hexsha
stringlengths
40
40
size
int64
2
1.01M
content
stringlengths
2
1.01M
avg_line_length
float64
1.5
100
max_line_length
int64
2
1k
alphanum_fraction
float64
0.25
1
4a77d047e15594b11f4584a7d40d19b372f94c32
919
class CreateMcPayloadStageClasses < ActiveRecord::Migration # # CONSTANTS # # Name of the table being created TABLE_NAME = :mc_payload_staged_classes # # Instance Methods # # Drop {TABLE_NAME}. # # @return [void] def down drop_table TABLE_NAME end # Create {TABLE_NAME}. # # @return [void] def up create_table TABLE_NAME do |t| # # References # t.references :payload_stage_instance, null: false t.references :payload_stager_instance, null: false end change_table TABLE_NAME do |t| t.index :payload_stage_instance_id, unique: false t.index :payload_stager_instance_id, unique: false t.index [:payload_stager_instance_id, :payload_stage_instance_id], name: 'unique_mc_payload_staged_classes', unique: true end end end
20.422222
72
0.618063
3977617d142d02ac8a643d27222db7bae4f6692e
827
=begin #Tatum API ## Authentication <!-- ReDoc-Inject: <security-definitions> --> OpenAPI spec version: 3.9.0 Generated by: https://github.com/swagger-api/swagger-codegen.git Swagger Codegen version: 3.0.31 =end require 'spec_helper' require 'json' require 'date' # Unit tests for Tatum::OneOfflowAccountBody1 # Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen) # Please update as you see appropriate describe 'OneOfflowAccountBody1' do before do # run before each test @instance = Tatum::OneOfflowAccountBody1.new end after do # run after each test end describe 'test an instance of OneOfflowAccountBody1' do it 'should create an instance of OneOfflowAccountBody1' do expect(@instance).to be_instance_of(Tatum::OneOfflowAccountBody1) end end end
23.628571
85
0.750907
6a42281d40be5103b84abf14c0340408cfdcc222
170
require File.dirname(__FILE__) + '/test_helper.rb' class TestTamarind < Test::Unit::TestCase def setup end def test_truth assert true end end
14.166667
51
0.658824
260c644bae4cbfc29bfb525604de2f2ad8fbe039
121
class AddRoleToMembers < ActiveRecord::Migration[5.1] def change add_column :memberships, :role, :string end end
20.166667
53
0.743802
1d3eb054f3ce8138b63b03ce96ecf3a2ec629d67
649
Gem::Specification.new do |s| s.platform = Gem::Platform::RUBY s.name = 'activerecord4-bigquery-adapter' s.version = '0.0.2' s.summary = 'BigQuery adapter for ActiveRecord 4' s.description = 'BigQuery _makeshift_ adapter for ActiveRecord 4.' s.license = 'MIT' s.authors = ['Mark Slemko'] s.email = '[email protected]' s.homepage = 'http://github.com/markslemko/activerecord4-bigquery-adapter' s.files = Dir.glob(['LICENSE', 'README.md', 'lib/**/*.rb']) s.require_path = 'lib' s.required_ruby_version = '>= 2.0.0' s.add_dependency 'google-cloud-bigquery', ['~> 0.25'] s.add_dependency 'activerecord', ['~> 4.2'] end
32.45
76
0.676425
ac71cb7f2389a94f465f21e3108262bf49358e40
1,502
# frozen_string_literal: true # Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Auto-generated by gapic-generator-ruby. DO NOT EDIT! module Google module Ads module GoogleAds module V7 module Enums # Container for enum describing the type of invoices. class InvoiceTypeEnum include ::Google::Protobuf::MessageExts extend ::Google::Protobuf::MessageExts::ClassMethods # The possible type of invoices. module InvoiceType # Not specified. UNSPECIFIED = 0 # Used for return value only. Represents value unknown in this version. UNKNOWN = 1 # An invoice with a negative amount. The account receives a credit. CREDIT_MEMO = 2 # An invoice with a positive amount. The account owes a balance. INVOICE = 3 end end end end end end end
30.04
85
0.648469
ff79822e0f9e87cc828d037ec6488e2c084ded76
1,213
# frozen_string_literal: true class SuperAdmin::OrgSwapsController < ApplicationController include OrgSelectable after_action :verify_authorized def create # Allows the user to swap their org affiliation on the fly authorize current_user, :org_swap? # See if the user selected a new Org via the Org Lookup and # convert it into an Org lookup = org_from_params(params_in: org_swap_params) # rubocop:disable Layout/LineLength if lookup.present? && !lookup.new_record? current_user.org = lookup if current_user.save redirect_back(fallback_location: root_path, notice: _("Your organisation affiliation has been changed. You may now edit templates for %{org_name}.") % { org_name: current_user.org.name }) else redirect_back(fallback_location: root_path, alert: _("Unable to change your organisation affiliation at this time.")) end else redirect_back(fallback_location: root_path, alert: _("Unknown organisation.")) end # rubocop:enable Layout/LineLength end private def org_swap_params params.require(:user).permit(:org_id, :org_name, :org_crosswalk) end end
30.325
165
0.70404
d58e3b2841ec30bd7f89c8c589e26d3ae5825c62
10,962
require 'spec_helper' describe 'Dashboard Todos' do let(:user) { create(:user) } let(:author) { create(:user) } let(:project) { create(:project, :public) } let(:issue) { create(:issue, due_date: Date.today) } context 'User does not have todos' do before do sign_in(user) visit dashboard_todos_path end it 'shows "All done" message' do expect(page).to have_content 'Todos let you see what you should do next' end end context 'when the todo references a merge request' do let(:referenced_mr) { create(:merge_request, source_project: project) } let(:note) { create(:note, project: project, note: "Check out #{referenced_mr.to_reference}") } let!(:todo) { create(:todo, :mentioned, user: user, project: project, author: author, note: note) } before do sign_in(user) visit dashboard_todos_path end it 'renders the mr link with the extra attributes' do link = page.find_link(referenced_mr.to_reference) expect(link).not_to be_nil expect(link['data-iid']).to eq(referenced_mr.iid.to_s) expect(link['data-project-path']).to eq(referenced_mr.project.full_path) expect(link['data-mr-title']).to eq(referenced_mr.title) end end context 'User has a todo', :js do before do create(:todo, :mentioned, user: user, project: project, target: issue, author: author) sign_in(user) visit dashboard_todos_path end it 'has todo present' do expect(page).to have_selector('.todos-list .todo', count: 1) end it 'shows due date as today' do within first('.todo') do expect(page).to have_content 'Due today' end end shared_examples 'deleting the todo' do before do within first('.todo') do click_link 'Done' end end it 'is marked as done-reversible in the list' do expect(page).to have_selector('.todos-list .todo.todo-pending.done-reversible') end it 'shows Undo button' do expect(page).to have_selector('.js-undo-todo', visible: true) expect(page).to have_selector('.js-done-todo', visible: false) end it 'updates todo count' do expect(page).to have_content 'Todos 0' expect(page).to have_content 'Done 1' end it 'has not "All done" message' do expect(page).not_to have_selector('.todos-all-done') end end shared_examples 'deleting and restoring the todo' do before do within first('.todo') do click_link 'Done' wait_for_requests click_link 'Undo' end end it 'is marked back as pending in the list' do expect(page).not_to have_selector('.todos-list .todo.todo-pending.done-reversible') expect(page).to have_selector('.todos-list .todo.todo-pending') end it 'shows Done button' do expect(page).to have_selector('.js-undo-todo', visible: false) expect(page).to have_selector('.js-done-todo', visible: true) end it 'updates todo count' do expect(page).to have_content 'Todos 1' expect(page).to have_content 'Done 0' end end it_behaves_like 'deleting the todo' it_behaves_like 'deleting and restoring the todo' context 'todo is stale on the page' do before do todos = TodosFinder.new(user, state: :pending).execute TodoService.new.mark_todos_as_done(todos, user) end it_behaves_like 'deleting the todo' it_behaves_like 'deleting and restoring the todo' end end context 'User created todos for themself' do before do sign_in(user) end context 'issue assigned todo' do before do create(:todo, :assigned, user: user, project: project, target: issue, author: user) visit dashboard_todos_path end it 'shows issue assigned to yourself message' do page.within('.js-todos-all') do expect(page).to have_content("You assigned issue #{issue.to_reference(full: true)} to yourself") end end end context 'marked todo' do before do create(:todo, :marked, user: user, project: project, target: issue, author: user) visit dashboard_todos_path end it 'shows you added a todo message' do page.within('.js-todos-all') do expect(page).to have_content("You added a todo for issue #{issue.to_reference(full: true)}") expect(page).not_to have_content('to yourself') end end end context 'mentioned todo' do before do create(:todo, :mentioned, user: user, project: project, target: issue, author: user) visit dashboard_todos_path end it 'shows you mentioned yourself message' do page.within('.js-todos-all') do expect(page).to have_content("You mentioned yourself on issue #{issue.to_reference(full: true)}") expect(page).not_to have_content('to yourself') end end end context 'directly_addressed todo' do before do create(:todo, :directly_addressed, user: user, project: project, target: issue, author: user) visit dashboard_todos_path end it 'shows you directly addressed yourself message' do page.within('.js-todos-all') do expect(page).to have_content("You directly addressed yourself on issue #{issue.to_reference(full: true)}") expect(page).not_to have_content('to yourself') end end end context 'approval todo' do let(:merge_request) { create(:merge_request) } before do create(:todo, :approval_required, user: user, project: project, target: merge_request, author: user) visit dashboard_todos_path end it 'shows you set yourself as an approver message' do page.within('.js-todos-all') do expect(page).to have_content("You set yourself as an approver for merge request #{merge_request.to_reference(full: true)}") expect(page).not_to have_content('to yourself') end end end end context 'User has done todos', :js do before do create(:todo, :mentioned, :done, user: user, project: project, target: issue, author: author) sign_in(user) visit dashboard_todos_path(state: :done) end it 'has the done todo present' do expect(page).to have_selector('.todos-list .todo.todo-done', count: 1) end describe 'restoring the todo' do before do within first('.todo') do click_link 'Add todo' end end it 'is removed from the list' do expect(page).not_to have_selector('.todos-list .todo.todo-done') end it 'updates todo count' do expect(page).to have_content 'Todos 1' expect(page).to have_content 'Done 0' end end end context 'User has Todos with labels spanning multiple projects' do before do label1 = create(:label, project: project) note1 = create(:note_on_issue, note: "Hello #{label1.to_reference(format: :name)}", noteable_id: issue.id, noteable_type: 'Issue', project: issue.project) create(:todo, :mentioned, project: project, target: issue, user: user, note_id: note1.id) project2 = create(:project, :public) label2 = create(:label, project: project2) issue2 = create(:issue, project: project2) note2 = create(:note_on_issue, note: "Test #{label2.to_reference(format: :name)}", noteable_id: issue2.id, noteable_type: 'Issue', project: project2) create(:todo, :mentioned, project: project2, target: issue2, user: user, note_id: note2.id) gitlab_sign_in(user) visit dashboard_todos_path end it 'shows page with two Todos' do expect(page).to have_selector('.todos-list .todo', count: 2) end end context 'User has multiple pages of Todos' do before do allow(Todo).to receive(:default_per_page).and_return(1) # Create just enough records to cause us to paginate create_list(:todo, 2, :mentioned, user: user, project: project, target: issue, author: author) sign_in(user) end it 'is paginated' do visit dashboard_todos_path expect(page).to have_selector('.gl-pagination') end it 'is has the right number of pages' do visit dashboard_todos_path expect(page).to have_selector('.gl-pagination .js-pagination-page', count: 2) end describe 'mark all as done', :js do before do visit dashboard_todos_path find('.js-todos-mark-all').click end it 'shows "All done" message!' do expect(page).to have_content 'Todos 0' expect(page).to have_content "You're all done!" expect(page).not_to have_selector('.gl-pagination') end it 'shows "Undo mark all as done" button' do expect(page).to have_selector('.js-todos-mark-all', visible: false) expect(page).to have_selector('.js-todos-undo-all', visible: true) end end describe 'undo mark all as done', :js do before do visit dashboard_todos_path end it 'shows the restored todo list' do mark_all_and_undo expect(page).to have_selector('.todos-list .todo', count: 1) expect(page).to have_selector('.gl-pagination') expect(page).not_to have_content "You're all done!" end it 'updates todo count' do mark_all_and_undo expect(page).to have_content 'Todos 2' expect(page).to have_content 'Done 0' end it 'shows "Mark all as done" button' do mark_all_and_undo expect(page).to have_selector('.js-todos-mark-all', visible: true) expect(page).to have_selector('.js-todos-undo-all', visible: false) end context 'User has deleted a todo' do before do within first('.todo') do click_link 'Done' end end it 'shows the restored todo list with the deleted todo' do mark_all_and_undo expect(page).to have_selector('.todos-list .todo.todo-pending', count: 1) end end def mark_all_and_undo find('.js-todos-mark-all').click wait_for_requests find('.js-todos-undo-all').click wait_for_requests end end end context 'User has a Build Failed todo' do let!(:todo) { create(:todo, :build_failed, user: user, project: project, author: author) } before do sign_in(user) visit dashboard_todos_path end it 'shows the todo' do expect(page).to have_content 'The build failed for merge request' end it 'links to the pipelines for the merge request' do href = pipelines_project_merge_request_path(project, todo.target) expect(page).to have_link "merge request #{todo.target.to_reference(full: true)}", href: href end end end
30.534819
160
0.640759
ed5e0053a0030a57e53d3e2fb77df04a3394336f
1,242
# Copyright 2018 Google, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # [START vision_product_search_update_product_labels] require "google/cloud/vision" def product_search_update_product_labels( project_id = "your-project-id", location = "us-west1", product_id = "your-product-id" ) client = Google::Cloud::Vision::ProductSearch.new product_path = client.product_path project_id, location, product_id product = { name: product_path, product_labels: [{ key: "color", value: "green" }] } client.update_product product, update_mask: { "paths": ["product_labels"] } end # [END vision_product_search_update_product_labels] product_search_update_product_labels *ARGV if $PROGRAM_NAME == __FILE__
34.5
77
0.748792
e96c13aede38b54840ada4bc41fdde51982e95c3
469
require 'spec_helper' describe Gitlab::Ci::Status::Created do subject do described_class.new(double('subject'), double('user')) end describe '#text' do it { expect(subject.text).to eq 'created' } end describe '#label' do it { expect(subject.label).to eq 'created' } end describe '#icon' do it { expect(subject.icon).to eq 'icon_status_created' } end describe '#group' do it { expect(subject.group).to eq 'created' } end end
19.541667
59
0.658849
1d7786bcbd1bcb67cea09dde182c5bc8a0901aec
1,220
# frozen_string_literal: true require 'application_system_test_case' class UserInfosTest < ApplicationSystemTestCase setup do @user_info = user_infos(:one) end test 'visiting the index' do visit user_infos_url assert_selector 'h1', text: 'User Infos' end test 'creating a User info' do visit user_infos_url click_on 'New User Info' fill_in 'First name', with: @user_info.first_name fill_in 'Last name', with: @user_info.last_name fill_in 'Member status', with: @user_info.member_status click_on 'Create User info' assert_text 'User info was successfully created' click_on 'Back' end test 'updating a User info' do visit user_infos_url click_on 'Edit', match: :first fill_in 'First name', with: @user_info.first_name fill_in 'Last name', with: @user_info.last_name fill_in 'Member status', with: @user_info.member_status click_on 'Update User info' assert_text 'User info was successfully updated' click_on 'Back' end test 'destroying a User info' do visit user_infos_url page.accept_confirm do click_on 'Destroy', match: :first end assert_text 'User info was successfully destroyed' end end
24.4
59
0.715574
2172895d66cb9c01030cecbd5acb0cebd862c9ae
906
require 'active_support/concern' module ModelCallbacks extend ActiveSupport::Concern def after_init names = self.class.after_init_names[self.class] names.each { |name| self.send(name) } end def self.included(klass) class << klass alias_method :__new, :new def new(*args) e = __new(*args) e.after_init e end end end def before_valid names = self.class.before_valid_names[self.class] names.each { |name| self.send(name) } end module ClassMethods def before_valid_names @@before_valid_names ||= Hash.new { |h, k| h[k] = [] } end def after_init_names @@after_init_names ||= Hash.new { |h, k| h[k] = [] } end def after_initialize(*names) after_init_names[self] += names end def before_validation(*names) before_valid_names[self] += names end end end
19.695652
60
0.620309
1aa62887f14714473ff5d985b136abe85ca1a9a6
5,057
# This file is auto-generated from the current state of the database. Instead # of editing this file, please use the migrations feature of Active Record to # incrementally modify your database, and then regenerate this schema definition. # # Note that this schema.rb definition is the authoritative source for your # database schema. If you need to create the application database on another # system, you should be using db:schema:load, not running all the migrations # from scratch. The latter is a flawed and unsustainable approach (the more migrations # you'll amass, the slower it'll run and the greater likelihood for issues). # # It's strongly recommended that you check this file into your version control system. ActiveRecord::Schema.define(version: 20190702200657) do # These are extensions that must be enabled in order to support this database enable_extension "plpgsql" enable_extension "hstore" create_table "benchmark_result_types", force: :cascade do |t| t.string "name", null: false t.string "unit", null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false t.index ["name", "unit"], name: "index_benchmark_result_types_on_name_and_unit", unique: true, using: :btree end create_table "benchmark_runs", force: :cascade do |t| t.hstore "result", null: false t.text "environment", null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false t.string "initiator_type" t.integer "initiator_id" t.integer "benchmark_type_id", default: 0, null: false t.integer "benchmark_result_type_id", null: false t.boolean "validity", default: true, null: false t.index ["benchmark_type_id"], name: "index_benchmark_runs_on_benchmark_type_id", using: :btree t.index ["initiator_type", "initiator_id"], name: "index_benchmark_runs_on_initiator_type_and_initiator_id", using: :btree end create_table "benchmark_types", force: :cascade do |t| t.string "category", null: false t.string "script_url", null: false t.integer "repo_id", null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false t.string "digest" t.boolean "from_user", default: false, null: false t.index ["repo_id", "category", "script_url"], name: "index_benchmark_types_on_repo_id_and_category_and_script_url", unique: true, using: :btree t.index ["repo_id"], name: "index_benchmark_types_on_repo_id", using: :btree end create_table "benchmark_types_groups", id: false, force: :cascade do |t| t.integer "benchmark_type_id", null: false t.integer "group_id", null: false t.index ["benchmark_type_id", "group_id"], name: "index_benchmark_types_groups_on_benchmark_type_id_and_group_id", using: :btree t.index ["group_id", "benchmark_type_id"], name: "index_benchmark_types_groups_on_group_id_and_benchmark_type_id", using: :btree end create_table "commits", force: :cascade do |t| t.string "sha1", null: false t.string "url", null: false t.text "message", null: false t.integer "repo_id", null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false t.index ["repo_id"], name: "index_commits_on_repo_id", using: :btree t.index ["sha1", "repo_id"], name: "index_commits_on_sha1_and_repo_id", unique: true, using: :btree end create_table "groups", force: :cascade do |t| t.string "name" t.text "description" t.datetime "created_at", null: false t.datetime "updated_at", null: false end create_table "organizations", force: :cascade do |t| t.string "name", null: false t.string "url", null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false t.index ["name"], name: "index_organizations_on_name", unique: true, using: :btree end create_table "releases", force: :cascade do |t| t.integer "repo_id", null: false t.string "version", null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false t.index ["repo_id"], name: "index_releases_on_repo_id", using: :btree end create_table "repos", force: :cascade do |t| t.string "name", null: false t.string "url", null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false t.integer "organization_id", null: false t.index ["name", "organization_id"], name: "index_repos_on_name_and_organization_id", unique: true, using: :btree t.index ["organization_id"], name: "index_repos_on_organization_id", using: :btree t.index ["url"], name: "index_repos_on_url", unique: true, using: :btree end end
47.707547
148
0.664228
21ee35f09aa7611da0d39a18f48f548512547faa
496
require 'rails_helper' feature 'capture complete step' do include IdvStepHelper include DocAuthHelper include DocCaptureHelper before do complete_doc_capture_steps_before_capture_complete_step allow_any_instance_of(DeviceDetector).to receive(:device_type).and_return('mobile') end it 'is on the correct page' do expect(page).to have_current_path(idv_capture_doc_capture_complete_step) expect(page).to have_content(t('doc_auth.headings.capture_complete')) end end
27.555556
87
0.802419
4ad2b59eddf54f9312553c1d83500ab2df907cf8
207
class CreateHeartbeats < ActiveRecord::Migration def change create_table :heartbeats do |t| t.references :device, index: true, foreign_key: true t.timestamps null: false end end end
20.7
58
0.705314
87b0bb43c99e12c4c57f3031e1c97ba7b0411882
1,054
require 'spec_helper' RSpec.describe 'Yunbi integration specs' do let(:client) { Cryptoexchange::Client.new } let(:btc_cny_pair) { Cryptoexchange::Models::MarketPair.new(base: 'BTC', target: 'CNY', market: 'yunbi') } it 'fetch pairs' do pending ":error, yunbi's service is temporarily unavailable." pairs = client.pairs('yunbi') expect(pairs).not_to be_empty pair = pairs.first expect(pair.base).to_not be_nil expect(pair.target).to_not be_nil expect(pair.market).to eq 'yunbi' end it 'fetch ticker' do pending ":error, yunbi's service is temporarily unavailable." ticker = client.ticker(btc_cny_pair) expect(ticker.base).to eq 'BTC' expect(ticker.target).to eq 'CNY' expect(ticker.market).to eq 'yunbi' expect(ticker.last).to_not be nil expect(ticker.bid).to_not be nil expect(ticker.ask).to_not be nil expect(ticker.high).to_not be nil expect(ticker.volume).to_not be nil expect(ticker.timestamp).to be nil expect(ticker.payload).to_not be nil end end
28.486486
108
0.696395
79bdc4af2f2b5878bad873e1b045e5c65eb377d6
47
module Lookfile VERSION = '0.1.5'.freeze end
11.75
26
0.702128
ed174a57121cd7f990a0e49888c4f5338df9640b
1,773
require 'spec_helper' module RubySpeech module SSML describe Sub do let(:doc) { Nokogiri::XML::Document.new } subject { described_class.new doc } its(:name) { should == 'sub' } describe "setting options in initializers" do subject { Sub.new doc, :alias => 'foo' } its(:alias) { should == 'foo' } end it 'registers itself' do Element.class_from_registration(:sub).should == Sub end describe "from a document" do let(:document) { '<sub alias="foo"/>' } subject { Element.import document } it { should be_instance_of Sub } its(:alias) { should == 'foo' } end describe "comparing objects" do it "should be equal if the content and alias are the same" do Sub.new(doc, :alias => 'jp', :content => "Hello there").should == Sub.new(doc, :alias => 'jp', :content => "Hello there") end describe "when the content is different" do it "should not be equal" do Sub.new(doc, :content => "Hello").should_not == Sub.new(doc, :content => "Hello there") end end describe "when the alias is different" do it "should not be equal" do Sub.new(doc, :alias => 'jp').should_not == Sub.new(doc, :alias => 'en') end end end describe "<<" do it "should accept String" do lambda { subject << 'anything' }.should_not raise_error end it "should raise InvalidChildError with non-acceptable objects" do lambda { subject << Voice.new(doc) }.should raise_error(InvalidChildError, "A Sub can only accept Strings as children") end end end # Desc end # SSML end # RubySpeech
28.596774
131
0.57586
1d5ca2d23368f514ee0289a8f5e48b3de5cf8e58
2,529
class I686ElfGcc < Formula desc "GNU compiler collection for i686-elf" homepage "https://gcc.gnu.org" url "https://ftp.gnu.org/gnu/gcc/gcc-11.2.0/gcc-11.2.0.tar.xz" mirror "https://ftpmirror.gnu.org/gcc/gcc-11.2.0/gcc-11.2.0.tar.xz" sha256 "d08edc536b54c372a1010ff6619dd274c0f1603aa49212ba20f7aa2cda36fa8b" license "GPL-3.0-or-later" => { with: "GCC-exception-3.1" } bottle do sha256 arm64_big_sur: "03ea1c0b8db4064c4acd673fb1138d410699bd37c74db3e255479f93fe0f991b" sha256 big_sur: "cbf70ba4c4dcd222b84b9bde4e3f69a8c621959e20facaa8677ebd0b0f14d4da" sha256 catalina: "7aec8ee5b87f0a56236b59c4e796ec8eb5e991b3d2163a745bc9dbc12068592d" sha256 mojave: "b0f9c1aafd4ecaace843bb77ec4062a7dc0f3b61f40506c762ad79d586811a51" sha256 x86_64_linux: "33a850dd3e3c9b93c2899ff81ddd4199a942ac763691e829733ec87aa5eea732" end depends_on "gmp" depends_on "i686-elf-binutils" depends_on "libmpc" depends_on "mpfr" # Remove when upstream has Apple Silicon support if Hardware::CPU.arm? patch do # patch from gcc-11.1.0-arm branch url "https://github.com/fxcoudert/gcc/commit/eea3046c5fa62d4dee47e074c7a758570d9da61c.patch?full_index=1" sha256 "b55ca05a0ed32f69f63bbe708568df5ad62d938da0e34b515d601bb966d32d40" end end def install target = "i686-elf" mkdir "i686-elf-gcc-build" do system "../configure", "--target=#{target}", "--prefix=#{prefix}", "--infodir=#{info}/#{target}", "--disable-nls", "--without-isl", "--without-headers", "--with-as=#{Formula["i686-elf-binutils"].bin}/i686-elf-as", "--with-ld=#{Formula["i686-elf-binutils"].bin}/i686-elf-ld", "--enable-languages=c,c++" system "make", "all-gcc" system "make", "install-gcc" system "make", "all-target-libgcc" system "make", "install-target-libgcc" # FSF-related man pages may conflict with native gcc (share/"man/man7").rmtree end end test do (testpath/"test-c.c").write <<~EOS int main(void) { int i=0; while(i<10) i++; return i; } EOS system "#{bin}/i686-elf-gcc", "-c", "-o", "test-c.o", "test-c.c" assert_match "file format elf32-i386", shell_output("#{Formula["i686-elf-binutils"].bin}/i686-elf-objdump -a test-c.o") end end
37.746269
111
0.62673
916381d41c5e30f4859538eeb01abb6ae8d7d3d3
567
class CopyAreaFeaturesAndAreasToAreaPlaces < ActiveRecord::Migration def up execute "INSERT INTO area_places (area_id,place_id) SELECT a.id, p.id FROM area_features af INNER JOIN places a ON af.area_id = a.area_id INNER JOIN places p ON af.feature_id = p.feature_id UNION SELECT a.id, p.id FROM area_areas aa INNER JOIN places a ON aa.parent_area_id = a.area_id INNER JOIN places p ON aa.child_area_id = p.area_id" end def down execute "DELETE FROM area_places" end end
33.352941
80
0.664903
5da539dd1b03d226b6911fc9ecfbc66844e4a233
1,091
class Pandaseq < Formula # cite Masella_2012: "https://doi.org/10.1186/1471-2105-13-31" desc "PAired-eND Assembler for DNA sequences" homepage "https://github.com/neufeld/pandaseq" url "https://github.com/neufeld/pandaseq/archive/v2.11.tar.gz" sha256 "6e3e35d88c95f57d612d559e093656404c1d48c341a8baa6bef7bb0f09fc8f82" license "GPL-3.0" bottle do root_url "https://archive.org/download/brewsci/bottles-bio" sha256 mojave: "a9e047bdfd18226644594b687f9daf13a7570f4e16f4a2c3860334932adee5a9" sha256 x86_64_linux: "7c5ba5be45e1505bbb4a6b7ca6d46a634a06edb302f8c76544b2feea36626b5e" end depends_on "autoconf" => :build depends_on "automake" => :build depends_on "pkg-config" => :build depends_on "libtool" uses_from_macos "bzip2" uses_from_macos "curl" uses_from_macos "zlib" def install system "./autogen.sh" system "./configure", "--prefix=#{prefix}" system "make", "install" end test do # https://github.com/neufeld/pandaseq/issues/75 assert_match version.to_s, shell_output("#{bin}/pandaseq -h 2>&1", 1) end end
30.305556
91
0.735105
618ad7feb4e0d2c208152fced135688effb77f6f
10,585
# frozen_string_literal: true require 'haml/attribute_builder' require 'haml/attribute_compiler' require 'haml/temple_line_counter' module Haml class Compiler include Haml::Util attr_accessor :options def initialize(options) @options = Options.wrap(options) @to_merge = [] @temple = [:multi] @node = nil @filters = Filters.defined.merge(options[:filters]) @attribute_compiler = AttributeCompiler.new(@options) end def call(node) compile(node) @temple end def compile(node) parent, @node = @node, node if node.children.empty? send(:"compile_#{node.type}") else send(:"compile_#{node.type}") {node.children.each {|c| compile c}} end ensure @node = parent end private def compile_root @output_line = 1 yield if block_given? flush_merged_text end def compile_plain push_text("#{@node.value[:text]}\n") end def nuke_inner_whitespace?(node) if node.value && node.value[:nuke_inner_whitespace] true elsif node.parent nuke_inner_whitespace?(node.parent) else false end end def compile_script(&block) push_script(@node.value[:text], :preserve_script => @node.value[:preserve], :escape_html => @node.value[:escape_html], :nuke_inner_whitespace => nuke_inner_whitespace?(@node), &block) end def compile_silent_script return if @options.suppress_eval push_silent(@node.value[:text]) keyword = @node.value[:keyword] if block_given? yield push_silent("end", :can_suppress) unless @node.value[:dont_push_end] elsif keyword == "end" if @node.parent.children.last.equal?(@node) # Since this "end" is ending the block, # we don't need to generate an additional one @node.parent.value[:dont_push_end] = true end # Don't restore dont_* for end because it isn't a conditional branch. end end def compile_haml_comment; end def compile_tag t = @node.value # Get rid of whitespace outside of the tag if we need to rstrip_buffer! if t[:nuke_outer_whitespace] if @options.suppress_eval object_ref = :nil parse = false value = t[:parse] ? nil : t[:value] dynamic_attributes = Haml::Parser::DynamicAttributes.new else object_ref = t[:object_ref] parse = t[:parse] value = t[:value] dynamic_attributes = t[:dynamic_attributes] end if @options[:trace] t[:attributes].merge!({"data-trace" => @options.filename.split('/views').last << ":" << @node.line.to_s}) end push_text("<#{t[:name]}") push_temple(@attribute_compiler.compile(t[:attributes], object_ref, dynamic_attributes)) push_text( if t[:self_closing] && @options.xhtml? " />#{"\n" unless t[:nuke_outer_whitespace]}" else ">#{"\n" unless (t[:self_closing] && @options.html?) ? t[:nuke_outer_whitespace] : (!block_given? || t[:preserve_tag] || t[:nuke_inner_whitespace])}" end ) if value && !parse push_text("#{value}</#{t[:name]}>#{"\n" unless t[:nuke_outer_whitespace]}") end return if t[:self_closing] if value.nil? yield if block_given? rstrip_buffer! if t[:nuke_inner_whitespace] push_text("</#{t[:name]}>#{"\n" unless t[:nuke_outer_whitespace]}") return end if parse push_script(value, t.merge(:in_tag => true)) push_text("</#{t[:name]}>#{"\n" unless t[:nuke_outer_whitespace]}") end end def compile_comment condition = "#{@node.value[:conditional]}>" if @node.value[:conditional] revealed = @node.value[:revealed] open = "<!--#{condition}#{'<!-->' if revealed}" close = "#{'<!--' if revealed}#{'<![endif]' if condition}-->" unless block_given? push_text("#{open} ") if @node.value[:parse] push_script(@node.value[:text], :in_tag => true, :nuke_inner_whitespace => true) else push_text(@node.value[:text]) end push_text(" #{close}\n") return end push_text("#{open}\n") yield if block_given? push_text("#{close}\n") end def compile_doctype doctype = text_for_doctype push_text("#{doctype}\n") if doctype end def compile_filter unless filter = @filters[@node.value[:name]] name = @node.value[:name] if ["maruku", "textile"].include?(name) raise Error.new(Error.message(:install_haml_contrib, name), @node.line - 1) else raise Error.new(Error.message(:filter_not_defined, name), @node.line - 1) end end filter.internal_compile(self, @node.value[:text]) end def text_for_doctype if @node.value[:type] == "xml" return nil if @options.html? wrapper = @options.attr_wrapper return "<?xml version=#{wrapper}1.0#{wrapper} encoding=#{wrapper}#{@node.value[:encoding] || "utf-8"}#{wrapper} ?>" end if @options.html5? '<!DOCTYPE html>' else if @options.xhtml? if @node.value[:version] == "1.1" '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">' elsif @node.value[:version] == "5" '<!DOCTYPE html>' else case @node.value[:type] when "strict"; '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">' when "frameset"; '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Frameset//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd">' when "mobile"; '<!DOCTYPE html PUBLIC "-//WAPFORUM//DTD XHTML Mobile 1.2//EN" "http://www.openmobilealliance.org/tech/DTD/xhtml-mobile12.dtd">' when "rdfa"; '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML+RDFa 1.0//EN" "http://www.w3.org/MarkUp/DTD/xhtml-rdfa-1.dtd">' when "basic"; '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML Basic 1.1//EN" "http://www.w3.org/TR/xhtml-basic/xhtml-basic11.dtd">' else '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">' end end elsif @options.html4? case @node.value[:type] when "strict"; '<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">' when "frameset"; '<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Frameset//EN" "http://www.w3.org/TR/html4/frameset.dtd">' else '<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">' end end end end # Evaluates `text` in the context of the scope object, but # does not output the result. def push_silent(text, can_suppress = false) flush_merged_text return if can_suppress && @options.suppress_eval? newline = (text == "end") ? ";" : "\n" @temple << [:code, "#{resolve_newlines}#{text}#{newline}"] @output_line = @output_line + text.count("\n") + newline.count("\n") end # Adds `text` to `@buffer`. def push_text(text) @to_merge << [:text, text] end def push_temple(temple) flush_merged_text @temple.concat([[:newline]] * resolve_newlines.count("\n")) @temple << temple @output_line += TempleLineCounter.count_lines(temple) end def flush_merged_text return if @to_merge.empty? @to_merge.each do |type, val| case type when :text @temple << [:static, val] when :script @temple << [:dynamic, val] else raise SyntaxError.new("[HAML BUG] Undefined entry in Haml::Compiler@to_merge.") end end @to_merge = [] end # Causes `text` to be evaluated in the context of # the scope object and the result to be added to `@buffer`. # # If `opts[:preserve_script]` is true, Haml::Helpers#find_and_preserve is run on # the result before it is added to `@buffer` def push_script(text, opts = {}) return if @options.suppress_eval? no_format = !(opts[:preserve_script] || opts[:preserve_tag] || opts[:escape_html]) unless block_given? push_generated_script(no_format ? "(#{text}\n).to_s" : build_script_formatter("(#{text}\n)", opts)) push_text("\n") unless opts[:in_tag] || opts[:nuke_inner_whitespace] return end flush_merged_text push_silent "haml_temp = #{text}" yield push_silent('end', :can_suppress) unless @node.value[:dont_push_end] @temple << [:dynamic, no_format ? 'haml_temp.to_s;' : build_script_formatter('haml_temp', opts)] end def build_script_formatter(text, opts) text = "(#{text}).to_s" if opts[:escape_html] text = "::Haml::Helpers.html_escape(#{text})" end if opts[:nuke_inner_whitespace] text = "(#{text}).strip" end if opts[:preserve_tag] text = "_hamlout.fix_textareas!(::Haml::Helpers.preserve(#{text}))" elsif opts[:preserve_script] text = "_hamlout.fix_textareas!(::Haml::Helpers.find_and_preserve(#{text}, _hamlout.options[:preserve]))" end "#{text};" end def push_generated_script(text) @to_merge << [:script, resolve_newlines + text] @output_line += text.count("\n") end def resolve_newlines diff = @node.line - @output_line return "" if diff <= 0 @output_line = @node.line "\n" * diff end # Get rid of and whitespace at the end of the buffer # or the merged text def rstrip_buffer!(index = -1) last = @to_merge[index] if last.nil? push_silent("_hamlout.rstrip!", false) return end case last.first when :text last[1] = last[1].rstrip if last[1].empty? @to_merge.slice! index rstrip_buffer! index end when :script last[1].gsub!(/\(haml_temp, (.*?)\);$/, '(haml_temp.rstrip, \1);') rstrip_buffer! index - 1 else raise SyntaxError.new("[HAML BUG] Undefined entry in Haml::Compiler@to_merge.") end end end end
31.88253
159
0.586301
39d6c7e109a042bae3dcd437036bc62824581983
1,450
module SessionsHelper # Logs in the given user. def log_in(user) session[:user_id] = user.id end # Returns the current logged-in user (if any). def current_user @current_user ||= User.find_by(id: session[:user_id]) end def current_user?(user) user == current_user end # Remembers a user in a persistent session. def remember(user) user.remember cookies.permanent.signed[:user_id] = user.id cookies.permanent[:remember_token] = user.remember_token end def current_user if (user_id = session[:user_id]) @current_user ||= User.find_by(id: user_id) elsif (user_id = cookies.signed[:user_id]) user = User.find_by(id: user_id) if user && user.authenticated?(:remember, cookies[:remember_token]) log_in user @current_user = user end end end def forget(user) user.forget cookies.delete(:user_id) cookies.delete(:remember_token) end # Logs out the current user. def log_out forget(current_user) session.delete(:user_id) @current_user = nil end # Returns true if the user is logged in, false otherwise. def logged_in? !current_user.nil? end def redirect_back_or(default) redirect_to(session[:forwarding_url] || default) session.delete(:forwarding_url) end # Stores the URL trying to be accessed. def store_location session[:forwarding_url] = request.original_url if request.get? end end
24.576271
73
0.687586
8784e86f2839f51787bf3e3fcf91621f4fe9fbbe
135
require 'test_helper' class HomePageControllerTest < ActionController::TestCase # test "the truth" do # assert true # end end
16.875
57
0.733333
bbcd8cac6fa76b6c2e518f5c4ef650972145306c
166
# frozen_string_literal: true require_relative '../../common/spec/spec_helper_head' require 'nanoc/external' require_relative '../../common/spec/spec_helper_foot'
20.75
53
0.777108
185b0db2294c4c826584e6f37ee0065bce688ed4
5,455
=begin #Swagger Petstore #This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ OpenAPI spec version: 1.0.0 Contact: [email protected] Generated by: https://github.com/swagger-api/swagger-codegen.git Swagger Codegen version: 2.4.16-SNAPSHOT =end require 'date' module Petstore class Category attr_accessor :id attr_accessor :name # Attribute mapping from ruby-style variable name to JSON key. def self.attribute_map { :'id' => :'id', :'name' => :'name' } end # Attribute type mapping. def self.swagger_types { :'id' => :'Integer', :'name' => :'String' } end # Initializes the object # @param [Hash] attributes Model attributes in the form of hash def initialize(attributes = {}) return unless attributes.is_a?(Hash) # convert string to symbol for hash key attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v } if attributes.has_key?(:'id') self.id = attributes[:'id'] end if attributes.has_key?(:'name') self.name = attributes[:'name'] end end # Show invalid properties with the reasons. Usually used together with valid? # @return Array for valid properties with the reasons def list_invalid_properties invalid_properties = Array.new invalid_properties end # Check to see if the all the properties in the model are valid # @return true if the model is valid def valid? true end # Checks equality by comparing each attribute. # @param [Object] Object to be compared def ==(o) return true if self.equal?(o) self.class == o.class && id == o.id && name == o.name end # @see the `==` method # @param [Object] Object to be compared def eql?(o) self == o end # Calculates hash code according to all attributes. # @return [Fixnum] Hash code def hash [id, name].hash end # Builds the object from hash # @param [Hash] attributes Model attributes in the form of hash # @return [Object] Returns the model itself def build_from_hash(attributes) return nil unless attributes.is_a?(Hash) self.class.swagger_types.each_pair do |key, type| if type =~ /\AArray<(.*)>/i # check to ensure the input is an array given that the attribute # is documented as an array but the input is not if attributes[self.class.attribute_map[key]].is_a?(Array) self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) }) end elsif !attributes[self.class.attribute_map[key]].nil? self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]])) end # or else data not found in attributes(hash), not an issue as the data can be optional end self end # Deserializes the data based on type # @param string type Data type # @param string value Value to be deserialized # @return [Object] Deserialized data def _deserialize(type, value) case type.to_sym when :DateTime DateTime.parse(value) when :Date Date.parse(value) when :String value.to_s when :Integer value.to_i when :Float value.to_f when :BOOLEAN if value.to_s =~ /\A(true|t|yes|y|1)\z/i true else false end when :Object # generic object (usually a Hash), return directly value when /\AArray<(?<inner_type>.+)>\z/ inner_type = Regexp.last_match[:inner_type] value.map { |v| _deserialize(inner_type, v) } when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/ k_type = Regexp.last_match[:k_type] v_type = Regexp.last_match[:v_type] {}.tap do |hash| value.each do |k, v| hash[_deserialize(k_type, k)] = _deserialize(v_type, v) end end else # model temp_model = Petstore.const_get(type).new temp_model.build_from_hash(value) end end # Returns the string representation of the object # @return [String] String presentation of the object def to_s to_hash.to_s end # to_body is an alias to to_hash (backward compatibility) # @return [Hash] Returns the object in the form of hash def to_body to_hash end # Returns the object in the form of hash # @return [Hash] Returns the object in the form of hash def to_hash hash = {} self.class.attribute_map.each_pair do |attr, param| value = self.send(attr) next if value.nil? hash[param] = _to_hash(value) end hash end # Outputs non-array value in the form of hash # For object, use to_hash. Otherwise, just return the value # @param [Object] value Any valid value # @return [Hash] Returns the value in the form of hash def _to_hash(value) if value.is_a?(Array) value.compact.map { |v| _to_hash(v) } elsif value.is_a?(Hash) {}.tap do |hash| value.each { |k, v| hash[k] = _to_hash(v) } end elsif value.respond_to? :to_hash value.to_hash else value end end end end
28.118557
157
0.610816
39b9c83fcaa48993440206e114802ab124901877
2,059
class GmtAT4 < Formula desc "Manipulation of geographic and Cartesian data sets" homepage "https://gmt.soest.hawaii.edu/" url "ftp://ftp.soest.hawaii.edu/gmt/gmt-4.5.17-src.tar.bz2" mirror "https://fossies.org/linux/misc/GMT/gmt-4.5.17-src.tar.bz2" mirror "https://mirrors.ustc.edu.cn/gmt/gmt-4.5.17-src.tar.bz2" sha256 "d69c4e2075f16fb7c153ba77429a7b60e45c44583ebefd7aae63ae05439d1d41" revision 2 bottle do sha256 "1881b5ae177fd5a46b998c2b3b91b3f19d9e1f6d3d118e069255d2bd425f3017" => :high_sierra sha256 "d482672713a9cd06c8ea109d4df8ec4c08b315b6b5ddb4def68720773816c696" => :sierra sha256 "a862422e9ee1e2b2347f28f19326a4e507021ad8ab58cdc718dcecd15ccf26de" => :el_capitan sha256 "f34b9d1ba035a42a3f8535ca96715e7be9cb1a09b361a9a71b41f6545a57939a" => :x86_64_linux end keg_only :versioned_formula depends_on "gdal" depends_on "netcdf" resource "gshhg" do url "ftp://ftp.soest.hawaii.edu/gmt/gshhg-gmt-2.3.7.tar.gz" mirror "https://fossies.org/linux/misc/GMT/gshhg-gmt-2.3.7.tar.gz" mirror "https://mirrors.ustc.edu.cn/gmt/gshhg-gmt-2.3.7.tar.gz" sha256 "9bb1a956fca0718c083bef842e625797535a00ce81f175df08b042c2a92cfe7f" end def install ENV.deparallelize # Parallel builds don't work due to missing makefile dependencies system "./configure", "--prefix=#{prefix}", "--datadir=#{share}/gmt4", "--enable-gdal=#{Formula["gdal"].opt_prefix}", "--enable-netcdf=#{Formula["netcdf"].opt_prefix}", "--enable-shared", "--enable-triangle", "--disable-xgrid", "--disable-mex" system "make" system "make", "install-gmt", "install-data", "install-suppl", "install-man" (share/"gmt4").install resource("gshhg") end test do gmt = OS.mac? ? "gmt" : "GMT" system "#{bin}/#{gmt} pscoast -R-90/-70/0/20 -JM6i -P -Ba5 -Gchocolate > test.ps" assert_predicate testpath/"test.ps", :exist? end end
41.18
94
0.663429
4ad9eb92d1e760038f92ae8d6cd6603bed5b1f83
3,494
require_relative '../../spec_helper' require 'bigdecimal' describe "BigDecimal#truncate" do before :each do @arr = ['3.14159', '8.7', "0.314159265358979323846264338327950288419716939937510582097494459230781640628620899862803482534211706798214808651328230664709384460955058223172535940812848111745028410270193852110555964462294895493038196442881097566593014782083152134043E1"] @big = BigDecimal("123456.789") @nan = BigDecimal('NaN') @infinity = BigDecimal('Infinity') @infinity_negative = BigDecimal('-Infinity') end it "returns value of type Integer." do @arr.each do |x| BigDecimal(x).truncate.kind_of?(Integer).should == true end end it "returns the integer part as a BigDecimal if no precision given" do BigDecimal(@arr[0]).truncate.should == 3 BigDecimal(@arr[1]).truncate.should == 8 BigDecimal(@arr[2]).truncate.should == 3 BigDecimal('0').truncate.should == 0 BigDecimal('0.1').truncate.should == 0 BigDecimal('-0.1').truncate.should == 0 BigDecimal('1.5').truncate.should == 1 BigDecimal('-1.5').truncate.should == -1 BigDecimal('1E10').truncate.should == BigDecimal('1E10') BigDecimal('-1E10').truncate.should == BigDecimal('-1E10') BigDecimal('1.8888E10').truncate.should == BigDecimal('1.8888E10') BigDecimal('-1E-1').truncate.should == 0 end it "returns value of given precision otherwise" do BigDecimal('-1.55').truncate(1).should == BigDecimal('-1.5') BigDecimal('1.55').truncate(1).should == BigDecimal('1.5') BigDecimal(@arr[0]).truncate(2).should == BigDecimal("3.14") BigDecimal('123.456').truncate(2).should == BigDecimal("123.45") BigDecimal('123.456789').truncate(4).should == BigDecimal("123.4567") BigDecimal('0.456789').truncate(10).should == BigDecimal("0.456789") BigDecimal('-1E-1').truncate(1).should == BigDecimal('-0.1') BigDecimal('-1E-1').truncate(2).should == BigDecimal('-0.1E0') BigDecimal('-1E-1').truncate.should == BigDecimal('0') BigDecimal('-1E-1').truncate(0).should == BigDecimal('0') BigDecimal('-1E-1').truncate(-1).should == BigDecimal('0') BigDecimal('-1E-1').truncate(-2).should == BigDecimal('0') BigDecimal(@arr[1]).truncate(1).should == BigDecimal("8.7") BigDecimal(@arr[2]).truncate(100).should == BigDecimal(\ "3.1415926535897932384626433832795028841971693993751058209749445923078164062862089986280348253421170679") end it "sets n digits left of the decimal point to 0, if given n < 0" do @big.truncate(-1).should == BigDecimal("123450.0") @big.truncate(-2).should == BigDecimal("123400.0") BigDecimal(@arr[2]).truncate(-1).should == 0 end it "returns NaN if self is NaN" do @nan.truncate(-1).should.nan? @nan.truncate(+1).should.nan? @nan.truncate(0).should.nan? end it "returns Infinity if self is infinite" do @infinity.truncate(-1).should == @infinity @infinity.truncate(+1).should == @infinity @infinity.truncate(0).should == @infinity @infinity_negative.truncate(-1).should == @infinity_negative @infinity_negative.truncate(+1).should == @infinity_negative @infinity_negative.truncate(0).should == @infinity_negative end it "returns the same value if self is special value" do -> { @nan.truncate }.should raise_error(FloatDomainError) -> { @infinity.truncate }.should raise_error(FloatDomainError) -> { @infinity_negative.truncate }.should raise_error(FloatDomainError) end end
42.609756
273
0.691757
08ba5575714f60dd9a76c6712ccffe6a246829d3
1,513
title 'Tests to confirm renderproto library exists' plan_origin = ENV['HAB_ORIGIN'] plan_name = input('plan_name', value: 'renderproto') control 'core-plans-renderproto-library-exists' do impact 1.0 title 'Ensure renderproto library exists' desc ' Verify renderproto library by ensuring that (1) its installation directory exists; (2) the library exists; (3) its pkgconfig metadata contains the expected version ' plan_installation_directory = command("hab pkg path #{plan_origin}/#{plan_name}") describe plan_installation_directory do its('exit_status') { should eq 0 } its('stdout') { should_not be_empty } end library_filename = input('library_filename', value: 'renderproto.h') library_full_path = File.join(plan_installation_directory.stdout.strip, 'include', 'X11', 'extensions', library_filename) describe file(library_full_path) do it { should exist } end plan_pkg_ident = ((plan_installation_directory.stdout.strip).match /(?<=pkgs\/)(.*)/)[1] plan_pkg_version = (plan_pkg_ident.match /^#{plan_origin}\/#{plan_name}\/(?<version>.*)\//)[:version] pkgconfig_filename = input('pkgconfig_filename', value: 'renderproto.pc') pkgconfig_full_path = File.join(plan_installation_directory.stdout.strip, 'lib', 'pkgconfig', pkgconfig_filename) describe command("cat #{pkgconfig_full_path}") do its('exit_status') { should eq 0 } its('stdout') { should_not be_empty } its('stdout') { should match /Version:\s+#{plan_pkg_version}/ } end end
40.891892
123
0.730998
1cb20ad768b2be62fa29d3bcb0f743349f1db6e9
163
shared_examples_for 'a worker' do let(:worker) { described_class } it 'responds to :handle_event' do expect(worker).to respond_to(:handle_event) end end
23.285714
47
0.736196
7aa6ced93f39cea1a2feb88dd8e1c2ff43f9361b
296
class CreateTripExpenses < ActiveRecord::Migration def change create_table :trip_expenses do |t| t.integer :schedule_id t.string :section t.boolean :round, null: false, default: true t.string :way t.integer :price t.timestamps end end end
21.142857
52
0.641892
03603055a13a213ae5844b97c781c6d627bc11b6
676
#!/usr/bin/env ruby1.9 # vim: filetype=ruby, fileencoding=UTF-8, tabsize=2, shiftwidth=2 #Copyright (c) 2008 Engine Yard, Inc. All rights reserved. #This code was directly ported from <https://GitHub.Com/RubySpec/RubySpec/tree/master/language/not_spec.rb> require File.expand_path File.join(File.dirname(__FILE__), 'spec_helper') require 'b001e' describe "The 'not' operator" do it 'turns false to true' do false.not.must_equal true end it 'turns nil to true' do nil.not.must_equal true end it 'turns true to false' do true.not.must_equal false end it 'turns anything not nil to false' do MiniTest::Mock.new.not.must_equal false end end
24.142857
107
0.727811
3844f77a402bc61d8614d311c808280a4759af86
4,256
# frozen_string_literal: true require 'spec_helper' require 'overcommit/hook_context/prepare_commit_msg' describe Overcommit::Hook::PrepareCommitMsg::ReplaceBranch do def checkout_branch(branch) allow(Overcommit::GitRepo).to receive(:current_branch).and_return(branch) end def new_config(opts = {}) default = Overcommit::ConfigurationLoader.default_configuration return default if opts.empty? default.merge( Overcommit::Configuration.new( 'PrepareCommitMsg' => { 'ReplaceBranch' => opts.merge('enabled' => true) } ) ) end def new_context(config, argv) Overcommit::HookContext::PrepareCommitMsg.new(config, argv, StringIO.new) end def hook_for(config, context) described_class.new(config, context) end def add_file(name, contents) File.open(name, 'w') { |f| f.puts contents } end def remove_file(name) File.delete(name) end before { allow(Overcommit::Utils).to receive_message_chain(:log, :debug) } let(:config) { new_config } let(:normal_context) { new_context(config, ['COMMIT_EDITMSG']) } let(:message_context) { new_context(config, %w[COMMIT_EDITMSG message]) } let(:commit_context) { new_context(config, %w[COMMIT_EDITMSG commit HEAD]) } let(:merge_context) { new_context(config, %w[MERGE_MSG merge]) } let(:squash_context) { new_context(config, %w[SQUASH_MSG squash]) } let(:template_context) { new_context(config, ['template.txt', 'template']) } subject(:hook) { hook_for(config, normal_context) } describe '#run' do before { add_file 'COMMIT_EDITMSG', '' } after { remove_file 'COMMIT_EDITMSG' } context 'when the checked out branch matches the pattern' do before { checkout_branch '123-topic' } before { hook.run } it { is_expected.to pass } it 'prepends the replacement text' do expect(File.read('COMMIT_EDITMSG')).to eq("[#123]\n") end end context "when the checked out branch doesn't matches the pattern" do before { checkout_branch 'topic-123' } before { hook.run } context 'with the default `skipped_commit_types`' do it { is_expected.to warn } end context 'when merging, and `skipped_commit_types` includes `merge`' do let(:config) { new_config('skipped_commit_types' => ['merge']) } subject(:hook) { hook_for(config, merge_context) } it { is_expected.to pass } end context 'when merging, and `skipped_commit_types` includes `template`' do let(:config) { new_config('skipped_commit_types' => ['template']) } subject(:hook) { hook_for(config, template_context) } it { is_expected.to pass } end context 'when merging, and `skipped_commit_types` includes `message`' do let(:config) { new_config('skipped_commit_types' => ['message']) } subject(:hook) { hook_for(config, message_context) } it { is_expected.to pass } end context 'when merging, and `skipped_commit_types` includes `commit`' do let(:config) { new_config('skipped_commit_types' => ['commit']) } subject(:hook) { hook_for(config, commit_context) } it { is_expected.to pass } end context 'when merging, and `skipped_commit_types` includes `squash`' do let(:config) { new_config('skipped_commit_types' => ['squash']) } subject(:hook) { hook_for(config, squash_context) } it { is_expected.to pass } end end context 'when the replacement text points to a valid filename' do before { checkout_branch '123-topic' } before { add_file 'replacement_text.txt', 'FOO' } after { remove_file 'replacement_text.txt' } let(:config) { new_config('replacement_text' => 'replacement_text.txt') } let(:normal_context) { new_context(config, ['COMMIT_EDITMSG']) } subject(:hook) { hook_for(config, normal_context) } before { hook.run } it { is_expected.to pass } let(:commit_msg) { File.read('COMMIT_EDITMSG') } it 'uses the file contents as the replacement text' do expect(commit_msg).to eq(File.read('replacement_text.txt')) end end end end
32.242424
80
0.655075
28f0e37d743853bc069eba7fe14bd4f560fceb43
624
class SearchEngine class SearchRequest class BasePart attr_reader :name attr_reader :value attr_reader :exclude def initialize(name:, value:, exclude: false) self.name = name self.value = value self.exclude = exclude end def name=(value) ; @name = value&.to_s ; end def value=(value) ; @value = value&.to_s ; end def exclude=(value) ; @exclude = value == true ; end def ==(other) self.name == other&.name && self.value == other&.value end def eql?(other) self == other end end end end
20.129032
58
0.559295
bfddfa0eb96a0f5f8b41cf44d3b437361e70f16d
1,920
Pod::Spec.new do |s| s.name = "ReactiveCocoa" s.version = "2.2.4" s.summary = "A framework for composing and transforming streams of values." s.homepage = "https://github.com/blog/1107-reactivecocoa-is-now-open-source" s.author = { "Josh Abernathy" => "[email protected]" } s.source = { :git => "https://github.com/ReactiveCocoa/ReactiveCocoa.git", :tag => "v#{s.version}" } s.license = 'MIT' s.description = "ReactiveCocoa (RAC) is an Objective-C framework for Functional Reactive Programming. It provides APIs for composing and transforming streams of values." s.requires_arc = true s.ios.deployment_target = '5.0' s.osx.deployment_target = '10.7' s.compiler_flags = '-DOS_OBJECT_USE_OBJC=0' s.prepare_command = <<-'END' find . \( -regex '.*EXT.*\.[mh]$' -o -regex '.*metamacros\.[mh]$' \) -execdir mv {} RAC{} \; find . -regex '.*\.[hm]' -exec sed -i '' -E 's@"(EXT.*|metamacros)\.h"@"RAC\1.h"@' {} \; find . -regex '.*\.[hm]' -exec sed -i '' -E 's@<ReactiveCocoa/(EXT.*)\.h>@<ReactiveCocoa/RAC\1.h>@' {} \; END s.subspec 'no-arc' do |sp| sp.source_files = 'ReactiveCocoaFramework/ReactiveCocoa/RACObjCRuntime.{h,m}' sp.requires_arc = false end s.subspec 'Core' do |sp| sp.dependency 'ReactiveCocoa/no-arc' sp.source_files = 'ReactiveCocoaFramework/ReactiveCocoa/**/*.{d,h,m}' sp.private_header_files = '**/*Private.h', '**/*EXTRuntimeExtensions.h' sp.exclude_files = 'ReactiveCocoaFramework/ReactiveCocoa/RACObjCRuntime.{h,m}' sp.ios.exclude_files = '**/*{AppKit,NSControl,NSText}*' sp.osx.exclude_files = '**/*{UIActionSheet,UIAlertView,UIBarButtonItem,UIButton,UICollectionReusableView,UIControl,UIDatePicker,UIGestureRecognizer,UIRefreshControl,UISegmentedControl,UISlider,UIStepper,UISwitch,UITableViewCell,UITableViewHeaderFooterView,UIText}*' sp.header_dir = 'ReactiveCocoa' end end
51.891892
269
0.674479
b93fc03b1b4c170b346e1cacf37e3cf178ab01be
106
# frozen_string_literal: true class CreateAllTables < ActiveRecord::Migration[6.0] def change; end end
17.666667
52
0.783019
1c6bfde5723211652f932788cdc9d9907cfccb64
321
# frozen_string_literal: true module QryFilter module Generators class InstallGenerator < ::Rails::Generators::Base source_root File.expand_path('templates', __dir__) def copy_application_filter template 'application_filter.rb', 'app/filters/application_filter.rb' end end end end
22.928571
77
0.728972
797c37e197e5ddeed50ddc7dbdedd5ca6e71cb5c
12,604
require "os/linux/glibc" class LlvmAT5 < Formula desc "Next-gen compiler infrastructure" homepage "https://llvm.org/" url "https://releases.llvm.org/5.0.2/llvm-5.0.2.src.tar.xz" sha256 "d522eda97835a9c75f0b88ddc81437e5edbb87dc2740686cb8647763855c2b3c" revision 1 bottle do cellar :any rebuild 1 sha256 "33c32271f2094e27473b54577b6c04e89fb457a09946b8ea1fe9df3bda8f6511" => :mojave sha256 "45b114bd1d3d652b679304cee5f405d1f856c8cc015a3f1c08764477c99310c2" => :high_sierra sha256 "8673a94fd59e891d0e26dc535944ed52a50c8074cfe88d6307cb05119c984b81" => :sierra end pour_bottle? do reason "The bottle needs to be installed into #{Homebrew::DEFAULT_PREFIX}." satisfy { OS.mac? || HOMEBREW_PREFIX.to_s == Homebrew::DEFAULT_PREFIX } end head do url "https://llvm.org/git/llvm.git", :branch => "release_50" resource "clang" do url "https://llvm.org/git/clang.git", :branch => "release_50" end resource "clang-extra-tools" do url "https://llvm.org/git/clang-tools-extra.git", :branch => "release_50" end resource "compiler-rt" do url "https://llvm.org/git/compiler-rt.git", :branch => "release_50" end resource "libcxx" do url "https://llvm.org/git/libcxx.git", :branch => "release_50" end resource "libcxxabi" do url "http://llvm.org/git/libcxxabi.git", :branch => "release_50" end resource "libunwind" do url "https://llvm.org/git/libunwind.git", :branch => "release_50" end resource "lld" do url "https://llvm.org/git/lld.git", :branch => "release_50" end resource "lldb" do url "https://llvm.org/git/lldb.git", :branch => "release_50" end resource "openmp" do url "https://llvm.org/git/openmp.git", :branch => "release_50" end resource "polly" do url "https://llvm.org/git/polly.git", :branch => "release_50" end end keg_only :versioned_formula depends_on "cmake" => :build depends_on "libffi" unless OS.mac? depends_on "gcc" # <atomic> is provided by gcc depends_on "glibc" => (Formula["glibc"].installed? || OS::Linux::Glibc.system_version < Formula["glibc"].version) ? :recommended : :optional depends_on "binutils" # needed for gold and strip depends_on "libedit" # llvm requires <histedit.h> depends_on "ncurses" depends_on "libxml2" depends_on "python" if build.with?("python") || build.with?("lldb") depends_on "zlib" depends_on "python@2" end resource "clang" do url "https://releases.llvm.org/5.0.2/cfe-5.0.2.src.tar.xz" sha256 "fa9ce9724abdb68f166deea0af1f71ca0dfa9af8f7e1261f2cae63c280282800" end resource "clang-extra-tools" do url "https://releases.llvm.org/5.0.2/clang-tools-extra-5.0.2.src.tar.xz" sha256 "a3362a854ba4a60336b21a95612f647f4b6de0afd88858f2420e41c5a31b0b05" end resource "compiler-rt" do url "https://releases.llvm.org/5.0.2/compiler-rt-5.0.2.src.tar.xz" sha256 "3efe9ddf3f69e0c0a45cde57ee93911f36f3ab5f2a7f6ab8c8efb3db9b24ed46" end resource "libcxx" do url "https://releases.llvm.org/5.0.2/libcxx-5.0.2.src.tar.xz" sha256 "6edf88e913175536e1182058753fff2365e388e017a9ec7427feb9929c52e298" end resource "libunwind" do url "https://releases.llvm.org/5.0.2/libunwind-5.0.2.src.tar.xz" sha256 "706e43c69c7be0fdeb55ebdf653cf47ca77e471d1584f1dbf12a568a93df9928" end resource "lld" do url "https://releases.llvm.org/5.0.2/lld-5.0.2.src.tar.xz" sha256 "46456d72ec411c6d5327ad3fea1358296f0dfe508caf1fa63ce4184f652e07aa" end resource "lldb" do url "https://releases.llvm.org/5.0.2/lldb-5.0.2.src.tar.xz" sha256 "78ba05326249b4d7577db56d16b2a7ffea43fc51e8592b0a1ac4d2ef87514216" # Fixes "error: no type named 'pid_t' in the global namespace" # https://github.com/Homebrew/homebrew-core/issues/17839 # Already fixed in upstream trunk patch do url "https://github.com/llvm-mirror/lldb/commit/324f93b5e30.patch?full_index=1" sha256 "f23fc92c2d61bf6c8bc6865994a75264fafba6ae435e4d2f4cc8327004523fb1" end end resource "openmp" do url "https://releases.llvm.org/5.0.2/openmp-5.0.2.src.tar.xz" sha256 "39ca542c540608d95d3299a474836a7b5f8377bcc5a68493379872738c28565c" end resource "polly" do url "https://releases.llvm.org/5.0.2/polly-5.0.2.src.tar.xz" sha256 "dda84e48b2195768c4ef25893edd5eeca731bed7e80a2376119dfbc3350e91b8" end def install # Apple's libstdc++ is too old to build LLVM ENV.libcxx if ENV.compiler == :clang (buildpath/"tools/clang").install resource("clang") unless OS.mac? # Add glibc to the list of library directories so that we won't have to do -L<path-to-glibc>/lib inreplace buildpath/"tools/clang/lib/Driver/ToolChains/Linux.cpp", "// Add the multilib suffixed paths where they are available.", "addPathIfExists(D, \"#{HOMEBREW_PREFIX}/opt/glibc/lib\", Paths);\n\n // Add the multilib suffixed paths where they are available." end (buildpath/"tools/clang/tools/extra").install resource("clang-extra-tools") (buildpath/"projects/openmp").install resource("openmp") (buildpath/"projects/libcxx").install resource("libcxx") (buildpath/"projects/libcxxabi").install resource("libcxxabi") unless OS.mac? (buildpath/"projects/libunwind").install resource("libunwind") (buildpath/"tools/lld").install resource("lld") (buildpath/"tools/polly").install resource("polly") (buildpath/"projects/compiler-rt").install resource("compiler-rt") # compiler-rt has some iOS simulator features that require i386 symbols # I'm assuming the rest of clang needs support too for 32-bit compilation # to work correctly, but if not, perhaps universal binaries could be # limited to compiler-rt. llvm makes this somewhat easier because compiler-rt # can almost be treated as an entirely different build from llvm. ENV.permit_arch_flags args = %W[ -DLIBOMP_ARCH=x86_64 -DLINK_POLLY_INTO_TOOLS=ON -DLLVM_BUILD_EXTERNAL_COMPILER_RT=ON -DLLVM_BUILD_LLVM_DYLIB=ON -DLLVM_ENABLE_EH=ON -DLLVM_ENABLE_FFI=ON -DLLVM_ENABLE_LIBCXX=ON -DLLVM_ENABLE_RTTI=ON -DLLVM_INCLUDE_DOCS=OFF -DLLVM_INSTALL_UTILS=ON -DLLVM_OPTIMIZED_TABLEGEN=ON -DLLVM_TARGETS_TO_BUILD=all -DWITH_POLLY=ON -DFFI_INCLUDE_DIR=#{Formula["libffi"].opt_lib}/libffi-#{Formula["libffi"].version}/include -DFFI_LIBRARY_DIR=#{Formula["libffi"].opt_lib} ] if OS.mac? args << "-DLLVM_CREATE_XCODE_TOOLCHAIN=OFF" else args << "-DLLVM_CREATE_XCODE_TOOLCHAIN=ON" args << "-DLLVM_ENABLE_LIBCXX=ON" if build_libcxx? args << "-DLLVM_ENABLE_LIBCXXABI=ON" if build_libcxx? && !OS.mac? end # Help just-built clang++ find <atomic> (and, possibly, other header files). Needed for compiler-rt unless OS.mac? gccpref = Formula["gcc"].opt_prefix.to_s args << "-DGCC_INSTALL_PREFIX=#{gccpref}" args << "-DCMAKE_C_COMPILER=#{gccpref}/bin/gcc" args << "-DCMAKE_CXX_COMPILER=#{gccpref}/bin/g++" args << "-DCMAKE_CXX_LINK_FLAGS=-L#{gccpref}/lib64 -Wl,-rpath,#{gccpref}/lib64" args << "-DCLANG_DEFAULT_CXX_STDLIB=#{build.with?("libcxx")?"libc++":"libstdc++"}" end mkdir "build" do system "cmake", "-G", "Unix Makefiles", "..", *(std_cmake_args + args) system "make" system "make", "install" system "make", "install-xcode-toolchain" if OS.mac? end (share/"cmake").install "cmake/modules" (share/"clang/tools").install Dir["tools/clang/tools/scan-{build,view}"] # scan-build is in Perl, so the @ in our path needs to be escaped inreplace "#{share}/clang/tools/scan-build/bin/scan-build", "$RealBin/bin/clang", "#{bin}/clang".gsub("@", "\\@") bin.install_symlink share/"clang/tools/scan-build/bin/scan-build", share/"clang/tools/scan-view/bin/scan-view" man1.install_symlink share/"clang/tools/scan-build/man/scan-build.1" # install llvm python bindings (lib/"python2.7/site-packages").install buildpath/"bindings/python/llvm" (lib/"python2.7/site-packages").install buildpath/"tools/clang/bindings/python/clang" # Remove conflicting libraries. # libgomp.so conflicts with gcc. # libunwind.so conflcits with libunwind. rm [lib/"libgomp.so", lib/"libunwind.so"] if OS.linux? # Strip executables/libraries/object files to reduce their size unless OS.mac? system("strip", "--strip-unneeded", "--preserve-dates", *(Dir[bin/"**/*", lib/"**/*"]).select do |f| f = Pathname.new(f) f.file? && (f.elf? || f.extname == ".a") end) end end def caveats; <<~EOS To use the bundled libc++ please add the following LDFLAGS: LDFLAGS="-L#{opt_lib} -Wl,-rpath,#{opt_lib}" EOS end test do assert_equal prefix.to_s, shell_output("#{bin}/llvm-config --prefix").chomp (testpath/"omptest.c").write <<~EOS #include <stdlib.h> #include <stdio.h> #include <omp.h> int main() { #pragma omp parallel num_threads(4) { printf("Hello from thread %d, nthreads %d\\n", omp_get_thread_num(), omp_get_num_threads()); } return EXIT_SUCCESS; } EOS system "#{bin}/clang", "-L#{lib}", "-fopenmp", "-nobuiltininc", "-I#{lib}/clang/#{version}/include", *("-Wl,-rpath=#{lib}" unless OS.mac?), "omptest.c", "-o", "omptest" testresult = shell_output("./omptest") sorted_testresult = testresult.split("\n").sort.join("\n") expected_result = <<~EOS Hello from thread 0, nthreads 4 Hello from thread 1, nthreads 4 Hello from thread 2, nthreads 4 Hello from thread 3, nthreads 4 EOS assert_equal expected_result.strip, sorted_testresult.strip (testpath/"test.c").write <<~EOS #include <stdio.h> int main() { printf("Hello World!\\n"); return 0; } EOS (testpath/"test.cpp").write <<~EOS #include <iostream> int main() { std::cout << "Hello World!" << std::endl; return 0; } EOS # Testing Command Line Tools if OS.mac? && MacOS::CLT.installed? libclangclt = Dir["/Library/Developer/CommandLineTools/usr/lib/clang/#{MacOS::CLT.version.to_i}*"].last { |f| File.directory? f } system "#{bin}/clang++", "-v", "-nostdinc", "-I/Library/Developer/CommandLineTools/usr/include/c++/v1", "-I#{libclangclt}/include", "-I/usr/include", # need it because /Library/.../usr/include/c++/v1/iosfwd refers to <wchar.h>, which CLT installs to /usr/include "test.cpp", "-o", "testCLT++" assert_includes MachO::Tools.dylibs("testCLT++"), "/usr/lib/libc++.1.dylib" assert_equal "Hello World!", shell_output("./testCLT++").chomp system "#{bin}/clang", "-v", "-nostdinc", "-I/usr/include", # this is where CLT installs stdio.h "test.c", "-o", "testCLT" assert_equal "Hello World!", shell_output("./testCLT").chomp end # Testing Xcode if OS.mac? && MacOS::Xcode.installed? libclangxc = Dir["#{MacOS::Xcode.toolchain_path}/usr/lib/clang/#{DevelopmentTools.clang_version}*"].last { |f| File.directory? f } system "#{bin}/clang++", "-v", "-nostdinc", "-I#{MacOS::Xcode.toolchain_path}/usr/include/c++/v1", "-I#{libclangxc}/include", "-I#{MacOS.sdk_path}/usr/include", "test.cpp", "-o", "testXC++" assert_includes MachO::Tools.dylibs("testXC++"), "/usr/lib/libc++.1.dylib" assert_equal "Hello World!", shell_output("./testXC++").chomp system "#{bin}/clang", "-v", "-nostdinc", "-I#{MacOS.sdk_path}/usr/include", "test.c", "-o", "testXC" assert_equal "Hello World!", shell_output("./testXC").chomp end # link against installed libc++ # related to https://github.com/Homebrew/legacy-homebrew/issues/47149 system "#{bin}/clang++", "-v", "-nostdinc", "-std=c++11", "-stdlib=libc++", "-I#{MacOS::Xcode.toolchain_path}/usr/include/c++/v1", "-I#{libclangxc}/include", "-I#{MacOS.sdk_path}/usr/include", "-L#{lib}", "-Wl,-rpath,#{lib}", "test.cpp", "-o", "test" assert_includes MachO::Tools.dylibs("test"), "#{opt_lib}/libc++.1.dylib" assert_equal "Hello World!", shell_output("./test").chomp end end
36.853801
144
0.651142
614584ba59bdc38f2cc151fc265c03a33fcd57fa
533
cask 'creepy' do version '1.4.1' sha256 'c500216420cb32b7779f20726bc838868c4424d234b9dc7f076d083b317b5450' # github.com/jkakavas/creepy was verified as official when first introduced to the cask url "https://github.com/jkakavas/creepy/releases/download/v#{version}/cree.py_#{version}.dmg.zip" appcast 'https://github.com/jkakavas/creepy/releases.atom', checkpoint: '7e6a4adab2f72f4be25e9d15dd1a1d2a94d87e2276a6d6bd6c7384d152ba12e9' name 'Creepy' homepage 'http://www.geocreepy.com/' app 'cree.py.app' end
38.071429
99
0.772983
d58d23d238f53fa22d5f05afb317d29d4d51bdc4
716
class EventSerializer < ActiveModel::Serializer include ActionView::Helpers::TextHelper attributes :guid, :title, :length, :scheduled_date, :language, :abstract, :speaker_ids, :type, :room, :track def scheduled_date t = object.time t.blank? ? '' : %( #{I18n.l t, format: :short}#{t.formatted_offset(false)} ) end def speaker_ids speakers = object.event_users.select { |i| i.event_role == 'speaker' } speakers.map { |i| i.user.id } end def type object.event_type.try(:title) end def room object.room.try(:guid) end def track object.track.try(:guid) end def length object.event_type.try(:length) || object.event_type.program.schedule_interval end end
22.375
110
0.678771
33251541e015776327d0b63bfc9700012b1b8fc2
1,842
require 'test_helper' class CustomerConfigurationsControllerTest < ActionController::TestCase setup do @customer_configuration = customer_configurations(:one) end test "should get index" do get :index assert_response :success assert_not_nil assigns(:customer_configurations) end test "should get new" do get :new assert_response :success end test "should create customer_configuration" do assert_difference('CustomerConfiguration.count') do post :create, customer_configuration: { customer_id: @customer_configuration.customer_id, dailySlaEnd: @customer_configuration.dailySlaEnd, dailySlaStart: @customer_configuration.dailySlaStart, excludedDays: @customer_configuration.excludedDays, weeklySlaDays: @customer_configuration.weeklySlaDays } end assert_redirected_to customer_configuration_path(assigns(:customer_configuration)) end test "should show customer_configuration" do get :show, id: @customer_configuration assert_response :success end test "should get edit" do get :edit, id: @customer_configuration assert_response :success end test "should update customer_configuration" do patch :update, id: @customer_configuration, customer_configuration: { customer_id: @customer_configuration.customer_id, dailySlaEnd: @customer_configuration.dailySlaEnd, dailySlaStart: @customer_configuration.dailySlaStart, excludedDays: @customer_configuration.excludedDays, weeklySlaDays: @customer_configuration.weeklySlaDays } assert_redirected_to customer_configuration_path(assigns(:customer_configuration)) end test "should destroy customer_configuration" do assert_difference('CustomerConfiguration.count', -1) do delete :destroy, id: @customer_configuration end assert_redirected_to customer_configurations_path end end
36.84
334
0.799131
1a82a655d1a512bdeaf0fcf621638206bb820ba7
2,119
# frozen_string_literal: true module DocTemplate module Tags class HeadingTag < BaseTag TEMPLATE = 'heading.html.erb' def parse(node, opts = {}) # we have to collect all the next siblings until next stop-tag params = { content: parse_nested(content_until_break(node), opts), heading: "<h3>#{heading(opts[:value])}</h3>", tag: self.class::TAG_NAME } @content = parse_template params, TEMPLATE replace_tag node self end private def prefix self.class::TITLE_PREFIX end def heading(value) value.include?(prefix + ':') ? value : "#{prefix}: #{value}" end end class RubricTag < HeadingTag TAG_NAME = 'rubric' TITLE_PREFIX = 'Rubric' end class JsTag < HeadingTag TAG_NAME = 'js' TITLE_PREFIX = 'Juicy Sentences' end class EtTag < HeadingTag TAG_NAME = 'et' TITLE_PREFIX = 'Exit Ticket' end class PhotoTag < HeadingTag TAG_NAME = 'photo' TITLE_PREFIX = 'Photograph' end class AssessTag < HeadingTag TAG_NAME = 'assess' TITLE_PREFIX = 'Assessment' end class ShTag < HeadingTag TAG_NAME = 'sh' def heading(value) value end end class KeyTag < HeadingTag TAG_NAME = 'key' def heading(value) "#{value}<br/>(For Teacher Reference)" end end class ThTag < HeadingTag TAG_NAME = 'th' def heading(value) "#{value}<br/>(For Teacher Reference)" end end end Template.register_tag(Tags::RubricTag::TAG_NAME, Tags::RubricTag) Template.register_tag(Tags::JsTag::TAG_NAME, Tags::JsTag) Template.register_tag(Tags::EtTag::TAG_NAME, Tags::EtTag) Template.register_tag(Tags::PhotoTag::TAG_NAME, Tags::PhotoTag) Template.register_tag(Tags::AssessTag::TAG_NAME, Tags::AssessTag) Template.register_tag(Tags::ShTag::TAG_NAME, Tags::ShTag) Template.register_tag(Tags::KeyTag::TAG_NAME, Tags::KeyTag) Template.register_tag(Tags::ThTag::TAG_NAME, Tags::ThTag) end
23.285714
70
0.622463
ffde2accad65533abaf0bdc133f33ef188043200
12,335
class Menu attr_accessor :user def run welcome_banner self.user = User.prompt_for_user main_menu end def welcome_banner puts "\n\n" puts" ,gggg, " puts" ,88*Y8b, " puts" d8* `Y8 " puts" d8' 8b d8 gg " puts",8I *Y88P' * " puts"I8' ,gggg,gg ,ggg,,ggg,,ggg, gg,gggg, gg ,ggg,,ggg, ,gggg,gg " puts"d8 dP* *Y8I ,8* *8P* *8P* *8, I8P* *Yb 88 ,8* *8P* *8, dP* *Y8I " puts"Y8, i8' ,8I I8 8I 8I 8I I8' ,8i 88 I8 8I 8I i8' ,8I " puts"`Yba,,_____, ,d8, ,d8b,,dP 8I 8I Yb,,I8 _ ,d8' _,88,_,dP 8I Yb,,d8, ,d8I " puts" `*Y8888888 P*Y8888P*`Y88P' 8I 8I `Y8PI8 YY88888P8P*Y88P' 8I `Y8P*Y8888P*888" puts" I8 ,d8I'" puts" I8 ,dP'8I " puts" I8 ,8* 8I " puts" I8 I8 8I " puts" I8 `8, ,8I " puts" I8 `Y8P* " puts" ,gggg, ,ggg, " puts" ,88*Y8b, dP*8I " puts" d8* `Y8 dP 88 " puts" d8' 8b d8 dP 88 " puts" ,8I *Y88P' ,8' 88 " puts" Welcome to the Camping CA! I8' d88888888 " puts" d8 __ ,8* 88 " puts" Where you can explore all nine of Y8, dP* ,8P Y8 " puts" California's National Parks! `Yba,,_____, Yb,_,dP `8b, " puts" `*Y8888888 *Y8P* `Y8 " puts "\n\n" end def main_menu prompt = TTY::Prompt.new choices = [ {name: 'Explore National Parks', value: 1}, {name: 'Search for campgrounds', value: 2}, {name: 'Check availability', value: 3}, {name: 'Manage alerts', value: 4}, {name: 'Update profile', value: 5}, {name: 'Exit explorer', value: 6} ] choice = prompt.select("What would you like to do?", choices) puts "\n\n" if choice == 1 explore_rec_areas_menu elsif choice == 2 camp = campground_search_by_name_menu view_campground_details_menu(camp) elsif choice == 3 availability_menu elsif choice == 4 view_alerts_menu elsif choice == 5 user.update_profile(self) elsif choice == 6 puts "Thanks for exploring! Goodbye!\n\n" exit end end def explore_rec_areas_menu prompt = TTY::Prompt.new choices = RecArea.all_rec_areas choices << {name: "Return to main menu", value: 1} choice = prompt.select("Choose an area!", choices) puts "\n\n" if choice == 1 main_menu else view_rec_area_menu(choice) end end def view_rec_area_menu(area) prompt = TTY::Prompt.new area.view_area choices = [ {name: "View campgrounds", value: 1}, {name: "Return to view parks", value: 2}, {name: "Return to main menu", value: 3} ] choice = prompt.select("Select an option:", choices) puts "\n\n" if choice == 1 view_campgrounds_menu(area) elsif choice == 2 explore_rec_areas_menu else main_menu end end def view_campgrounds_menu(area) prompt = TTY::Prompt.new choices = area.view_campgrounds choices << {name: "Return to #{area.name}", value: 1} choices << {name: "Return to main menu", value: 2} choice = prompt.select("Select a campground to view details", choices) puts "\n\n" if choice == 1 view_rec_area_menu(area) elsif choice == 2 main_menu else view_campground_details_menu(choice) end end def view_campground_details_menu(camp) prompt = TTY::Prompt.new puts "#{camp.name}\n\n" choices = [ {name: "View description", value: 1}, {name: "View availability", value: 2}, {name: "View more campgrounds of #{camp.rec_area.name}", value: 3}, {name: "Search for campgrounds by name", value: 4}, {name: "Return to main menu", value: 5} ] choice = prompt.select("Select an option:", choices) puts "\n\n" if choice == 1 camp.view_description puts "\n\n" view_campground_details_menu(camp) elsif choice == 2 availability_menu(camp) elsif choice == 3 view_campgrounds_menu(camp.rec_area) elsif choice == 4 camp = campground_search_by_name_menu view_campground_details_menu(camp) else main_menu end end def availability_menu(camp = nil, date_array = []) if camp.nil? camp = campground_search_by_name_menu end if date_array.empty? date_array = get_dates end print_availability(camp, date_array[0], date_array[1]) prompt = TTY::Prompt.new choices = [ {name: "Change search dates", value: 1}, {name: "Change campground", value: 2}, {name: "Set an alert for this search", value: 3}, {name: "View all of #{camp.rec_area.name}'s campgrounds", value: 4}, {name: "Return to main menu", value: 5} ] choice = prompt.select("Select an option:", choices) puts "\n\n" if choice == 1 availability_menu(camp, []) elsif choice == 2 availability_menu(nil, date_array) elsif choice == 3 alert_menu(camp, date_array) elsif choice == 4 view_campgrounds_menu(camp.rec_area) else main_menu end end def campground_search_by_name_menu prompt = TTY::Prompt.new camp_name = prompt.ask("Enter a campground name to search, 'menu' to return to the main menu:") do |input| input.modify :trim, :up end puts "\n\n" if camp_name == "MENU" main_menu else camp = Campground.find_by_name(camp_name) if camp.nil? puts "Hmmm, I don't know that one. Please try again.\n\n" campground_search_by_name_menu else camp end end end def get_dates begin start_prompt = TTY::Prompt.new start_date = start_prompt.ask("Enter your arrival date: (within the next month, MM/DD)", convert: :date) end_prompt = TTY::Prompt.new end_date = end_prompt.ask("Enter your departure date: (within the next month, MM/DD", convert: :date) puts "\n\n" if end_date < start_date || end_date > Date.today.next_month puts "Invalid dates. Please try again." get_dates else [start_date, end_date] end rescue puts "Oops! There was a problem with the date that you entered. Please try again!" get_dates end end def print_availability(camp, start_date, end_date) avail_array = camp.check_availability(start_date, end_date) puts "Sites available for #{camp.name}:" table = TTY::Table.new header: ['Date','# Available'] avail_array.each do |hash| table << [hash[:date], hash[:avail]] end puts table.render :unicode, alignment: [:center] puts "\n\n" prompt = TTY::Prompt.new open_browser = prompt.yes?("Would you like to open recreation.gov to reserve?") puts "\n\n" if open_browser system("open https://www.recreation.gov/camping/campgrounds/#{camp.official_facility_id}/availability") end end def alert_menu(camp = nil, date_array = []) camp.set_alert(date_array, user) puts "An alert has been set. You will be emailed when sites become available.\n\n" prompt = TTY::Prompt.new choices = [ {name: "Set another alert", value: 1}, {name: "Manage alerts", value: 2}, {name: "Return to main menu", value: 3} ] choice = prompt.select("Select an option:", choices) puts "\n\n" if choice == 1 availability_menu elsif choice == 2 view_alerts_menu else main_menu end end def view_alerts_menu if user.alerts.empty? no_active_alerts else display_alerts prompt = TTY::Prompt.new choices = [ {name: "Update an alert", value: 1}, {name: "Delete an alert", value: 2}, {name: "Add an alert", value: 3}, {name: "Return to main menu", value: 4} ] choice = prompt.select("Select an option:", choices) puts "\n\n" if choice == 1 update_alert elsif choice == 2 delete_alert elsif choice == 3 availability_menu else main_menu end end end def no_active_alerts puts "You have no active alerts.\n\n" prompt = TTY::Prompt.new new_alert = prompt.yes?("Would you like to set an alert?") puts "\n\n" if new_alert availability_menu else main_menu end end def display_alerts num = user.alerts.count puts "You currently have #{num} alert(s) set:\n\n" user.alerts.each do |alert| alert.display_alert end puts "\n\n" end def choose_alert prompt = TTY::Prompt.new choices = user.selectable_alerts alert = prompt.select("Select an option:", choices) puts "\n\n" alert end def update_alert alert = choose_alert prompt = TTY::Prompt.new choices = [ {name: "Update dates", value: 1}, {name: "Update campground", value: 2}, {name: "Return to main menu", value: 3} ] choice = prompt.select("Select an option:", choices) puts "\n\n" if choice == 1 dates = get_dates alert.update_dates(dates) puts "Alert updated.\n\n" view_alerts_menu elsif choice == 2 camp = campground_search_by_name_menu alert.update_campground(camp) puts "Alert updated.\n\n" view_alerts_menu else main_menu end end def delete_alert alert = choose_alert alert.destroy user.alerts.reload puts "Alert deleted.\n\n" view_alerts_menu end end
36.067251
116
0.461775
e230262a052c71b48f6f25246b969cf0440607ec
456
class CreateAnimalPermits < ActiveRecord::Migration[5.2] def change create_table :animal_permits do |t| t.integer :permitter_id t.integer :permitted_id t.boolean :is_permit, default: false t.timestamps end add_index :animal_permits, :permitter_id add_index :animal_permits, :permitted_id add_index :animal_permits, :is_permit add_index :animal_permits, [:permitter_id, :permitted_id], unique: true end end
28.5
75
0.730263
ffa1c69b91f88a38b35187e200b88e63e2380742
308
module Polymorph class OrderableController < Polymorph::ApplicationController def create klass.reorder(order) head :ok end private def klass @klass = params[:orderable_type].constantize end def order params[klass.table_name.singularize] end end end
16.210526
62
0.672078
e2894cc74f406e9b0d58ff252db22d125f95d7d7
1,328
require "url_shortener" module Lita module Handlers class Bitly < Handler config :apikey config :username route(/(?:bitly|shorten)\s(.+)/i, :shorten_url, command: true, help: {"bitly | shorten URL" => "Shorten the URL using bitly"} ) def shorten_url(response) username = Lita.config.handlers.bitly.username Lita.logger.debug("Got Bitly Username: #{username}") apikey = Lita.config.handlers.bitly.apikey inputURL = response.matches[0][0] Lita.logger.debug("Bitly() - Input url - #{inputURL}") if not (/^https?:\/\/.+/i) =~ inputURL Lita.logger.debug("Bitly() - Input URL Does not start with http://. Appending ..") inputURL.prepend("http://") Lita.logger.debug(inputURL) end Lita.logger.debug("Authorizing") authorize = UrlShortener::Authorize.new username, apikey client = UrlShortener::Client.new authorize shorten = client.shorten(inputURL) response.reply(shorten.urls) rescue => e Lita.logger.error("Bitly raised #{e.class}: #{e.message}") response.reply "Chuck Norris has turned off the bitly service, #{e.class} is raising '#{e.message}'" end end Lita.register_handler(Bitly) end end
30.883721
108
0.606928
b9d254bd60d45361802efa80481d7d738d5dc51b
6,611
class ESIClient Faraday.register_middleware(:response, :logging => Echo::ClientMiddleware::LoggingMiddleware) def self.submit_esi_request(*args) new.submit_esi_request(*args) end def self.get_esi_request(*args) new.get_esi_request(*args) end def submit_esi_request(collection_id, params, method, request_url, client, token) service_url = get_service_url(collection_id, client, token) # FILE_IDS is a comma seperated list of granule_ur's granules = client.get_granules(params, token).body['feed']['entry'].map{|g| g['title']} options = {} options['FILE_IDS'] = granules.join(',') options['CLIENT_STRING'] = "To view the status of your request, please see: #{request_url}" @model = Nokogiri::XML(method['model'].gsub(/>\s+</,"><").strip) params_hash = build_params options.merge!(params_hash) Rails.logger.info " service_url: #{service_url}" Rails.logger.info " options: #{options.inspect}" post(service_url, options) end def get_esi_request(collection_id, service_order_id, client, token, header_value) service_url = get_service_url(collection_id, client, token) get(service_url + '/' + service_order_id.to_s, {}, header_value) end def connection @connection ||= build_connection end private def post(url, params={}) connection.post(url, params) end def get(url, params={}, header_value=nil) connection.get(url, params, {'X-EDSC-REQUEST' => header_value}) end def build_connection Faraday.new do |conn| conn.request :url_encoded conn.response :logging conn.response :json, :content_type => /\bjson$/ conn.adapter Faraday.default_adapter end end def get_service_url(collection_id, client, token) service_option_assignment = client.get_service_order_information(collection_id, token).body service_entry_id = service_option_assignment[0]['service_option_assignment']['service_entry_id'] service_url = client.get_service_entry(service_entry_id, token).body['service_entry']['url'] end def esi_fields @esi_fields ||= {} end def build_params add_top_level_fields add_switch_field(:INCLUDE_META) add_name_value_pairs_for_projections(:PROJECTION_PARAMETERS) add_name_value_pairs_for_resample(:RESAMPLE) add_subset_data_layers add_bounding_box add_parameter(:EMAIL, find_field_element("email").text.strip) esi_fields end def add_parameter(field_symbol, value) # send "#{field_symbol}=", value if !value.blank? esi_fields[field_symbol.to_s] = value if !value.blank? end def find_field_element(field_symbol, data_type = 'ecs') find_by_xpath("//#{data_type}:#{field_symbol.to_s}") end def find_by_xpath(xpath) @model.xpath(xpath, 'xmlns' => 'http://echo.nasa.gov/v9/echoforms', #'eesi' => "http://eosdis.nasa.gov/esi/req/e", 'ecs' => "http://ecs.nasa.gov/options", 'info' => "http://eosdis.nasa.gov/esi/info") end def add_top_level_fields [ :INTERPOLATION, :FORMAT, :PROJECTION, :CLIENT, :START, :END, :NATIVE_PROJECTION, :OUTPUT_GRID, :BBOX, :SUBAGENT_ID, :REQUEST_MODE, :META, :INCLUDE_META, ].each do |field| add_parameter(field, find_field_element(field).text.strip) end end TRANSLATE= { 'true' => 'Y', 'True' => 'Y', 'TRUE' => 'Y', 'y' => 'Y', 'Y' => 'Y', 'false' => 'N', 'False' => 'N', 'FALSE' => 'N', 'n' => 'N', 'N' => 'N' } def add_switch_field(field_symbol) add_parameter(field_symbol, TRANSLATE[find_field_element(field_symbol).text.strip] ) end def add_name_value_pairs_for_projections(field_symbol) field_element = find_field_element(field_symbol) projections = compact_nodes(field_element) items = projections.map do |projection| compact_nodes(projection.children).map do |project_parameter_lists| compact_nodes(project_parameter_lists.children).map do |project_parameter_field| "#{project_parameter_field.name}:#{project_parameter_field.text}" if project_parameter_field.text.present? end end end.flatten.compact.map(&:chomp).join(",") add_parameter(field_symbol, items) end def add_name_value_pairs_for_resample(field_symbol) sub_field_values = find_field_element(field_symbol).children add_parameter(field_symbol, build_resample_pairs(sub_field_values)) end def build_resample_pairs(sub_field_values) sub_fields = Hash[compact_nodes(sub_field_values).map do |nd| [nd.name, nd.text && nd.text.strip] end] value = sub_fields.keys.select {|x| x.include?('value')}.first if value.present? key = sub_fields.keys.select {|x| x.include?('dimension')}.first "#{sub_fields[key]}:#{sub_fields[value]}" end end def add_subset_data_layers data_layers = collect_subset_data_layers add_parameter(:SUBSET_DATA_LAYERS, data_layers) end def collect_subset_data_layers find_subset_banding.join(',') end def find_subset_banding() objects = find_by_xpath( "//ecs:SUBSET_DATA_LAYERS/*[ecs:subtreeSelected='true' and ecs:subtreeSelected='true']/@value" ).to_a fields = find_by_xpath( "//ecs:SUBSET_DATA_LAYERS/descendant::*[ecs:itemSelected='true' and ecs:subtreeSelected='true']/@value" ).to_a bands= find_by_xpath( "//ecs:SUBSET_DATA_LAYERS/descendant::*[ecs:itemSelected ='true']/*[ecs:value > 0]" ).map do |a_node| value_text = a_node['value'] ecs_value = a_node.xpath('ecs:value') "#{value_text}[#{ecs_value.text}]" end tree_style_bands = find_by_xpath("//ecs:SUBSET_DATA_LAYERS[@style='tree']/descendant::*/text()") objects + fields + bands + tree_style_bands end def add_bounding_box bboxes = [] #Find all bounding boxes in the option selections. There may be zero, one, or multiple find_by_xpath("//*[contains(name(),'ecs:boundingbox')]").map{|bbox_element| bbox = {} bbox_element.children.each do |item| text = item.text.strip bbox[item.name] = item.text.strip if item && !item.blank? && text.present? && item.name != "display" end if bbox.size >= 4 bboxes.push %w{ullon lrlat lrlon ullat}. map { |an_edge| bbox[an_edge] }. join(',') end } add_parameter :BBOX, bboxes end def compact_nodes(node_set) node_set.select { |sub_field_value| !sub_field_value.blank? } end end
28.619048
116
0.671759
1dabb26609bad998668c2e48693fe1653968fc4f
3,789
Rails.application.configure do # Settings specified here will take precedence over those in config/application.rb. # Code is not reloaded between requests. config.cache_classes = true # Eager load code on boot. This eager loads most of Rails and # your application in memory, allowing both threaded web servers # and those relying on copy on write to perform better. # Rake tasks automatically ignore this option for performance. config.eager_load = true # Full error reports are disabled and caching is turned on. config.consider_all_requests_local = false config.action_controller.perform_caching = true # Attempt to read encrypted secrets from `config/secrets.yml.enc`. # Requires an encryption key in `ENV["RAILS_MASTER_KEY"]` or # `config/secrets.yml.key`. config.read_encrypted_secrets = true # Disable serving static files from the `/public` folder by default since # Apache or NGINX already handles this. config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present? # Compress JavaScripts and CSS. config.assets.js_compressor = :uglifier # config.assets.css_compressor = :sass # Do not fallback to assets pipeline if a precompiled asset is missed. config.assets.compile = false # `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb # Enable serving of images, stylesheets, and JavaScripts from an asset server. # config.action_controller.asset_host = 'http://assets.example.com' # Specifies the header that your server uses for sending files. # config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache # config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX # Mount Action Cable outside main process or domain # config.action_cable.mount_path = nil # config.action_cable.url = 'wss://example.com/cable' # config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ] # Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies. # config.force_ssl = true # Use the lowest log level to ensure availability of diagnostic information # when problems arise. config.log_level = :debug # Prepend all log lines with the following tags. config.log_tags = [ :request_id ] # Use a different cache store in production. # config.cache_store = :mem_cache_store # Use a real queuing backend for Active Job (and separate queues per environment) # config.active_job.queue_adapter = :resque # config.active_job.queue_name_prefix = "iamstatsman_#{Rails.env}" config.action_mailer.perform_caching = false # Ignore bad email addresses and do not raise email delivery errors. # Set this to true and configure the email server for immediate delivery to raise delivery errors. # config.action_mailer.raise_delivery_errors = false # Enable locale fallbacks for I18n (makes lookups for any locale fall back to # the I18n.default_locale when a translation cannot be found). config.i18n.fallbacks = true # Send deprecation notices to registered listeners. config.active_support.deprecation = :notify # Use default logging formatter so that PID and timestamp are not suppressed. config.log_formatter = ::Logger::Formatter.new # Use a different logger for distributed setups. # require 'syslog/logger' # config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name') if ENV["RAILS_LOG_TO_STDOUT"].present? logger = ActiveSupport::Logger.new(STDOUT) logger.formatter = config.log_formatter config.logger = ActiveSupport::TaggedLogging.new(logger) end # Do not dump schema after migrations. config.active_record.dump_schema_after_migration = false end
41.184783
102
0.757192
1d616c3274588cb1e8198a9b4c9506a4de9eec55
1,675
require 'set' module MARC # MARC records contain control fields, each of which has a # tag and value. Tags for control fields must be in the # 001-009 range or be specially added to the @@control_tags Set class ControlField # Initially, control tags are the numbers 1 through 9 or the string '000' @@control_tags = Set.new(%w{000 001 002 003 004 005 006 007 008 009}) def self.control_tags return @@control_tags end # A tag is a control tag if tag.to_s is a member of the @@control_tags set. def self.control_tag?(tag) return @@control_tags.include? tag.to_s end # the tag value (007, 008, etc) attr_accessor :tag # the value of the control field attr_accessor :value # The constructor which must be passed a tag value and # an optional value for the field. def initialize(tag,value='') @tag = tag @value = value if not MARC::ControlField.control_tag?(@tag) raise MARC::Exception.new(), "tag must be in 001-009 or in the MARC::ControlField.control_tags set" end end # Two control fields are equal if their tags and values are equal. def ==(other) if @tag != other.tag return false elsif @value != other.value return false end return true end # turning it into a marc-hash element def to_marchash return [@tag, @value] end # Turn the control field into a hash for MARC-in-JSON def to_hash return {@tag=>@value} end def to_s return "#{tag} #{value}" end def =~(regex) return self.to_s =~ regex end end end
23.263889
107
0.627463
2801fa834667b47ce6592995ff5a5f0076754164
3,395
# frozen_string_literal: true # == Schema Information # # Table name: inboxes # # id :integer not null, primary key # allow_messages_after_resolved :boolean default(TRUE) # channel_type :string # csat_survey_enabled :boolean default(FALSE) # email_address :string # enable_auto_assignment :boolean default(TRUE) # enable_email_collect :boolean default(TRUE) # greeting_enabled :boolean default(FALSE) # greeting_message :string # name :string not null # out_of_office_message :string # timezone :string default("UTC") # working_hours_enabled :boolean default(FALSE) # created_at :datetime not null # updated_at :datetime not null # account_id :integer not null # channel_id :integer not null # # Indexes # # index_inboxes_on_account_id (account_id) # class Inbox < ApplicationRecord include Reportable include Avatarable include OutOfOffisable validates :name, presence: true validates :account_id, presence: true validates :timezone, inclusion: { in: TZInfo::Timezone.all_identifiers } belongs_to :account belongs_to :channel, polymorphic: true, dependent: :destroy has_many :campaigns, dependent: :destroy_async has_many :contact_inboxes, dependent: :destroy_async has_many :contacts, through: :contact_inboxes has_many :inbox_members, dependent: :destroy_async has_many :members, through: :inbox_members, source: :user has_many :conversations, dependent: :destroy_async has_many :messages, through: :conversations has_one :agent_bot_inbox, dependent: :destroy_async has_one :agent_bot, through: :agent_bot_inbox has_many :webhooks, dependent: :destroy_async has_many :hooks, dependent: :destroy_async, class_name: 'Integrations::Hook' after_destroy :delete_round_robin_agents scope :order_by_name, -> { order('lower(name) ASC') } def add_member(user_id) member = inbox_members.new(user_id: user_id) member.save! end def remove_member(user_id) member = inbox_members.find_by!(user_id: user_id) member.try(:destroy) end def facebook? channel_type == 'Channel::FacebookPage' end def web_widget? channel_type == 'Channel::WebWidget' end def api? channel_type == 'Channel::Api' end def email? channel_type == 'Channel::Email' end def twilio? channel_type == 'Channel::TwilioSms' end def twitter? channel_type == 'Channel::TwitterProfile' end def whatsapp? channel_type == 'Channel::Whatsapp' end def inbox_type channel.name end def webhook_data { id: id, name: name } end def callback_webhook_url case channel_type when 'Channel::TwilioSms' "#{ENV['FRONTEND_URL']}/twilio/callback" when 'Channel::Sms' "#{ENV['FRONTEND_URL']}/webhooks/sms/#{channel.phone_number.delete_prefix('+')}" when 'Channel::Line' "#{ENV['FRONTEND_URL']}/webhooks/line/#{channel.line_channel_id}" end end private def delete_round_robin_agents ::RoundRobin::ManageService.new(inbox: self).clear_queue end end
26.732283
86
0.648306
bf099ab3079c640045c0ab17c3ce856bf5384bad
363
class CreateRelationships < ActiveRecord::Migration[5.2] def change create_table :relationships do |t| t.integer :follower_id t.integer :followed_id t.timestamps end add_index :relationships, :follower_id add_index :relationships, :followed_id add_index :relationships, [:follower_id,:followed_id], unique: true end end
25.928571
71
0.721763
62416ed17af5c89bc13dddfd571906e573b77147
13,299
=begin #Mailchimp Marketing API #No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) OpenAPI spec version: 3.0.22 Contact: [email protected] Generated by: https://github.com/swagger-api/swagger-codegen.git Swagger Codegen version: 2.4.12 =end require 'uri' module MailchimpMarketing class AutomationsApi attr_accessor :api_client def initialize(api_client) @api_client = api_client end # Archive automation def archive(workflow_id, opts = {}) fail ArgumentError, "Missing required param: 'workflow_id'" if workflow_id.nil? query_params = {} post_body = nil local_var_path = '/automations/{workflow_id}/actions/archive'.sub('{' + 'workflow_id' + '}', workflow_id.to_s) data = @api_client.call_api(:POST, local_var_path, :query_params => query_params, :body => post_body) return data end # Delete workflow email def delete_workflow_email(workflow_id, workflow_email_id, opts = {}) fail ArgumentError, "Missing required param: 'workflow_id'" if workflow_id.nil? fail ArgumentError, "Missing required param: 'workflow_email_id'" if workflow_email_id.nil? query_params = {} post_body = nil local_var_path = '/automations/{workflow_id}/emails/{workflow_email_id}'.sub('{' + 'workflow_id' + '}', workflow_id.to_s).sub('{' + 'workflow_email_id' + '}', workflow_email_id.to_s) data = @api_client.call_api(:DELETE, local_var_path, :query_params => query_params, :body => post_body) return data end # List automations def list(opts = {}) fail ArgumentError, 'invalid value for "opts[:"count"]", must be smaller than or equal to 1000.' if !opts[:'count'].nil? && opts[:'count'] > 1000 fail ArgumentError, 'invalid value for "status", must be one of save, paused, sending' if opts[:'status'] && !['save', 'paused', 'sending'].include?(opts[:'status']) query_params = {} query_params[:'count'] = opts[:'count'] if !opts[:'count'].nil? query_params[:'offset'] = opts[:'offset'] if !opts[:'offset'].nil? query_params[:'fields'] = @api_client.build_collection_param(opts[:'fields'], :csv) if !opts[:'fields'].nil? query_params[:'exclude_fields'] = @api_client.build_collection_param(opts[:'exclude_fields'], :csv) if !opts[:'exclude_fields'].nil? query_params[:'before_create_time'] = opts[:'before_create_time'] if !opts[:'before_create_time'].nil? query_params[:'since_create_time'] = opts[:'since_create_time'] if !opts[:'since_create_time'].nil? query_params[:'before_send_time'] = opts[:'before_send_time'] if !opts[:'before_send_time'].nil? query_params[:'since_send_time'] = opts[:'since_send_time'] if !opts[:'since_send_time'].nil? query_params[:'status'] = opts[:'status'] if !opts[:'status'].nil? post_body = nil local_var_path = '/automations' data = @api_client.call_api(:GET, local_var_path, :query_params => query_params, :body => post_body) return data end # Get automation info def get(workflow_id, opts = {}) fail ArgumentError, "Missing required param: 'workflow_id'" if workflow_id.nil? query_params = {} query_params[:'fields'] = @api_client.build_collection_param(opts[:'fields'], :csv) if !opts[:'fields'].nil? query_params[:'exclude_fields'] = @api_client.build_collection_param(opts[:'exclude_fields'], :csv) if !opts[:'exclude_fields'].nil? post_body = nil local_var_path = '/automations/{workflow_id}'.sub('{' + 'workflow_id' + '}', workflow_id.to_s) data = @api_client.call_api(:GET, local_var_path, :query_params => query_params, :body => post_body) return data end # List automated emails def list_all_workflow_emails(workflow_id, opts = {}) fail ArgumentError, "Missing required param: 'workflow_id'" if workflow_id.nil? query_params = {} post_body = nil local_var_path = '/automations/{workflow_id}/emails'.sub('{' + 'workflow_id' + '}', workflow_id.to_s) data = @api_client.call_api(:GET, local_var_path, :query_params => query_params, :body => post_body) return data end # Get workflow email info def get_workflow_email(workflow_id, workflow_email_id, opts = {}) fail ArgumentError, "Missing required param: 'workflow_id'" if workflow_id.nil? fail ArgumentError, "Missing required param: 'workflow_email_id'" if workflow_email_id.nil? query_params = {} post_body = nil local_var_path = '/automations/{workflow_id}/emails/{workflow_email_id}'.sub('{' + 'workflow_id' + '}', workflow_id.to_s).sub('{' + 'workflow_email_id' + '}', workflow_email_id.to_s) data = @api_client.call_api(:GET, local_var_path, :query_params => query_params, :body => post_body) return data end # List automated email subscribers def get_workflow_email_subscriber_queue(workflow_id, workflow_email_id, opts = {}) fail ArgumentError, "Missing required param: 'workflow_id'" if workflow_id.nil? fail ArgumentError, "Missing required param: 'workflow_email_id'" if workflow_email_id.nil? query_params = {} post_body = nil local_var_path = '/automations/{workflow_id}/emails/{workflow_email_id}/queue'.sub('{' + 'workflow_id' + '}', workflow_id.to_s).sub('{' + 'workflow_email_id' + '}', workflow_email_id.to_s) data = @api_client.call_api(:GET, local_var_path, :query_params => query_params, :body => post_body) return data end # Get automated email subscriber def get_workflow_email_subscriber(workflow_id, workflow_email_id, subscriber_hash, opts = {}) fail ArgumentError, "Missing required param: 'workflow_id'" if workflow_id.nil? fail ArgumentError, "Missing required param: 'workflow_email_id'" if workflow_email_id.nil? fail ArgumentError, "Missing required param: 'subscriber_hash'" if subscriber_hash.nil? query_params = {} post_body = nil local_var_path = '/automations/{workflow_id}/emails/{workflow_email_id}/queue/{subscriber_hash}'.sub('{' + 'workflow_id' + '}', workflow_id.to_s).sub('{' + 'workflow_email_id' + '}', workflow_email_id.to_s).sub('{' + 'subscriber_hash' + '}', subscriber_hash.to_s) data = @api_client.call_api(:GET, local_var_path, :query_params => query_params, :body => post_body) return data end # List subscribers removed from workflow def list_workflow_email_subscribers_removed(workflow_id, opts = {}) fail ArgumentError, "Missing required param: 'workflow_id'" if workflow_id.nil? query_params = {} post_body = nil local_var_path = '/automations/{workflow_id}/removed-subscribers'.sub('{' + 'workflow_id' + '}', workflow_id.to_s) data = @api_client.call_api(:GET, local_var_path, :query_params => query_params, :body => post_body) return data end # Get subscriber removed from workflow def get_removed_workflow_email_subscriber(workflow_id, subscriber_hash, opts = {}) fail ArgumentError, "Missing required param: 'workflow_id'" if workflow_id.nil? fail ArgumentError, "Missing required param: 'subscriber_hash'" if subscriber_hash.nil? query_params = {} post_body = nil local_var_path = '/automations/{workflow_id}/removed-subscribers/{subscriber_hash}'.sub('{' + 'workflow_id' + '}', workflow_id.to_s).sub('{' + 'subscriber_hash' + '}', subscriber_hash.to_s) data = @api_client.call_api(:GET, local_var_path, :query_params => query_params, :body => post_body) return data end # Update workflow email def update_workflow_email(workflow_id, workflow_email_id, body, opts = {}) fail ArgumentError, "Missing required param: 'workflow_id'" if workflow_id.nil? fail ArgumentError, "Missing required param: 'workflow_email_id'" if workflow_email_id.nil? fail ArgumentError, "Missing required param: 'body'" if body.nil? query_params = {} post_body = @api_client.object_to_http_body(body) local_var_path = '/automations/{workflow_id}/emails/{workflow_email_id}'.sub('{' + 'workflow_id' + '}', workflow_id.to_s).sub('{' + 'workflow_email_id' + '}', workflow_email_id.to_s) data = @api_client.call_api(:PATCH, local_var_path, :query_params => query_params, :body => post_body) return data end # Update automation def update(workflow_id, body, opts = {}) fail ArgumentError, "Missing required param: 'workflow_id'" if workflow_id.nil? fail ArgumentError, "Missing required param: 'body'" if body.nil? query_params = {} post_body = @api_client.object_to_http_body(body) local_var_path = '/automations/{workflow_id}'.sub('{' + 'workflow_id' + '}', workflow_id.to_s) data = @api_client.call_api(:PATCH, local_var_path, :query_params => query_params, :body => post_body) return data end # Add automation def create(body, opts = {}) fail ArgumentError, "Missing required param: 'body'" if body.nil? query_params = {} post_body = @api_client.object_to_http_body(body) local_var_path = '/automations' data = @api_client.call_api(:POST, local_var_path, :query_params => query_params, :body => post_body) return data end # Pause automation emails def pause_all_emails(workflow_id, opts = {}) fail ArgumentError, "Missing required param: 'workflow_id'" if workflow_id.nil? query_params = {} post_body = nil local_var_path = '/automations/{workflow_id}/actions/pause-all-emails'.sub('{' + 'workflow_id' + '}', workflow_id.to_s) data = @api_client.call_api(:POST, local_var_path, :query_params => query_params, :body => post_body) return data end # Start automation emails def start_all_emails(workflow_id, opts = {}) fail ArgumentError, "Missing required param: 'workflow_id'" if workflow_id.nil? query_params = {} post_body = nil local_var_path = '/automations/{workflow_id}/actions/start-all-emails'.sub('{' + 'workflow_id' + '}', workflow_id.to_s) data = @api_client.call_api(:POST, local_var_path, :query_params => query_params, :body => post_body) return data end # Pause automated email def pause_workflow_email(workflow_id, workflow_email_id, opts = {}) fail ArgumentError, "Missing required param: 'workflow_id'" if workflow_id.nil? fail ArgumentError, "Missing required param: 'workflow_email_id'" if workflow_email_id.nil? query_params = {} post_body = nil local_var_path = '/automations/{workflow_id}/emails/{workflow_email_id}/actions/pause'.sub('{' + 'workflow_id' + '}', workflow_id.to_s).sub('{' + 'workflow_email_id' + '}', workflow_email_id.to_s) data = @api_client.call_api(:POST, local_var_path, :query_params => query_params, :body => post_body) return data end # Start automated email def start_workflow_email(workflow_id, workflow_email_id, opts = {}) fail ArgumentError, "Missing required param: 'workflow_id'" if workflow_id.nil? fail ArgumentError, "Missing required param: 'workflow_email_id'" if workflow_email_id.nil? query_params = {} post_body = nil local_var_path = '/automations/{workflow_id}/emails/{workflow_email_id}/actions/start'.sub('{' + 'workflow_id' + '}', workflow_id.to_s).sub('{' + 'workflow_email_id' + '}', workflow_email_id.to_s) data = @api_client.call_api(:POST, local_var_path, :query_params => query_params, :body => post_body) return data end # Add subscriber to workflow email def add_workflow_email_subscriber(workflow_id, workflow_email_id, body, opts = {}) fail ArgumentError, "Missing required param: 'workflow_id'" if workflow_id.nil? fail ArgumentError, "Missing required param: 'workflow_email_id'" if workflow_email_id.nil? fail ArgumentError, "Missing required param: 'body'" if body.nil? query_params = {} post_body = @api_client.object_to_http_body(body) local_var_path = '/automations/{workflow_id}/emails/{workflow_email_id}/queue'.sub('{' + 'workflow_id' + '}', workflow_id.to_s).sub('{' + 'workflow_email_id' + '}', workflow_email_id.to_s) data = @api_client.call_api(:POST, local_var_path, :query_params => query_params, :body => post_body) return data end # Remove subscriber from workflow def remove_workflow_email_subscriber(workflow_id, body, opts = {}) fail ArgumentError, "Missing required param: 'workflow_id'" if workflow_id.nil? fail ArgumentError, "Missing required param: 'body'" if body.nil? query_params = {} post_body = @api_client.object_to_http_body(body) local_var_path = '/automations/{workflow_id}/removed-subscribers'.sub('{' + 'workflow_id' + '}', workflow_id.to_s) data = @api_client.call_api(:POST, local_var_path, :query_params => query_params, :body => post_body) return data end end end
42.085443
269
0.675991
b927ce88a1dd033542dcdb7ce048493ff9e98284
2,750
# Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/ads/googleads/v6/services/ad_parameter_service.proto require 'google/protobuf' require 'google/ads/google_ads/v6/resources/ad_parameter_pb' require 'google/api/annotations_pb' require 'google/api/client_pb' require 'google/api/field_behavior_pb' require 'google/api/resource_pb' require 'google/protobuf/field_mask_pb' require 'google/rpc/status_pb' Google::Protobuf::DescriptorPool.generated_pool.build do add_file("google/ads/googleads/v6/services/ad_parameter_service.proto", :syntax => :proto3) do add_message "google.ads.googleads.v6.services.GetAdParameterRequest" do optional :resource_name, :string, 1 end add_message "google.ads.googleads.v6.services.MutateAdParametersRequest" do optional :customer_id, :string, 1 repeated :operations, :message, 2, "google.ads.googleads.v6.services.AdParameterOperation" optional :partial_failure, :bool, 3 optional :validate_only, :bool, 4 end add_message "google.ads.googleads.v6.services.AdParameterOperation" do optional :update_mask, :message, 4, "google.protobuf.FieldMask" oneof :operation do optional :create, :message, 1, "google.ads.googleads.v6.resources.AdParameter" optional :update, :message, 2, "google.ads.googleads.v6.resources.AdParameter" optional :remove, :string, 3 end end add_message "google.ads.googleads.v6.services.MutateAdParametersResponse" do optional :partial_failure_error, :message, 3, "google.rpc.Status" repeated :results, :message, 2, "google.ads.googleads.v6.services.MutateAdParameterResult" end add_message "google.ads.googleads.v6.services.MutateAdParameterResult" do optional :resource_name, :string, 1 end end end module Google module Ads module GoogleAds module V6 module Services GetAdParameterRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v6.services.GetAdParameterRequest").msgclass MutateAdParametersRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v6.services.MutateAdParametersRequest").msgclass AdParameterOperation = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v6.services.AdParameterOperation").msgclass MutateAdParametersResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v6.services.MutateAdParametersResponse").msgclass MutateAdParameterResult = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v6.services.MutateAdParameterResult").msgclass end end end end end
48.245614
167
0.759636
61be8074ddc7bed4ec30076f56f081c8459a8bb3
1,705
class XercesC < Formula desc "Validating XML parser" homepage "https://xerces.apache.org/xerces-c/" url "https://www.apache.org/dyn/closer.lua?path=xerces/c/3/sources/xerces-c-3.2.2.tar.gz" mirror "https://archive.apache.org/dist/xerces/c/3/sources/xerces-c-3.2.2.tar.gz" sha256 "dd6191f8aa256d3b4686b64b0544eea2b450d98b4254996ffdfe630e0c610413" bottle do cellar :any rebuild 1 sha256 "8bcc9b20b0b3df89ec53900e0b3b09ea1bbc3159b4cffc4b8ef4f62413621924" => :catalina sha256 "fab62b22422c24b0218cae42f7f81ad736db316d9bde4218272cdf7b174c313f" => :mojave sha256 "e62fba2c06fd03edf0491b54f753d10c4ca9e73e97c24389b749e655f9199b50" => :high_sierra sha256 "8390cdf10fcc8b65a1f295eacf8b3fec34776d18219b8a8ce565592ee3b03372" => :sierra end depends_on "cmake" => :build def install ENV.cxx11 mkdir "build" do system "cmake", "..", *std_cmake_args system "make" system "ctest", "-V" system "make", "install" system "make", "clean" system "cmake", "..", "-DBUILD_SHARED_LIBS=OFF", *std_cmake_args system "make" lib.install Dir["src/*.a"] end # Remove a sample program that conflicts with libmemcached # on case-insensitive file systems (bin/"MemParse").unlink end test do (testpath/"ducks.xml").write <<~EOS <?xml version="1.0" encoding="iso-8859-1"?> <ducks> <person id="Red.Duck" > <name><family>Duck</family> <given>One</given></name> <email>[email protected]</email> </person> </ducks> EOS output = shell_output("#{bin}/SAXCount #{testpath}/ducks.xml") assert_match "(6 elems, 1 attrs, 0 spaces, 37 chars)", output end end
32.169811
93
0.679765
ab8b0e4327588049761fa0168e8b6464983b82aa
1,200
# frozen_string_literal: true require "sidekiq/extensions/generic_proxy" module Sidekiq module Extensions ## # Adds 'delay', 'delay_for' and `delay_until` methods to all Classes to offload class method # execution to Sidekiq. Examples: # # User.delay.delete_inactive # Wikipedia.delay.download_changes_for(Date.today) # class DelayedClass include Sidekiq::Worker def perform(yml) (target, method_name, args) = YAML.load(yml) target.__send__(method_name, *args) end end module Klass def sidekiq_delay(options = {}) Proxy.new(DelayedClass, self, options) end def sidekiq_delay_for(interval, options = {}) Proxy.new(DelayedClass, self, options.merge("at" => Time.now.to_f + interval.to_f)) end def sidekiq_delay_until(timestamp, options = {}) Proxy.new(DelayedClass, self, options.merge("at" => timestamp.to_f)) end alias_method :delay, :sidekiq_delay alias_method :delay_for, :sidekiq_delay_for alias_method :delay_until, :sidekiq_delay_until end end end Module.__send__(:include, Sidekiq::Extensions::Klass) unless defined?(::Rails)
27.906977
96
0.675833
d5e7211c8e40386fb9d24afcdc5327fdbcb9e80c
391
# frozen_string_literal: true class SessionDecorator < ApplicationDecorator delegate_all # Define presentation-specific methods here. Helpers are accessed through # `helpers` (aka `h`). You can override attributes, for example: # # def created_at # helpers.content_tag :span, class: 'time' do # object.created_at.strftime("%a %m/%d/%y") # end # end end
26.066667
75
0.685422
3996265f9813497b7300b5f74b3799d66b0f4d7f
749
=begin Insights Service Catalog API This is a API to fetch and order catalog items from different cloud sources OpenAPI spec version: 1.0.0 Contact: [email protected] Generated by: https://github.com/swagger-api/swagger-codegen.git =end class ProgressMessage < ApplicationRecord acts_as_tenant(:tenant) after_initialize :set_defaults, unless: :persisted? NON_DATE_ATTRIBUTES = %w(level message) DATE_ATTRIBUTES = %w(received_at) def to_hash attributes.slice(*NON_DATE_ATTRIBUTES).tap do |hash| DATE_ATTRIBUTES.each do |attr| hash[attr] = self.send(attr.to_sym).iso8601 if self.send(attr.to_sym) end end.merge(:id => id.to_s) end def set_defaults self.received_at = DateTime.now end end
23.40625
77
0.735648
3362c71d8ff581659bc2167a7304fdb6e4596114
445
# # is_hash.rb # module Puppet::Parser::Functions newfunction(:is_hash, type: :rvalue, doc: <<-EOS Returns true if the variable passed to this function is a hash. EOS ) do |arguments| raise(Puppet::ParseError, "is_hash(): Wrong number of arguments given (#{arguments.size} for 1)") if arguments.size != 1 type = arguments[0] result = type.is_a?(Hash) return result end end # vim: set ts=2 sw=2 et :
21.190476
124
0.644944
d56e9974742590c2707a0adac931d89fceb3c318
36
require 'omniauth/strategies/origo'
18
35
0.833333
ff5e988c4399072d81c136041e69c2723f70ee40
1,624
# Fact: domain # # Purpose: # Return the host's primary DNS domain name. # # Resolution: # On UNIX (excluding Darwin), first try and use the hostname fact, # which uses the hostname system command, and then parse the output # of that. # Failing that it tries the dnsdomainname system command. # Failing that it uses /etc/resolv.conf and takes the domain from that, or as # a final resort, the search from that. # Otherwise returns nil. # # On Windows uses the win32ole gem and winmgmts to get the DNSDomain value # from the Win32 networking stack. # # Caveats: # Facter.add(:domain) do setcode do # Get the domain from various sources; the order of these # steps is important if name = Facter::Util::Resolution.exec('hostname') \ and name =~ /.*?\.(.+$)/ $1 elsif domain = Facter::Util::Resolution.exec('dnsdomainname') \ and domain =~ /.+\..+/ domain elsif FileTest.exists?("/etc/resolv.conf") domain = nil search = nil File.open("/etc/resolv.conf") { |file| file.each { |line| if line =~ /^\s*domain\s+(\S+)/ domain = $1 elsif line =~ /^\s*search\s+(\S+)/ search = $1 end } } next domain if domain next search if search end end end Facter.add(:domain) do confine :kernel => :windows setcode do require 'facter/util/wmi' domain = "" Facter::Util::WMI.execquery("select DNSDomain from Win32_NetworkAdapterConfiguration where IPEnabled = True").each { |nic| domain = nic.DNSDomain break } domain end end
25.375
126
0.617611
bb29e629fa027fa094ff0d0a3b513400a68b1ba4
3,925
require 'faraday' require 'json' require 'wmp_sdk/error' module WmpSdk class Api # 登录凭证校验。通过 wx.login 接口获得临时登录凭证 code 后传到开发者服务器调用此接口完成登录流程。 def self.invoke_auth_session(code) response = Faraday.get('https://api.weixin.qq.com/sns/jscode2session', { appid: WmpSdk.configuration.app_id, secret: WmpSdk.configuration.secret, js_code: code, grant_type: :authorization_code, }) raise WmpServerError, "Response Status: #{response.status}" unless response.success? JSON.parse(response.body, symbolize_names: true) end # 获取小程序全局唯一后台接口调用凭据(access_token)。调用绝大多数后台接口时都需使用 access_token,开发者需要进行妥善保存。 def self.invoke_access_token response = Faraday.get('https://api.weixin.qq.com/cgi-bin/token', { grant_type: :client_credential, appid: WmpSdk.configuration.app_id, secret: WmpSdk.configuration.secret, }) raise WmpServerError, "Response Status: #{response.status}" unless response.success? JSON.parse(response.body, symbolize_names: true) end # 下发小程序和公众号统一的服务消息 def self.invoke_uniform_message(access_token, openid, mp_template_msg, weapp_template_msg = nil) response = Faraday.post('https://api.weixin.qq.com/cgi-bin/message/wxopen/template/uniform_send', { access_token: access_token }) do |request| request_body = { touser: openid, mp_template_msg: mp_template_msg, } request_body.merge!(weapp_template_msg: weapp_template_msg) if weapp_template_msg request.body = request_body.to_json end raise WmpServerError, "Response Status: #{response.status}" unless response.success? JSON.parse(response.body, symbolize_names: true) end # 发送订阅消息 def self.invoke_subscribe_message_send(access_token, openid, template_id, page, data, miniprogram_state = 'formal', lang = 'zh_CN') response = Faraday.post('https://api.weixin.qq.com/cgi-bin/message/subscribe/send', { access_token: access_token }) do |request| request.body = { touser: openid, template_id: template_id, page: page, data: data, miniprogram_state: miniprogram_state, lang: lang, }.to_json end raise WmpServerError, "Response Status: #{response.status}" unless response.success? JSON.parse(response.body, symbolize_names: true) end # 把媒体文件上传到微信服务器。目前仅支持图片。用于发送客服消息或被动回复用户消息。 def self.invoke_upload_temp_media(access_token, file_name) connection = Faraday.new('https://api.weixin.qq.com') do |conn| conn.request :multipart conn.request :url_encoded conn.adapter :net_http end file = Faraday::FilePart.new Rails.root.join(file_name).to_s, 'image/jpg' response = connection.post("/cgi-bin/media/upload?access_token=#{access_token}&type=image", { media: file }) raise WmpServerError, "Response Status: #{response.status}" unless response.success? JSON.parse(response.body, symbolize_names: true) end # 发送客服消息给用户 def self.send_customer_service_message(access_token, openid, message_type, options = {}) response = Faraday.post('https://api.weixin.qq.com/cgi-bin/message/custom/send', { access_token: access_token }) do |request| request.body = { touser: openid, msgtype: message_type, }.merge!(options).to_json end raise WmpServerError, "Response Status: #{response.status}" unless response.success? JSON.parse(response.body, symbolize_names: true) end # 消息推送验证 def self.validate_customer_service_message(signature, timestamp, nonce) Digest::SHA1.hexdigest( [WmpSdk.configuration.token, timestamp, nonce].sort.join ) == signature end end end
40.885417
148
0.664713
5dcf5f28ed1133557e21e00f5ccb3953547d07d0
1,982
class Ncmpcpp < Formula desc "Ncurses-based client for the Music Player Daemon" homepage "https://rybczak.net/ncmpcpp/" url "https://rybczak.net/ncmpcpp/stable/ncmpcpp-0.9.2.tar.bz2" sha256 "faabf6157c8cb1b24a059af276e162fa9f9a3b9cd3810c43b9128860c9383a1b" license "GPL-2.0-or-later" revision 3 bottle do sha256 cellar: :any, arm64_big_sur: "589a36dfb83da7b7093605e58cdf6a9ae6f58e8bc915fc84a937742b17aafad6" sha256 cellar: :any, big_sur: "94f81334cde20719c43ff2e31cd89b89fe05b79e072f91e9ad5a9e8b104e7453" sha256 cellar: :any, catalina: "491b03399a3f52b0ae8bd5ffd4ccbe34bff8565f1a5898d60c0a6c04e1bc43db" sha256 cellar: :any, mojave: "228570b600da1e6001294be6761a84cf93f373a6d32aadbe38c7f239158835cd" sha256 cellar: :any_skip_relocation, x86_64_linux: "6544900ddf305c9b85a7dd0d44272caf80d7c5884c37ed6a6550d0a83c26169c" # linuxbrew-core end head do url "https://github.com/ncmpcpp/ncmpcpp.git" depends_on "autoconf" => :build depends_on "automake" => :build depends_on "libtool" => :build end depends_on "pkg-config" => :build depends_on "boost" depends_on "fftw" depends_on "libmpdclient" depends_on "ncurses" depends_on "readline" depends_on "taglib" uses_from_macos "curl" def install ENV.cxx11 on_macos do ENV.append "LDFLAGS", "-liconv" end ENV.append "BOOST_LIB_SUFFIX", "-mt" ENV.append "CXXFLAGS", "-D_XOPEN_SOURCE_EXTENDED" args = %W[ --disable-dependency-tracking --prefix=#{prefix} --enable-clock --enable-outputs --enable-unicode --enable-visualizer --with-curl --with-taglib ] system "./autogen.sh" if build.head? system "./configure", *args system "make" system "make", "install" end test do ENV.delete("LC_CTYPE") assert_match version.to_s, shell_output("#{bin}/ncmpcpp --version") end end
29.58209
139
0.688698
bbe14522ea14fcf0079cb82a59a1d08253a2a24e
1,986
#!/usr/bin/env ruby # # Check the size of a database queue # require 'rubygems' require 'choice' require 'mysql' EXIT_OK = 0 EXIT_WARNING = 1 EXIT_CRITICAL = 2 EXIT_UNKNOWN = 3 Choice.options do header '' header 'Specific options:' option :warn do short '-w' long '--warning=VALUE' desc 'Warning threshold' cast Integer end option :crit do short '-c' long '--critical=VALUE' desc 'Critical threshold' cast Integer end option :host do short '-H' long '--host=VALUE' desc 'MySQL DB host' end option :port do short '-P' long '--port=VALUE' desc 'MySQL DB port' end option :username do short '-u' long '--username=VALUE' desc 'MySQL DB username' end option :password do short '-p' long '--password=VALUE' desc 'MySQL DB password' end option :database do short '-d' long '--database=VALUE' desc 'MySQL database' end option :query do short '-q' long '--query=VALUE' desc 'MySQL DB count query' end end c = Choice.choices # nagios performance data format: 'label'=value[UOM];[warn];[crit];[min];[max] # see http://nagiosplug.sourceforge.net/developer-guidelines.html#AEN203 perfdata = "query_count=%d;#{c[:warn]};#{c[:crit]}" message = "Query '#{c[:query]}' result %d exceeds %d|#{perfdata}" if c[:warn] && c[:crit] conn = Mysql::connect(c[:host], c[:username], c[:password], c[:database], c[:port].to_i) res = conn.query(c[:query]) value = res.fetch_row value = value.first.to_i if value >= c[:crit] puts sprintf(message, value, c[:crit], value) exit(EXIT_CRITICAL) end if value >= c[:warn] puts sprintf(message, value, c[:warn], value) exit(EXIT_WARNING) end else puts "Please provide a warning and critical threshold" end # if warning nor critical trigger, say OK and return performance data puts sprintf("Query '#{c[:query]}' result %d OK|#{perfdata}", value, value)
19.281553
90
0.632427
39bbb8e202d02cf4294a53366ffac1b3c3d5985e
1,923
require 'rails_helper' RSpec.describe Event, type: :model do describe '#outdated?' do context 'when starts_at is before today' do it 'should return true' do event = Event.new(starts_at: Date.new(2001, 2, 3)) expect(event.outdated?).to eq true end end context 'when starts_at is after today' do it 'should return false' do event = Event.new(starts_at: Date.new(3000, 2, 3)) expect(event.outdated?).to eq false end end end describe '#can_publish?' do subject(:event) { Event.create!(extended_html_description: 'somewhere', venue: Venue.create, category: Category.create, starts_at: Date.today) } context 'when event is already published' do it 'should return false' do event.published = true expect(event.can_publish?).to eq false end end context 'when event is not published' do context 'when event has no ticket type' do it 'should return false' do expect(event.can_publish?).to eq false end end context 'when event has at least 1 ticket type' do it 'should return true' do TicketType.create(event: event) expect(event.can_publish?).to eq true end end end end describe '#create' do context 'when an event is created' do it 'should add creator as admin' do event = Event.create!(extended_html_description: 'somewhere', venue: Venue.create, category: Category.create, starts_at: Date.today, creator: User.create!(email: 'test', password: 'abc', password_confirmation: 'abc')) expect(event.admins).to include(event.creator) end end end end
32.59322
114
0.572543
034f6c0a4803ad67d3005c5420a520f2ea0af92a
37
module Avaya VERSION = "0.0.6" end
9.25
19
0.648649
799884d7a7400e31964a0f758f50b50680858906
20,007
# frozen_string_literal: true require 'spec_helper' RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do include TrackingHelpers before do stub_const('Gitlab::Experimentation::EXPERIMENTS', { test_experiment: { tracking_category: 'Team', rollout_strategy: rollout_strategy }, my_experiment: { tracking_category: 'Team' } } ) allow(Gitlab).to receive(:com?).and_return(is_gitlab_com) Feature.enable_percentage_of_time(:test_experiment_experiment_percentage, enabled_percentage) end let(:enabled_percentage) { 10 } let(:rollout_strategy) { nil } let(:is_gitlab_com) { true } controller(ApplicationController) do include Gitlab::Experimentation::ControllerConcern def index head :ok end end describe '#set_experimentation_subject_id_cookie' do let(:do_not_track) { nil } let(:cookie) { cookies.permanent.signed[:experimentation_subject_id] } let(:cookie_value) { nil } before do stub_do_not_track(do_not_track) if do_not_track.present? request.cookies[:experimentation_subject_id] = cookie_value if cookie_value get :index end context 'cookie is present' do let(:cookie_value) { 'test' } it 'does not change the cookie' do expect(cookies[:experimentation_subject_id]).to eq 'test' end end context 'cookie is not present' do it 'sets a permanent signed cookie' do expect(cookie).to be_present end context 'DNT: 0' do let(:do_not_track) { '0' } it 'sets a permanent signed cookie' do expect(cookie).to be_present end end context 'DNT: 1' do let(:do_not_track) { '1' } it 'does nothing' do expect(cookie).not_to be_present end end end context 'when not on gitlab.com' do let(:is_gitlab_com) { false } context 'when cookie was set' do let(:cookie_value) { 'test' } it 'cookie gets deleted' do expect(cookie).not_to be_present end end context 'when no cookie was set before' do it 'does nothing' do expect(cookie).not_to be_present end end end end describe '#push_frontend_experiment' do it 'pushes an experiment to the frontend' do gon = class_double('Gon') stub_experiment_for_subject(my_experiment: true) allow(controller).to receive(:gon).and_return(gon) expect(gon).to receive(:push).with({ experiments: { 'myExperiment' => true } }, true) controller.push_frontend_experiment(:my_experiment) end end describe '#experiment_enabled?' do def check_experiment(exp_key = :test_experiment, subject = nil) controller.experiment_enabled?(exp_key, subject: subject) end subject { check_experiment } context 'cookie is not present' do it { is_expected.to eq(false) } end context 'cookie is present' do before do cookies.permanent.signed[:experimentation_subject_id] = 'abcd-1234' get :index end it 'calls Gitlab::Experimentation.in_experiment_group? with the name of the experiment and the calculated experimentation_subject_index based on the uuid' do expect(Gitlab::Experimentation).to receive(:in_experiment_group?).with(:test_experiment, subject: 'abcd-1234') check_experiment(:test_experiment) end context 'when subject is given' do let(:rollout_strategy) { :user } let(:user) { build(:user) } it 'uses the subject' do expect(Gitlab::Experimentation).to receive(:in_experiment_group?).with(:test_experiment, subject: user) check_experiment(:test_experiment, user) end end end context 'do not track' do before do allow(Gitlab::Experimentation).to receive(:in_experiment_group?) { true } end context 'when do not track is disabled' do before do controller.request.headers['DNT'] = '0' end it { is_expected.to eq(true) } end context 'when do not track is enabled' do before do controller.request.headers['DNT'] = '1' end it { is_expected.to eq(false) } end end context 'URL parameter to force enable experiment' do it 'returns true unconditionally' do get :index, params: { force_experiment: :test_experiment } is_expected.to eq(true) end end context 'Cookie parameter to force enable experiment' do it 'returns true unconditionally' do cookies[:force_experiment] = 'test_experiment,another_experiment' get :index expect(check_experiment(:test_experiment)).to eq(true) expect(check_experiment(:another_experiment)).to eq(true) end end end describe '#track_experiment_event', :snowplow do let(:user) { build(:user) } context 'when the experiment is enabled' do before do stub_experiment(test_experiment: true) allow(controller).to receive(:current_user).and_return(user) end context 'the user is part of the experimental group' do before do stub_experiment_for_subject(test_experiment: true) end it 'tracks the event with the right parameters' do controller.track_experiment_event(:test_experiment, 'start', 1) expect_snowplow_event( category: 'Team', action: 'start', property: 'experimental_group', value: 1, user: user ) end end context 'the user is part of the control group' do before do stub_experiment_for_subject(test_experiment: false) end it 'tracks the event with the right parameters' do controller.track_experiment_event(:test_experiment, 'start', 1) expect_snowplow_event( category: 'Team', action: 'start', property: 'control_group', value: 1, user: user ) end end context 'do not track is disabled' do before do stub_do_not_track('0') end it 'does track the event' do controller.track_experiment_event(:test_experiment, 'start', 1) expect_snowplow_event( category: 'Team', action: 'start', property: 'control_group', value: 1, user: user ) end end context 'do not track enabled' do before do stub_do_not_track('1') end it 'does not track the event' do controller.track_experiment_event(:test_experiment, 'start', 1) expect_no_snowplow_event end end context 'subject is provided' do before do stub_experiment_for_subject(test_experiment: false) end it "provides the subject's hashed global_id as label" do experiment_subject = double(:subject, to_global_id: 'abc') allow(Gitlab::Experimentation).to receive(:valid_subject_for_rollout_strategy?).and_return(true) controller.track_experiment_event(:test_experiment, 'start', 1, subject: experiment_subject) expect_snowplow_event( category: 'Team', action: 'start', property: 'control_group', value: 1, label: Digest::SHA256.hexdigest('abc'), user: user ) end it "provides the subject's hashed string representation as label" do experiment_subject = 'somestring' controller.track_experiment_event(:test_experiment, 'start', 1, subject: experiment_subject) expect_snowplow_event( category: 'Team', action: 'start', property: 'control_group', value: 1, label: Digest::SHA256.hexdigest('somestring'), user: user ) end end context 'no subject is provided but cookie is set' do before do get :index stub_experiment_for_subject(test_experiment: false) end it 'uses the experimentation_subject_id as fallback' do controller.track_experiment_event(:test_experiment, 'start', 1) expect_snowplow_event( category: 'Team', action: 'start', property: 'control_group', value: 1, label: cookies.permanent.signed[:experimentation_subject_id], user: user ) end end end context 'when the experiment is disabled' do before do stub_experiment(test_experiment: false) end it 'does not track the event' do controller.track_experiment_event(:test_experiment, 'start') expect_no_snowplow_event end end end describe '#frontend_experimentation_tracking_data' do context 'when the experiment is enabled' do before do stub_experiment(test_experiment: true) end context 'the user is part of the experimental group' do before do stub_experiment_for_subject(test_experiment: true) end it 'pushes the right parameters to gon' do controller.frontend_experimentation_tracking_data(:test_experiment, 'start', 'team_id') expect(Gon.tracking_data).to eq( { category: 'Team', action: 'start', property: 'experimental_group', value: 'team_id' } ) end end context 'the user is part of the control group' do before do stub_experiment_for_subject(test_experiment: false) end it 'pushes the right parameters to gon' do controller.frontend_experimentation_tracking_data(:test_experiment, 'start', 'team_id') expect(Gon.tracking_data).to eq( { category: 'Team', action: 'start', property: 'control_group', value: 'team_id' } ) end it 'does not send nil value to gon' do controller.frontend_experimentation_tracking_data(:test_experiment, 'start') expect(Gon.tracking_data).to eq( { category: 'Team', action: 'start', property: 'control_group' } ) end end context 'do not track disabled' do before do stub_do_not_track('0') end it 'pushes the right parameters to gon' do controller.frontend_experimentation_tracking_data(:test_experiment, 'start') expect(Gon.tracking_data).to eq( { category: 'Team', action: 'start', property: 'control_group' } ) end end context 'do not track enabled' do before do stub_do_not_track('1') end it 'does not push data to gon' do controller.frontend_experimentation_tracking_data(:test_experiment, 'start') expect(Gon.method_defined?(:tracking_data)).to eq(false) end end end context 'when the experiment is disabled' do before do stub_experiment(test_experiment: false) end it 'does not push data to gon' do expect(Gon.method_defined?(:tracking_data)).to eq(false) controller.track_experiment_event(:test_experiment, 'start') end end end describe '#record_experiment_user' do let(:user) { build(:user) } let(:context) { { a: 42 } } context 'when the experiment is enabled' do before do stub_experiment(test_experiment: true) allow(controller).to receive(:current_user).and_return(user) end context 'the user is part of the experimental group' do before do stub_experiment_for_subject(test_experiment: true) end it 'calls add_user on the Experiment model' do expect(::Experiment).to receive(:add_user).with(:test_experiment, :experimental, user, context) controller.record_experiment_user(:test_experiment, context) end context 'with a cookie based rollout strategy' do it 'calls tracking_group with a nil subject' do expect(controller).to receive(:tracking_group).with(:test_experiment, nil, subject: nil).and_return(:experimental) allow(::Experiment).to receive(:add_user).with(:test_experiment, :experimental, user, context) controller.record_experiment_user(:test_experiment, context) end end context 'with a user based rollout strategy' do let(:rollout_strategy) { :user } it 'calls tracking_group with a user subject' do expect(controller).to receive(:tracking_group).with(:test_experiment, nil, subject: user).and_return(:experimental) allow(::Experiment).to receive(:add_user).with(:test_experiment, :experimental, user, context) controller.record_experiment_user(:test_experiment, context) end end end context 'the user is part of the control group' do before do stub_experiment_for_subject(test_experiment: false) end it 'calls add_user on the Experiment model' do expect(::Experiment).to receive(:add_user).with(:test_experiment, :control, user, context) controller.record_experiment_user(:test_experiment, context) end end end context 'when the experiment is disabled' do before do stub_experiment(test_experiment: false) allow(controller).to receive(:current_user).and_return(user) end it 'does not call add_user on the Experiment model' do expect(::Experiment).not_to receive(:add_user) controller.record_experiment_user(:test_experiment, context) end end context 'when there is no current_user' do before do stub_experiment(test_experiment: true) end it 'does not call add_user on the Experiment model' do expect(::Experiment).not_to receive(:add_user) controller.record_experiment_user(:test_experiment, context) end end context 'do not track' do before do stub_experiment(test_experiment: true) allow(controller).to receive(:current_user).and_return(user) end context 'is disabled' do before do stub_do_not_track('0') stub_experiment_for_subject(test_experiment: false) end it 'calls add_user on the Experiment model' do expect(::Experiment).to receive(:add_user).with(:test_experiment, :control, user, context) controller.record_experiment_user(:test_experiment, context) end end context 'is enabled' do before do stub_do_not_track('1') end it 'does not call add_user on the Experiment model' do expect(::Experiment).not_to receive(:add_user) controller.record_experiment_user(:test_experiment, context) end end end end describe '#record_experiment_group' do let(:group) { 'a group object' } let(:experiment_key) { :some_experiment_key } let(:dnt_enabled) { false } let(:experiment_active) { true } let(:rollout_strategy) { :whatever } let(:variant) { 'variant' } before do allow(controller).to receive(:dnt_enabled?).and_return(dnt_enabled) allow(::Gitlab::Experimentation).to receive(:active?).and_return(experiment_active) allow(::Gitlab::Experimentation).to receive(:rollout_strategy).and_return(rollout_strategy) allow(controller).to receive(:tracking_group).and_return(variant) allow(::Experiment).to receive(:add_group) end subject(:record_experiment_group) { controller.record_experiment_group(experiment_key, group) } shared_examples 'exits early without recording' do it 'returns early without recording the group as an ExperimentSubject' do expect(::Experiment).not_to receive(:add_group) record_experiment_group end end shared_examples 'calls tracking_group' do |using_cookie_rollout| it "calls tracking_group with #{using_cookie_rollout ? 'a nil' : 'the group as the'} subject" do expect(controller).to receive(:tracking_group).with(experiment_key, nil, subject: using_cookie_rollout ? nil : group).and_return(variant) record_experiment_group end end shared_examples 'records the group' do it 'records the group' do expect(::Experiment).to receive(:add_group).with(experiment_key, group: group, variant: variant) record_experiment_group end end context 'when DNT is enabled' do let(:dnt_enabled) { true } include_examples 'exits early without recording' end context 'when the experiment is not active' do let(:experiment_active) { false } include_examples 'exits early without recording' end context 'when a nil group is given' do let(:group) { nil } include_examples 'exits early without recording' end context 'when the experiment uses a cookie-based rollout strategy' do let(:rollout_strategy) { :cookie } include_examples 'calls tracking_group', true include_examples 'records the group' end context 'when the experiment uses a non-cookie-based rollout strategy' do let(:rollout_strategy) { :group } include_examples 'calls tracking_group', false include_examples 'records the group' end end describe '#record_experiment_conversion_event' do let(:user) { build(:user) } before do allow(controller).to receive(:dnt_enabled?).and_return(false) allow(controller).to receive(:current_user).and_return(user) stub_experiment(test_experiment: true) end subject(:record_conversion_event) do controller.record_experiment_conversion_event(:test_experiment) end it 'records the conversion event for the experiment & user' do expect(::Experiment).to receive(:record_conversion_event).with(:test_experiment, user, {}) record_conversion_event end shared_examples 'does not record the conversion event' do it 'does not record the conversion event' do expect(::Experiment).not_to receive(:record_conversion_event) record_conversion_event end end context 'when DNT is enabled' do before do allow(controller).to receive(:dnt_enabled?).and_return(true) end include_examples 'does not record the conversion event' end context 'when there is no current user' do before do allow(controller).to receive(:current_user).and_return(nil) end include_examples 'does not record the conversion event' end context 'when the experiment is not enabled' do before do stub_experiment(test_experiment: false) end include_examples 'does not record the conversion event' end end describe '#experiment_tracking_category_and_group' do let_it_be(:experiment_key) { :test_something } subject { controller.experiment_tracking_category_and_group(experiment_key) } it 'returns a string with the experiment tracking category & group joined with a ":"' do expect(controller).to receive(:tracking_category).with(experiment_key).and_return('Experiment::Category') expect(controller).to receive(:tracking_group).with(experiment_key, '_group', subject: nil).and_return('experimental_group') expect(subject).to eq('Experiment::Category:experimental_group') end end end
29.596154
163
0.639576
33f2c7349b73b2ddc40521102572885d1706dd91
3,452
# frozen_string_literal: true module RubyEventStore module ROM module Memory module Relations class Events < ::ROM::Relation[:memory] schema(:events) do attribute(:id, ::ROM::Types::Strict::Integer.default { RubyEventStore::ROM::Memory.fetch_next_id }) attribute :event_id, ::ROM::Types::Strict::String.meta(primary_key: true) attribute :event_type, ::ROM::Types::Strict::String attribute :metadata, ::ROM::Types::Strict::String.optional attribute :data, ::ROM::Types::Strict::String attribute :created_at, RubyEventStore::ROM::Types::DateTime attribute :valid_at, RubyEventStore::ROM::Types::DateTime end def create_changeset(tuples) events.changeset(Changesets::CreateEvents, tuples) end def update_changeset(tuples) events.changeset(Changesets::UpdateEvents, tuples) end def insert(tuple) verify_uniquness!(tuple) super end def offset(num) num.zero? ? self : new(dataset.slice(num..-1) || []) end def for_stream_entries(_assoc, stream_entries) restrict(event_id: stream_entries.map { |e| e[:event_id] }) end def by_event_id(event_id) restrict(event_id: event_id) end def by_event_type(event_type) restrict(event_type: event_type) end def exist? one? end def pluck(name) map { |e| e[name] } end def newer_than(time) restrict { |tuple| tuple[:created_at] > time.localtime } end def newer_than_or_equal(time) restrict { |tuple| tuple[:created_at] >= time.localtime } end def older_than(time) events.restrict { |tuple| tuple[:created_at] < time.localtime } end def older_than_or_equal(time) restrict { |tuple| tuple[:created_at] <= time.localtime } end DIRECTION_MAP = { forward: [false, :>, :<], backward: [true, :<, :>] }.freeze def ordered(direction, offset_entry_id = nil, stop_entry_id = nil, time_sort_by = nil) reverse, operator_offset, operator_stop = DIRECTION_MAP[direction] raise ArgumentError, 'Direction must be :forward or :backward' if order.nil? event_order_columns = [:id] case time_sort_by when :as_at event_order_columns.unshift :created_at when :as_of event_order_columns.unshift :valid_at end query = self query = query.restrict { |tuple| tuple[:id].public_send(operator_offset, offset_entry_id) } if offset_entry_id query = query.restrict { |tuple| tuple[:id].public_send(operator_stop, stop_entry_id) } if stop_entry_id query = new(query.dataset.sort_by { |tuple| event_order_columns.map { |c| tuple[c] } }) query = new(query.dataset.reverse) if reverse query end private def verify_uniquness!(tuple) return unless by_event_id(tuple[:event_id]).exist? raise TupleUniquenessError.for_event_id(tuple[:event_id]) end end end end end end
31.381818
122
0.573291
261e8eb3f2254d1643bf13d4925aaa0366de28c0
734
#-------------------------------------------------------------------- # @file include.rb # @author Johnny Willemsen # # @brief Include visitor dds4ccm # # @copyright (c) Remedy IT Expertise BV #-------------------------------------------------------------------- require 'ridlbe/c++11/visitors/include' module IDL module Cxx11 # Reopen visitor class to add dds include specifics class IncludeVisitor def ddsx11_typesupport_include? !%w{ccm_dds.idl Components.idl}.include?(File.basename(node.filename)) end def ddsx11_typesupport_include self.include_directory + '/' + File.basename(node.filename, '.*') + params[:dds_typesupport_pfx] + params[:hdr_ext] end end end end
29.36
123
0.561308
282c91467f0e40c9478c6d1255cfbe316ead191a
2,716
class Filebeat < Formula desc "File harvester to ship log files to Elasticsearch or Logstash" homepage "https://www.elastic.co/products/beats/filebeat" url "https://github.com/elastic/beats/archive/v6.1.1.tar.gz" sha256 "c69f0047644be919e42a1d8fa3383c894ca8e054d5b6f727f161ed4ce497ca84" head "https://github.com/elastic/beats.git" bottle do cellar :any_skip_relocation sha256 "b591db4508a251ae2f53db7f3f0f2ad0febf9520ed851aa5e5084b5f56f0b941" => :high_sierra sha256 "bfa3d9a81dc31b9f79c64e9bb29ff0bc26c3d5082ee8540cbc5a735a0d1531fb" => :sierra sha256 "643b99d29e78947269b9140b4956ee713cb7713aa7caea4820dd6c14924a99d6" => :el_capitan sha256 "7442e133b6efe009f4cf67a82338e11d3064bad604f65d92e1a40d3560bdf907" => :x86_64_linux end depends_on "go" => :build def install gopath = buildpath/"gopath" (gopath/"src/github.com/elastic/beats").install Dir["{*,.git,.gitignore}"] ENV["GOPATH"] = gopath cd gopath/"src/github.com/elastic/beats/filebeat" do system "make" system "make", "modules" libexec.install "filebeat" (prefix/"module").install Dir["_meta/module.generated/*"] (etc/"filebeat").install Dir["filebeat.*"] end prefix.install_metafiles gopath/"src/github.com/elastic/beats" (bin/"filebeat").write <<~EOS #!/bin/sh exec #{libexec}/filebeat -path.config #{etc}/filebeat -path.home #{prefix} -path.logs #{var}/log/filebeat -path.data #{var}/filebeat $@ EOS end plist_options :manual => "filebeat" def plist; <<~EOS <?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <plist version="1.0"> <dict> <key>Label</key> <string>#{plist_name}</string> <key>Program</key> <string>#{opt_bin}/filebeat</string> <key>RunAtLoad</key> <true/> </dict> </plist> EOS end test do log_file = testpath/"test.log" touch log_file (testpath/"filebeat.yml").write <<~EOS filebeat: prospectors: - paths: - #{log_file} scan_frequency: 0.1s output: file: path: #{testpath} EOS (testpath/"log").mkpath (testpath/"data").mkpath filebeat_pid = fork { exec "#{bin}/filebeat -c #{testpath}/filebeat.yml -path.config #{testpath}/filebeat -path.home=#{testpath} -path.logs #{testpath}/log -path.data #{testpath}" } begin sleep 1 log_file.append_lines "foo bar baz" sleep 5 assert_predicate testpath/"filebeat", :exist? ensure Process.kill("TERM", filebeat_pid) end end end
30.177778
185
0.655376
1d1d8830df69d6fdc7805958a9abd35009abf5cc
1,303
require 'test_helper' class UsersEditTest < ActionDispatch::IntegrationTest def setup @user = users(:michael) end test "unsuccessful edit" do log_in_as(@user) get edit_user_path(@user) assert_template 'users/edit' patch user_path(@user), params: { user: { name: "", email: "foo@invalid", password: "foo", password_confirmation: "bar" } } assert_template 'users/edit' assert_select "div.alert", "The form contains 4 errors." end test "successful edit with friendly forwarding" do get edit_user_path(@user) assert_equal session[:forwarding_url], edit_user_url(@user) log_in_as(@user) assert_nil session[:forwarding_url] name = "Foo Bar" email = "[email protected]" patch user_path(@user), params: { user: { name: name, email: email, password: "", password_confirmation: "" } } assert_not flash.empty? assert_redirected_to @user @user.reload assert_equal name, @user.name assert_equal email, @user.email end end
33.410256
78
0.529547
e840a1bcfd80d6355b1f0d7e873b0b2e949d6d6f
5,641
module ActiveMerchant #:nodoc: module Billing #:nodoc: class PayJunctionV2Gateway < Gateway self.display_name = 'PayJunction' self.homepage_url = 'https://www.payjunction.com/' self.test_url = 'https://api.payjunctionlabs.com/transactions' self.live_url = 'https://api.payjunction.com/transactions' self.supported_countries = ['US'] self.default_currency = 'USD' self.money_format = :dollars self.supported_cardtypes = [:visa, :master, :american_express, :discover] def initialize(options={}) requires!(options, :api_login, :api_password, :api_key) super end def purchase(amount, payment_method, options={}) post = {} add_invoice(post, amount, options) add_payment_method(post, payment_method) commit('purchase', post) end def authorize(amount, payment_method, options={}) post = {} post[:status] = 'HOLD' add_invoice(post, amount, options) add_payment_method(post, payment_method) commit('authorize', post) end def capture(amount, authorization, options={}) post = {} post[:status] = 'CAPTURE' post[:transactionId] = authorization add_invoice(post, amount, options) commit('capture', post) end def void(authorization, options={}) post = {} post[:status] = 'VOID' post[:transactionId] = authorization commit('void', post) end def refund(amount, authorization, options={}) post = {} post[:action] = 'REFUND' post[:transactionId] = authorization add_invoice(post, amount, options) commit('refund', post) end def credit(amount, payment_method, options={}) post = {} post[:action] = 'REFUND' add_invoice(post, amount, options) add_payment_method(post, payment_method) commit('credit', post) end def verify(credit_card, options={}) MultiResponse.run(:use_first_response) do |r| r.process { authorize(100, credit_card, options) } r.process(:ignore_result) { void(r.authorization, options) } end end def store(payment_method, options = {}) verify(payment_method, options) end def supports_scrubbing? true end def scrub(transcript) transcript. gsub(%r((Authorization: Basic )\w+), '\1[FILTERED]'). gsub(%r((X-Pj-Application-Key: )[\w-]+), '\1[FILTERED]'). gsub(%r((cardNumber=)\d+), '\1[FILTERED]'). gsub(%r((cardCvv=)\d+), '\1[FILTERED]') end private def add_invoice(post, money, options) post[:amountBase] = amount(money) if money post[:invoiceNumber] = options[:order_id] if options[:order_id] end def add_payment_method(post, payment_method) if payment_method.is_a? Integer post[:transactionId] = payment_method else post[:cardNumber] = payment_method.number post[:cardExpMonth] = format(payment_method.month, :two_digits) post[:cardExpYear] = format(payment_method.year, :four_digits) post[:cardCvv] = payment_method.verification_value end end def commit(action, params) response = begin parse(ssl_invoke(action, params)) rescue ResponseError => e parse(e.response.body) end success = success_from(response) Response.new( success, message_from(response), response, authorization: success ? authorization_from(response) : nil, error_code: success ? nil : error_from(response), test: test? ) end def ssl_invoke(action, params) if ['purchase', 'authorize', 'refund', 'credit'].include?(action) ssl_post(url(), post_data(params), headers) else ssl_request(:put, url(params), post_data(params), headers) end end def headers { 'Authorization' => 'Basic ' + Base64.encode64("#{@options[:api_login]}:#{@options[:api_password]}").strip, 'Content-Type' => 'application/x-www-form-urlencoded;charset=UTF-8', 'Accept' => 'application/json', 'X-PJ-Application-Key' => @options[:api_key].to_s } end def post_data(params) params.map { |k, v| "#{k}=#{CGI.escape(v.to_s)}" }.join('&') end def url(params={}) test? ? "#{test_url}/#{params[:transactionId]}" : "#{live_url}/#{params[:transactionId]}" end def parse(body) JSON.parse(body) rescue JSON::ParserError message = 'Invalid JSON response received from PayJunctionV2Gateway. Please contact PayJunctionV2Gateway if you continue to receive this message.' message += " (The raw response returned by the API was #{body.inspect})" { 'errors' => [{ 'message' => message }] } end def success_from(response) return response['response']['approved'] if response['response'] false end def message_from(response) return response['response']['message'] if response['response'] response['errors']&.inject('') { |message, error| error['message'] + '|' + message } end def authorization_from(response) response['transactionId'] end def error_from(response) response['response']['code'] if response['response'] end end end end
29.689474
154
0.586775
4a8002616250307b7155e6249d5a3568f2673f36
4,453
# frozen_string_literal: true require 'spec_helper' require Rails.root.join('config', 'object_store_settings.rb') RSpec.describe ObjectStoreSettings do describe '#parse!' do let(:settings) { Settingslogic.new(config) } subject { described_class.new(settings).parse! } context 'with valid config' do let(:connection) do { 'provider' => 'AWS', 'aws_access_key_id' => 'AWS_ACCESS_KEY_ID', 'aws_secret_access_key' => 'AWS_SECRET_ACCESS_KEY', 'region' => 'us-east-1' } end let(:config) do { 'lfs' => { 'enabled' => true }, 'artifacts' => { 'enabled' => true }, 'external_diffs' => { 'enabled' => false }, 'object_store' => { 'enabled' => true, 'connection' => connection, 'proxy_download' => true, 'objects' => { 'artifacts' => { 'bucket' => 'artifacts', 'proxy_download' => false }, 'lfs' => { 'bucket' => 'lfs-objects' }, 'external_diffs' => { 'bucket' => 'external_diffs', 'enabled' => false } } } } end it 'sets correct default values' do subject expect(settings.artifacts['enabled']).to be true expect(settings.artifacts['object_store']['enabled']).to be true expect(settings.artifacts['object_store']['connection']).to eq(connection) expect(settings.artifacts['object_store']['direct_upload']).to be true expect(settings.artifacts['object_store']['background_upload']).to be false expect(settings.artifacts['object_store']['proxy_download']).to be false expect(settings.artifacts['object_store']['remote_directory']).to eq('artifacts') expect(settings.lfs['enabled']).to be true expect(settings.lfs['object_store']['enabled']).to be true expect(settings.lfs['object_store']['connection']).to eq(connection) expect(settings.lfs['object_store']['direct_upload']).to be true expect(settings.lfs['object_store']['background_upload']).to be false expect(settings.lfs['object_store']['proxy_download']).to be true expect(settings.lfs['object_store']['remote_directory']).to eq('lfs-objects') expect(settings.external_diffs['enabled']).to be false expect(settings.external_diffs['object_store']['enabled']).to be false expect(settings.external_diffs['object_store']['remote_directory']).to eq('external_diffs') end it 'raises an error when a bucket is missing' do config['object_store']['objects']['lfs'].delete('bucket') expect { subject }.to raise_error(/Object storage for lfs must have a bucket specified/) end context 'with legacy config' do let(:legacy_settings) do { 'enabled' => true, 'remote_directory' => 'some-bucket', 'direct_upload' => true, 'background_upload' => false, 'proxy_download' => false } end before do settings.lfs['object_store'] = described_class.legacy_parse(legacy_settings) end it 'does not alter config if legacy settings are specified' do subject expect(settings.artifacts['object_store']).to be_nil expect(settings.lfs['object_store']['remote_directory']).to eq('some-bucket') expect(settings.external_diffs['object_store']).to be_nil end end end end describe '.legacy_parse' do it 'sets correct default values' do settings = described_class.legacy_parse(nil) expect(settings['enabled']).to be false expect(settings['direct_upload']).to be false expect(settings['background_upload']).to be true expect(settings['remote_directory']).to be nil end it 'respects original values' do original_settings = Settingslogic.new({ 'enabled' => true, 'remote_directory' => 'artifacts' }) settings = described_class.legacy_parse(original_settings) expect(settings['enabled']).to be true expect(settings['direct_upload']).to be false expect(settings['background_upload']).to be true expect(settings['remote_directory']).to eq 'artifacts' end end end
34.789063
99
0.599371
33825081625b158802e4516d234451c9f92745fa
2,090
# frozen_string_literal: true require "spec_helper" describe GraphQL::Schema::InputObject do let(:input_object) { Jazz::EnsembleInput } describe "type info" do it "has it" do assert_equal "EnsembleInput", input_object.graphql_name assert_equal nil, input_object.description assert_equal 1, input_object.arguments.size end it "is the #owner of its arguments" do argument = input_object.arguments["name"] assert_equal input_object, argument.owner end it "inherits arguments" do base_class = Class.new(GraphQL::Schema::InputObject) do argument :arg1, String, required: true argument :arg2, String, required: true end subclass = Class.new(base_class) do argument :arg2, Integer, required: true argument :arg3, Integer, required: true end assert_equal 3, subclass.arguments.size assert_equal ["arg1", "arg2", "arg3"], subclass.arguments.keys assert_equal ["String!", "Int!", "Int!"], subclass.arguments.values.map { |a| a.type.to_s } end end describe ".to_graphql" do it "assigns itself as the arguments_class" do assert_equal input_object, input_object.to_graphql.arguments_class end it "accepts description: kwarg" do input_obj_class = Jazz::InspectableInput input_obj_type = input_obj_class.to_graphql assert_equal "Test description kwarg", input_obj_type.arguments["stringValue"].description end end describe "in queries" do it "is passed to the field method" do query_str = <<-GRAPHQL { inspectInput(input: { stringValue: "ABC", legacyInput: { intValue: 4 }, nestedInput: { stringValue: "xyz"} }) } GRAPHQL res = Jazz::Schema.execute(query_str, context: { message: "hi" }) expected_info = [ "Jazz::InspectableInput", "hi, ABC, 4, (hi, xyz, -, (-))", "ABC", "ABC", "true", "ABC", ] assert_equal expected_info, res["data"]["inspectInput"] end end end
29.027778
97
0.639713
bb62b8dece61a507796a88c6e43b1b6ab4a3378b
1,861
# frozen_string_literal: true # run once a day, overnight, to synchronize systems class NightlySyncsJob < CaseflowJob queue_with_priority :low_priority application_attr :queue # arbitrary def perform RequestStore.store[:current_user] = User.system_user sync_vacols_users sync_vacols_cases sync_decision_review_tasks sync_bgs_attorneys end private def sync_vacols_users user_cache_start = Time.zone.now CachedUser.sync_from_vacols datadog_report_time_segment(segment: "sync_users_from_vacols", start_time: user_cache_start) end def sync_vacols_cases start_time = Time.zone.now dangling_legacy_appeals.each do |legacy_appeal| next if legacy_appeal.case_record.present? # extra check # delete pure danglers if legacy_appeal.tasks.none? legacy_appeal.destroy! else # if we have tasks and no case_record, then we need to cancel all the tasks, # but we do not delete the dangling LegacyAppeal record. legacy_appeal.tasks.open.where(parent_id: nil).each(&:cancel_task_and_child_subtasks) end end datadog_report_time_segment(segment: "sync_cases_from_vacols", start_time: start_time) end def sync_decision_review_tasks # tasks that went unfinished while the case was completed should be cancelled checker = DecisionReviewTasksForInactiveAppealsChecker.new checker.call checker.buffer.map { |task_id| Task.find(task_id).cancelled! } end def sync_bgs_attorneys start_time = Time.zone.now BgsAttorney.sync_bgs_attorneys datadog_report_time_segment(segment: "sync_bgs_attorneys", start_time: start_time) end def dangling_legacy_appeals reporter = LegacyAppealsWithNoVacolsCase.new reporter.call reporter.buffer.map { |vacols_id| LegacyAppeal.find_by(vacols_id: vacols_id) } end end
30.016129
96
0.760344
61413e854014bf7386b1ae244e00f9a1c4cdc814
499
require 'spec_helper' describe PivotalTracker::Task do before do @project = PivotalTracker::Project.find(102622) @story = @project.stories.find(4459994) end context ".all" do it "should return an array of tasks" do @story.tasks.all.should be_a(Array) @story.tasks.all.first.should be_a(PivotalTracker::Task) end end context ".find" do it "should return a given task" do @story.tasks.find(468113).should be_a(PivotalTracker::Task) end end end
22.681818
65
0.687375
bffaf1a4059e5ed3d87f20735686d8eba810d948
1,060
module Fastlane module Actions class OptOutCrashReportingAction < Action def self.run(params) ENV['FASTLANE_OPT_OUT_CRASH_REPORTING'] = "YES" UI.message("Disabled crash reporting") end def self.description "This will prevent reports from being uploaded when _fastlane_ crashes" end def self.details [ "By default, fastlane will send a report when it crashes", "The stack trace is sanitized so no personal information is sent.", "Learn more at https://docs.fastlane.tools/actions/opt_out_crash_reporting/", "Add `opt_out_crash_reporting` at the top of your Fastfile to disable crash reporting" ].join(' ') end def self.authors ['mpirri', 'ohayon'] end def self.is_supported?(platform) true end def self.example_code [ 'opt_out_crash_reporting # add this to the top of your Fastfile' ] end def self.category :misc end end end end
25.238095
96
0.616981
f728155ff9ba5c02744fc38fc0d6af2aedbf6797
225
class CreateWidgetComments < ActiveRecord::Migration[5.2] def change create_table :widget_comments do |t| t.references :widget, foreign_key: true t.string :username t.string :comment end end end
22.5
57
0.697778
26f454fbc5ad6fb6489632de845ffed7090f5ee0
337
require 'fileutils' module Pod class Command class Setup < Command self.summary = 'Setup the CocoaPods environment' self.description = <<-DESC Setup the CocoaPods environment DESC def run # Right now, no setup is needed UI.puts 'Setup completed'.green end end end end
17.736842
54
0.623145
33a14669b6f996ad5c23f6f5a9d6d409c2bdcbf4
992
Gem::Specification.new do |s| s.name = 'ruby-search-engine' s.version = '0.0.1' s.executables = ['ruby_search_engine.rb'] s.date = '2016-03-29' s.summary = "Search the web using Ruby" s.description = "Search the web using Ruby with this fast and lightweight gem, made to prioritize speed and efficiency. Compared to other search gems, this aims to be minimal, easy to use and most importantly, quick. For any inquiries, please feel free to contact us via email or a pull request. I hope you enjoy using the ruby-search-engine as much as we have had while making it! \nhttps://github.com/martinvelez/ruby-search-engine \n-Matt Le" s.authors = ["Martin Velez", "Michael Yen", "Mathew Le"] s.email = ['[email protected] ', '[email protected]', '[email protected]'] s.files = Dir["{lib,bin}/**/*"] + ["README.rdoc"] s.homepage = 'http://github.com/martinvelez/ruby-search-engine' s.require_paths = ["bin", "lib"] s.add_dependency('ruby_cli', '>=0.2.0') s.license = 'MIT' end
58.352941
382
0.708669
62d33eac1f3002f58c7a7effa002f0dcdea4b228
1,092
# frozen_string_literal: true RSpec.describe Loggi::Credential, type: :model do describe '#initialize' do subject { described_class.new(options) } context 'without options' do let(:options) { {} } it 'shouldnt full any field' do expect(subject.email).to be_nil expect(subject.password).to be_nil expect(subject.api_key).to be_nil end end context 'with options' do let(:email) { Faker::Internet.email } let(:password) { Faker::Internet.password } let(:api_key) { SecureRandom.uuid } let(:options) { { email: email, password: password, api_key: api_key } } it 'should full all fields' do expect(subject.email).to eq(email) expect(subject.password).to eq(password) expect(subject.api_key).to eq(api_key) end end end describe '#authenticate!' do subject { build(:credential).authenticate! } after { subject } it 'should call authentication service' do expect(Loggi::Services::Authentication).to receive(:authenticate!).once end end end
26.634146
78
0.649267
39e3a4f3d5316cf7b4fdb8b705fc75ec3536ac80
134
module Pixela class Configuration # @!attribute debug_logger # @return [Logger] attr_accessor :debug_logger end end
16.75
31
0.701493
5dcb6aceac94e23704782bcfe9c4f9134d370249
2,679
require File.expand_path('../../../spec_helper', __FILE__) # MRI magic to use built but not installed ruby $extmk = false require 'rbconfig' OBJDIR ||= File.expand_path("../../../ext/#{RUBY_ENGINE}/#{RUBY_VERSION}", __FILE__) mkdir_p(OBJDIR) def extension_path File.expand_path("../ext", __FILE__) end def object_path OBJDIR end def compile_extension(name) debug = false run_mkmf_in_process = RUBY_ENGINE == 'truffleruby' ext = "#{name}_spec" lib = "#{object_path}/#{ext}.#{RbConfig::CONFIG['DLEXT']}" ruby_header = "#{RbConfig::CONFIG['rubyhdrdir']}/ruby.h" return lib if File.exist?(lib) and File.mtime(lib) > File.mtime("#{extension_path}/rubyspec.h") and File.mtime(lib) > File.mtime("#{extension_path}/#{ext}.c") and File.mtime(lib) > File.mtime(ruby_header) and true # sentinel # Copy needed source files to tmpdir tmpdir = tmp("cext_#{name}") Dir.mkdir(tmpdir) begin ["rubyspec.h", "#{ext}.c"].each do |file| cp "#{extension_path}/#{file}", "#{tmpdir}/#{file}" end Dir.chdir(tmpdir) do if run_mkmf_in_process required = require 'mkmf' # Reinitialize mkmf if already required init_mkmf unless required create_makefile(ext, tmpdir) else File.write("extconf.rb", "require 'mkmf'\n" + "$ruby = ENV.values_at('RUBY_EXE', 'RUBY_FLAGS').join(' ')\n" + # MRI magic to consider building non-bundled extensions "$extout = nil\n" + "create_makefile(#{ext.inspect})\n") output = ruby_exe("extconf.rb") raise "extconf failed:\n#{output}" unless $?.success? $stderr.puts output if debug end make = ENV['MAKE'] make ||= (RbConfig::CONFIG['host_os'].include?("mswin") ? "nmake" : "make") if File.basename(make, ".*") == "nmake" # suppress logo of nmake.exe to stderr ENV["MAKEFLAGS"] = "l#{ENV["MAKEFLAGS"]}" end # Do not capture stderr as we want to show compiler warnings output = IO.popen([make, "V=1", "DESTDIR=", close_others: false], &:read) raise "#{make} failed:\n#{output}" unless $?.success? $stderr.puts output if debug cp File.basename(lib), lib end ensure rm_r tmpdir end File.chmod(0755, lib) lib end def load_extension(name) require compile_extension(name) rescue LoadError if %r{/usr/sbin/execerror ruby "\(ld 3 1 main ([/a-zA-Z0-9_\-.]+_spec\.so)"} =~ $!.message system('/usr/sbin/execerror', "#{RbConfig::CONFIG["bindir"]}/ruby", "(ld 3 1 main #{$1}") end raise end # Constants CAPI_SIZEOF_LONG = [0].pack('l!').size
29.43956
93
0.611049
f8d8fc3c01f4a935942843c833138256daca85fc
166
require 'sales_tax/rate' require 'sales_tax/rate_store' module SalesTax Data = RateStore.new def self.[](zip_code) Data.find_by_zip_code zip_code end end
15.090909
34
0.753012
622fbb84c02930cfd88e39bb58101ee5da24b4fb
730
class CategoriesController < ApplicationController before_action :authenticate_user!, only: [:new, :create, :edit, :update, :destroy] def new @category = Category.new end def create @category = Category.new category_params if @category.save redirect_to posts_path, flash: { notice: 'Your category has been created.' } else flash.now[:error] = @category.errors.full_messages render :new end end def show @category = Category.includes(posts: [:comment_threads]).find(params[:id]) @posts = @category.posts.page(params[:page]) end private def category_params params.require(:category).permit(:name, :title, :description, :sidebar, :submission_text) end end
25.172414
93
0.693151
ff4725eaf6489006aac09af4c396cb910fc74dd4
8,228
module Mutest module AST module Regexp class Transformer # Transformer for nodes which map directly to other domain # # A node maps "directly" to another domain if the node never # has children or text which needs to be preserved for a mapping # # @example direct mapping # # input = /\d/ # expression = Regexp::Parser.parse(input).first # node = Transformer::Direct.to_ast(expression) # # # the digit type always has the same text and no children # expression.text # => "\\d" # expression.terminal? # => true # # # therefore the `Parser::AST::Node` is always the same # node # => s(:regexp_digit_type) class Direct < self # Mapper from `Regexp::Expression` to `Parser::AST::Node` class ExpressionToAST < Transformer::ExpressionToAST # Transform expression into node # # @return [Parser::AST::Node] def call quantify(ast) end end # Mapper from `Parser::AST::Node` to `Regexp::Expression` class ASTToExpression < Transformer::ASTToExpression include LookupTable # rubocop:disable LineLength TABLE = Table.create( [:regexp_one_or_more_escape, [:escape, :one_or_more, '\+'], ::Regexp::Expression::EscapeSequence::Literal], [:regexp_zero_or_one_escape, [:escape, :zero_or_one, '\?'], ::Regexp::Expression::EscapeSequence::Literal], [:regexp_alternation_escape, [:escape, :alternation, '\|'], ::Regexp::Expression::EscapeSequence::Literal], [:regexp_group_open_escape, [:escape, :group_open, '\('], ::Regexp::Expression::EscapeSequence::Literal], [:regexp_group_close_escape, [:escape, :group_close, '\)'], ::Regexp::Expression::EscapeSequence::Literal], [:regexp_interval_open_escape, [:escape, :interval_open, '\{'], ::Regexp::Expression::EscapeSequence::Literal], [:regexp_interval_close_escape, [:escape, :interval_close, '\}'], ::Regexp::Expression::EscapeSequence::Literal], [:regexp_newline_escape, [:escape, :newline, '\n'], ::Regexp::Expression::EscapeSequence::Literal], [:regexp_zero_or_more_escape, [:escape, :zero_or_more, '\*'], ::Regexp::Expression::EscapeSequence::Literal], [:regexp_carriage_escape, [:escape, :carriage, '\r'], ::Regexp::Expression::EscapeSequence::Literal], [:regexp_dot_escape, [:escape, :dot, '\.'], ::Regexp::Expression::EscapeSequence::Literal], [:regexp_set_open_escape, [:escape, :set_open, '\['], ::Regexp::Expression::EscapeSequence::Literal], [:regexp_set_close_escape, [:escape, :set_close, '\]'], ::Regexp::Expression::EscapeSequence::Literal], [:regexp_eol_escape, [:escape, :eol, '\$'], ::Regexp::Expression::EscapeSequence::Literal], [:regexp_bol_escape, [:escape, :bol, '\^'], ::Regexp::Expression::EscapeSequence::Literal], [:regexp_bell_escape, [:escape, :bell, '\a'], ::Regexp::Expression::EscapeSequence::Literal], [:regexp_escape_escape, [:escape, :escape, '\e'], ::Regexp::Expression::EscapeSequence::AsciiEscape], [:regexp_form_feed_escape, [:escape, :form_feed, '\f'], ::Regexp::Expression::EscapeSequence::FormFeed], [:regexp_vertical_tab_escape, [:escape, :vertical_tab, '\v'], ::Regexp::Expression::EscapeSequence::VerticalTab], [:regexp_mark_keep, [:keep, :mark, '\K'], ::Regexp::Expression::Keep::Mark], [:regexp_bos_anchor, [:anchor, :bos, '\\A'], ::Regexp::Expression::Anchor::BeginningOfString], [:regexp_match_start_anchor, [:anchor, :match_start, '\\G'], ::Regexp::Expression::Anchor::MatchStart], [:regexp_word_boundary_anchor, [:anchor, :word_boundary, '\b'], ::Regexp::Expression::Anchor::WordBoundary], [:regexp_eos_ob_eol_anchor, [:anchor, :eos_ob_eol, '\\Z'], ::Regexp::Expression::Anchor::EndOfStringOrBeforeEndOfLine], [:regexp_eos_anchor, [:anchor, :eos, '\\z'], ::Regexp::Expression::Anchor::EndOfString], [:regexp_bol_anchor, [:anchor, :bol, '^'], ::Regexp::Expression::Anchor::BeginningOfLine], [:regexp_eol_anchor, [:anchor, :eol, '$'], ::Regexp::Expression::Anchor::EndOfLine], [:regexp_nonword_boundary_anchor, [:anchor, :nonword_boundary, '\\B'], ::Regexp::Expression::Anchor::NonWordBoundary], [:regexp_alpha_property, [:property, :alpha, '\p{Alpha}'], ::Regexp::Expression::UnicodeProperty::Alpha], [:regexp_script_arabic_property, [:property, :script_arabic, '\p{Arabic}'], ::Regexp::Expression::UnicodeProperty::Script], [:regexp_script_hangul_property, [:property, :script_hangul, '\p{Hangul}'], ::Regexp::Expression::UnicodeProperty::Script], [:regexp_script_han_property, [:property, :script_han, '\p{Han}'], ::Regexp::Expression::UnicodeProperty::Script], [:regexp_script_hiragana_property, [:property, :script_hiragana, '\p{Hiragana}'], ::Regexp::Expression::UnicodeProperty::Script], [:regexp_script_katakana_property, [:property, :script_katakana, '\p{Katakana}'], ::Regexp::Expression::UnicodeProperty::Script], [:regexp_letter_any_property, [:property, :letter_any, '\p{L}'], ::Regexp::Expression::UnicodeProperty::Letter::Any], [:regexp_hex_type, [:type, :hex, '\h'], ::Regexp::Expression::CharacterType::Hex], [:regexp_digit_type, [:type, :digit, '\d'], ::Regexp::Expression::CharacterType::Digit], [:regexp_space_type, [:type, :space, '\s'], ::Regexp::Expression::CharacterType::Space], [:regexp_word_type, [:type, :word, '\w'], ::Regexp::Expression::CharacterType::Word], [:regexp_hex_type, [:type, :hex, '\h'], ::Regexp::Expression::CharacterType::Hex], [:regexp_nonhex_type, [:type, :nonhex, '\H'], ::Regexp::Expression::CharacterType::NonHex], [:regexp_nondigit_type, [:type, :nondigit, '\D'], ::Regexp::Expression::CharacterType::NonDigit], [:regexp_nonspace_type, [:type, :nonspace, '\S'], ::Regexp::Expression::CharacterType::NonSpace], [:regexp_nonword_type, [:type, :nonword, '\W'], ::Regexp::Expression::CharacterType::NonWord], [:regexp_dot_meta, [:meta, :dot, '.'], ::Regexp::Expression::CharacterType::Any] ) private # Transform ast into expression # # @return [Regexp::Expression::Base] def transform expression_class.new(expression_token) end end ASTToExpression::TABLE.types.each(&method(:register)) end end end end end
80.666667
157
0.514706
216b19dcf22cc89d9dbde8a9b2a73152518ae8ad
719
# frozen_string_literal: true $LOAD_PATH.unshift(File.dirname(__FILE__) + '/../lib') %w[ bundler/setup signalwire ].each { |f| require f } # Setup your ENV with: # SIGNALWIRE_PROJECT_KEY=YOUR_SIGNALWIRE_ACCOUNT_ID # SIGNALWIRE_TOKEN=YOUR_SIGNALWIRE_ACCOUNT_TOKEN # # Set logging to debug for testing Signalwire::Logger.logger.level = ::Logger::DEBUG class MyConsumer < Signalwire::Relay::Consumer contexts ['incoming'] def on_incoming_call(call) call.answer call.play_tts text: 'connecting you to the clock service' call.connect [[{ type: 'phone', params: { to_number: '+12027621401', from_number: ENV['FROM_NUMBER'], timeout: 30 } }]] sleep 20 call.hangup end end MyConsumer.new.run
24.793103
123
0.730181
e958687e86bc4a78ba32166ec25d75d47a8dce2a
417
class Eagle < Cask version '7.1.0' sha256 '95a721bae751ea210fad390c9b414ec5e317332133072f08247b552e125ab2d5' url "ftp://ftp.cadsoft.de/eagle/program/#{version.gsub(/\.\d$/, '')}/eagle-mac-#{version}.zip" homepage 'http://www.cadsoftusa.com/' license :unknown pkg "eagle-#{version}.pkg" uninstall :pkgutil => 'com.CadSoftComputerGmbH.EAGLE', :delete => "/Applications/EAGLE-#{version}" end
32.076923
96
0.695444
e845714240ddce4b0f9da52e761a9c831adfbbcc
2,764
class NeovimRemote < Formula include Language::Python::Virtualenv desc "Control nvim processes using `nvr` command-line tool" homepage "https://github.com/mhinz/neovim-remote" url "https://files.pythonhosted.org/packages/cc/d8/82aec85fc7ad0853afca2c88e73ecc7d3a50c66988c44aa9748ccbc9b689/neovim-remote-2.4.0.tar.gz" sha256 "f199ebb61c3decf462feed4e7d467094ed38d8afaf43620736b5983a12fe2427" license "MIT" head "https://github.com/mhinz/neovim-remote.git", branch: "master" bottle do sha256 cellar: :any_skip_relocation, arm64_big_sur: "eb32806679e25729e2106e7ee35082d41b460a49305e51402121a21a013f8924" sha256 cellar: :any_skip_relocation, big_sur: "f94e3d59bc081404b883cd5fa4e8ddd48e33d060fc25099c0070ba52e8ac9682" sha256 cellar: :any_skip_relocation, catalina: "1bb77fdf6276df0833eed58107681ff725a0b0c6746a5e19c7c98428b903799d" sha256 cellar: :any_skip_relocation, mojave: "a08d1b3bbae29235c707c59a345ce343a6994e4f9460bbcf16f8279a2e13683e" end depends_on "neovim" depends_on "[email protected]" resource "greenlet" do url "https://files.pythonhosted.org/packages/47/6d/be10df2b141fcb1020c9605f7758881b5af706fb09a05b737e8eb7540387/greenlet-1.1.0.tar.gz" sha256 "c87df8ae3f01ffb4483c796fe1b15232ce2b219f0b18126948616224d3f658ee" end resource "msgpack" do url "https://files.pythonhosted.org/packages/59/04/87fc6708659c2ed3b0b6d4954f270b6e931def707b227c4554f99bd5401e/msgpack-1.0.2.tar.gz" sha256 "fae04496f5bc150eefad4e9571d1a76c55d021325dcd484ce45065ebbdd00984" end resource "psutil" do url "https://files.pythonhosted.org/packages/e1/b0/7276de53321c12981717490516b7e612364f2cb372ee8901bd4a66a000d7/psutil-5.8.0.tar.gz" sha256 "0c9ccb99ab76025f2f0bbecf341d4656e9c1351db8cc8a03ccd62e318ab4b5c6" end resource "pynvim" do url "https://files.pythonhosted.org/packages/7a/01/2d0898ba6cefbe2736283ee3155cba1c602de641ca5667ac55a0e4857276/pynvim-0.4.3.tar.gz" sha256 "3a795378bde5e8092fbeb3a1a99be9c613d2685542f1db0e5c6fd467eed56dff" end def install virtualenv_install_with_resources end test do socket = testpath/"nvimsocket" file = testpath/"test.txt" ENV["NVIM_LISTEN_ADDRESS"] = socket nvim = spawn( { "NVIM_LISTEN_ADDRESS" => socket }, Formula["neovim"].opt_bin/"nvim", "--headless", "-i", "NONE", "-u", "NONE", file, [:out, :err] => "/dev/null" ) sleep 5 str = "Hello from neovim-remote!" system bin/"nvr", "--remote-send", "i#{str}<esc>:write<cr>" assert_equal str, file.read.chomp assert_equal Process.kill(0, nvim), 1 system bin/"nvr", "--remote-send", ":quit<cr>" # Test will be terminated by the timeout # if `:quit` was not sent correctly Process.wait nvim end end
40.057971
141
0.764472
1c84c408eba5e732edc0c0d25f586266b8f4eee9
233
# frozen_string_literal: true module RuboCop module Cop module Style class AndOr < Cop YAYOI_MSG = "ζ*'ヮ')ζ<うっうー!" \ '`%<current>s` じゃなくて `%<prefer>s` をつかいましょうねー!' end end end end
17.923077
66
0.553648
d545a41f21d09cc992d56d1c5dd78f312774f96d
2,670
require 'spec_helper' describe Admin::RevisionsController do describe '#restore' do it 'does not allow account manager to restore revisions for other account' do account = create(:account) entry = create(:entry) earlier_revision = create(:revision, :frozen, :entry => entry) sign_in(create(:user, :account_manager)) expect { post(:restore, :id => earlier_revision.id) }.not_to change { entry.revisions.count } end it 'allows account manager to restore revisions for own account' do user = create(:user, :account_manager) entry = create(:entry, :account => user.account) earlier_revision = create(:revision, :frozen, :entry => entry, :title => 'the way it used to be') sign_in(user) post(:restore, :id => earlier_revision.id) expect(entry.draft(true).title).to eq('the way it used to be') end it 'allows admin to restore revisions for other accounts' do account = create(:account) entry = create(:entry) earlier_revision = create(:revision, :frozen, :entry => entry, :title => 'the way it used to be') sign_in(create(:user, :admin)) post(:restore, :id => earlier_revision.id) expect(entry.draft(true).title).to eq('the way it used to be') end it 'allows editor to restore revisions for entries he is member of' do user = create(:user, :editor) entry = create(:entry, :with_member => user, :account => user.account) earlier_revision = create(:revision, :frozen, :entry => entry, :title => 'the way it used to be') sign_in(user) post(:restore, :id => earlier_revision.id) expect(entry.draft(true).title).to eq('the way it used to be') end it 'does not allows editor to restore revisions for entries he is not member of' do user = create(:user, :editor) entry = create(:entry, :account => user.account) earlier_revision = create(:revision, :frozen, :entry => entry) sign_in(user) post(:restore, :id => earlier_revision.id) expect { post(:restore, :id => earlier_revision.id) }.not_to change { entry.revisions.count } end it 'needs to be able to aquire an edit lock' do user = create(:user, :editor) entry = create(:entry, :with_member => user, :account => user.account) earlier_revision = create(:revision, :frozen, :entry => entry) aquire_edit_lock(create(:user), entry) sign_in(user) request.env['HTTP_REFERER'] = admin_entry_path(entry) expect { post(:restore, :id => earlier_revision.id) }.not_to change { entry.revisions.count } end end end
34.230769
103
0.643446
ab7c64c66c31d869059d474d6beaa9ca38bb2a99
2,352
class Projects::PathLocksController < Projects::ApplicationController include PathLocksHelper include ExtractsPath # Authorize before_action :require_non_empty_project before_action :authorize_push_code!, only: [:toggle] before_action :check_license before_action :assign_ref_vars, only: :toggle before_action :lfs_blob_ids, only: :toggle def index @path_locks = @project.path_locks.page(params[:page]) end def toggle path_lock = @project.path_locks.find_by(path: params[:path]) if path_lock unlock_file(path_lock) else lock_file end head :ok rescue PathLocks::UnlockService::AccessDenied, PathLocks::LockService::AccessDenied return access_denied! end def destroy path_lock = @project.path_locks.find(params[:id]) begin PathLocks::UnlockService.new(project, current_user).execute(path_lock) rescue PathLocks::UnlockService::AccessDenied return access_denied! end respond_to do |format| format.html do redirect_to project_locks_path(@project), status: 302 end format.js end end private def check_license unless @project.feature_available?(:file_locks) flash[:alert] = _('You need a different license to enable FileLocks feature') redirect_to admin_license_path end end def lock_file path_lock = PathLocks::LockService.new(project, current_user).execute(params[:path]) if path_lock.persisted? && sync_with_lfs? Lfs::LockFileService.new( project, current_user, path: params[:path], create_path_lock: false ).execute end end def unlock_file(path_lock) PathLocks::UnlockService.new(project, current_user).execute(path_lock) if sync_with_lfs? Lfs::UnlockFileService.new(project, current_user, path: path_lock.path, force: true).execute end end # Override get_id from ExtractsPath. # We don't support file locking per branch, that's why we use the root branch. def get_id id = project.repository.root_ref id += "/#{params[:path]}" if params[:path].present? id end def lfs_file? blob = project.repository.blob_at_branch(@ref, @path) return false unless blob @lfs_blob_ids.include?(blob.id) end def sync_with_lfs? project.lfs_enabled? && lfs_file? end end
24
98
0.705782
62a33f7ed975bffbf65879f9d0614c6faf4c40e7
673
# # Cookbook:: build_cookbook # Recipe:: deploy # # Copyright:: 2017, Corey Hemminger # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. include_recipe 'delivery-truck::deploy'
33.65
74
0.757801
219161f7fe14edc6aec8f0cdafd7b4bb853224d7
711
Pod::Spec.new do |s| s.name = "Commercetools" s.version = "0.12.3" s.summary = "The e-commerce Swift SDK from commercetools" s.homepage = "https://github.com/commercetools/commercetools-ios-sdk" s.license = { :type => "Apache License, Version 2.0", :file => "LICENSE" } s.author = { "Commercetools GmbH" => "[email protected]" } s.source = { :git => "https://github.com/commercetools/commercetools-ios-sdk.git", :tag => s.version.to_s } s.source_files = 'Source/*.swift' s.ios.deployment_target = '10.0' s.osx.deployment_target = '10.11' s.tvos.deployment_target = '9.0' s.watchos.deployment_target = '2.2' s.swift_version = '5.5' end
37.421053
115
0.635724
26534fb4f0fc1c18996e4185739844c537707450
319
module HealthSeven::V2_5 class MfqM01 < ::HealthSeven::Message attribute :msh, Msh, position: "MSH", require: true attribute :sfts, Array[Sft], position: "SFT", multiple: true attribute :qrd, Qrd, position: "QRD", require: true attribute :qrf, Qrf, position: "QRF" attribute :dsc, Dsc, position: "DSC" end end
35.444444
62
0.708464
b90c350c6209c98b47b0c96e8b6201845278eba7
604
# frozen_string_literal: true module VAOS module V0 class BaseController < ::ApplicationController before_action :authorize protected def authorize raise_access_denied unless current_user.authorize(:vaos, :access?) raise_access_denied_no_icn if current_user.icn.blank? end def raise_access_denied raise Common::Exceptions::Forbidden, detail: 'You do not have access to online scheduling' end def raise_access_denied_no_icn raise Common::Exceptions::Forbidden, detail: 'No patient ICN found' end end end end
24.16
98
0.701987
4a5e5feab4c15c8a80f2db45ca63a7fafb13e80c
132
# Preview all emails at http://localhost:3000/rails/mailers/auth/notifier class Auth::NotifierPreview < ActionMailer::Preview end
26.4
73
0.795455
2166dbed663382eacb6258415d1b8fa1356f3a64
7,184
# frozen_string_literal: true require 'spec_helper' require 'bolt/applicator' require 'bolt/executor' require 'bolt/inventory' require 'bolt/pal' require 'bolt/puppetdb' require 'bolt/target' describe Bolt::Applicator do let(:uri) { 'foobar' } let(:target) { Bolt::Target.new(uri) } let(:inventory) { Bolt::Inventory.new(nil) } let(:executor) { Bolt::Executor.new } let(:config) do Bolt::PuppetDB::Config.new('server_urls' => 'https://localhost:8081', 'cacert' => '/path/to/cacert', 'token' => 'token') end let(:pdb_client) { Bolt::PuppetDB::Client.new(config) } let(:modulepath) { [Bolt::PAL::BOLTLIB_PATH, Bolt::PAL::MODULES_PATH] } let(:applicator) { Bolt::Applicator.new(inventory, executor, modulepath, [], pdb_client, nil, 2) } let(:ast) { { 'resources' => [] } } let(:input) { { code_ast: ast, modulepath: modulepath, pdb_config: config.to_hash, hiera_config: nil, target: { name: uri, facts: { 'bolt' => true }, variables: {}, trusted: { authenticated: 'local', certname: uri, extensions: {}, hostname: uri, domain: nil } }, inventory: { data: {}, target_hash: { target_vars: {}, target_facts: {}, target_features: {} }, config: { transport: 'ssh', transports: { ssh: { 'connect-timeout' => 10, 'tty' => false, 'load-config' => true, 'disconnect-timeout' => 5 }, winrm: { 'connect-timeout' => 10, ssl: true, 'ssl-verify' => true, 'file-protocol' => 'winrm' }, pcp: { 'task-environment' => 'production' }, local: {}, docker: {}, remote: { 'run-on': 'localhost' } } } } } } it 'instantiates' do expect(applicator).to be end it 'passes catalog input' do expect(Open3).to receive(:capture3) .with('ruby', /bolt_catalog/, 'compile', stdin_data: input.to_json) .and_return(['{}', '', double(:status, success?: true)]) expect(applicator.compile(target, ast, {})).to eq({}) end it 'logs messages returned on stderr' do logs = [ { debug: 'A message' }, { notice: 'Stuff happened' } ] expect(Open3).to receive(:capture3) .with('ruby', /bolt_catalog/, 'compile', stdin_data: input.to_json) .and_return(['{}', logs.map(&:to_json).join("\n"), double(:status, success?: true)]) expect(applicator.compile(target, ast, {})).to eq({}) expect(@log_output.readlines).to eq( [ " DEBUG Bolt::Executor : Started with 1 max thread(s)\n", " DEBUG Bolt::Applicator : #{target.uri}: A message\n", "NOTICE Bolt::Applicator : #{target.uri}: Stuff happened\n" ] ) end context 'with Puppet mocked' do before(:each) do env = Puppet::Node::Environment.create(:testing, modulepath) allow(Puppet).to receive(:lookup).with(:pal_script_compiler).and_return(double(:script_compiler, type: nil)) allow(Puppet).to receive(:lookup).with(:current_environment).and_return(env) allow(Puppet::Pal).to receive(:assert_type) allow(Puppet::Pops::Serialization::ToDataConverter).to receive(:convert).and_return(ast) allow(applicator).to receive(:count_statements) end it 'replaces failures to find Puppet' do expect(applicator).to receive(:compile).and_return(ast) result = Bolt::Result.new(target) allow_any_instance_of(Bolt::Transport::SSH).to receive(:batch_task).and_return(result) expect(Bolt::ApplyResult).to receive(:puppet_missing_error).with(result).and_return(nil) applicator.apply([target], :body, {}) end it 'captures compile errors in a result set' do expect(applicator).to receive(:compile).and_raise('Something weird happened') resultset = applicator.apply([uri, '_catch_errors' => true], :body, {}) expect(resultset).to be_a(Bolt::ResultSet) expect(resultset).not_to be_ok expect(resultset.count).to eq(1) expect(resultset.first).not_to be_ok expect(resultset.first.error_hash['msg']).to eq('Something weird happened') end it 'fails if the report signals failure' do expect(applicator).to receive(:compile).and_return(ast) allow_any_instance_of(Bolt::Transport::SSH).to receive(:batch_task).and_return( Bolt::Result.new(target, value: { 'status' => 'failed', 'resource_statuses' => {} }) ) resultset = applicator.apply([target, '_catch_errors' => true], :body, {}) expect(resultset).to be_a(Bolt::ResultSet) expect(resultset).not_to be_ok expect(resultset.count).to eq(1) expect(resultset.first).not_to be_ok expect(resultset.first.error_hash['msg']).to match(/Resources failed to apply for #{uri}/) end it 'includes failed resource events for all failing nodes when errored' do resources = { '/tmp/does/not/exist' => [{ 'status' => 'failure', 'message' => 'It failed.' }], 'C:/does/not/exist' => [{ 'status' => 'failure', 'message' => 'It failed.' }], '/tmp/sure' => [] }.map { |name, events| { "File[#{name}]" => { 'failed' => !events.empty?, 'events' => events } } } targets = [Bolt::Target.new('node1'), Bolt::Target.new('node2'), Bolt::Target.new('node3')] results = targets.zip(resources, %w[failed failed success]).map do |target, res, status| Bolt::Result.new(target, value: { 'status' => status, 'resource_statuses' => res }) end allow(applicator).to receive(:compile).and_return(ast) allow_any_instance_of(Bolt::Transport::SSH).to receive(:batch_task).and_return(*results) expect { applicator.apply([targets], :body, {}) }.to raise_error(Bolt::ApplyFailure, <<-MSG.chomp) Resources failed to apply for node1 File[/tmp/does/not/exist]: It failed. Resources failed to apply for node2 File[C:/does/not/exist]: It failed. MSG end it "only creates 2 threads" do running = Concurrent::AtomicFixnum.new promises = Concurrent::Array.new allow(applicator).to receive(:compile) do count = running.increment if count <= 2 # Only first two will block, simplifying cleanup at the end delay = Concurrent::Promise.new { ast } promises << delay delay.value else ast end end targets = [Bolt::Target.new('node1'), Bolt::Target.new('node2'), Bolt::Target.new('node3')] allow_any_instance_of(Bolt::Transport::SSH).to receive(:batch_task) do |_, batch| Bolt::ApplyResult.new(batch.first) end t = Thread.new { applicator.apply([targets], :body, {}) } sleep(0.2) expect(running.value).to eq(2) # execute all the promises to release the threads expect(promises.count).to eq(2) promises.each(&:execute) t.join end end end
34.047393
114
0.594655
796fb6286ca28ba4d912a25fa064a1f8c989b1a2
35
module Grandstand::UsersHelper end
11.666667
30
0.857143
b9bd39e55754951c4f7b68d04012b955c0407497
581
BlCommons::Engine.routes.draw do post '/bl_resources/:resource_name/sync', to: 'resources#sync' post '/bl_resources/:resource_name/batch_sync', to: 'resources#batch_sync' post '/bl_resources/:resource_name/require_sync', to: 'resources#require_sync' get '/bl_resources/:resource_name', to: 'resources#index' post '/bl_resources/:resource_name', to: 'resources#create' get '/bl_resources/:resource_name/:id', to: 'resources#show' put '/bl_resources/:resource_name/:id', to: 'resources#update' delete '/bl_resources/:resource_name/:id', to: 'resources#destroy' end
52.818182
80
0.748709
61ffc0e716d63a34b7dde6690581f419a279c201
1,332
# # Cookbook Name:: ossec # Recipe:: client # # Copyright 2010, Opscode, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # node.set['ossec']['user']['install_type'] = "agent" node.set['ossec']['user']['agent_server_ip'] = node['ossec']['server']['ip'] include_recipe "ossec" user "ossecd" do comment "OSSEC Distributor" shell "/bin/bash" system true gid "ossec" home node['ossec']['user']['dir'] end directory "#{node['ossec']['user']['dir']}/.ssh" do owner "ossecd" group "ossec" mode 0750 end template "#{node['ossec']['user']['dir']}/.ssh/authorized_keys" do source "ssh_key.erb" owner "ossecd" group "ossec" mode 0600 variables(:key => node['ossec']['server']['ssh_key']['public']) end file "#{node['ossec']['user']['dir']}/etc/client.keys" do owner "ossecd" group "ossec" mode 0660 end
25.615385
76
0.688438