hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
1a173484a00fdde88bb261aebf1f9449b6b86e52 | 1,748 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Resources::Mgmt::V2018_02_01
module Models
#
# Tag information.
#
class TagValue
include MsRestAzure
# @return [String] The tag ID.
attr_accessor :id
# @return [String] The tag value.
attr_accessor :tag_value
# @return [TagCount] The tag value count.
attr_accessor :count
#
# Mapper for TagValue class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'TagValue',
type: {
name: 'Composite',
class_name: 'TagValue',
model_properties: {
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
tag_value: {
client_side_validation: true,
required: false,
serialized_name: 'tagValue',
type: {
name: 'String'
}
},
count: {
client_side_validation: true,
required: false,
serialized_name: 'count',
type: {
name: 'Composite',
class_name: 'TagCount'
}
}
}
}
}
end
end
end
end
| 24.619718 | 70 | 0.474256 |
1aed53715ca0d40ee56f741573414220147328b2 | 1,954 | # frozen_string_literal: true
require "rom/elasticsearch/relation"
RSpec.describe ROM::Elasticsearch::Relation, "#create_index" do
subject(:relation) { relations[:users] }
include_context "setup"
context "when custom :index is configured" do
after do
relation.delete_index
end
context "with default settings" do
before do
conf.relation(:users) do
schema do
attribute :id, ROM::Elasticsearch::Types::ID
attribute :name, ROM::Types::String
end
end
end
it "creates an index" do
relation.create_index
expect(gateway.index?(:users)).to be(true)
end
end
context "with customized settings" do
before do
conf.relation(:users) do
schema do
attribute :id, ROM::Types::Integer
attribute :name, ROM::Types::String
end
index_settings number_of_shards: 2
end
end
it "creates an index" do
relation.create_index
expect(gateway.index?(:users)).to be(true)
expect(relation.dataset.settings["number_of_shards"]).to eql("2")
end
end
context "with customized attribute mappings" do
before do
conf.relation(:users) do
schema do
attribute :id, ROM::Elasticsearch::Types::ID
attribute :name, ROM::Elasticsearch::Types.Keyword
attribute :desc, ROM::Elasticsearch::Types.Text(analyzer: "snowball")
end
index_settings number_of_shards: 2
end
end
it "creates an index" do
relation.create_index
expect(gateway.index?(:users)).to be(true)
expect(relation.dataset.mappings)
.to eql("properties" => {
"name" => {"type" => "keyword"},
"desc" => {"type" => "text", "analyzer" => "snowball"}
})
end
end
end
end
| 24.734177 | 81 | 0.576766 |
918b945ba701a527bb0575a4c31e1e82d03a015e | 167 | class Cat < ApplicationRecord
validates :name, presence: true
validates :age, presence: true
validates :sex, presence: true
validates :bio, presence: true
end
| 23.857143 | 33 | 0.748503 |
bf5c2db9aa3b8598d1e8761c43ea8222696c9603 | 1,798 | class Basket < ApplicationRecord
belongs_to :user, optional: true
has_many :line_items, dependent: :destroy
has_many :products, through: :line_items
paginates_per 10
def self.custom_sort(args)
category = args.fetch(:sortCategory, "sort_date")
desc = args.fetch(:desc, "true")
direction = desc == "true" ? 'desc' : 'asc'
send(category, direction)
end
def self.sort_date(direction)
order = ["baskets.transaction_date", direction].join(" ")
order(order)
end
def self.sort_items(direction)
order = ["baskets.line_item_count", direction].join(" ")
order(order)
end
def self.sort_total(direction)
order = ["baskets.total_cents", direction].join(" ")
order(order)
end
def self.within_date_range(args = {})
oldest_date = args.fetch(:oldestDate, order(:transaction_date).first.transaction_date.to_s)
newest_date = args.fetch(:newestDate, order(:transaction_date).last.transaction_date.to_s)
start_date = DateTime.parse(oldest_date)
end_date = DateTime.parse(newest_date)
where(transaction_date: start_date..end_date)
end
def self.group_baskets(args = {})
oldest_date = args.fetch(:oldestDate, self.last.transaction_date.to_s)
newest_date = args.fetch(:newestDate, self.first.transaction_date.to_s)
start_date = DateTime.parse(oldest_date)
end_date = DateTime.parse(newest_date)
unit = args.fetch(:unit, Basket.pick_unit(start_date, end_date))
data = self.group_by_period(unit, :transaction_date, range: start_date..end_date).sum('baskets.total_cents').to_a
{ data: data, unit: unit }
end
def self.pick_unit(start_date, end_date)
if end_date - start_date < (15 / 1)
"day"
elsif end_date - start_date < (30 / 1)
"week"
else
"month"
end
end
end
| 31.54386 | 117 | 0.700779 |
4a1d0809696931258e49687a70a8866f51306ed9 | 527 | require 'spec_helper'
require 'active_support/testing/time_helpers'
describe DogStatsd do
def send_metric(payload={})
ActiveSupport::Notifications.instrument('process_action.action_controller.duration', payload)
end
context 'with no configuration' do
subject { described_class }
context 'with base payload empty' do
it 'should call underlying client' do
expect_any_instance_of(Datadog::Statsd).to receive(:increment).with('test')
subject.increment 'test'
end
end
end
end
| 25.095238 | 97 | 0.73055 |
ab56a973c83b344238fc55a2aaf14aeb0a12bb1a | 2,568 | # frozen_string_literal: true
require 'spec_helper'
module Spree
describe Api::ZonesController, type: :request do
let!(:attributes) { [:id, :name, :zone_members] }
let!(:zone) { create(:zone, name: 'Europe') }
before do
stub_authentication!
end
it "gets list of zones" do
get spree.api_zones_path
expect(json_response['zones'].first).to have_attributes(attributes)
end
it 'can control the page size through a parameter' do
create(:zone)
get spree.api_zones_path, params: { per_page: 1 }
expect(json_response['count']).to eq(1)
expect(json_response['current_page']).to eq(1)
expect(json_response['pages']).to eq(2)
end
it 'can query the results through a paramter' do
expected_result = create(:zone, name: 'South America')
get spree.api_zones_path, params: { q: { name_cont: 'south' } }
expect(json_response['count']).to eq(1)
expect(json_response['zones'].first['name']).to eq expected_result.name
end
it "gets a zone" do
get spree.api_zone_path(zone)
expect(json_response).to have_attributes(attributes)
expect(json_response['name']).to eq zone.name
expect(json_response['zone_members'].size).to eq zone.zone_members.count
end
context "as an admin" do
sign_in_as_admin!
it "can create a new zone" do
params = {
zone: {
name: "North Pole",
zone_members: [
{
zoneable_type: "Spree::Country",
zoneable_id: 1
}
]
}
}
post spree.api_zones_path, params: params
expect(response.status).to eq(201)
expect(json_response).to have_attributes(attributes)
expect(json_response["zone_members"]).not_to be_empty
end
it "updates a zone" do
params = {
zone: {
name: "North Pole",
zone_members: [
{
zoneable_type: "Spree::Country",
zoneable_id: 1
}
]
}
}
put spree.api_zone_path(zone), params: params
expect(response.status).to eq(200)
expect(json_response['name']).to eq 'North Pole'
expect(json_response['zone_members']).not_to be_blank
end
it "can delete a zone" do
delete spree.api_zone_path(zone)
expect(response.status).to eq(204)
expect { zone.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
end
end
end
| 28.21978 | 78 | 0.591121 |
21cce5a8bb214ef44305c5346a98469aeb01cc96 | 351 | module SmartAnswer
module Question
class Salary < Base
def parse_input(raw_input)
SmartAnswer::Salary.new(raw_input)
end
def to_response(input)
salary = parse_input(input)
{
amount: salary.amount,
period: salary.period
}
rescue
nil
end
end
end
end
| 17.55 | 42 | 0.566952 |
392ed9a707f7ab9d9e2023ef58ad2fc71bf44fc7 | 1,426 | require 'rack'
module CartoDB
module Importer2
module UrlTranslator
class OSM
URL_REGEX = %r{openstreetmap.org.*lat.*}
TRANSLATED_URL_REGEX = /api.openstreetmap.org/
URL_TEMPLATE = 'http://api.openstreetmap.org/api/0.6/map?bbox='
DEFW = 1200.0/2.0
DEFH = 1000.0/2.0
def translate(url)
return url if !supported?(url) || translated?(url)
"#{URL_TEMPLATE}#{bounding_box_for(url)}"
end #translate
def bounding_box_for(url)
params = Rack::Utils.parse_query(url.split('?')[1])
#2h, 6w
lon = params['lon'].to_f
lat = params['lat'].to_f
zoom = params['zoom'].to_i
res = 180 / 256.0 / 2**zoom
py = (90 + lat) / res
px = (180 + lon) / res
lpx = px - DEFW
lpy = py - DEFH
upx = px + DEFW
upy = py + DEFH
lon1 = (res * lpx) - 180
lat1 = (res * lpy) - 90
lon2 = (res * upx) - 180
lat2 = (res * upy) - 90
[lon1, lat1, lon2, lat2].join(',')
end #bounding_box_for
def supported?(url)
!!(url =~ URL_REGEX)
end #supported?
def translated?(url)
!!(url =~ TRANSLATED_URL_REGEX)
end #translated?
end #OSM
end # UrlTranslator
end # Importer2
end # CartoDB
| 26.407407 | 72 | 0.493689 |
113d4015da97c3aa2a73aa48c10de6357d2e5553 | 900 | # -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "omniauth-odnoklassniki/version"
Gem::Specification.new do |s|
s.name = "omniauth-odnoklassniki"
s.version = Omniauth::Odnoklassniki::VERSION
s.authors = ["Alexander Logvinov"]
s.email = ["[email protected]"]
s.homepage = "https://github.com/incubus/omniauth-odnoklassniki"
s.summary = %q{OmniAuth strategy for Odnoklassniki.ru}
s.description = %q{OmniAuth strategy for Odnoklassniki.ru}
s.rubyforge_project = "omniauth-odnoklassniki"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
s.add_runtime_dependency "omniauth", "~> 1.0"
s.add_runtime_dependency "omniauth-oauth2", "~> 1.0"
end
| 37.5 | 83 | 0.656667 |
7ae763416e626e405baa0e8d716b2c782385db58 | 1,652 | # frozen_string_literal: true
namespace :patients do
get "mdms/:scope", to: "mdms#show", as: :mdms
resources :primary_care_physicians
resources :practices, only: [] do
collection do
get :search
end
resources :primary_care_physicians,
only: :index,
controller: "practices/primary_care_physicians"
end
resources :abridgements, only: :index
end
resources :bookmarks, controller: "patients/bookmarks", only: [:destroy, :index]
resource :dashboard, only: :show, controller: "dashboard/dashboards"
resource :worryboard, only: :show, controller: "patients/worryboard"
resources :deaths, only: :index, as: :patient_deaths
resources :patients, except: [:destroy], controller: "patients/patients" do
collection do
get :search
end
get "perspectives/bone",
to: "patients/perspectives#show",
id: :bone,
as: :bone_perspective
get "perspectives/anaemia",
to: "patients/perspectives#show",
id: :anaemia,
as: :anaemia_perspective
resource :clinical_summary, only: :show, controller: "patients/clinical_summaries"
resource :death, only: [:edit, :update]
resource :primary_care_physician,
controller: "patients/primary_care_physician",
only: [:edit, :update, :destroy]
resources :bookmarks, only: :create, controller: "patients/bookmarks"
resources :alerts, only: [:new, :create, :destroy], controller: "patients/alerts"
resource :worry, only: [:create, :destroy], controller: "patients/worry"
resources :attachments, controller: "patients/attachments"
namespace :surveys do
resource :dashboard, only: :show
end
end
| 32.392157 | 84 | 0.702179 |
f890c08c0f621c1224206fdce59242da2ed151f2 | 7,831 | #!/usr/bin/env ruby
class Builder
def initialize
@env = Environment.new()
@worker = CompositeWorker.new([Logger.new(), Executer.new()])
end
def makeRelease
createWorkingDirectories
downloadSource
copySource
buildModules
signFrameworks "[email protected]"
createPackage "ocmock-3.3.dmg", "OCMock 3.3"
sanityCheck
openPackageDir
end
def justBuild
createWorkingDirectories
downloadSource
buildModules
openPackageDir
end
def createWorkingDirectories
@worker.run("mkdir -p #{@env.sourcedir}")
@worker.run("mkdir -p #{@env.productdir}")
@worker.run("mkdir -p #{@env.packagedir}")
end
def downloadSource
@worker.run("git archive master | tar -x -v -C #{@env.sourcedir}")
end
def copySource
@worker.run("cp -R #{@env.sourcedir}/Source #{@env.productdir}")
end
def buildModules
@worker.chdir("#{@env.sourcedir}/Source")
@worker.run("xcodebuild -project OCMock.xcodeproj -target OCMock OBJROOT=#{@env.objroot} SYMROOT=#{@env.symroot}")
osxproductdir = "#{@env.productdir}/OSX"
@worker.run("mkdir -p #{osxproductdir}")
@worker.run("cp -R #{@env.symroot}/Release/OCMock.framework #{osxproductdir}")
@worker.run("xcodebuild -project OCMock.xcodeproj -target OCMockLib -sdk iphoneos9.2 OBJROOT=#{@env.objroot} SYMROOT=#{@env.symroot}")
@worker.run("xcodebuild -project OCMock.xcodeproj -target OCMockLib -sdk iphonesimulator9.2 OBJROOT=#{@env.objroot} SYMROOT=#{@env.symroot}")
ioslibproductdir = "#{@env.productdir}/iOS\\ library"
@worker.run("mkdir -p #{ioslibproductdir}")
@worker.run("cp -R #{@env.symroot}/Release-iphoneos/OCMock #{ioslibproductdir}")
@worker.run("lipo -create -output #{ioslibproductdir}/libOCMock.a #{@env.symroot}/Release-iphoneos/libOCMock.a #{@env.symroot}/Release-iphonesimulator/libOCMock.a")
@worker.run("xcodebuild -project OCMock.xcodeproj -target 'OCMock iOS' -sdk iphoneos9.2 OBJROOT=#{@env.objroot} SYMROOT=#{@env.symroot}")
@worker.run("xcodebuild -project OCMock.xcodeproj -target 'OCMock iOS' -sdk iphonesimulator9.2 OBJROOT=#{@env.objroot} SYMROOT=#{@env.symroot}")
iosproductdir = "#{@env.productdir}/iOS\\ framework"
@worker.run("mkdir -p #{iosproductdir}")
@worker.run("cp -R #{@env.symroot}/Release-iphoneos/OCMock.framework #{iosproductdir}")
@worker.run("lipo -create -output #{iosproductdir}/OCMock.framework/OCMock #{@env.symroot}/Release-iphoneos/OCMock.framework/OCMock #{@env.symroot}/Release-iphonesimulator/OCMock.framework/OCMock")
@worker.run("xcodebuild -project OCMock.xcodeproj -target 'OCMock tvOS' -sdk appletvos9.1 OBJROOT=#{@env.objroot} SYMROOT=#{@env.symroot}")
@worker.run("xcodebuild -project OCMock.xcodeproj -target 'OCMock tvOS' -sdk appletvsimulator9.1 OBJROOT=#{@env.objroot} SYMROOT=#{@env.symroot}")
tvosproductdir = "#{@env.productdir}/tvOS"
@worker.run("mkdir -p #{tvosproductdir}")
@worker.run("cp -R #{@env.symroot}/Release-appletvos/OCMock.framework #{tvosproductdir}")
@worker.run("lipo -create -output #{tvosproductdir}/OCMock.framework/OCMock #{@env.symroot}/Release-appletvos/OCMock.framework/OCMock #{@env.symroot}/Release-appletvsimulator/OCMock.framework/OCMock")
end
def signFrameworks(identity)
osxproductdir = "#{@env.productdir}/OSX"
iosproductdir = "#{@env.productdir}/iOS\\ framework"
tvosproductdir = "#{@env.productdir}/tvOS"
@worker.run("codesign -s 'Mac Developer: #{identity}' #{osxproductdir}/OCMock.framework")
@worker.run("codesign -s 'iPhone Developer: #{identity}' #{iosproductdir}/OCMock.framework")
@worker.run("codesign -s 'iPhone Developer: #{identity}' #{tvosproductdir}/OCMock.framework")
end
def createPackage(packagename, volumename)
@worker.chdir(@env.packagedir)
@worker.run("hdiutil create -size 5m temp.dmg -layout NONE")
disk_id = nil
@worker.run("hdid -nomount temp.dmg") { |hdid| disk_id = hdid.readline.split[0] }
@worker.run("newfs_hfs -v '#{volumename}' #{disk_id}")
@worker.run("hdiutil eject #{disk_id}")
@worker.run("hdid temp.dmg") { |hdid| disk_id = hdid.readline.split[0] }
@worker.run("cp -R #{@env.productdir}/* '/Volumes/#{volumename}'")
@worker.run("hdiutil eject #{disk_id}")
@worker.run("hdiutil convert -format UDZO temp.dmg -o #{@env.packagedir}/#{packagename} -imagekey zlib-level=9")
@worker.run("hdiutil internet-enable -yes #{@env.packagedir}/#{packagename}")
@worker.run("rm temp.dmg")
end
def openPackageDir
@worker.run("open #{@env.packagedir}")
end
def sanityCheck
osxproductdir = "#{@env.productdir}/OSX"
ioslibproductdir = "#{@env.productdir}/iOS\\ library"
iosproductdir = "#{@env.productdir}/iOS\\ framework"
tvosproductdir = "#{@env.productdir}/tvOS"
@worker.run("lipo -info #{osxproductdir}/OCMock.framework/OCMock")
@worker.run("lipo -info #{ioslibproductdir}/libOCMock.a")
@worker.run("lipo -info #{iosproductdir}/OCMock.framework/OCMock")
@worker.run("lipo -info #{tvosproductdir}/OCMock.framework/OCMock")
@worker.run("codesign -dvv #{osxproductdir}/OCMock.framework")
@worker.run("codesign -dvv #{iosproductdir}/OCMock.framework")
@worker.run("codesign -dvv #{tvosproductdir}/OCMock.framework")
end
def upload(packagename, dest)
@worker.run("scp #{@env.packagedir}/#{packagename} #{dest}")
end
def cleanup
@worker.run("chmod -R u+w #{@env.tmpdir}")
@worker.run("rm -rf #{@env.tmpdir}");
end
end
## Environment
## use attributes to configure manager for your environment
class Environment
def initialize()
@tmpdir = "/tmp/ocmock.#{Process.pid}"
@sourcedir = tmpdir + "/Source"
@productdir = tmpdir + "/Products"
@packagedir = tmpdir
@objroot = tmpdir + '/Build/Intermediates'
@symroot = tmpdir + '/Build'
end
attr_accessor :tmpdir, :sourcedir, :productdir, :packagedir, :objroot, :symroot
end
## Logger (Worker)
## prints commands
class Logger
def chdir(dir)
puts "## chdir #{dir}"
end
def run(cmd)
puts "## #{cmd}"
end
end
## Executer (Worker)
## actually runs commands
class Executer
def chdir(dir)
Dir.chdir(dir)
end
def run(cmd, &block)
if block == nil
system(cmd)
else
IO.popen(cmd, &block)
end
end
end
## Composite Worker (Worker)
## sends commands to multiple workers
class CompositeWorker
def initialize(workers)
@workers = workers
end
def chdir(dir)
@workers.each { |w| w.chdir(dir) }
end
def run(cmd)
@workers.each { |w| w.run(cmd) }
end
def run(cmd, &block)
@workers.each { |w| w.run(cmd, &block) }
end
end
if /Tools$/.match(Dir.pwd)
Dir.chdir("..")
end
if ARGV[0] == '-r'
Builder.new.makeRelease
else
Builder.new.justBuild
end
| 37.113744 | 208 | 0.592389 |
d5ee2b10d104a41a0c4106b80742539a7875b6e2 | 13,690 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Cloud
module DataLabeling
module V1beta1
# Response used for ImportData longrunning operation.
# @!attribute [rw] dataset
# @return [::String]
# Ouptut only. The name of imported dataset.
# @!attribute [rw] total_count
# @return [::Integer]
# Output only. Total number of examples requested to import
# @!attribute [rw] import_count
# @return [::Integer]
# Output only. Number of examples imported successfully.
class ImportDataOperationResponse
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Response used for ExportDataset longrunning operation.
# @!attribute [rw] dataset
# @return [::String]
# Ouptut only. The name of dataset.
# "projects/*/datasets/*"
# @!attribute [rw] total_count
# @return [::Integer]
# Output only. Total number of examples requested to export
# @!attribute [rw] export_count
# @return [::Integer]
# Output only. Number of examples exported successfully.
# @!attribute [rw] label_stats
# @return [::Google::Cloud::DataLabeling::V1beta1::LabelStats]
# Output only. Statistic infos of labels in the exported dataset.
# @!attribute [rw] output_config
# @return [::Google::Cloud::DataLabeling::V1beta1::OutputConfig]
# Output only. output_config in the ExportData request.
class ExportDataOperationResponse
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Metadata of an ImportData operation.
# @!attribute [rw] dataset
# @return [::String]
# Output only. The name of imported dataset.
# "projects/*/datasets/*"
# @!attribute [rw] partial_failures
# @return [::Array<::Google::Rpc::Status>]
# Output only. Partial failures encountered.
# E.g. single files that couldn't be read.
# Status details field will contain standard GCP error details.
# @!attribute [rw] create_time
# @return [::Google::Protobuf::Timestamp]
# Output only. Timestamp when import dataset request was created.
class ImportDataOperationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Metadata of an ExportData operation.
# @!attribute [rw] dataset
# @return [::String]
# Output only. The name of dataset to be exported.
# "projects/*/datasets/*"
# @!attribute [rw] partial_failures
# @return [::Array<::Google::Rpc::Status>]
# Output only. Partial failures encountered.
# E.g. single files that couldn't be read.
# Status details field will contain standard GCP error details.
# @!attribute [rw] create_time
# @return [::Google::Protobuf::Timestamp]
# Output only. Timestamp when export dataset request was created.
class ExportDataOperationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Metadata of a labeling operation, such as LabelImage or LabelVideo.
# Next tag: 20
# @!attribute [rw] image_classification_details
# @return [::Google::Cloud::DataLabeling::V1beta1::LabelImageClassificationOperationMetadata]
# Details of label image classification operation.
# @!attribute [rw] image_bounding_box_details
# @return [::Google::Cloud::DataLabeling::V1beta1::LabelImageBoundingBoxOperationMetadata]
# Details of label image bounding box operation.
# @!attribute [rw] image_bounding_poly_details
# @return [::Google::Cloud::DataLabeling::V1beta1::LabelImageBoundingPolyOperationMetadata]
# Details of label image bounding poly operation.
# @!attribute [rw] image_oriented_bounding_box_details
# @return [::Google::Cloud::DataLabeling::V1beta1::LabelImageOrientedBoundingBoxOperationMetadata]
# Details of label image oriented bounding box operation.
# @!attribute [rw] image_polyline_details
# @return [::Google::Cloud::DataLabeling::V1beta1::LabelImagePolylineOperationMetadata]
# Details of label image polyline operation.
# @!attribute [rw] image_segmentation_details
# @return [::Google::Cloud::DataLabeling::V1beta1::LabelImageSegmentationOperationMetadata]
# Details of label image segmentation operation.
# @!attribute [rw] video_classification_details
# @return [::Google::Cloud::DataLabeling::V1beta1::LabelVideoClassificationOperationMetadata]
# Details of label video classification operation.
# @!attribute [rw] video_object_detection_details
# @return [::Google::Cloud::DataLabeling::V1beta1::LabelVideoObjectDetectionOperationMetadata]
# Details of label video object detection operation.
# @!attribute [rw] video_object_tracking_details
# @return [::Google::Cloud::DataLabeling::V1beta1::LabelVideoObjectTrackingOperationMetadata]
# Details of label video object tracking operation.
# @!attribute [rw] video_event_details
# @return [::Google::Cloud::DataLabeling::V1beta1::LabelVideoEventOperationMetadata]
# Details of label video event operation.
# @!attribute [rw] text_classification_details
# @return [::Google::Cloud::DataLabeling::V1beta1::LabelTextClassificationOperationMetadata]
# Details of label text classification operation.
# @!attribute [rw] text_entity_extraction_details
# @return [::Google::Cloud::DataLabeling::V1beta1::LabelTextEntityExtractionOperationMetadata]
# Details of label text entity extraction operation.
# @!attribute [rw] progress_percent
# @return [::Integer]
# Output only. Progress of label operation. Range: [0, 100].
# @!attribute [rw] partial_failures
# @return [::Array<::Google::Rpc::Status>]
# Output only. Partial failures encountered.
# E.g. single files that couldn't be read.
# Status details field will contain standard GCP error details.
# @!attribute [rw] create_time
# @return [::Google::Protobuf::Timestamp]
# Output only. Timestamp when labeling request was created.
class LabelOperationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Metadata of a LabelImageClassification operation.
# @!attribute [rw] basic_config
# @return [::Google::Cloud::DataLabeling::V1beta1::HumanAnnotationConfig]
# Basic human annotation config used in labeling request.
class LabelImageClassificationOperationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Details of a LabelImageBoundingBox operation metadata.
# @!attribute [rw] basic_config
# @return [::Google::Cloud::DataLabeling::V1beta1::HumanAnnotationConfig]
# Basic human annotation config used in labeling request.
class LabelImageBoundingBoxOperationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Details of a LabelImageOrientedBoundingBox operation metadata.
# @!attribute [rw] basic_config
# @return [::Google::Cloud::DataLabeling::V1beta1::HumanAnnotationConfig]
# Basic human annotation config.
class LabelImageOrientedBoundingBoxOperationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Details of LabelImageBoundingPoly operation metadata.
# @!attribute [rw] basic_config
# @return [::Google::Cloud::DataLabeling::V1beta1::HumanAnnotationConfig]
# Basic human annotation config used in labeling request.
class LabelImageBoundingPolyOperationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Details of LabelImagePolyline operation metadata.
# @!attribute [rw] basic_config
# @return [::Google::Cloud::DataLabeling::V1beta1::HumanAnnotationConfig]
# Basic human annotation config used in labeling request.
class LabelImagePolylineOperationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Details of a LabelImageSegmentation operation metadata.
# @!attribute [rw] basic_config
# @return [::Google::Cloud::DataLabeling::V1beta1::HumanAnnotationConfig]
# Basic human annotation config.
class LabelImageSegmentationOperationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Details of a LabelVideoClassification operation metadata.
# @!attribute [rw] basic_config
# @return [::Google::Cloud::DataLabeling::V1beta1::HumanAnnotationConfig]
# Basic human annotation config used in labeling request.
class LabelVideoClassificationOperationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Details of a LabelVideoObjectDetection operation metadata.
# @!attribute [rw] basic_config
# @return [::Google::Cloud::DataLabeling::V1beta1::HumanAnnotationConfig]
# Basic human annotation config used in labeling request.
class LabelVideoObjectDetectionOperationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Details of a LabelVideoObjectTracking operation metadata.
# @!attribute [rw] basic_config
# @return [::Google::Cloud::DataLabeling::V1beta1::HumanAnnotationConfig]
# Basic human annotation config used in labeling request.
class LabelVideoObjectTrackingOperationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Details of a LabelVideoEvent operation metadata.
# @!attribute [rw] basic_config
# @return [::Google::Cloud::DataLabeling::V1beta1::HumanAnnotationConfig]
# Basic human annotation config used in labeling request.
class LabelVideoEventOperationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Details of a LabelTextClassification operation metadata.
# @!attribute [rw] basic_config
# @return [::Google::Cloud::DataLabeling::V1beta1::HumanAnnotationConfig]
# Basic human annotation config used in labeling request.
class LabelTextClassificationOperationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Details of a LabelTextEntityExtraction operation metadata.
# @!attribute [rw] basic_config
# @return [::Google::Cloud::DataLabeling::V1beta1::HumanAnnotationConfig]
# Basic human annotation config used in labeling request.
class LabelTextEntityExtractionOperationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Metadata of a CreateInstruction operation.
# @!attribute [rw] instruction
# @return [::String]
# The name of the created Instruction.
# projects/\\{project_id}/instructions/\\{instruction_id}
# @!attribute [rw] partial_failures
# @return [::Array<::Google::Rpc::Status>]
# Partial failures encountered.
# E.g. single files that couldn't be read.
# Status details field will contain standard GCP error details.
# @!attribute [rw] create_time
# @return [::Google::Protobuf::Timestamp]
# Timestamp when create instruction request was created.
class CreateInstructionMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
end
end
end
| 48.892857 | 108 | 0.65851 |
abada4ce54eb08d5e365a6714ea98098776bfa52 | 1,935 | #
# Be sure to run `pod lib lint PublicModule.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'YQSPublicModule'
s.version = '1.0.2'
s.summary = 'PublicModule组件'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
适用于项目的PublicModule组件
DESC
s.homepage = 'https://github.com/yeqingsong/YQSPublicModule'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'yeqingsong' => '[email protected]' }
s.source = { :git => 'https://github.com/yeqingsong/YQSPublicModule.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '8.0'
# s.source_files = 'PublicModule/Classes/**/*'
# s.resource_bundles = {
# 'PublicModule' => ['PublicModule/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
s.subspec 'PeopleModel' do |c|
c.source_files = 'PublicModule/Classes/PeopleModel/*.{h,m}'
c.dependency 'SDWebImage'
end
s.subspec 'ManModel' do |c|
c.source_files = 'PublicModule/Classes/ManModel/*.{h,m}'
end
s.subspec 'WomenModel' do |t|
t.source_files = 'PublicModule/Classes/WomenModel/*.{h,m}'
end
end
| 32.79661 | 110 | 0.637209 |
4af65c5cb3c2dede29381280defcae67ab259d6c | 683 | class Atdtool < Formula
desc "Command-line interface for After the Deadline language checker"
homepage "https://github.com/lpenz/atdtool"
url "https://github.com/lpenz/atdtool/archive/upstream/1.3.tar.gz"
sha256 "eb634fd9e8a57d5d5e4d8d2ca0dd9692610aa952e28fdf24909fd678a8f39155"
depends_on "txt2tags" => :build
def install
# Change the PREFIX to match the homebrew one, since there is no way to
# pass it as an option for now edit the Makefile
# https://github.com/lpenz/atdtool/pull/8
inreplace "Makefile", "PREFIX=/usr/local", "PREFIX=#{prefix}"
system "make", "install"
end
test do
system "#{bin}/atdtool", "#{prefix}/AUTHORS"
end
end
| 32.52381 | 75 | 0.720351 |
ff83ca30fdd7531aa5782b3d5fb667a95a5c8680 | 2,406 | # frozen_string_literal: true
begin
require 'irb/color'
module IRB
module Color
DIM = 2 unless defined? DIM
end
end
require "irb/color_printer"
rescue LoadError
warn "DEBUGGER: can not load newer irb for coloring. Write 'gem \"debug\" in your Gemfile."
end
module DEBUGGER__
module Color
if defined? IRB::Color.colorize
begin
IRB::Color.colorize('', [:DIM], colorable: true)
SUPPORT_COLORABLE_OPTION = true
rescue ArgumentError
end
if defined? SUPPORT_COLORABLE_OPTION
def irb_colorize str, color
IRB::Color.colorize str, color, colorable: true
end
else
def irb_colorize str, color
IRB::Color.colorize str, color
end
end
def colorize str, color
if !CONFIG[:no_color]
irb_colorize str, color
else
str
end
end
else
def colorize str, color
str
end
end
if defined? IRB::ColorPrinter.pp
def color_pp obj, width
if !CONFIG[:no_color]
IRB::ColorPrinter.pp(obj, "".dup, width)
else
obj.pretty_inspect
end
end
else
def color_pp obj, width
obj.pretty_inspect
end
end
def colored_inspect obj, width: SESSION.width, no_color: false
if !no_color
color_pp obj, width
else
obj.pretty_inspect
end
rescue => ex
err_msg = "#{ex.inspect} rescued during inspection"
string_result = obj.to_s rescue nil
# don't colorize the string here because it's not from user's application
if string_result
%Q{"#{string_result}" from #to_s because #{err_msg}}
else
err_msg
end
end
if defined? IRB::Color.colorize_code
if SUPPORT_COLORABLE_OPTION
def colorize_code code
IRB::Color.colorize_code(code, colorable: true)
end
else
def colorize_code code
IRB::Color.colorize_code(code)
end
end
else
def colorize_code code
code
end
end
def colorize_cyan(str)
colorize(str, [:CYAN, :BOLD])
end
def colorize_blue(str)
colorize(str, [:BLUE, :BOLD])
end
def colorize_magenta(str)
colorize(str, [:MAGENTA, :BOLD])
end
def colorize_dim(str)
colorize(str, [:DIM])
end
end
end
| 21.105263 | 93 | 0.595179 |
f7057b1058357e3e989ab6e8480610318da37c88 | 415 | require 'ffi'
module FFI
module Mapscript
extend FFI::Library
class SymbolSetObj < FFI::Struct
layout :file_name, :string,
:image_cache_size, :int,
:num_symbols, :int,
:max_symbols, :int,
:ref_count, :int,
:symbol, :pointer, # array of SymbolObj
:map, MapObj.ptr,
:font_set, FontSetObj.ptr,
:image_cache, ImageCacheObj.ptr
end
end
end
| 20.75 | 49 | 0.612048 |
03d6815293d9b69ba2894c80697574bc71636b78 | 110 | class Stack < ApplicationRecord
has_many :project_stacks
has_many :projects, through: :project_stacks
end
| 22 | 46 | 0.809091 |
79a9ec31d7d7c3b5f06ed3433a7a742ea716df90 | 2,292 | require 'java'
require 'observer'
java.lang.System.setProperty('org.jpedal.jai', 'true')
require_relative './jars/jpedal_lgpl.jar'
java_import javax.imageio.ImageIO
java_import java.awt.image.BufferedImage
java_import java.awt.Image
java_import org.jpedal.PdfDecoder
java_import org.jpedal.fonts.FontMappings
class AbstractThumbnailGenerator
include Observable
def initialize(pdf_filename, output_directory, sizes=[2048, 560], pages=[])
raise Errno::ENOENT unless File.directory?(output_directory)
raise ArgumentError if sizes.empty?
@sizes = sizes.sort.reverse
@output_directory = output_directory
@pages = []
end
def generate_thumbnails!
raise 'NotImplemented'
end
end
class JPedalThumbnailGenerator < AbstractThumbnailGenerator
def initialize(pdf_filename, output_directory, sizes=[2048, 560], pages=[])
super(pdf_filename, output_directory, sizes, pages)
@decoder = PdfDecoder.new(true)
FontMappings.setFontReplacements
@decoder.openPdfFile(pdf_filename)
@decoder.setExtractionMode(0, 1.0)
@decoder.useHiResScreenDisplay(true)
end
def generate_thumbnails!
total_pages = @decoder.getPageCount
total_pages.times do |i|
begin
image = @decoder.getPageAsImage(i+1);
image_w, image_h = image.getWidth, image.getHeight
@sizes.each do |s|
scale = s.to_f / image_w.to_f
bi = BufferedImage.new(s, image_h * scale, image.getType)
bi.getGraphics.drawImage(image.getScaledInstance(s, image_h * scale, Image::SCALE_SMOOTH), 0, 0, nil)
ImageIO.write(bi,
'png',
java.io.File.new(File.join(@output_directory,
"document_#{s}_#{i+1}.png")))
changed
notify_observers(i+1, total_pages, "generating page thumbnails...")
end
rescue java.lang.RuntimeException
# TODO What?
end
end
@decoder.closePdfFile
end
end
if __FILE__ == $0
class STDERRProgressReporter
def update(page, total_pages)
STDERR.puts "#{page}///#{total_pages}"
end
end
pdftg = JPedalThumbnailGenerator.new(ARGV[0], '/tmp', [560])
pdftg.add_observer(STDERRProgressReporter.new)
pdftg.generate_thumbnails!
end
| 28.296296 | 111 | 0.681501 |
26888d2c6fd39c9acdfb1916d1a6ebacb134c637 | 1,127 | class Nodebrew < Formula
desc "Node.js version manager"
homepage "https://github.com/hokaccha/nodebrew"
url "https://github.com/hokaccha/nodebrew/archive/v1.1.0.tar.gz"
sha256 "b2046d97392ed971254bee2026cfcf8fb59225f51b566ec4b77e9355a861c8a7"
license "MIT"
head "https://github.com/hokaccha/nodebrew.git"
bottle do
rebuild 1
sha256 cellar: :any_skip_relocation, x86_64_linux: "dad75f41cddb514ed89c01ae75b38c18ab21e4cb0ee2b9135c986968975b058d" # linuxbrew-core
end
def install
bin.install "nodebrew"
bash_completion.install "completions/bash/nodebrew-completion" => "nodebrew"
zsh_completion.install "completions/zsh/_nodebrew"
end
def caveats
<<~EOS
You need to manually run setup_dirs to create directories required by nodebrew:
#{opt_bin}/nodebrew setup_dirs
Add path:
export PATH=$HOME/.nodebrew/current/bin:$PATH
To use Homebrew's directories rather than ~/.nodebrew add to your profile:
export NODEBREW_ROOT=#{var}/nodebrew
EOS
end
test do
assert_match "v0.10.0", shell_output("#{bin}/nodebrew ls-remote")
end
end
| 30.459459 | 138 | 0.734694 |
f797339ea6ab21e68020827dfc37199869d0c79d | 1,181 | module Autopilot
class Resource
class << self
attr_accessor :record_key, :plural_key
attr_writer :singleton_resource
def singleton_resource?
!!@singleton_resource
end
end
undef :id if method_defined?(:id)
attr_reader :attributes
private :attributes
def initialize(attributes = {})
@id = attributes[:id]
define_id_reader if @id
build_from_attributes(attributes)
end
# Attributes used for serialization
def to_hash
attributes.dup
end
alias_method :to_h, :to_hash
private
def build_from_attributes(attributes)
@attributes = Utils.hash_without_key(attributes, :id)
define_attribute_accessors(@attributes.keys)
end
def define_id_reader
Utils.eigenclass(self).instance_eval do
attr_reader :id
end
end
def define_attribute_accessors(keys)
Utils.eigenclass(self).instance_eval do
keys.each do |key|
define_method(key) do
attributes[key]
end
define_method("#{key}=") do |value|
attributes[key] = value
end
end
end
end
end
end
| 20.719298 | 59 | 0.634208 |
e2201c516bac82a11f40b492df0206f80a3fac45 | 2,820 | require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module AccountsExample
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
end
end
| 44.761905 | 100 | 0.735106 |
28cfad6020e84272fd6edb536ab53bdd40082a98 | 3,122 | require "formula"
class NoExpatFramework < Requirement
def expat_framework
"/Library/Frameworks/expat.framework"
end
satisfy :build_env => false do
not File.exist? expat_framework
end
def message; <<-EOS.undent
Detected #{expat_framework}
This will be picked up by CMake's build system and likely cause the
build to fail, trying to link to a 32-bit version of expat.
You may need to move this file out of the way to compile CMake.
EOS
end
end
class Cmake < Formula
homepage "http://www.cmake.org/"
url "http://www.cmake.org/files/v3.1/cmake-3.1.0.tar.gz"
sha1 "cc20c40f5480c83a0204f516a490b470bd3a963a"
head "http://cmake.org/cmake.git"
bottle do
cellar :any
sha1 "7a015c43f30830ffc722dbd548b014d725b1cc64" => :yosemite
sha1 "73716b458ef13282f84a870c9faf0cea52b0c508" => :mavericks
sha1 "d19131db9de47fa03ad08edce1c1c1b6eb6c3aa0" => :mountain_lion
end
option "without-docs", "Don't build man pages"
depends_on :python => :build if MacOS.version <= :snow_leopard && build.with?("docs")
depends_on "xz" # For LZMA
depends_on "qt" => :optional
resource "sphinx" do
url "https://pypi.python.org/packages/source/S/Sphinx/Sphinx-1.2.3.tar.gz"
sha1 "3a11f130c63b057532ca37fe49c8967d0cbae1d5"
end
resource "docutils" do
url "https://pypi.python.org/packages/source/d/docutils/docutils-0.12.tar.gz"
sha1 "002450621b33c5690060345b0aac25bc2426d675"
end
resource "pygments" do
url "https://pypi.python.org/packages/source/P/Pygments/Pygments-2.0.1.tar.gz"
sha1 "b9e9236693ccf6e86414e8578bf8874181f409de"
end
resource "jinja2" do
url "https://pypi.python.org/packages/source/J/Jinja2/Jinja2-2.7.3.tar.gz"
sha1 "25ab3881f0c1adfcf79053b58de829c5ae65d3ac"
end
resource "markupsafe" do
url "https://pypi.python.org/packages/source/M/MarkupSafe/MarkupSafe-0.23.tar.gz"
sha1 "cd5c22acf6dd69046d6cb6a3920d84ea66bdf62a"
end
depends_on NoExpatFramework
def install
if build.with? "docs"
ENV.prepend_create_path "PYTHONPATH", buildpath+"sphinx/lib/python2.7/site-packages"
resources.each do |r|
r.stage do
system "python", *Language::Python.setup_install_args(buildpath/"sphinx")
end
end
# There is an existing issue around OS X & Python locale setting
# See http://bugs.python.org/issue18378#msg215215 for explanation
ENV["LC_ALL"] = "en_US.UTF-8"
end
args = %W[
--prefix=#{prefix}
--system-libs
--parallel=#{ENV.make_jobs}
--no-system-libarchive
--datadir=/share/cmake
--docdir=/share/doc/cmake
--mandir=/share/man
]
if build.with? "docs"
args << "--sphinx-man" << "--sphinx-build=#{buildpath}/sphinx/bin/sphinx-build"
end
args << "--qt-gui" if build.with? "qt"
system "./bootstrap", *args
system "make"
system "make", "install"
bin.install_symlink Dir["#{prefix}/CMake.app/Contents/bin/*"] if build.with? "qt"
end
test do
(testpath/"CMakeLists.txt").write("find_package(Ruby)")
system "#{bin}/cmake", "."
end
end
| 28.642202 | 90 | 0.688341 |
abe1435dd73048cab372ca4038733c2932f25962 | 1,711 | require 'spec_helper'
require_relative '../../lib/middleman-webp/pathname_matcher'
describe Middleman::WebP::PathnameMatcher do
describe '#matches?' do
it 'returns true when given file matches pattern' do
patterns = ['**/*.jpg', /jpg$/, ->(path) { path.end_with?('jpg') }]
files = [
'images/sample.jpg',
Pathname.new('images/another.jpg'),
File.new('spec/fixtures/dummy-build/empty.jpg')
]
patterns.each do |p|
matcher = Middleman::WebP::PathnameMatcher.new(p)
files.each do |f|
matcher.matches?(f).must_equal true, "Pattern: #{p.inspect}, "\
"file: #{f.inspect}"
end
end
end
it 'returns false when given file won\'t match pattern' do
patterns = ['**/*.jpg', /jpg$/, ->(path) { path.end_with?('jpg') }]
files = [
'images/sample.png',
Pathname.new('images/another.png'),
File.new('spec/fixtures/dummy-build/empty.png')
]
patterns.each do |p|
matcher = Middleman::WebP::PathnameMatcher.new(p)
files.each do |f|
matcher.matches?(f).must_equal false, "Pattern: #{p.inspect}, "\
"file: #{f.inspect}"
end
end
end
it 'defaults to pattern \'**/*\' and matches all when pattern is nil' do
paths = [
'something/anything.foo',
Pathname.new('images/another.png'),
'a/b/c/d/e/f/g'
]
matcher = Middleman::WebP::PathnameMatcher.new
paths.each do |p|
matcher.matches?(p).must_equal true
end
end
it 'handles nil path gracefully and returns false' do
Middleman::WebP::PathnameMatcher.new('*.jpg').matches? nil
end
end
end
| 29.5 | 76 | 0.581531 |
26dd3a5006023ce5b20548eb4ed0f67bb75c4bf2 | 1,454 | class Codequery < Formula
desc "Code-understanding, code-browsing or code-search tool."
homepage "https://github.com/ruben2020/codequery"
url "https://github.com/ruben2020/codequery/archive/v0.21.0.tar.gz"
sha256 "32c4fd9e7d1c05246b0b3866bb1ccb6eab5cf246b589ffcb9f1b32b07b5e6ff7"
bottle do
cellar :any
sha256 "948f1227721a3a1f35a7d61304018646806ac42b5604e3f6b8f35854b3977cd5" => :high_sierra
sha256 "cf8477f991795c39b7993c3eab785eb5b069c684db0c0d24d08777faee44993b" => :sierra
sha256 "f9c4cf1314b4c84622ea4dd3f8950e683cf2030f6bdb5b1ea63374ea7bc25a74" => :el_capitan
sha256 "5b80bb8794e765c0436ee8abe8ac667813f42810a12dccccabe909a8b0733496" => :yosemite
end
depends_on "cmake" => :build
depends_on "qt"
def install
args = std_cmake_args
args << "-DBUILD_QT5=ON"
share.install "test"
mkdir "build" do
system "cmake", "..", "-G", "Unix Makefiles", *args
system "make"
system "make", "install"
end
end
test do
# Copy test files as `cqmakedb` gets confused if we just symlink them.
test_files = (share/"test").children
cp test_files, testpath
system "#{bin}/cqmakedb", "-s", "./codequery.db",
"-c", "./cscope.out",
"-t", "./tags",
"-p"
output = shell_output("#{bin}/cqsearch -s ./codequery.db -t info_platform")
assert_match "info_platform", output
end
end
| 33.813953 | 93 | 0.671939 |
1dbf78e4058da485ceb78bb4309a5d0f057f405b | 301 | class AddCommentsCountToPages < ActiveRecord::Migration
def self.up
add_column :pages, :comments_count, :integer
add_column :pages, :show_comments, :boolean, :default => false
end
def self.down
remove_column :pages, :show_comments
remove_column :pages, :comments_count
end
end | 27.363636 | 66 | 0.744186 |
613b9b2b35f9cab92355e5db089af822f3b2d20d | 84 | class Workout < ActiveRecord::Base
belongs_to :user
has_many :exercises
end
| 16.8 | 34 | 0.738095 |
628cbc8592b2171a6ff39247223260abf272b4dd | 877 | cask 'firefox-beta' do
version '48.0b5'
sha256 '395f0b09c9f619937fb969c3c50b88fef226869a1a9ba395ff31cb70f6046004'
url "https://download.mozilla.org/?product=firefox-#{version}-SSL&os=osx&lang=en-US"
name 'Mozilla Firefox'
homepage 'https://www.mozilla.org/en-US/firefox/channel/#beta'
license :mpl
app 'Firefox.app'
zap delete: [
'~/Library/Application Support/Firefox',
'~/Library/Caches/Firefox',
]
caveats <<-EOS.undent
The Mac App Store version of 1Password won't work with a Homebrew-cask-linked Mozilla Firefox. To bypass this limitation, you need to either:
+ Move Mozilla Firefox to your /Applications directory (the app itself, not a symlink).
+ Install 1Password from outside the Mac App Store (licenses should transfer automatically, but you should contact AgileBits about it).
EOS
end
| 38.130435 | 143 | 0.714937 |
b9066d1b2e18fa5eb523ace518e4440f5c0f1f34 | 779 |
class PocketsphinxServer::Handler
attr_reader :recognizer, :config
def initialize(server, config={})
@config = config
@server = server
@recognizer = PocketsphinxServer::Recognizer.new(server, config.fetch('recognizer', {}))
end
# Can this handler handle this request?
def can_handle?(req)
true
end
# Prepare the recognizer for this request (switch LM, etc)
def prepare_rec(req)
end
# Postprocess an hypothesis string (e.g., make it prettyier)
def postprocess_hypothesis(hyp)
hyp
end
# Return a map of extra data for a hypothesis
def get_hyp_extras(req, hyp)
{}
end
def can_handle_fetch_lm?(req)
false
end
def handle_fetch_lm(req)
end
def log(str)
@server.logger.info str
end
end
| 16.934783 | 92 | 0.677792 |
1aaeeaf038b23e6c1fedc8bc93d1bd9238e1603e | 33,187 | #! /usr/bin/env ruby -S rspec
require 'spec_helper'
require 'puppet/resource'
describe Puppet::Resource do
include PuppetSpec::Files
let :basepath do make_absolute("/somepath") end
[:catalog, :file, :line].each do |attr|
it "should have an #{attr} attribute" do
resource = Puppet::Resource.new("file", "/my/file")
resource.should respond_to(attr)
resource.should respond_to(attr.to_s + "=")
end
end
it "should have a :title attribute" do
Puppet::Resource.new(:user, "foo").title.should == "foo"
end
it "should require the type and title" do
lambda { Puppet::Resource.new }.should raise_error(ArgumentError)
end
it "should canonize types to capitalized strings" do
Puppet::Resource.new(:user, "foo").type.should == "User"
end
it "should canonize qualified types so all strings are capitalized" do
Puppet::Resource.new("foo::bar", "foo").type.should == "Foo::Bar"
end
it "should tag itself with its type" do
Puppet::Resource.new("file", "/f").should be_tagged("file")
end
it "should tag itself with its title if the title is a valid tag" do
Puppet::Resource.new("user", "bar").should be_tagged("bar")
end
it "should not tag itself with its title if the title is a not valid tag" do
Puppet::Resource.new("file", "/bar").should_not be_tagged("/bar")
end
it "should allow setting of attributes" do
Puppet::Resource.new("file", "/bar", :file => "/foo").file.should == "/foo"
Puppet::Resource.new("file", "/bar", :exported => true).should be_exported
end
it "should set its type to 'Class' and its title to the passed title if the passed type is :component and the title has no square brackets in it" do
ref = Puppet::Resource.new(:component, "foo")
ref.type.should == "Class"
ref.title.should == "Foo"
end
it "should interpret the title as a reference and assign appropriately if the type is :component and the title contains square brackets" do
ref = Puppet::Resource.new(:component, "foo::bar[yay]")
ref.type.should == "Foo::Bar"
ref.title.should == "yay"
end
it "should set the type to 'Class' if it is nil and the title contains no square brackets" do
ref = Puppet::Resource.new(nil, "yay")
ref.type.should == "Class"
ref.title.should == "Yay"
end
it "should interpret the title as a reference and assign appropriately if the type is nil and the title contains square brackets" do
ref = Puppet::Resource.new(nil, "foo::bar[yay]")
ref.type.should == "Foo::Bar"
ref.title.should == "yay"
end
it "should interpret the title as a reference and assign appropriately if the type is nil and the title contains nested square brackets" do
ref = Puppet::Resource.new(nil, "foo::bar[baz[yay]]")
ref.type.should == "Foo::Bar"
ref.title.should =="baz[yay]"
end
it "should interpret the type as a reference and assign appropriately if the title is nil and the type contains square brackets" do
ref = Puppet::Resource.new("foo::bar[baz]")
ref.type.should == "Foo::Bar"
ref.title.should =="baz"
end
it "should be able to extract its information from a Puppet::Type instance" do
ral = Puppet::Type.type(:file).new :path => basepath+"/foo"
ref = Puppet::Resource.new(ral)
ref.type.should == "File"
ref.title.should == basepath+"/foo"
end
it "should fail if the title is nil and the type is not a valid resource reference string" do
expect { Puppet::Resource.new("resource-spec-foo") }.should raise_error(ArgumentError)
end
it 'should fail if strict is set and type does not exist' do
expect { Puppet::Resource.new('resource-spec-foo', 'title', {:strict=>true}) }.should raise_error(ArgumentError, 'Invalid resource type resource-spec-foo')
end
it 'should fail if strict is set and class does not exist' do
expect { Puppet::Resource.new('Class', 'resource-spec-foo', {:strict=>true}) }.should raise_error(ArgumentError, 'Could not find declared class resource-spec-foo')
end
it "should fail if the title is a hash and the type is not a valid resource reference string" do
expect { Puppet::Resource.new({:type => "resource-spec-foo", :title => "bar"}) }.
to raise_error ArgumentError, /Puppet::Resource.new does not take a hash/
end
it "should be taggable" do
Puppet::Resource.ancestors.should be_include(Puppet::Util::Tagging)
end
it "should have an 'exported' attribute" do
resource = Puppet::Resource.new("file", "/f")
resource.exported = true
resource.exported.should == true
resource.should be_exported
end
it "should support an environment attribute" do
Puppet::Resource.new("file", "/my/file", :environment => :foo).environment.name.should == :foo
end
describe "and munging its type and title" do
describe "when modeling a builtin resource" do
it "should be able to find the resource type" do
Puppet::Resource.new("file", "/my/file").resource_type.should equal(Puppet::Type.type(:file))
end
it "should set its type to the capitalized type name" do
Puppet::Resource.new("file", "/my/file").type.should == "File"
end
end
describe "when modeling a defined resource" do
describe "that exists" do
before do
@type = Puppet::Resource::Type.new(:definition, "foo::bar")
Puppet::Node::Environment.new.known_resource_types.add @type
end
it "should set its type to the capitalized type name" do
Puppet::Resource.new("foo::bar", "/my/file").type.should == "Foo::Bar"
end
it "should be able to find the resource type" do
Puppet::Resource.new("foo::bar", "/my/file").resource_type.should equal(@type)
end
it "should set its title to the provided title" do
Puppet::Resource.new("foo::bar", "/my/file").title.should == "/my/file"
end
end
describe "that does not exist" do
it "should set its resource type to the capitalized resource type name" do
Puppet::Resource.new("foo::bar", "/my/file").type.should == "Foo::Bar"
end
end
end
describe "when modeling a node" do
# Life's easier with nodes, because they can't be qualified.
it "should set its type to 'Node' and its title to the provided title" do
node = Puppet::Resource.new("node", "foo")
node.type.should == "Node"
node.title.should == "foo"
end
end
describe "when modeling a class" do
it "should set its type to 'Class'" do
Puppet::Resource.new("class", "foo").type.should == "Class"
end
describe "that exists" do
before do
@type = Puppet::Resource::Type.new(:hostclass, "foo::bar")
Puppet::Node::Environment.new.known_resource_types.add @type
end
it "should set its title to the capitalized, fully qualified resource type" do
Puppet::Resource.new("class", "foo::bar").title.should == "Foo::Bar"
end
it "should be able to find the resource type" do
Puppet::Resource.new("class", "foo::bar").resource_type.should equal(@type)
end
end
describe "that does not exist" do
it "should set its type to 'Class' and its title to the capitalized provided name" do
klass = Puppet::Resource.new("class", "foo::bar")
klass.type.should == "Class"
klass.title.should == "Foo::Bar"
end
end
describe "and its name is set to the empty string" do
it "should set its title to :main" do
Puppet::Resource.new("class", "").title.should == :main
end
describe "and a class exists whose name is the empty string" do # this was a bit tough to track down
it "should set its title to :main" do
@type = Puppet::Resource::Type.new(:hostclass, "")
Puppet::Node::Environment.new.known_resource_types.add @type
Puppet::Resource.new("class", "").title.should == :main
end
end
end
describe "and its name is set to :main" do
it "should set its title to :main" do
Puppet::Resource.new("class", :main).title.should == :main
end
describe "and a class exists whose name is the empty string" do # this was a bit tough to track down
it "should set its title to :main" do
@type = Puppet::Resource::Type.new(:hostclass, "")
Puppet::Node::Environment.new.known_resource_types.add @type
Puppet::Resource.new("class", :main).title.should == :main
end
end
end
end
end
it "should return nil when looking up resource types that don't exist" do
Puppet::Resource.new("foobar", "bar").resource_type.should be_nil
end
it "should not fail when an invalid parameter is used and strict mode is disabled" do
type = Puppet::Resource::Type.new(:definition, "foobar")
Puppet::Node::Environment.new.known_resource_types.add type
resource = Puppet::Resource.new("foobar", "/my/file")
resource[:yay] = true
end
it "should be considered equivalent to another resource if their type and title match and no parameters are set" do
Puppet::Resource.new("file", "/f").should == Puppet::Resource.new("file", "/f")
end
it "should be considered equivalent to another resource if their type, title, and parameters are equal" do
Puppet::Resource.new("file", "/f", :parameters => {:foo => "bar"}).should == Puppet::Resource.new("file", "/f", :parameters => {:foo => "bar"})
end
it "should not be considered equivalent to another resource if their type and title match but parameters are different" do
Puppet::Resource.new("file", "/f", :parameters => {:fee => "baz"}).should_not == Puppet::Resource.new("file", "/f", :parameters => {:foo => "bar"})
end
it "should not be considered equivalent to a non-resource" do
Puppet::Resource.new("file", "/f").should_not == "foo"
end
it "should not be considered equivalent to another resource if their types do not match" do
Puppet::Resource.new("file", "/f").should_not == Puppet::Resource.new("exec", "/f")
end
it "should not be considered equivalent to another resource if their titles do not match" do
Puppet::Resource.new("file", "/foo").should_not == Puppet::Resource.new("file", "/f")
end
describe "when setting default parameters" do
before do
@scope = mock "Scope"
@scope.stubs(:source).returns(nil)
end
it "should fail when asked to set default values and it is not a parser resource" do
Puppet::Node::Environment.new.known_resource_types.add(
Puppet::Resource::Type.new(:definition, "default_param", :arguments => {"a" => Puppet::Parser::AST::String.new(:value => "default")})
)
resource = Puppet::Resource.new("default_param", "name")
lambda { resource.set_default_parameters(@scope) }.should raise_error(Puppet::DevError)
end
it "should evaluate and set any default values when no value is provided" do
Puppet::Node::Environment.new.known_resource_types.add(
Puppet::Resource::Type.new(:definition, "default_param", :arguments => {"a" => Puppet::Parser::AST::String.new(:value => "a_default_value")})
)
resource = Puppet::Parser::Resource.new("default_param", "name", :scope => Puppet::Parser::Scope.new)
resource.set_default_parameters(@scope)
resource["a"].should == "a_default_value"
end
it "should skip attributes with no default value" do
Puppet::Node::Environment.new.known_resource_types.add(
Puppet::Resource::Type.new(:definition, "no_default_param", :arguments => {"a" => Puppet::Parser::AST::String.new(:value => "a_default_value")})
)
resource = Puppet::Parser::Resource.new("no_default_param", "name", :scope => Puppet::Parser::Scope.new)
lambda { resource.set_default_parameters(@scope) }.should_not raise_error
end
it "should return the list of default parameters set" do
Puppet::Node::Environment.new.known_resource_types.add(
Puppet::Resource::Type.new(:definition, "default_param", :arguments => {"a" => Puppet::Parser::AST::String.new(:value => "a_default_value")})
)
resource = Puppet::Parser::Resource.new("default_param", "name", :scope => Puppet::Parser::Scope.new)
resource.set_default_parameters(@scope).should == [:a]
end
describe "when the resource type is :hostclass" do
let(:environmnet_name) { "testing env" }
let(:fact_values) { { :a => 1 } }
let(:port) { Puppet::Parser::AST::String.new(:value => '80') }
let(:apache) { Puppet::Resource::Type.new(:hostclass, 'apache', :arguments => { 'port' => port }) }
before do
environment = Puppet::Node::Environment.new(environmnet_name)
environment.known_resource_types.add(apache)
@scope.stubs(:host).returns('host')
@scope.stubs(:environment).returns(Puppet::Node::Environment.new(environmnet_name))
@scope.stubs(:facts).returns(Puppet::Node::Facts.new("facts", fact_values))
end
context "when no value is provided" do
let(:resource) do
Puppet::Parser::Resource.new("class", "apache", :scope => @scope)
end
it "should query the data_binding terminus using a namespaced key" do
Puppet::DataBinding.indirection.expects(:find).with(
'apache::port', :host => 'host', :environment => environmnet_name, :facts => fact_values)
resource.set_default_parameters(@scope)
end
it "should use the value from the data_binding terminus" do
Puppet::DataBinding.indirection.expects(:find).returns('443')
resource.set_default_parameters(@scope).should == [:port]
resource[:port].should == '443'
end
it "should use the default value if the data_binding terminus returns nil" do
Puppet::DataBinding.indirection.expects(:find).returns(nil)
resource.set_default_parameters(@scope).should == [:port]
resource[:port].should == '80'
end
end
context "when a value is provided" do
let(:port_parameter) do
Puppet::Parser::Resource::Param.new(
{ :name => 'port', :value => '8080' }
)
end
let(:resource) do
Puppet::Parser::Resource.new("class", "apache", :scope => @scope,
:parameters => [port_parameter])
end
it "should not query the data_binding terminus" do
Puppet::DataBinding.indirection.expects(:find).never
resource.set_default_parameters(@scope)
end
it "should use the value provided" do
Puppet::DataBinding.indirection.expects(:find).never
resource.set_default_parameters(@scope).should == []
resource[:port].should == '8080'
end
end
end
end
describe "when validating all required parameters are present" do
it "should be able to validate that all required parameters are present" do
Puppet::Node::Environment.new.known_resource_types.add(
Puppet::Resource::Type.new(:definition, "required_param", :arguments => {"a" => nil})
)
lambda { Puppet::Resource.new("required_param", "name").validate_complete }.should raise_error(Puppet::ParseError)
end
it "should not fail when all required parameters are present" do
Puppet::Node::Environment.new.known_resource_types.add(
Puppet::Resource::Type.new(:definition, "no_required_param")
)
resource = Puppet::Resource.new("no_required_param", "name")
resource["a"] = "meh"
lambda { resource.validate_complete }.should_not raise_error
end
it "should not validate against builtin types" do
lambda { Puppet::Resource.new("file", "/bar").validate_complete }.should_not raise_error
end
end
describe "when referring to a resource with name canonicalization" do
it "should canonicalize its own name" do
res = Puppet::Resource.new("file", "/path/")
res.uniqueness_key.should == ["/path"]
res.ref.should == "File[/path/]"
end
end
describe "when running in strict mode" do
it "should be strict" do
Puppet::Resource.new("file", "/path", :strict => true).should be_strict
end
it "should fail if invalid parameters are used" do
expect { Puppet::Resource.new("file", "/path", :strict => true, :parameters => {:nosuchparam => "bar"}) }.should raise_error
end
it "should fail if the resource type cannot be resolved" do
expect { Puppet::Resource.new("nosuchtype", "/path", :strict => true) }.should raise_error
end
end
describe "when managing parameters" do
before do
@resource = Puppet::Resource.new("file", "/my/file")
end
it "should correctly detect when provided parameters are not valid for builtin types" do
Puppet::Resource.new("file", "/my/file").should_not be_valid_parameter("foobar")
end
it "should correctly detect when provided parameters are valid for builtin types" do
Puppet::Resource.new("file", "/my/file").should be_valid_parameter("mode")
end
it "should correctly detect when provided parameters are not valid for defined resource types" do
type = Puppet::Resource::Type.new(:definition, "foobar")
Puppet::Node::Environment.new.known_resource_types.add type
Puppet::Resource.new("foobar", "/my/file").should_not be_valid_parameter("myparam")
end
it "should correctly detect when provided parameters are valid for defined resource types" do
type = Puppet::Resource::Type.new(:definition, "foobar", :arguments => {"myparam" => nil})
Puppet::Node::Environment.new.known_resource_types.add type
Puppet::Resource.new("foobar", "/my/file").should be_valid_parameter("myparam")
end
it "should allow setting and retrieving of parameters" do
@resource[:foo] = "bar"
@resource[:foo].should == "bar"
end
it "should allow setting of parameters at initialization" do
Puppet::Resource.new("file", "/my/file", :parameters => {:foo => "bar"})[:foo].should == "bar"
end
it "should canonicalize retrieved parameter names to treat symbols and strings equivalently" do
@resource[:foo] = "bar"
@resource["foo"].should == "bar"
end
it "should canonicalize set parameter names to treat symbols and strings equivalently" do
@resource["foo"] = "bar"
@resource[:foo].should == "bar"
end
it "should set the namevar when asked to set the name" do
resource = Puppet::Resource.new("user", "bob")
Puppet::Type.type(:user).stubs(:key_attributes).returns [:myvar]
resource[:name] = "bob"
resource[:myvar].should == "bob"
end
it "should return the namevar when asked to return the name" do
resource = Puppet::Resource.new("user", "bob")
Puppet::Type.type(:user).stubs(:key_attributes).returns [:myvar]
resource[:myvar] = "test"
resource[:name].should == "test"
end
it "should be able to set the name for non-builtin types" do
resource = Puppet::Resource.new(:foo, "bar")
resource[:name] = "eh"
expect { resource[:name] = "eh" }.should_not raise_error
end
it "should be able to return the name for non-builtin types" do
resource = Puppet::Resource.new(:foo, "bar")
resource[:name] = "eh"
resource[:name].should == "eh"
end
it "should be able to iterate over parameters" do
@resource[:foo] = "bar"
@resource[:fee] = "bare"
params = {}
@resource.each do |key, value|
params[key] = value
end
params.should == {:foo => "bar", :fee => "bare"}
end
it "should include Enumerable" do
@resource.class.ancestors.should be_include(Enumerable)
end
it "should have a method for testing whether a parameter is included" do
@resource[:foo] = "bar"
@resource.should be_has_key(:foo)
@resource.should_not be_has_key(:eh)
end
it "should have a method for providing the list of parameters" do
@resource[:foo] = "bar"
@resource[:bar] = "foo"
keys = @resource.keys
keys.should be_include(:foo)
keys.should be_include(:bar)
end
it "should have a method for providing the number of parameters" do
@resource[:foo] = "bar"
@resource.length.should == 1
end
it "should have a method for deleting parameters" do
@resource[:foo] = "bar"
@resource.delete(:foo)
@resource[:foo].should be_nil
end
it "should have a method for testing whether the parameter list is empty" do
@resource.should be_empty
@resource[:foo] = "bar"
@resource.should_not be_empty
end
it "should be able to produce a hash of all existing parameters" do
@resource[:foo] = "bar"
@resource[:fee] = "yay"
hash = @resource.to_hash
hash[:foo].should == "bar"
hash[:fee].should == "yay"
end
it "should not provide direct access to the internal parameters hash when producing a hash" do
hash = @resource.to_hash
hash[:foo] = "bar"
@resource[:foo].should be_nil
end
it "should use the title as the namevar to the hash if no namevar is present" do
resource = Puppet::Resource.new("user", "bob")
Puppet::Type.type(:user).stubs(:key_attributes).returns [:myvar]
resource.to_hash[:myvar].should == "bob"
end
it "should set :name to the title if :name is not present for non-builtin types" do
krt = Puppet::Resource::TypeCollection.new("myenv")
krt.add Puppet::Resource::Type.new(:definition, :foo)
resource = Puppet::Resource.new :foo, "bar"
resource.stubs(:known_resource_types).returns krt
resource.to_hash[:name].should == "bar"
end
end
describe "when serializing" do
before do
@resource = Puppet::Resource.new("file", "/my/file")
@resource["one"] = "test"
@resource["two"] = "other"
end
it "should be able to be dumped to yaml" do
proc { YAML.dump(@resource) }.should_not raise_error
end
it "should produce an equivalent yaml object" do
text = YAML.dump(@resource)
newresource = YAML.load(text)
newresource.title.should == @resource.title
newresource.type.should == @resource.type
%w{one two}.each do |param|
newresource[param].should == @resource[param]
end
end
end
describe "when loading 0.25.x storedconfigs YAML" do
before :each do
@old_storedconfig_yaml = %q{--- !ruby/object:Puppet::Resource::Reference
builtin_type:
title: /tmp/bar
type: File
}
end
it "should deserialize a Puppet::Resource::Reference without exceptions" do
expect { YAML.load(@old_storedconfig_yaml) }.should_not raise_error
end
it "should deserialize as a Puppet::Resource::Reference as a Puppet::Resource" do
YAML.load(@old_storedconfig_yaml).class.should == Puppet::Resource
end
it "should to_hash properly" do
YAML.load(@old_storedconfig_yaml).to_hash.should == { :path => "/tmp/bar" }
end
end
describe "when converting to a RAL resource" do
it "should use the resource type's :new method to create the resource if the resource is of a builtin type" do
resource = Puppet::Resource.new("file", basepath+"/my/file")
result = resource.to_ral
result.should be_instance_of(Puppet::Type.type(:file))
result[:path].should == basepath+"/my/file"
end
it "should convert to a component instance if the resource type is not of a builtin type" do
resource = Puppet::Resource.new("foobar", "somename")
result = resource.to_ral
result.should be_instance_of(Puppet::Type.type(:component))
result.title.should == "Foobar[somename]"
end
end
describe "when converting to puppet code" do
before do
@resource = Puppet::Resource.new("one::two", "/my/file",
:parameters => {
:noop => true,
:foo => %w{one two},
:ensure => 'present',
}
)
end
it "should align, sort and add trailing commas to attributes with ensure first" do
@resource.to_manifest.should == <<-HEREDOC.gsub(/^\s{8}/, '').gsub(/\n$/, '')
one::two { '/my/file':
ensure => 'present',
foo => ['one', 'two'],
noop => 'true',
}
HEREDOC
end
end
describe "when converting to pson", :if => Puppet.features.pson? do
def pson_output_should
@resource.class.expects(:pson_create).with { |hash| yield hash }
end
it "should include the pson util module" do
Puppet::Resource.singleton_class.ancestors.should be_include(Puppet::Util::Pson)
end
# LAK:NOTE For all of these tests, we convert back to the resource so we can
# trap the actual data structure then.
it "should set its type to the provided type" do
Puppet::Resource.from_pson(PSON.parse(Puppet::Resource.new("File", "/foo").to_pson)).type.should == "File"
end
it "should set its title to the provided title" do
Puppet::Resource.from_pson(PSON.parse(Puppet::Resource.new("File", "/foo").to_pson)).title.should == "/foo"
end
it "should include all tags from the resource" do
resource = Puppet::Resource.new("File", "/foo")
resource.tag("yay")
Puppet::Resource.from_pson(PSON.parse(resource.to_pson)).tags.should == resource.tags
end
it "should include the file if one is set" do
resource = Puppet::Resource.new("File", "/foo")
resource.file = "/my/file"
Puppet::Resource.from_pson(PSON.parse(resource.to_pson)).file.should == "/my/file"
end
it "should include the line if one is set" do
resource = Puppet::Resource.new("File", "/foo")
resource.line = 50
Puppet::Resource.from_pson(PSON.parse(resource.to_pson)).line.should == 50
end
it "should include the 'exported' value if one is set" do
resource = Puppet::Resource.new("File", "/foo")
resource.exported = true
Puppet::Resource.from_pson(PSON.parse(resource.to_pson)).exported.should be_true
end
it "should set 'exported' to false if no value is set" do
resource = Puppet::Resource.new("File", "/foo")
Puppet::Resource.from_pson(PSON.parse(resource.to_pson)).exported.should be_false
end
it "should set all of its parameters as the 'parameters' entry" do
resource = Puppet::Resource.new("File", "/foo")
resource[:foo] = %w{bar eh}
resource[:fee] = %w{baz}
result = Puppet::Resource.from_pson(PSON.parse(resource.to_pson))
result["foo"].should == %w{bar eh}
result["fee"].should == %w{baz}
end
it "should serialize relationships as reference strings" do
resource = Puppet::Resource.new("File", "/foo")
resource[:requires] = Puppet::Resource.new("File", "/bar")
result = Puppet::Resource.from_pson(PSON.parse(resource.to_pson))
result[:requires].should == "File[/bar]"
end
it "should serialize multiple relationships as arrays of reference strings" do
resource = Puppet::Resource.new("File", "/foo")
resource[:requires] = [Puppet::Resource.new("File", "/bar"), Puppet::Resource.new("File", "/baz")]
result = Puppet::Resource.from_pson(PSON.parse(resource.to_pson))
result[:requires].should == [ "File[/bar]", "File[/baz]" ]
end
end
describe "when converting from pson", :if => Puppet.features.pson? do
def pson_result_should
Puppet::Resource.expects(:new).with { |hash| yield hash }
end
before do
@data = {
'type' => "file",
'title' => basepath+"/yay",
}
end
it "should set its type to the provided type" do
Puppet::Resource.from_pson(@data).type.should == "File"
end
it "should set its title to the provided title" do
Puppet::Resource.from_pson(@data).title.should == basepath+"/yay"
end
it "should tag the resource with any provided tags" do
@data['tags'] = %w{foo bar}
resource = Puppet::Resource.from_pson(@data)
resource.tags.should be_include("foo")
resource.tags.should be_include("bar")
end
it "should set its file to the provided file" do
@data['file'] = "/foo/bar"
Puppet::Resource.from_pson(@data).file.should == "/foo/bar"
end
it "should set its line to the provided line" do
@data['line'] = 50
Puppet::Resource.from_pson(@data).line.should == 50
end
it "should 'exported' to true if set in the pson data" do
@data['exported'] = true
Puppet::Resource.from_pson(@data).exported.should be_true
end
it "should 'exported' to false if not set in the pson data" do
Puppet::Resource.from_pson(@data).exported.should be_false
end
it "should fail if no title is provided" do
@data.delete('title')
expect { Puppet::Resource.from_pson(@data) }.should raise_error(ArgumentError)
end
it "should fail if no type is provided" do
@data.delete('type')
expect { Puppet::Resource.from_pson(@data) }.should raise_error(ArgumentError)
end
it "should set each of the provided parameters" do
@data['parameters'] = {'foo' => %w{one two}, 'fee' => %w{three four}}
resource = Puppet::Resource.from_pson(@data)
resource['foo'].should == %w{one two}
resource['fee'].should == %w{three four}
end
it "should convert single-value array parameters to normal values" do
@data['parameters'] = {'foo' => %w{one}}
resource = Puppet::Resource.from_pson(@data)
resource['foo'].should == %w{one}
end
end
describe "it should implement to_resource" do
resource = Puppet::Resource.new("file", "/my/file")
resource.to_resource.should == resource
end
describe "because it is an indirector model" do
it "should include Puppet::Indirector" do
Puppet::Resource.should be_is_a(Puppet::Indirector)
end
it "should have a default terminus" do
Puppet::Resource.indirection.terminus_class.should be
end
it "should have a name" do
Puppet::Resource.new("file", "/my/file").name.should == "File//my/file"
end
end
describe "when resolving resources with a catalog" do
it "should resolve all resources using the catalog" do
catalog = mock 'catalog'
resource = Puppet::Resource.new("foo::bar", "yay")
resource.catalog = catalog
catalog.expects(:resource).with("Foo::Bar[yay]").returns(:myresource)
resource.resolve.should == :myresource
end
end
describe "when generating the uniqueness key" do
it "should include all of the key_attributes in alphabetical order by attribute name" do
Puppet::Type.type(:file).stubs(:key_attributes).returns [:myvar, :owner, :path]
Puppet::Type.type(:file).stubs(:title_patterns).returns(
[ [ /(.*)/, [ [:path, lambda{|x| x} ] ] ] ]
)
res = Puppet::Resource.new("file", "/my/file", :parameters => {:owner => 'root', :content => 'hello'})
res.uniqueness_key.should == [ nil, 'root', '/my/file']
end
end
describe "#prune_parameters" do
before do
Puppet.newtype('blond') do
newproperty(:ensure)
newproperty(:height)
newproperty(:weight)
newproperty(:sign)
newproperty(:friends)
newparam(:admits_to_dying_hair)
newparam(:admits_to_age)
newparam(:name)
end
end
it "should strip all parameters and strip properties that are nil, empty or absent except for ensure" do
resource = Puppet::Resource.new("blond", "Bambi", :parameters => {
:ensure => 'absent',
:height => '',
:weight => 'absent',
:friends => [],
:admits_to_age => true,
:admits_to_dying_hair => false
})
pruned_resource = resource.prune_parameters
pruned_resource.should == Puppet::Resource.new("blond", "Bambi", :parameters => {:ensure => 'absent'})
end
it "should leave parameters alone if in parameters_to_include" do
resource = Puppet::Resource.new("blond", "Bambi", :parameters => {
:admits_to_age => true,
:admits_to_dying_hair => false
})
pruned_resource = resource.prune_parameters(:parameters_to_include => [:admits_to_dying_hair])
pruned_resource.should == Puppet::Resource.new("blond", "Bambi", :parameters => {:admits_to_dying_hair => false})
end
it "should leave properties if not nil, absent or empty" do
resource = Puppet::Resource.new("blond", "Bambi", :parameters => {
:ensure => 'silly',
:height => '7 ft 5 in',
:friends => ['Oprah'],
})
pruned_resource = resource.prune_parameters
pruned_resource.should ==
resource = Puppet::Resource.new("blond", "Bambi", :parameters => {
:ensure => 'silly',
:height => '7 ft 5 in',
:friends => ['Oprah'],
})
end
end
end
| 37.205157 | 167 | 0.643927 |
ed1563bd20132ff5659ff186b58bb3262e32b09f | 1,967 | # encoding: UTF-8
#
# Author: Stefano Harding <[email protected]>
# License: Apache License, Version 2.0
# Copyright: (C) 2014-2015 Stefano Harding
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Garcon
# Clock that cannot be set and represents monotonic time since
# some unspecified starting point.
# @!visibility private
GLOBAL_MONOTONIC_CLOCK = Class.new {
if defined?(Process::CLOCK_MONOTONIC)
# @!visibility private
def get_time
Process.clock_gettime(Process::CLOCK_MONOTONIC)
end
else
require 'thread'
# @!visibility private
def initialize
@mutex = Mutex.new
@last_time = Time.now.to_f
end
# @!visibility private
def get_time
@mutex.synchronize do
now = Time.now.to_f
if @last_time < now
@last_time = now
else # clock has moved back in time
@last_time += 0.000_001
end
end
end
end
}.new
private_constant :GLOBAL_MONOTONIC_CLOCK
# @!macro [attach] monotonic_get_time
#
# Returns the current time a tracked by the application monotonic clock.
#
# @return [Float] The current monotonic time when `since` not given else
# the elapsed monotonic time between `since` and the current time
#
# @!macro monotonic_clock_warning
def monotonic_time
GLOBAL_MONOTONIC_CLOCK.get_time
end
module_function :monotonic_time
end
| 28.1 | 76 | 0.680224 |
1c0e5842ac04772cf7e364e7db59f8da188e6b13 | 157 | class AddDiscountToErpOrdersOrderDetails < ActiveRecord::Migration[5.1]
def change
add_column :erp_orders_order_details, :discount, :decimal
end
end
| 26.166667 | 71 | 0.802548 |
e9cfa291ad3c4c8367a3fac6cb435aaf75cb5783 | 2,212 | require "logstash/filters/base"
require "logstash/namespace"
require "logstash/timestamp"
require "stud/interval"
require "socket" # for Socket.gethostname
class LogStash::Codecs::RFC822 < LogStash::Codecs::Base
config_name "rfc822"
config :lowercase_headers, :validate => :boolean, :default => true
config :strip_attachments, :validate => :boolean, :default => false
# For multipart messages, use the first part that has this
# content-type as the event message.
config :content_type, :validate => :string, :default => "text/plain"
public
def register
require "mail"
@content_type_re = Regexp.new("^" + @content_type)
end
public
def decode(payload, &block)
mail = Mail.read_from_string(payload)
if @strip_attachments
mail = mail.without_attachments
end
event = LogStash::Event.new("raw" => payload, "parts" => mail.parts.count)
# Use the 'Date' field as the timestamp
event.timestamp = LogStash::Timestamp.new(mail.date.to_time)
# Add fields: Add message.header_fields { |h| h.name=> h.value }
mail.header_fields.each do |header|
# 'header.name' can sometimes be a Mail::Multibyte::Chars, get it in String form
name = @lowercase_headers ? header.name.to_s.downcase : header.name.to_s
# Call .decoded on the header in case it's in encoded-word form.
# Details at:
# https://github.com/mikel/mail/blob/master/README.md#encodings
# http://tools.ietf.org/html/rfc2047#section-2
value = transcode_to_utf8(header.decoded.to_s)
case (field = event.get(name))
when String
# promote string to array if a header appears multiple times
# (like 'received')
event.set(name, [field, value])
when Array
field << value
event.set(name, field)
when nil
event.set(name, value)
end
end
yield event
end
# transcode_to_utf8 is meant for headers transcoding.
# the mail gem will set the correct encoding on header strings decoding
# and we want to transcode it to utf8
def transcode_to_utf8(s)
unless s.nil?
s.encode(Encoding::UTF_8, :invalid => :replace, :undef => :replace)
end
end
end
| 30.30137 | 86 | 0.673599 |
bb2d162d8dfdcb9a0d67c6e49c11bd6bff65b171 | 227 | class NomenclaturalRank::Iczn::SpeciesGroup::Species < NomenclaturalRank::Iczn::SpeciesGroup
def self.parent_rank
NomenclaturalRank::Iczn::SpeciesGroup::Subsuperspecies
end
def self.abbreviation
'sp.'
end
end
| 20.636364 | 92 | 0.76652 |
79815dd3939063e35d72aa5ae637c12243d9816b | 2,255 | require 'spec_helper'
describe "directory environments" do
let(:args) { ['--configprint', 'modulepath', '--environment', 'direnv'] }
let(:puppet) { Puppet::Application[:apply] }
context "with a single directory environmentpath" do
before(:each) do
environmentdir = PuppetSpec::Files.tmpdir('envpath')
Puppet[:environmentpath] = environmentdir
FileUtils.mkdir_p(environmentdir + "/direnv/modules")
end
it "config prints the environments modulepath" do
Puppet.settings.initialize_global_settings(args)
expect {
puppet.run
}.to exit_with(0)
.and output(%r{/direnv/modules}).to_stdout
end
it "config prints the cli --modulepath despite environment" do
args << '--modulepath' << '/completely/different'
Puppet.settings.initialize_global_settings(args)
expect {
puppet.run
}.to exit_with(0)
.and output(%r{/completely/different}).to_stdout
end
it 'given an 8.3 style path on Windows, will config print an expanded path',
:if => Puppet::Util::Platform.windows? do
# ensure an 8.3 style path is set for environmentpath
shortened = Puppet::Util::Windows::File.get_short_pathname(Puppet[:environmentpath])
expanded = Puppet::FileSystem.expand_path(shortened)
Puppet[:environmentpath] = shortened
expect(Puppet[:environmentpath]).to match(/~/)
Puppet.settings.initialize_global_settings(args)
expect {
puppet.run
}.to exit_with(0)
.and output(a_string_matching(expanded)).to_stdout
end
end
context "with an environmentpath having multiple directories" do
let(:args) { ['--configprint', 'modulepath', '--environment', 'otherdirenv'] }
before(:each) do
envdir1 = File.join(Puppet[:confdir], 'env1')
envdir2 = File.join(Puppet[:confdir], 'env2')
Puppet[:environmentpath] = [envdir1, envdir2].join(File::PATH_SEPARATOR)
FileUtils.mkdir_p(envdir2 + "/otherdirenv/modules")
end
it "config prints a directory environment modulepath" do
Puppet.settings.initialize_global_settings(args)
expect {
puppet.run
}.to exit_with(0)
.and output(%r{otherdirenv/modules}).to_stdout
end
end
end
| 33.161765 | 90 | 0.673171 |
01034e866cd84cf6f0a83ea28c50485362bd894c | 346 | # frozen_string_literal: true
require 'capybara/rails'
require 'capybara-box'
Capybara.disable_animation = true
ci = Helper.true?(ENV['CI'])
CapybaraBox::Base.configure(
browser: ENV.fetch('BROWSER', :selenium_chrome_headless).to_sym,
log: !ci,
screenshot: { enabled: ci, s3: ci },
version: ENV['CHROMEDRIVER_VERSION']
)
| 21.625 | 69 | 0.702312 |
0329f0b43db35edd0b87d2d1e7106e42870fe041 | 6,971 | ##
# This code was generated by
# \ / _ _ _| _ _
# | (_)\/(_)(_|\/| |(/_ v1.0.0
# / /
require 'spec_helper.rb'
describe 'PhoneNumber' do
it "can create" do
@holodeck.mock(Twilio::Response.new(500, ''))
expect {
@client.messaging.v1.services('MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa') \
.phone_numbers.create(phone_number_sid: 'PNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
}.to raise_exception(Twilio::REST::TwilioError)
values = {'PhoneNumberSid' => 'PNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', }
expect(
@holodeck.has_request?(Holodeck::Request.new(
method: 'post',
url: 'https://messaging.twilio.com/v1/Services/MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/PhoneNumbers',
data: values,
))).to eq(true)
end
it "receives create responses" do
@holodeck.mock(Twilio::Response.new(
201,
%q[
{
"sid": "PNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"date_created": "2015-07-30T20:12:31Z",
"date_updated": "2015-07-30T20:12:33Z",
"phone_number": "+987654321",
"country_code": "US",
"capabilities": [],
"url": "https://messaging.twilio.com/v1/Services/MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/PhoneNumbers/PNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
]
))
actual = @client.messaging.v1.services('MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa') \
.phone_numbers.create(phone_number_sid: 'PNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
expect(actual).to_not eq(nil)
end
it "receives create_with_capabilities responses" do
@holodeck.mock(Twilio::Response.new(
201,
%q[
{
"sid": "PNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"date_created": "2015-07-30T20:12:31Z",
"date_updated": "2015-07-30T20:12:33Z",
"phone_number": "+987654321",
"country_code": "US",
"capabilities": [
"MMS",
"SMS",
"Voice"
],
"url": "https://messaging.twilio.com/v1/Services/MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/PhoneNumbers/PNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
]
))
actual = @client.messaging.v1.services('MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa') \
.phone_numbers.create(phone_number_sid: 'PNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
expect(actual).to_not eq(nil)
end
it "can delete" do
@holodeck.mock(Twilio::Response.new(500, ''))
expect {
@client.messaging.v1.services('MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa') \
.phone_numbers('PNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa').delete()
}.to raise_exception(Twilio::REST::TwilioError)
values = {}
expect(
@holodeck.has_request?(Holodeck::Request.new(
method: 'delete',
url: 'https://messaging.twilio.com/v1/Services/MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/PhoneNumbers/PNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
))).to eq(true)
end
it "receives delete responses" do
@holodeck.mock(Twilio::Response.new(
204,
nil,
))
actual = @client.messaging.v1.services('MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa') \
.phone_numbers('PNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa').delete()
expect(actual).to eq(true)
end
it "can read" do
@holodeck.mock(Twilio::Response.new(500, ''))
expect {
@client.messaging.v1.services('MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa') \
.phone_numbers.list()
}.to raise_exception(Twilio::REST::TwilioError)
values = {}
expect(
@holodeck.has_request?(Holodeck::Request.new(
method: 'get',
url: 'https://messaging.twilio.com/v1/Services/MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/PhoneNumbers',
))).to eq(true)
end
it "receives read_full responses" do
@holodeck.mock(Twilio::Response.new(
200,
%q[
{
"meta": {
"page": 0,
"page_size": 50,
"first_page_url": "https://messaging.twilio.com/v1/Services/MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/PhoneNumbers?PageSize=50&Page=0",
"previous_page_url": null,
"next_page_url": null,
"key": "phone_numbers",
"url": "https://messaging.twilio.com/v1/Services/MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/PhoneNumbers?PageSize=50&Page=0"
},
"phone_numbers": [
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"sid": "PNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"date_created": "2015-07-30T20:12:31Z",
"date_updated": "2015-07-30T20:12:33Z",
"phone_number": "+987654321",
"country_code": "US",
"capabilities": [],
"url": "https://messaging.twilio.com/v1/Services/MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/PhoneNumbers/PNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
]
}
]
))
actual = @client.messaging.v1.services('MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa') \
.phone_numbers.list()
expect(actual).to_not eq(nil)
end
it "can fetch" do
@holodeck.mock(Twilio::Response.new(500, ''))
expect {
@client.messaging.v1.services('MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa') \
.phone_numbers('PNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa').fetch()
}.to raise_exception(Twilio::REST::TwilioError)
values = {}
expect(
@holodeck.has_request?(Holodeck::Request.new(
method: 'get',
url: 'https://messaging.twilio.com/v1/Services/MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/PhoneNumbers/PNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
))).to eq(true)
end
it "receives fetch responses" do
@holodeck.mock(Twilio::Response.new(
200,
%q[
{
"sid": "SCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"date_created": "2015-07-30T20:12:31Z",
"date_updated": "2015-07-30T20:12:33Z",
"phone_number": "12345",
"country_code": "US",
"capabilities": [],
"url": "https://messaging.twilio.com/v1/Services/MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/PhoneNumbers/SCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
]
))
actual = @client.messaging.v1.services('MGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa') \
.phone_numbers('PNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa').fetch()
expect(actual).to_not eq(nil)
end
end | 35.030151 | 150 | 0.618276 |
bb3c3384b1d973299894bbbe4734dc4a302f057e | 881 | describe CampusSolutions::StudentTermGpa do
let(:user_id) { '61889' }
shared_examples 'a proxy that gets data' do
subject { proxy.get }
it_should_behave_like 'a simple proxy that returns errors'
it_behaves_like 'a proxy that properly observes the student success feature flag'
it_behaves_like 'a proxy that got data successfully'
it 'returns data with the expected structure' do
expect(subject[:feed][:ucAaTermData]).to be
expect(subject[:feed][:ucAaTermData][:ucAaTermGpa]).to be
end
end
context 'mock proxy' do
let(:proxy) { CampusSolutions::StudentTermGpa.new(user_id: user_id, fake: true) }
it_should_behave_like 'a proxy that gets data'
end
context 'real proxy', testext: true do
let(:proxy) { CampusSolutions::StudentTermGpa.new(user_id: user_id) }
it_should_behave_like 'a proxy that gets data'
end
end
| 32.62963 | 85 | 0.725312 |
1116ea8c7c4955eba7695993552010ebda08aeb6 | 210 | cask :v1 => 'sqwiggle' do
version :latest
sha256 :no_check
url 'https://www.sqwiggle.com/download/mac'
name 'Sqwiggle'
homepage 'https://www.sqwiggle.com'
license :gratis
app 'Sqwiggle.app'
end
| 17.5 | 45 | 0.690476 |
5d460ca14fc748a491e08f62c46f9ed87d1ffa62 | 188 | # Load the Rails application.
require File.expand_path('../application', __FILE__)
# Initialize the Rails application.
Rails.application.initialize!
Mime::Type.register "text/txt", :txt
| 23.5 | 52 | 0.771277 |
f7f3c351e0ab69e8c6335a48b2df3a0432309608 | 2,721 | class Telegraf < Formula
desc "Server-level metric gathering agent for InfluxDB"
homepage "https://www.influxdata.com/"
url "https://github.com/influxdata/telegraf/archive/v1.17.3.tar.gz"
sha256 "fea0a44a0dbbe9a506e695b41ffbbc065cf78acfa5b10d9da0678ce43340b238"
license "MIT"
head "https://github.com/influxdata/telegraf.git"
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "2e274b39110c9cec062d724e00531043bc2ed2f1a9990c9722ef3e2653a7e352"
sha256 cellar: :any_skip_relocation, big_sur: "0895ff4a3277b65b34d9da6ffacf3363d0c5ca5f60fae84ba60b07d867d651d2"
sha256 cellar: :any_skip_relocation, catalina: "8217c64da0084d8a4e741e157f33f576e40c3c9e26c738f54bd6ba0cf00a56ad"
sha256 cellar: :any_skip_relocation, mojave: "387c98576c1b705e296057eb555d489feaa6865042c85e22eb682a577891be1a"
sha256 cellar: :any_skip_relocation, x86_64_linux: "83f0554b2f08ace75bdeb1a299ff22211b4c2b271eb64d634f219f9ae5ea5d85"
end
depends_on "go" => :build
def install
system "go", "build", *std_go_args, "-ldflags", "-X main.version=#{version}", "./cmd/telegraf"
etc.install "etc/telegraf.conf" => "telegraf.conf"
end
def post_install
# Create directory for additional user configurations
(etc/"telegraf.d").mkpath
end
plist_options manual: "telegraf -config #{HOMEBREW_PREFIX}/etc/telegraf.conf"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<dict>
<key>SuccessfulExit</key>
<false/>
</dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/telegraf</string>
<string>-config</string>
<string>#{etc}/telegraf.conf</string>
<string>-config-directory</string>
<string>#{etc}/telegraf.d</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{var}</string>
<key>StandardErrorPath</key>
<string>#{var}/log/telegraf.log</string>
<key>StandardOutPath</key>
<string>#{var}/log/telegraf.log</string>
</dict>
</plist>
EOS
end
test do
(testpath/"config.toml").write shell_output("#{bin}/telegraf -sample-config")
system "#{bin}/telegraf", "-config", testpath/"config.toml", "-test",
"-input-filter", "cpu:mem"
end
end
| 35.802632 | 122 | 0.649761 |
ac5a2850f80edacc0c42cf3c073963dd8d4d4e33 | 640 | # frozen_string_literal: true
require "spec_helper"
require "lib/subscribe_user"
require "spec/mocks/user_mock"
require "spec/mocks/subscription_mock"
RSpec.describe SubscribeUser, active_mocker: true do
let(:user) { User.create }
subject { described_class.new(user: user) }
describe "with_yearly" do
it do
subject.with_yearly
expect(
Subscription.where(user: user,
kind: "yearly").count,
).to eq 1
end
end
describe "with_monthly" do
it do
subject.with_monthly
expect(Subscription.where(user: user, kind: "monthly").count).to eq 1
end
end
end
| 22.068966 | 75 | 0.665625 |
bf65bf04f2554bd1432855095ed90bb03faa12bc | 2,083 | Pod::Spec.new do |s|
s.name = 'CorePlot'
s.version = '99.99.99'
s.license = 'BSD'
s.summary = 'Cocoa plotting framework for Mac OS X, iOS, and tvOS.'
s.homepage = 'https://github.com/core-plot'
s.social_media_url = 'https://twitter.com/CorePlot'
s.documentation_url = 'http://core-plot.github.io'
s.authors = { 'Drew McCormack' => '[email protected]',
'Brad Larson' => '[email protected]',
'Eric Skroch' => '[email protected]',
'Barry Wark' => '[email protected]' }
s.source = { :git => 'https://github.com/core-plot/core-plot.git' }
s.description = 'Core Plot is a plotting framework for OS X, iOS, and tvOS. It provides 2D visualization ' \
'of data, and is tightly integrated with Apple technologies like Core Animation, ' \
'Core Data, and Cocoa Bindings.'
s.ios.deployment_target = '8.0'
s.osx.deployment_target = '10.8'
s.tvos.deployment_target = '9.0'
s.ios.header_dir = 'ios'
s.osx.header_dir = 'osx'
s.tvos.header_dir = 'tvos'
s.source_files = 'framework/Source/*.{h,m}', 'framework/CocoaPods/*.h', 'framework/TestResources/CorePlotProbes.d'
s.exclude_files = '**/*{TestCase,Tests}.{h,m}', '**/mainpage.h'
s.ios.source_files = 'framework/CorePlot-CocoaTouch.h', 'framework/iPhoneOnly/*.{h,m}'
s.tvos.source_files = 'framework/iPhoneOnly/*.{h,m}'
s.osx.source_files = 'framework/MacOnly/*.{h,m}'
s.private_header_files = '**/_*.h', '**/CorePlotProbes.h'
s.requires_arc = true
s.xcconfig = { 'ALWAYS_SEARCH_USER_PATHS' => 'YES' }
s.ios.xcconfig = { 'HEADER_SEARCH_PATHS' => '"${PODS_ROOT}/Headers/Private/CorePlot/ios"' }
s.osx.xcconfig = { 'HEADER_SEARCH_PATHS' => '"${PODS_ROOT}/Headers/Private/CorePlot/osx"' }
s.tvos.xcconfig = { 'HEADER_SEARCH_PATHS' => '"${PODS_ROOT}/Headers/Private/CorePlot/tvos"' }
s.frameworks = 'QuartzCore', 'Accelerate'
s.ios.frameworks = 'UIKit', 'Foundation'
s.tvos.frameworks = 'UIKit', 'Foundation'
s.osx.frameworks = 'Cocoa'
end | 45.282609 | 116 | 0.640422 |
0873a14d3cc8ba4012ad8139dd8e69f14384e7fc | 17,884 | require 'spec_helper'
require 'cloud_controller/diego/process_guid'
module VCAP::CloudController
RSpec.describe Runners do
subject(:runners) { Runners.new(config, message_bus, dea_pool) }
let(:config) do
{
staging: {
timeout_in_seconds: 90
}
}
end
let(:message_bus) { instance_double(CfMessageBus::MessageBus) }
let(:dea_pool) { instance_double(Dea::Pool) }
let(:package_hash) { 'fake-package-hash' }
let(:custom_buildpacks_enabled?) { true }
let(:buildpack) { instance_double(AutoDetectionBuildpack, custom?: false) }
let(:docker_image) { nil }
describe '#runner_for_app' do
subject(:runner) do
runners.runner_for_app(app)
end
context 'when the app is configured to run on Diego' do
let(:app) { AppFactory.make(diego: true) }
it 'finds a diego backend' do
expect(runners).to receive(:diego_runner).with(app).and_call_original
expect(runner).to be_a(Diego::Runner)
end
context 'when the app has a docker image' do
let(:app) { AppFactory.make(:docker, docker_image: 'foobar') }
it 'finds a diego backend' do
expect(runners).to receive(:diego_runner).with(app).and_call_original
expect(runner).to be_a(Diego::Runner)
end
end
end
context 'when the app is not configured to run on Diego' do
let(:app) { AppFactory.make }
it 'finds a DEA backend' do
expect(runners).to receive(:dea_runner).with(app).and_call_original
expect(runner).to be_a(Dea::Runner)
end
end
end
describe '#run_with_diego?' do
let(:diego_app) { AppFactory.make(diego: true) }
let(:dea_app) { AppFactory.make }
it 'returns true for a diego app' do
expect(runners.run_with_diego?(diego_app)).to be_truthy
end
it 'returns false for a dea app' do
expect(runners.run_with_diego?(dea_app)).to be_falsey
end
end
describe '#diego_apps' do
let!(:diego_app1) { AppFactory.make(diego: true, state: 'STARTED') }
let!(:diego_app2) { AppFactory.make(diego: true, state: 'STARTED') }
let!(:diego_app3) { AppFactory.make(diego: true, state: 'STARTED') }
let!(:diego_app4) { AppFactory.make(diego: true, state: 'STARTED') }
let!(:diego_app5) { AppFactory.make(diego: true, state: 'STARTED') }
let!(:dea_app) { AppFactory.make(state: 'STARTED') }
it 'returns apps that have the desired data' do
last_app = AppFactory.make(diego: true, state: 'STARTED', version: 'app-version-6')
apps = runners.diego_apps(100, 0)
expect(apps.count).to eq(6)
expect(apps.last.to_json).to match_object(last_app.to_json)
end
it 'respects the batch_size' do
app_counts = [3, 5].map do |batch_size|
runners.diego_apps(batch_size, 0).count
end
expect(app_counts).to eq([3, 5])
end
it 'returns non-intersecting apps across subsequent batches' do
first_batch = runners.diego_apps(3, 0)
expect(first_batch.count).to eq(3)
second_batch = runners.diego_apps(3, first_batch.last.id)
expect(second_batch.count).to eq(2)
expect(second_batch & first_batch).to eq([])
end
it 'does not return unstaged apps' do
unstaged_app = AppFactory.make(diego: true, state: 'STARTED')
unstaged_app.current_droplet.destroy
batch = runners.diego_apps(100, 0)
expect(batch).not_to include(unstaged_app)
end
it "does not return apps which aren't expected to be started" do
stopped_app = AppFactory.make(diego: true, state: 'STOPPED')
batch = runners.diego_apps(100, 0)
expect(batch).not_to include(stopped_app)
end
it 'only includes apps that have the diego attribute set' do
batch = runners.diego_apps(100, 0)
expect(batch).not_to include(dea_app)
end
end
describe '#diego_apps_from_process_guids' do
let!(:diego_app1) { AppFactory.make(diego: true, state: 'STARTED') }
let!(:diego_app2) { AppFactory.make(diego: true, state: 'STARTED') }
let!(:diego_app3) { AppFactory.make(diego: true, state: 'STARTED') }
let!(:diego_app4) { AppFactory.make(diego: true, state: 'STARTED') }
let!(:diego_app5) { AppFactory.make(diego: true, state: 'STARTED') }
let!(:dea_app) { AppFactory.make(state: 'STARTED') }
it 'does not return unstaged apps' do
unstaged_app = AppFactory.make(diego: true, state: 'STARTED')
unstaged_app.current_droplet.destroy
batch = runners.diego_apps_from_process_guids(Diego::ProcessGuid.from_process(unstaged_app))
expect(batch).not_to include(unstaged_app)
end
it 'does not return apps that are stopped' do
stopped_app = AppFactory.make(diego: true, state: 'STOPPED')
batch = runners.diego_apps_from_process_guids(Diego::ProcessGuid.from_process(stopped_app))
expect(batch).not_to include(stopped_app)
end
it 'only includes diego apps' do
batch = runners.diego_apps_from_process_guids(Diego::ProcessGuid.from_process(dea_app))
expect(batch).not_to include(dea_app)
end
it 'accepts a process guid or an array of process guids' do
app = App.where(diego: true).order(:id).first
process_guid = Diego::ProcessGuid.from_process(app)
expect(runners.diego_apps_from_process_guids(process_guid)).to eq([app])
expect(runners.diego_apps_from_process_guids([process_guid])).to eq([app])
end
it 'returns diego apps for each requested process guid' do
diego_apps = App.where(diego: true).all
diego_guids = diego_apps.map { |app| Diego::ProcessGuid.from_process(app) }
expect(runners.diego_apps_from_process_guids(diego_guids)).to match_array(diego_apps)
end
context 'when the process guid is not found' do
it 'does not return an app' do
app = App.where(diego: true).order(:id).first
process_guid = Diego::ProcessGuid.from_process(app)
expect {
app.set_new_version
app.save
}.to change {
runners.diego_apps_from_process_guids(process_guid)
}.from([app]).to([])
end
end
end
describe '#diego_apps_cache_data' do
let!(:diego_app1) { AppFactory.make(diego: true, state: 'STARTED') }
let!(:diego_app2) { AppFactory.make(diego: true, state: 'STARTED') }
let!(:diego_app3) { AppFactory.make(diego: true, state: 'STARTED') }
let!(:diego_app4) { AppFactory.make(diego: true, state: 'STARTED') }
let!(:diego_app5) { AppFactory.make(diego: true, state: 'STARTED') }
let!(:dea_app) { AppFactory.make(state: 'STARTED') }
it 'respects the batch_size' do
data_count = [3, 5].map do |batch_size|
runners.diego_apps_cache_data(batch_size, 0).count
end
expect(data_count).to eq([3, 5])
end
it 'returns data for non-intersecting apps across subsequent batches' do
first_batch = runners.diego_apps_cache_data(3, 0)
expect(first_batch.count).to eq(3)
last_id = first_batch.last[0]
second_batch = runners.diego_apps_cache_data(3, last_id)
expect(second_batch.count).to eq(2)
end
it 'does not return unstaged apps' do
unstaged_app = AppFactory.make(diego: true, state: 'STARTED')
unstaged_app.current_droplet.destroy
batch = runners.diego_apps_cache_data(100, 0)
app_ids = batch.map { |data| data[0] }
expect(app_ids).not_to include(unstaged_app.id)
end
it 'does not return apps that are stopped' do
stopped_app = AppFactory.make(diego: true, state: 'STOPPED')
batch = runners.diego_apps_cache_data(100, 0)
app_ids = batch.map { |data| data[0] }
expect(app_ids).not_to include(stopped_app.id)
end
it 'only includes diego apps' do
batch = runners.diego_apps_cache_data(100, 0)
app_ids = batch.map { |data| data[0] }
expect(app_ids).not_to include(dea_app.id)
end
it 'acquires the data in one select' do
expect {
runners.diego_apps_cache_data(100, 0)
}.to have_queried_db_times(/SELECT.*FROM.*processes.*/, 1)
end
context 'with Docker app' do
before do
FeatureFlag.create(name: 'diego_docker', enabled: true)
end
let!(:docker_app) do
AppFactory.make(:docker, docker_image: 'some-image', state: 'STARTED')
end
context 'when docker is enabled' do
before do
FeatureFlag.find(name: 'diego_docker').update(enabled: true)
end
it 'returns docker apps' do
batch = runners.diego_apps_cache_data(100, 0)
app_ids = batch.map { |data| data[0] }
expect(app_ids).to include(docker_app.id)
end
end
context 'when docker is disabled' do
before do
FeatureFlag.find(name: 'diego_docker').update(enabled: false)
end
it 'does not return docker apps' do
batch = runners.diego_apps_cache_data(100, 0)
app_ids = batch.map { |data| data[0] }
expect(app_ids).not_to include(docker_app.id)
end
end
end
end
describe '#dea_apps_hm9k' do
let!(:dea_app1) { AppFactory.make(state: 'STARTED') }
let!(:dea_app2) { AppFactory.make(state: 'STARTED') }
let!(:dea_app3) { AppFactory.make(state: 'STARTED') }
let!(:dea_app4) { AppFactory.make(state: 'STARTED') }
let!(:dea_app5) { AppFactory.make(state: 'STARTED') }
it 'returns apps that have the desired data' do
last_app = AppFactory.make(state: 'STARTED')
apps, _ = runners.dea_apps_hm9k
expect(apps.count).to eq(6)
expect(apps).to include(
{
'id' => last_app.guid,
'instances' => last_app.instances,
'state' => last_app.state,
'package_state' => 'STAGED',
'version' => last_app.version,
}
)
end
it 'does not return stopped apps' do
stopped_app = AppFactory.make(state: 'STOPPED')
batch, _ = runners.dea_apps_hm9k
guids = batch.map { |entry| entry['id'] }
expect(guids).not_to include(stopped_app.guid)
end
it 'does not return apps that failed to stage' do
staging_failed_app = dea_app1
DropletModel.make(package: dea_app1.latest_package, app: dea_app1.app, state: DropletModel::FAILED_STATE)
batch, _ = runners.dea_apps_hm9k
guids = batch.map { |entry| entry['id'] }
expect(guids).not_to include(staging_failed_app.guid)
end
it 'returns apps that have not yet been staged' do
staging_pending_app = dea_app1
PackageModel.make(app: dea_app1.app, state: PackageModel::PENDING_STATE)
batch, _ = runners.dea_apps_hm9k
expect(batch).to include(hash_including({
'id' => staging_pending_app.guid,
'package_state' => 'PENDING'
}))
end
it 'returns the largest process id from the query' do
_, process_id = runners.dea_apps_hm9k
expect(process_id).to equal(App.dataset.order(:id).last.id)
end
end
describe '#latest' do
context 'when the input hash includes a key app_guid' do
let(:input) do
[{
app_guid: 'app_guid_1',
id: 1,
}]
end
it 'is added to the hash' do
output_hash = runners.latest(input)
expect(output_hash['app_guid_1']).to eq(input[0])
end
context 'when the input hash has multiple values' do
let(:input) do
[
{
app_guid: 'app_guid_1',
id: 1,
},
{
app_guid: 'app_guid_2',
id: 2,
},
]
end
it 'adds all items to the hash' do
output_hash = runners.latest(input)
expect(output_hash.length).to eq(input.length)
expect(output_hash['app_guid_1']).to eq(input[0])
expect(output_hash['app_guid_2']).to eq(input[1])
end
end
context 'when multiple items have the same app_guid key' do
context 'when the created_at times are the same' do
let(:time) { Time.now }
let(:input) do
[
{
app_guid: 'app_guid_1',
id: 1,
created_at: time,
},
{
app_guid: 'app_guid_1',
id: 2,
created_at: time,
}
]
end
it "takes the last entry based off of the 'id'" do
output_hash = runners.latest(input)
expect(output_hash['app_guid_1']).to eq(input[1])
end
end
end
end
end
describe '#package_state' do
let(:parent_app) { AppModel.make }
subject(:app) { App.make(app: parent_app) }
context 'when no package exists' do
it 'is PENDING' do
expect(app.latest_package).to be_nil
expect(runners.package_state(app.guid, nil, app.latest_droplet, app.latest_package)).to eq('PENDING')
end
end
context 'when the package has no hash' do
before do
PackageModel.make(app: parent_app, package_hash: nil)
end
it 'is PENDING' do
expect(runners.package_state(app.guid, nil, app.latest_droplet, app.latest_package)).to eq('PENDING')
end
end
context 'when the package failed to upload' do
before do
PackageModel.make(app: parent_app, state: PackageModel::FAILED_STATE)
end
it 'is FAILED' do
expect(runners.package_state(app.guid, nil, app.latest_droplet, app.latest_package)).to eq('FAILED')
end
end
context 'when the package is available and there is no droplet' do
before do
PackageModel.make(app: parent_app, package_hash: 'hash')
end
it 'is PENDING' do
expect(runners.package_state(app.guid, nil, app.latest_droplet, app.latest_package)).to eq('PENDING')
end
end
context 'when the current droplet is the latest droplet' do
before do
package = PackageModel.make(app: parent_app, package_hash: 'hash', state: PackageModel::READY_STATE)
droplet = DropletModel.make(app: parent_app, package: package, state: DropletModel::STAGED_STATE)
parent_app.update(droplet: droplet)
end
it 'is STAGED' do
expect(runners.package_state(app.guid, app.current_droplet.guid, app.latest_droplet, app.latest_package)).to eq('STAGED')
end
end
context 'when the current droplet is not the latest droplet' do
before do
package = PackageModel.make(app: parent_app, package_hash: 'hash', state: PackageModel::READY_STATE)
DropletModel.make(app: parent_app, package: package, state: DropletModel::STAGED_STATE)
end
it 'is PENDING' do
expect(runners.package_state(app.guid, nil, app.latest_droplet, app.latest_package)).to eq('PENDING')
end
end
context 'when the latest droplet failed to stage' do
before do
package = PackageModel.make(app: parent_app, package_hash: 'hash', state: PackageModel::READY_STATE)
DropletModel.make(app: parent_app, package: package, state: DropletModel::FAILED_STATE)
end
it 'is FAILED' do
expect(runners.package_state(app.guid, nil, app.latest_droplet, app.latest_package)).to eq('FAILED')
end
end
context 'when there is a newer package than current droplet' do
before do
package = PackageModel.make(app: parent_app, package_hash: 'hash', state: PackageModel::READY_STATE)
droplet = DropletModel.make(app: parent_app, package: package, state: DropletModel::STAGED_STATE)
parent_app.update(droplet: droplet)
PackageModel.make(app: parent_app, package_hash: 'hash', state: PackageModel::READY_STATE, created_at: droplet.created_at + 10.seconds)
end
it 'is PENDING' do
expect(runners.package_state(app.guid, app.current_droplet.guid, app.latest_droplet, app.latest_package)).to eq('PENDING')
end
end
context 'when the latest droplet is the current droplet but it does not have a package' do
before do
droplet = DropletModel.make(app: parent_app, state: DropletModel::STAGED_STATE)
parent_app.update(droplet: droplet)
end
it 'is STAGED' do
expect(runners.package_state(app.guid, app.current_droplet.guid, app.latest_droplet, app.latest_package)).to eq('STAGED')
end
end
context 'when the latest droplet has no package but there is a previous package' do
before do
previous_package = PackageModel.make(app: parent_app, package_hash: 'hash', state: PackageModel::FAILED_STATE)
droplet = DropletModel.make(app: parent_app, state: DropletModel::STAGED_STATE, created_at: previous_package.created_at + 10.seconds)
parent_app.update(droplet: droplet)
end
it 'is STAGED' do
expect(runners.package_state(app.guid, app.current_droplet.guid, app.latest_droplet, app.latest_package)).to eq('STAGED')
end
end
end
end
end
| 34.326296 | 145 | 0.615522 |
6aca2b75fcd03b17563d8f348900470ea8aa01cf | 8,304 | module ActiveScaffold::Bridges
class DatePicker
module Helper
DATE_FORMAT_CONVERSION = {
/%a/ => 'D',
/%A/ => 'DD',
/%b/ => 'M',
/%B/ => 'MM',
/%d/ => 'dd',
/%e|%-d/ => 'd',
/%j/ => 'oo',
/%m/ => 'mm',
/%-m|%-m/ => 'm',
/%y/ => 'y',
/%Y/ => 'yy',
/%H/ => 'HH', # options ampm => false
/%I/ => 'hh', # options ampm => true
/%M/ => 'mm',
/%p/ => 'tt',
/%S/ => 'ss',
/%[cUWwxXZz]/ => ''
}.freeze
def self.date_options_for_locales
I18n.available_locales.collect do |locale|
locale_date_options = date_options(locale)
if locale_date_options
"$.datepicker.regional['#{locale}'] = #{locale_date_options.to_json};"
end
end.compact.join('')
end
def self.date_options(locale)
date_picker_options = {
:closeText => as_(:close),
:prevText => as_(:previous),
:nextText => as_(:next),
:currentText => as_(:today),
:monthNames => I18n.translate!('date.month_names', :locale => locale)[1..-1],
:monthNamesShort => I18n.translate!('date.abbr_month_names', :locale => locale)[1..-1],
:dayNames => I18n.translate!('date.day_names', :locale => locale),
:dayNamesShort => I18n.translate!('date.abbr_day_names', :locale => locale),
:dayNamesMin => I18n.translate!('date.abbr_day_names', :locale => locale),
:changeYear => true,
:changeMonth => true
}
as_date_picker_options = I18n.translate! :date_picker_options, :scope => :active_scaffold, :locale => locale, :default => ''
date_picker_options.merge!(as_date_picker_options) if as_date_picker_options.is_a? Hash
Rails.logger.warn "ActiveScaffold: Missing date picker localization for your locale: #{locale}" if as_date_picker_options.blank?
js_format = to_datepicker_format(I18n.translate!('date.formats.default', :locale => locale, :default => ''))
date_picker_options[:dateFormat] = js_format if js_format.present?
date_picker_options
rescue StandardError
raise if locale == I18n.locale
end
def self.datetime_options_for_locales
I18n.available_locales.collect do |locale|
locale_datetime_options = datetime_options(locale)
if locale_datetime_options
"$.timepicker.regional['#{locale}'] = #{locale_datetime_options.to_json};"
end
end.compact.join('')
end
def self.datetime_options(locale)
rails_time_format = I18n.translate! 'time.formats.picker', :locale => locale, :default => '%a, %d %b %Y %H:%M:%S'
datetime_picker_options = {
:ampm => false,
:hourText => I18n.translate!('datetime.prompts.hour', :locale => locale),
:minuteText => I18n.translate!('datetime.prompts.minute', :locale => locale),
:secondText => I18n.translate!('datetime.prompts.second', :locale => locale)
}
as_datetime_picker_options = I18n.translate! :datetime_picker_options, :scope => :active_scaffold, :locale => locale, :default => ''
datetime_picker_options.merge!(as_datetime_picker_options) if as_datetime_picker_options.is_a? Hash
Rails.logger.warn "ActiveScaffold: Missing datetime picker localization for your locale: #{locale}" if as_datetime_picker_options.blank?
date_format, time_format = split_datetime_format(to_datepicker_format(rails_time_format))
datetime_picker_options[:dateFormat] = date_format unless date_format.nil?
unless time_format.nil?
datetime_picker_options[:timeFormat] = time_format
datetime_picker_options[:ampm] = true if rails_time_format.include?('%I')
end
datetime_picker_options
rescue StandardError
raise if locale == I18n.locale
end
def self.to_datepicker_format(rails_format)
return nil if rails_format.nil?
if rails_format =~ /%[cUWwxXZz]/
Rails.logger.warn("AS DatePicker::Helper: rails date format #{rails_format} includes options which can't be converted to jquery datepicker format. Options %c, %U, %W, %w, %x %X, %z, %Z are not supported by datepicker and will be removed")
nil
end
js_format = rails_format.dup
js_format.gsub!(/([ ]|^)([^% ]\S*)/, " '\\2'")
DATE_FORMAT_CONVERSION.each do |key, value|
js_format.gsub!(key, value)
end
js_format
end
def self.split_datetime_format(datetime_format)
date_format = datetime_format
time_format = nil
time_start_indicators = %w[HH hh mm tt ss]
unless datetime_format.nil?
start_indicator = time_start_indicators.detect { |indicator| datetime_format.include?(indicator) }
unless start_indicator.nil?
pos_time_format = datetime_format.index(start_indicator)
date_format = datetime_format.to(pos_time_format - 1).strip
time_format = datetime_format.from(pos_time_format).strip
end
end
[date_format, time_format]
end
module DatepickerColumnHelpers
def datepicker_split_datetime_format(datetime_format)
ActiveScaffold::Bridges::DatePicker::Helper.split_datetime_format(datetime_format)
end
def to_datepicker_format(rails_format)
ActiveScaffold::Bridges::DatePicker::Helper.to_datepicker_format(rails_format)
end
def datepicker_format_options(column, format, options)
unless format == :default
if column.form_ui == :date_picker
js_format = to_datepicker_format(I18n.translate!("date.formats.#{format}"))
options['data-dateFormat'] = js_format unless js_format.nil?
else
rails_time_format = I18n.translate!("time.formats.#{format}")
date_format, time_format = datepicker_split_datetime_format(to_datepicker_format(rails_time_format))
options['data-dateFormat'] = date_format unless date_format.nil?
unless time_format.nil?
options['data-timeFormat'] = time_format
options['data-ampm'] = true if rails_time_format.include?('%I')
end
end
end
end
end
module SearchColumnHelpers
def active_scaffold_search_date_bridge_calendar_control(column, options, current_search, name)
value = if current_search.is_a? Hash
controller.class.condition_value_for_datetime(column, current_search[name], column.search_ui == :date_picker ? :to_date : :to_time)
else
current_search
end
options = column.options.merge(options).except!(:include_blank, :discard_time, :discard_date, :value)
options = active_scaffold_input_text_options(options.merge(column.options))
options[:class] << " #{column.search_ui}"
options[:style] = (options[:show].nil? || options[:show]) ? nil : 'display: none'
format = options.delete(:format) || (column.search_ui == :date_picker ? :default : :picker)
datepicker_format_options(column, format, options)
text_field_tag("#{options[:name]}[#{name}]", value ? l(value, :format => format) : nil, options.merge(:id => "#{options[:id]}_#{name}", :name => "#{options[:name]}[#{name}]", :object => nil))
end
end
module FormColumnHelpers
def active_scaffold_input_date_picker(column, options)
record = options[:object]
options = active_scaffold_input_text_options(options.merge(column.options))
options[:class] << " #{column.form_ui}"
value = controller.class.condition_value_for_datetime(column, record.send(column.name), column.form_ui == :date_picker ? :to_date : :to_time)
format = options.delete(:format) || (column.form_ui == :date_picker ? :default : :picker)
datepicker_format_options(column, format, options)
options[:value] = (value ? l(value, :format => format) : nil)
text_field(:record, column.name, options)
end
end
end
end
end
| 45.878453 | 248 | 0.623916 |
f85945a5768deca0a17bbfe93885c3d5aa68d102 | 3,092 | # frozen_string_literal: true
module Mutations
module Snippets
class Create < BaseMutation
include ServiceCompatibility
include CanMutateSpammable
include Mutations::SpamProtection
authorize :create_snippet
graphql_name 'CreateSnippet'
field :snippet,
Types::SnippetType,
null: true,
description: 'The snippet after mutation.'
argument :title, GraphQL::Types::String,
required: true,
description: 'Title of the snippet.'
argument :description, GraphQL::Types::String,
required: false,
description: 'Description of the snippet.'
argument :visibility_level, Types::VisibilityLevelsEnum,
description: 'The visibility level of the snippet.',
required: true
argument :project_path, GraphQL::Types::ID,
required: false,
description: 'The project full path the snippet is associated with.'
argument :uploaded_files, [GraphQL::Types::String],
required: false,
description: 'The paths to files uploaded in the snippet description.'
argument :blob_actions, [Types::Snippets::BlobActionInputType],
description: 'Actions to perform over the snippet repository and blobs.',
required: false
def resolve(project_path: nil, **args)
if project_path.present?
project = authorized_find!(project_path)
else
authorize!(:global)
end
process_args_for_params!(args)
spam_params = ::Spam::SpamParams.new_from_request(request: context[:request])
service = ::Snippets::CreateService.new(project: project, current_user: current_user, params: args, spam_params: spam_params)
service_response = service.execute
# Only when the user is not an api user and the operation was successful
if !api_user? && service_response.success?
::Gitlab::UsageDataCounters::EditorUniqueCounter.track_snippet_editor_edit_action(author: current_user)
end
snippet = service_response.payload[:snippet]
check_spam_action_response!(snippet)
{
snippet: service_response.success? ? snippet : nil,
errors: errors_on_object(snippet)
}
end
private
def find_object(full_path)
Project.find_by_full_path(full_path)
end
# process_args_for_params!(args) -> nil
#
# Modifies/adds/deletes mutation resolve args as necessary to be passed as params to service layer.
def process_args_for_params!(args)
convert_blob_actions_to_snippet_actions!(args)
# We need to rename `uploaded_files` into `files` because
# it's the expected key param
args[:files] = args.delete(:uploaded_files)
# Return nil to make it explicit that this method is mutating the args parameter, and that
# the return value is not relevant and is not to be used.
nil
end
end
end
end
| 33.247312 | 133 | 0.647477 |
ff22bffc20d0695b29bff30ef15680166cff2073 | 2,077 | # -*- encoding: utf-8 -*-
# stub: middleware 0.1.0 ruby lib
Gem::Specification.new do |s|
s.name = "middleware".freeze
s.version = "0.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Mitchell Hashimoto".freeze]
s.date = "2012-03-16"
s.description = "Generalized implementation of the middleware abstraction for Ruby.".freeze
s.email = ["[email protected]".freeze]
s.homepage = "https://github.com/mitchellh/middleware".freeze
s.rubygems_version = "3.0.3".freeze
s.summary = "Generalized implementation of the middleware abstraction for Ruby.".freeze
s.installed_by_version = "3.0.3" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rake>.freeze, [">= 0"])
s.add_development_dependency(%q<redcarpet>.freeze, ["~> 2.1.0"])
s.add_development_dependency(%q<rspec-core>.freeze, ["~> 2.8.0"])
s.add_development_dependency(%q<rspec-expectations>.freeze, ["~> 2.8.0"])
s.add_development_dependency(%q<rspec-mocks>.freeze, ["~> 2.8.0"])
s.add_development_dependency(%q<yard>.freeze, ["~> 0.7.5"])
else
s.add_dependency(%q<rake>.freeze, [">= 0"])
s.add_dependency(%q<redcarpet>.freeze, ["~> 2.1.0"])
s.add_dependency(%q<rspec-core>.freeze, ["~> 2.8.0"])
s.add_dependency(%q<rspec-expectations>.freeze, ["~> 2.8.0"])
s.add_dependency(%q<rspec-mocks>.freeze, ["~> 2.8.0"])
s.add_dependency(%q<yard>.freeze, ["~> 0.7.5"])
end
else
s.add_dependency(%q<rake>.freeze, [">= 0"])
s.add_dependency(%q<redcarpet>.freeze, ["~> 2.1.0"])
s.add_dependency(%q<rspec-core>.freeze, ["~> 2.8.0"])
s.add_dependency(%q<rspec-expectations>.freeze, ["~> 2.8.0"])
s.add_dependency(%q<rspec-mocks>.freeze, ["~> 2.8.0"])
s.add_dependency(%q<yard>.freeze, ["~> 0.7.5"])
end
end
| 44.191489 | 112 | 0.654309 |
e9078f1168f60678a6f5058507177437bc4d4167 | 1,098 | # frozen_string_literal: true
require "ripper_parser/commenting_ripper_parser"
require "ripper_parser/sexp_processor"
module RipperParser
# Main parser class. Brings together Ripper and our
# RipperParser::SexpProcessor.
class Parser
def parse(source, filename = "(string)", lineno = 1)
parser = CommentingRipperParser.new(source, filename, lineno)
exp = parser.parse
processor = SexpProcessor.new(filename: filename)
result = processor.process exp
if result.sexp_type == :void_stmt
nil
else
trickle_up_line_numbers result
trickle_down_line_numbers result
result
end
end
private
def trickle_up_line_numbers(exp)
exp.each do |sub_exp|
if sub_exp.is_a? Sexp
trickle_up_line_numbers sub_exp
exp.line ||= sub_exp.line
end
end
end
def trickle_down_line_numbers(exp)
exp.each do |sub_exp|
if sub_exp.is_a? Sexp
sub_exp.line ||= exp.line
trickle_down_line_numbers sub_exp
end
end
end
end
end
| 23.361702 | 67 | 0.659381 |
62591bb545f0cec7c16d3f6fe1c11602d93a53db | 2,914 | require 'sentry-raven'
describe 'Exception', set_app: true do
class FixRaven < Struct.new(:app)
def call(env)
requested_at = env['requested_at']
env['requested_at'] = env['requested_at'].to_s if env.key?('requested_at')
app.call(env)
rescue Exception => e
env['requested_at'] = requested_at
raise e
end
end
class TestError < StandardError
end
before do
Raven.configure do |config|
config.silence_ready = true
end
set_app Raven::Rack.new(FixRaven.new(app))
Travis.config.sentry.dsn = 'https://fake:[email protected]/12345'
Travis::Api::App.setup_monitoring
Travis.testing = false
allow(Raven).to receive(:send_event)
end
after do
Travis.testing = true
end
it 'raises an error in testing mode' do
begin
Travis.testing = false
error = TestError.new('a test error')
allow_any_instance_of(Travis::Api::App::Endpoint::Repos).to receive(:service).and_raise(error)
res = get '/repos/1', nil, 'HTTP_X_REQUEST_ID' => '235dd08f-10d5-4fcc-9a4d-6b8e6a24f975'
rescue TestError => e
expect(e.message).to eq('a test error')
ensure
Travis.testing = true
end
end
it 'enqueues error into a thread' do
error = TestError.new('Konstantin broke all the thingz!')
allow_any_instance_of(Travis::Api::App::Endpoint::Repos).to receive(:service).and_raise(error)
expect(Raven).to receive(:send_event).with(
satisfy { |event| event['logentry']['message'] == "#{error.class}: #{error.message}" }
)
res = get '/repos/1'
expect(res.status).to eq(500)
expect(res.body).to eq("Sorry, we experienced an error.\n")
expect(res.headers).to eq({
'Content-Type' => 'text/plain',
'Content-Length' => '32',
'Access-Control-Allow-Origin' => '*',
'Access-Control-Allow-Credentials' => 'true',
'Access-Control-Expose-Headers' => 'Content-Type, Cache-Control, Expires, Etag, Last-Modified, X-Request-ID',
})
sleep 0.1
end
it 'returns request_id in body' do
error = TestError.new('Konstantin broke all the thingz!')
allow_any_instance_of(Travis::Api::App::Endpoint::Repos).to receive(:service).and_raise(error)
allow(Raven).to receive(:send_event)
res = get '/repos/1', nil, 'HTTP_X_REQUEST_ID' => '235dd08f-10d5-4fcc-9a4d-6b8e6a24f975'
expect(res.status).to eq(500)
expect(res.body).to eq("Sorry, we experienced an error.\n\nrequest_id:235dd08f-10d5-4fcc-9a4d-6b8e6a24f975\n")
expect(res.headers).to eq({
'Content-Type' => 'text/plain',
'Content-Length' => '81',
'X-Request-ID' => '235dd08f-10d5-4fcc-9a4d-6b8e6a24f975',
'Access-Control-Allow-Origin' => '*',
'Access-Control-Allow-Credentials' => 'true',
'Access-Control-Expose-Headers' => 'Content-Type, Cache-Control, Expires, Etag, Last-Modified, X-Request-ID',
})
sleep 0.1
end
end
| 33.883721 | 115 | 0.657515 |
bb0f8bb3f1e3d6aea448d2a40faec497e071c8dc | 801 | require 'spec_helper'
describe SmartRpc::Setting do
class << SmartRpc::Setting
def config
{'foo' => {
'v1' => {'base_uri' => 'http://example.com'},
'authentication' => {
'name_and_password' => {'name' => 'Foo', 'password' => 'BarBazBlah'}
}
}
}
end
end
describe ".request" do
it "should return the settings for the request" do
result = SmartRpc::Setting.request('foo', 'v1')
result.base_uri.should eq("http://example.com")
end
end
describe ".authentication" do
it "should return the settings for the authentication" do
result = SmartRpc::Setting.authentication('foo', 'name_and_password')
result.name.should eq('Foo')
result.password.should eq('BarBazBlah')
end
end
end
| 25.83871 | 80 | 0.600499 |
e8e5adaa2741f56a2836cff9dc0a673142fe57fb | 11,953 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe IssuablesHelper do
let(:label) { build_stubbed(:label) }
let(:label2) { build_stubbed(:label) }
describe '#users_dropdown_label' do
let(:user) { build_stubbed(:user) }
let(:user2) { build_stubbed(:user) }
it 'returns unassigned' do
expect(users_dropdown_label([])).to eq('Unassigned')
end
it 'returns selected user\'s name' do
expect(users_dropdown_label([user])).to eq(user.name)
end
it 'returns selected user\'s name and counter' do
expect(users_dropdown_label([user, user2])).to eq("#{user.name} + 1 more")
end
end
describe '#group_dropdown_label' do
let(:group) { create(:group) }
let(:default) { 'default label' }
it 'returns default group label when group_id is nil' do
expect(group_dropdown_label(nil, default)).to eq('default label')
end
it 'returns "any group" when group_id is 0' do
expect(group_dropdown_label('0', default)).to eq('Any group')
end
it 'returns group full path when a group was found for the provided id' do
expect(group_dropdown_label(group.id, default)).to eq(group.full_name)
end
it 'returns default label when a group was not found for the provided id' do
expect(group_dropdown_label(non_existing_record_id, default)).to eq('default label')
end
end
describe '#issuables_state_counter_text' do
let(:user) { create(:user) }
describe 'state text' do
before do
allow(helper).to receive(:issuables_count_for_state).and_return(42)
end
it 'returns "Open" when state is :opened' do
expect(helper.issuables_state_counter_text(:issues, :opened, true))
.to eq('<span>Open</span> <span class="badge badge-pill">42</span>')
end
it 'returns "Closed" when state is :closed' do
expect(helper.issuables_state_counter_text(:issues, :closed, true))
.to eq('<span>Closed</span> <span class="badge badge-pill">42</span>')
end
it 'returns "Merged" when state is :merged' do
expect(helper.issuables_state_counter_text(:merge_requests, :merged, true))
.to eq('<span>Merged</span> <span class="badge badge-pill">42</span>')
end
it 'returns "All" when state is :all' do
expect(helper.issuables_state_counter_text(:merge_requests, :all, true))
.to eq('<span>All</span> <span class="badge badge-pill">42</span>')
end
end
end
describe '#issuable_reference' do
context 'when show_full_reference truthy' do
it 'display issuable full reference' do
assign(:show_full_reference, true)
issue = build_stubbed(:issue)
expect(helper.issuable_reference(issue)).to eql(issue.to_reference(full: true))
end
end
context 'when show_full_reference falsey' do
context 'when @group present' do
it 'display issuable reference to @group' do
project = build_stubbed(:project)
assign(:show_full_reference, nil)
assign(:group, project.namespace)
issue = build_stubbed(:issue)
expect(helper.issuable_reference(issue)).to eql(issue.to_reference(project.namespace))
end
end
context 'when @project present' do
it 'display issuable reference to @project' do
project = build_stubbed(:project)
assign(:show_full_reference, nil)
assign(:group, nil)
assign(:project, project)
issue = build_stubbed(:issue)
expect(helper.issuable_reference(issue)).to eql(issue.to_reference(project))
end
end
end
end
describe '#updated_at_by' do
let(:user) { create(:user) }
let(:unedited_issuable) { create(:issue) }
let(:edited_issuable) { create(:issue, last_edited_by: user, created_at: 3.days.ago, updated_at: 1.day.ago, last_edited_at: 2.days.ago) }
let(:edited_updated_at_by) do
{
updatedAt: edited_issuable.last_edited_at.to_time.iso8601,
updatedBy: {
name: user.name,
path: user_path(user)
}
}
end
it { expect(helper.updated_at_by(unedited_issuable)).to eq({}) }
it { expect(helper.updated_at_by(edited_issuable)).to eq(edited_updated_at_by) }
context 'when updated by a deleted user' do
let(:edited_updated_at_by) do
{
updatedAt: edited_issuable.last_edited_at.to_time.iso8601,
updatedBy: {
name: User.ghost.name,
path: user_path(User.ghost)
}
}
end
before do
user.destroy!
end
it 'returns "Ghost user" as edited_by' do
expect(helper.updated_at_by(edited_issuable.reload)).to eq(edited_updated_at_by)
end
end
end
describe '#issuable_initial_data' do
let(:user) { create(:user) }
before do
allow(helper).to receive(:current_user).and_return(user)
allow(helper).to receive(:can?).and_return(true)
stub_commonmark_sourcepos_disabled
end
it 'returns the correct data for an issue' do
issue = create(:issue, author: user, description: 'issue text')
@project = issue.project
expected_data = {
endpoint: "/#{@project.full_path}/-/issues/#{issue.iid}",
updateEndpoint: "/#{@project.full_path}/-/issues/#{issue.iid}.json",
canUpdate: true,
canDestroy: true,
issuableRef: "##{issue.iid}",
markdownPreviewPath: "/#{@project.full_path}/preview_markdown",
markdownDocsPath: '/help/user/markdown',
lockVersion: issue.lock_version,
projectPath: @project.path,
projectNamespace: @project.namespace.path,
initialTitleHtml: issue.title,
initialTitleText: issue.title,
initialDescriptionHtml: '<p dir="auto">issue text</p>',
initialDescriptionText: 'issue text',
initialTaskStatus: '0 of 0 tasks completed',
issueType: 'issue',
iid: issue.iid.to_s
}
expect(helper.issuable_initial_data(issue)).to match(hash_including(expected_data))
end
describe '#sentryIssueIdentifier' do
let(:issue) { create(:issue, author: user) }
before do
assign(:project, issue.project)
end
it 'sets sentryIssueIdentifier to nil with no sentry issue ' do
expect(helper.issuable_initial_data(issue)[:sentryIssueIdentifier])
.to be_nil
end
it 'sets sentryIssueIdentifier to sentry_issue_identifier' do
sentry_issue = create(:sentry_issue, issue: issue)
expect(helper.issuable_initial_data(issue)[:sentryIssueIdentifier])
.to eq(sentry_issue.sentry_issue_identifier)
end
end
describe '#zoomMeetingUrl in issue' do
let(:issue) { create(:issue, author: user) }
before do
assign(:project, issue.project)
end
shared_examples 'sets zoomMeetingUrl to nil' do
specify do
expect(helper.issuable_initial_data(issue)[:zoomMeetingUrl])
.to be_nil
end
end
context 'with no "added" zoom mettings' do
it_behaves_like 'sets zoomMeetingUrl to nil'
context 'with multiple removed meetings' do
before do
create(:zoom_meeting, issue: issue, issue_status: :removed)
create(:zoom_meeting, issue: issue, issue_status: :removed)
end
it_behaves_like 'sets zoomMeetingUrl to nil'
end
end
context 'with "added" zoom meeting' do
before do
create(:zoom_meeting, issue: issue)
end
shared_examples 'sets zoomMeetingUrl to canonical meeting url' do
specify do
expect(helper.issuable_initial_data(issue))
.to include(zoomMeetingUrl: 'https://zoom.us/j/123456789')
end
end
it_behaves_like 'sets zoomMeetingUrl to canonical meeting url'
context 'with muliple "removed" zoom meetings' do
before do
create(:zoom_meeting, issue: issue, issue_status: :removed)
create(:zoom_meeting, issue: issue, issue_status: :removed)
end
it_behaves_like 'sets zoomMeetingUrl to canonical meeting url'
end
end
end
end
describe '#assignee_sidebar_data' do
let(:user) { create(:user) }
let(:merge_request) { nil }
subject { helper.assignee_sidebar_data(user, merge_request: merge_request) }
it 'returns hash of assignee data' do
is_expected.to eql({
avatar_url: user.avatar_url,
name: user.name,
username: user.username
})
end
context 'with merge_request' do
let(:merge_request) { build_stubbed(:merge_request) }
where(can_merge: [true, false])
with_them do
before do
allow(merge_request).to receive(:can_be_merged_by?).and_return(can_merge)
end
it { is_expected.to include({ can_merge: can_merge })}
end
end
end
describe '#reviewer_sidebar_data' do
let(:user) { create(:user) }
subject { helper.reviewer_sidebar_data(user, merge_request: merge_request) }
context 'without merge_request' do
let(:merge_request) { nil }
it 'returns hash of reviewer data' do
is_expected.to eql({
avatar_url: user.avatar_url,
name: user.name,
username: user.username
})
end
end
context 'with merge_request' do
let(:merge_request) { build(:merge_request) }
where(can_merge: [true, false])
with_them do
before do
allow(merge_request).to receive(:can_be_merged_by?).and_return(can_merge)
end
it { is_expected.to include({ can_merge: can_merge })}
end
end
end
describe '#issuable_squash_option?' do
using RSpec::Parameterized::TableSyntax
where(:issuable_persisted, :squash, :squash_enabled_by_default, :expectation) do
true | true | true | true
true | false | true | false
false | false | false | false
false | false | true | true
false | true | false | false
false | true | true | true
end
with_them do
it 'returns the correct value' do
project = double(
squash_enabled_by_default?: squash_enabled_by_default
)
issuable = double(persisted?: issuable_persisted, squash: squash)
expect(helper.issuable_squash_option?(issuable, project)).to eq(expectation)
end
end
end
describe '#sidebar_milestone_tooltip_label' do
it 'escapes HTML in the milestone title' do
milestone = build(:milestone, title: '<img onerror=alert(1)>')
expect(helper.sidebar_milestone_tooltip_label(milestone)).to eq('<img onerror=alert(1)><br/>Milestone')
end
end
describe '#serialize_issuable' do
context 'when it is a merge request' do
let(:merge_request) { build(:merge_request) }
let(:user) { build(:user) }
before do
allow(helper).to receive(:current_user) { user }
end
it 'has suggest_pipeline experiment enabled' do
allow(helper).to receive(:experiment_enabled?).with(:suggest_pipeline) { true }
expect_next_instance_of(MergeRequestSerializer) do |serializer|
expect(serializer).to receive(:represent).with(merge_request, { serializer: 'widget', experiment_enabled: :suggest_pipeline })
end
helper.serialize_issuable(merge_request, serializer: 'widget')
end
it 'suggest_pipeline experiment disabled' do
allow(helper).to receive(:experiment_enabled?).with(:suggest_pipeline) { false }
expect_next_instance_of(MergeRequestSerializer) do |serializer|
expect(serializer).to receive(:represent).with(merge_request, { serializer: 'widget' })
end
helper.serialize_issuable(merge_request, serializer: 'widget')
end
end
end
end
| 30.886305 | 141 | 0.646281 |
01fe1eaa2bf058507d2c023c08afd8b17a22918d | 343 | module UsersHelper
# 引数で与えられたユーザーのGravatar画像を返す
def gravatar_for(user, options = { sieze: 80 })
size = options[:size]
gravatar_id = Digest::MD5::hexdigest(user.email.downcase)
gravatar_url = "https://secure.gravatar.com/avatar/#{gravatar_id}?s=#{size}"
image_tag(gravatar_url, alt: user.name, class: "gravatar")
end
end
| 31.181818 | 80 | 0.71137 |
79dc625f25ac0bf8943771325234af10046582e2 | 4,334 | require 'devise'
# Use this hook to configure devise mailer, warden hooks and so forth. The first
# four configuration values can also be set straight in your models.
Devise.setup do |config|
# Configure the e-mail address which will be shown in DeviseMailer.
config.mailer_sender = "[email protected]"
# ==> Configuration for :authenticatable
# Invoke `rake secret` and use the printed value to setup a pepper to generate
# the encrypted password. By default no pepper is used.
# config.pepper = "rake secret output"
# Configure how many times you want the password is reencrypted. Default is 10.
# config.stretches = 10
# Define which will be the encryption algorithm. Supported algorithms are :sha1
# (default), :sha512 and :bcrypt. Devise also supports encryptors from others
# authentication tools as :clearance_sha1, :authlogic_sha512 (then you should set
# stretches above to 20 for default behavior) and :restful_authentication_sha1
# (then you should set stretches to 10, and copy REST_AUTH_SITE_KEY to pepper)
# config.encryptor = :sha1
# Configure which keys are used when authenticating an user. By default is
# just :email. You can configure it to use [:username, :subdomain], so for
# authenticating an user, both parameters are required. Remember that those
# parameters are used only when authenticating and not when retrieving from
# session. If you need permissions, you should implement that in a before filter.
# config.authentication_keys = [ :email ]
# ==> Configuration for :confirmable
# The time you want give to your user to confirm his account. During this time
# he will be able to access your application without confirming. Default is nil.
# config.confirm_within = 2.days
# ==> Configuration for :rememberable
# The time the user will be remembered without asking for credentials again.
# config.remember_for = 2.weeks
# ==> Configuration for :timeoutable
# The time you want to timeout the user session without activity. After this
# time the user will be asked for credentials again.
# config.timeout_in = 10.minutes
# ==> Configuration for :lockable
# Number of authentication tries before locking an account.
# config.maximum_attempts = 20
# Defines which strategy will be used to unlock an account.
# :email = Sends an unlock link to the user email
# :time = Reanables login after a certain ammount of time (see :unlock_in below)
# :both = enables both strategies
# config.unlock_strategy = :both
# Time interval to unlock the account if :time is enabled as unlock_strategy.
# config.unlock_in = 1.hour
# ==> General configuration
# Load and configure the ORM. Supports :active_record (default), :mongo_mapper
# (requires mongo_ext installed) and :data_mapper (experimental).
# require 'devise/orm/mongo_mapper'
# config.orm = :mongo_mapper
# Turn scoped views on. Before rendering "sessions/new", it will first check for
# "sessions/users/new". It's turned off by default because it's slower if you
# are using only default views.
# config.scoped_views = true
# By default, devise detects the role accessed based on the url. So whenever
# accessing "/users/sign_in", it knows you are accessing an User. This makes
# routes as "/sign_in" not possible, unless you tell Devise to use the default
# scope, setting true below.
# config.use_default_scope = true
# Configure the default scope used by Devise. By default it's the first devise
# role declared in your routes.
# config.default_scope = :user
# If you want to use other strategies, that are not (yet) supported by Devise,
# you can configure them inside the config.warden block. The example below
# allows you to setup OAuth, using http://github.com/roman/warden_oauth
#
# config.warden do |manager|
# manager.oauth(:twitter) do |twitter|
# twitter.consumer_secret = <YOUR CONSUMER SECRET>
# twitter.consumer_key = <YOUR CONSUMER KEY>
# twitter.options :site => 'http://twitter.com'
# end
# manager.default_strategies.unshift :twitter_oauth
# end
# Configure default_url_options if you are using dynamic segments in :path_prefix
# for devise_for.
#
# config.default_url_options do
# { :locale => I18n.locale }
# end
end
| 43.777778 | 83 | 0.736502 |
870f204db6cad21bb0459520c0bec55d06bd9b96 | 1,399 | # pagarme_core_api
#
# This file was automatically generated by APIMATIC v2.0
# ( https://apimatic.io ).
module PagarmeCoreApi
# Response object for listing subscription cycles
class ListCyclesResponse < BaseModel
# The subscription cycles objects
# @return [List of GetPeriodResponse]
attr_accessor :data
# Paging object
# @return [PagingResponse]
attr_accessor :paging
# A mapping from model property names to API property names.
def self.names
@_hash = {} if @_hash.nil?
@_hash['data'] = 'data'
@_hash['paging'] = 'paging'
@_hash
end
def initialize(data = nil,
paging = nil)
@data = data
@paging = paging
end
# Creates an instance of the object from a hash.
def self.from_hash(hash)
return nil unless hash
# Extract variables from the hash.
# Parameter is an array, so we need to iterate through it
data = nil
unless hash['data'].nil?
data = []
hash['data'].each do |structure|
data << (GetPeriodResponse.from_hash(structure) if structure)
end
end
paging = PagingResponse.from_hash(hash['paging']) if hash['paging']
# Create object from extracted values.
ListCyclesResponse.new(data,
paging)
end
end
end
| 26.903846 | 74 | 0.598284 |
627bd9e59adb60c984ea6a19f7af3b6fd4a0885d | 86 | require "super_features/version"
module SuperFeatures
# Your code goes here...
end
| 14.333333 | 32 | 0.767442 |
f7c9a3d9a775f71b3a88a3624a8b81b2b76cd217 | 1,988 | require "formula"
class Guile < Formula
desc "GUILE: GNU Ubiquitous Intelligent Language for Extensions"
homepage "https://www.gnu.org/software/guile/"
url "http://ftpmirror.gnu.org/guile/guile-2.0.11.tar.gz"
mirror "https://ftp.gnu.org/gnu/guile/guile-2.0.11.tar.gz"
sha1 "3cdd1c4956414bffadea13e5a1ca08949016a802"
revision 1
bottle do
revision 1
sha1 "67bd9b8050bded7916db3622d7abd896e1376eac" => :yosemite
sha1 "818e7ac90634b60bcbf44509a512b542b0a87bd8" => :mavericks
sha1 "55790b96275804b2e5952b60e1071a318f3b1518" => :mountain_lion
end
head do
url "http://git.sv.gnu.org/r/guile.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "gettext" => :build
end
depends_on "pkg-config" => :build
depends_on "libtool" => :run
depends_on "libffi"
depends_on "libunistring"
depends_on "bdw-gc"
depends_on "gmp"
depends_on "readline"
fails_with :llvm do
build 2336
cause "Segfaults during compilation"
end
fails_with :clang do
build 211
cause "Segfaults during compilation"
end
def install
if build.head?
inreplace "autogen.sh", "libtoolize", "glibtoolize"
system "./autogen.sh"
end
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--with-libreadline-prefix=#{Formula["readline"].opt_prefix}",
"--with-libgmp-prefix=#{Formula["gmp"].opt_prefix}"
system "make install"
# A really messed up workaround required on OS X --mkhl
Pathname.glob("#{lib}/*.dylib") do |dylib|
lib.install_symlink dylib.basename => "#{dylib.basename(".dylib")}.so"
end
(share/"gdb/auto-load").install Dir["#{lib}/*-gdb.scm"]
end
test do
hello = testpath/"hello.scm"
hello.write <<-EOS.undent
(display "Hello World")
(newline)
EOS
ENV["GUILE_AUTO_COMPILE"] = "0"
system bin/"guile", hello
end
end
| 26.157895 | 88 | 0.652918 |
185a10a250c9083af11c29ff62c5c34fa11334e7 | 2,277 | # frozen_string_literal: true
module API
module Helpers
module IssuesHelpers
extend Grape::API::Helpers
params :negatable_issue_filter_params_ee do
end
params :optional_issue_params_ee do
end
params :issues_stats_params_ee do
end
def self.update_params_at_least_one_of
[
:assignee_id,
:assignee_ids,
:confidential,
:created_at,
:description,
:discussion_locked,
:due_date,
:labels,
:add_labels,
:remove_labels,
:milestone_id,
:state_event,
:title,
:issue_type
]
end
def self.sort_options
%w[
created_at
due_date
label_priority
milestone_due
popularity
priority
relative_position
title
updated_at
]
end
def issue_finder(args = {})
args = declared_params.merge(args)
args.delete(:id)
args[:not] ||= {}
args[:milestone_title] ||= args.delete(:milestone)
args[:milestone_wildcard_id] ||= args.delete(:milestone_id)
args[:not][:milestone_title] ||= args[:not].delete(:milestone)
args[:not][:milestone_wildcard_id] ||= args[:not].delete(:milestone_id)
args[:label_name] ||= args.delete(:labels)
args[:not][:label_name] ||= args[:not].delete(:labels)
args[:scope] = args[:scope].underscore if args[:scope]
args[:sort] = "#{args[:order_by]}_#{args[:sort]}"
args[:issue_types] ||= args.delete(:issue_type)
IssuesFinder.new(current_user, args)
end
def find_issues(args = {})
finder = issue_finder(args)
finder.execute.with_api_entity_associations
end
def issues_statistics(args = {})
finder = issue_finder(args)
counter = Gitlab::IssuablesCountForState.new(finder)
{
statistics: {
counts: {
all: counter[:all],
closed: counter[:closed],
opened: counter[:opened]
}
}
}
end
end
end
end
API::Helpers::IssuesHelpers.prepend_mod_with('API::Helpers::IssuesHelpers')
| 24.75 | 79 | 0.557312 |
f7ab2c6cd983ef493e31585556ad5259aa09e4c2 | 886 | module Prawn::SVG::Extensions
module AdditionalGradientTransforms
def gradient_coordinates(gradient)
# As of Prawn 2.2.0, apply_transformations is used as purely a boolean.
#
# Here we're using it to optionally pass in a 6-tuple transformation matrix that gets applied to the
# gradient. This should be added to Prawn properly, and then this monkey patch will not be necessary.
if gradient.apply_transformations.is_a?(Array)
x1, y1, x2, y2, transformation = super
a, b, c, d, e, f = transformation
na, nb, nc, nd, ne, nf = gradient.apply_transformations
matrix = Matrix[[a, c, e], [b, d, f], [0, 0, 1]] * Matrix[[na, nc, ne], [nb, nd, nf], [0, 0, 1]]
new_transformation = matrix.to_a[0..1].transpose.flatten
[x1, y1, x2, y2, new_transformation]
else
super
end
end
end
end
| 36.916667 | 108 | 0.639955 |
0172d6bbba38a2d60c503f1174e18ba27527cadd | 342 | # frozen_string_literal: true
class AddTextLimitToRequirementsDescription < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_text_limit :requirements, :description, 10_000
end
def down
remove_text_limit :requirements, :description
end
end
| 19 | 74 | 0.780702 |
62d85b8cd4be26917e755ee95430827ca36d60d7 | 1,657 | require 'rails_helper'
RSpec.describe 'split decision flow' do
let(:split_page) { app.admin_split_show_page }
let(:split_decision_page) { app.admin_split_decision_new_page }
let(:user_id_values) { %w(4 8 15 16 23) }
let!(:split) { FactoryBot.create :split }
let!(:id_type_user_ids) { FactoryBot.create :identifier_type, name: "user_ids" }
let!(:existing_identifiers) do
user_id_values.map { |user_id| FactoryBot.create(:identifier, value: user_id, identifier_type: id_type_user_ids) }
end
let!(:existing_assignments) do
existing_identifiers.map do |identifier|
FactoryBot.create(:assignment, visitor: identifier.visitor, split: split, variant: "hammer_time")
end
end
before do
login
end
it 'allows an admin to decide a split' do
split_page.load split_id: split.id
expect(split_page).to be_displayed
expect(Assignment.where(variant: "hammer_time").count).to eq 5
expect(Assignment.where(variant: "touch_this").count).to eq 0
split_page.decide_split.click
expect(split_decision_page).to be_displayed
split_decision_page.create_form.tap do |form|
form.variant_options.select 'touch_this'
form.submit_button.click
end
expect(split_page).to be_displayed
expect(split_page).to have_content "Queued decision"
Delayed::Worker.new.work_off
split_page.load split_id: split.id
expect(split_page).to be_displayed
expect(Assignment.where(variant: "hammer_time").count).to eq 0
expect(Assignment.where(variant: "touch_this").count).to eq 5
split.reload
expect(split.registry).to eq("hammer_time" => 0, "touch_this" => 100)
end
end
| 31.264151 | 118 | 0.730839 |
017853a1faff6c32235f00362fbb0e10a3df73f2 | 6,235 | # =================================================================
# Copyright 2018 IBM Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =================================================================
#
# Cookbook Name::workflow
# Recipe::attributes
#
# <> The attributes file will define all attributes that may be over-written by CHEF Attribute Precendence
# <> The attribues defined in this file, will be used internally
#
#
# <> Attributes defined for Business Automation Workflow installation
#
# temp folder, which used to store some tmp files
default['ibm']['temp_dir'] = '/tmp/ibm_cloud'
# if archives are secured, need provide, hide them by now
# TODO: keep them internally, expose them out later if needed.
default['ibm']['sw_repo_user'] = 'repouser'
default['ibm']['sw_repo_password'] = ''
# The prerequistes packages, which need be installed ahead of time
force_default['workflow']['prereq_packages'] = []
case node['platform_family']
when 'debian'
case node['kernel']['machine']
when 'x86_64'
# for db2, was and workflow
# keep it as-is currently, theoretically, no risk to allow editing if indeed need. If don't expose it out, not sure if we already covered enough.
force_default['workflow']['prereq_packages'] = %w(libxtst6 libgtk2.0-bin libxft2 cpp gcc ksh openssh-server rpm unzip binutils libaio1 libnuma1 libpam0g:i386 libx32stdc++6 nfs-common)
end
end
# Expand directory, unzip the archive files here
force_default['ibm']['expand_area'] = node['ibm']['temp_dir'] + '/expand_area'
# Workflow Edition
force_default['workflow']['edition'] = ''
case node['workflow']['features']
when 'WorkflowEnterprise.Production', 'WorkflowEnterprise.NonProduction'
force_default['workflow']['edition'] = 'Enterprise'
when 'EnterpriseServiceBus.Production', 'EnterpriseServiceBus.NonProduction'
force_default['workflow']['edition'] = 'ESB'
when 'WorkflowExpress.Production', 'WorkflowExpress.NonProduction'
force_default['workflow']['edition'] = 'Exp'
end
# Constants, used to download & extract installation images, archives list, base on os, workflow version
#
# 1. BAW_18_0_0_1_Linux_x86_1_of_3.tar.gz
# 2. BAW_18_0_0_1_Linux_x86_2_of_3.tar.gz
# 3. BAW_18_0_0_1_Linux_x86_3_of_3.tar.gz
#
force_override['workflow']['version'] = node['workflow']['version'].gsub('.', '_')
force_override['workflow']['archive_names'] = {
'was' => {
'filename' => "BAW_#{node['workflow']['version']}_Linux_x86_1_of_3.tar.gz" },
'workflow' => {
'filename' => "BAW_#{node['workflow']['version']}_Linux_x86_2_of_3.tar.gz" },
'db2' => {
'filename' => "BAW_#{node['workflow']['version']}_Linux_x86_3_of_3.tar.gz" }
}
# The runas user/group while doing 'execute'
# For admin mode, will use root/root as user and group name, same rule as was
case node['workflow']['install_mode']
when 'admin'
force_default['workflow']['runas_user'] = 'root'
force_default['workflow']['runas_group'] = 'root'
else
force_default['workflow']['runas_user'] = node['workflow']['os_users']['workflow']['name']
force_default['workflow']['runas_group'] = node['workflow']['os_users']['workflow']['gid']
end
# IM installation directory
force_default['workflow']['im_install_dir'] = ''
case node['workflow']['install_mode']
when 'admin'
force_default['workflow']['im_install_dir'] = '/opt/IBM/InstallationManager'
when 'nonAdmin'
force_default['workflow']['im_install_dir'] = '/home/' + node['workflow']['os_users']['workflow']['name'] + '/IBM/InstallationManager'
when 'group'
force_default['workflow']['im_install_dir'] = '/home/' + node['workflow']['os_users']['workflow']['name'] + '/IBM/InstallationManager_Group'
end
#
# <> Attributes defined for Business Automation Workflow configuration
#
# The name of the SharedDb database.
force_default['workflow']['config']['db2_shareddb_name'] = node['workflow']['config']['db2_cmndb_name']
# The name of the CellOnlyDb database.
force_default['workflow']['config']['db2_cellonlydb_name'] = node['workflow']['config']['db2_cmndb_name']
# For information about the restrictions that pertain to IBM Business Automation Workflow database schema names,
# see the IBM Business Automation Workflow topic "Configuration properties for the BPMConfig command"
# in the IBM Knowledge Center: http://www-01.ibm.com/support/knowledgecenter/SSFPJS/welcome
force_default['workflow']['config']['db2_schema'] = node['workflow']['config']['db_alias_user']
# The database data directory path.
force_default['workflow']['config']['db2_data_dir'] = '/home/' + node['workflow']['config']['db_alias_user'] + '/' + node['workflow']['config']['db_alias_user'] + '/NODE0000'
# The unified local case network shared directory, the attribute is defined for the limitation that same directory should be used among multiple nodes
default['workflow']['config']['local_case_network_shared_dir'] = '/opt/IBM/Workflow/CaseManagement/properties'
# The local oracle driver directory, used to put oracle jdbc driver
default['workflow']['config']['oracle']['jdbc_driver_path'] = node['workflow']['install_dir'] + '/jdbcdrivers/Oracle'
# The database_type attribute
force_override['workflow']['config']['database_type'] = node['workflow']['config']['database_type'].strip.upcase if !node['workflow']['config']['database_type'].nil?
force_override['workflow']['config']['database_type'] = 'Oracle' if !node['workflow']['config']['database_type'].nil? && 'ORACLE'.eql?(node['workflow']['config']['database_type'].strip.upcase)
# <> Attributes defined for chef-vault
#
# TODO: enhance later to support
default['workflow']['vault']['name'] = node['ibm_internal']['vault']['name']
default['workflow']['vault']['encrypted_id'] = node['ibm_internal']['vault']['item'] | 46.529851 | 192 | 0.713873 |
e9fe147f5b0218c10125a347298eff916e8637c7 | 471 | class ApplicationController < ActionController::Base
# Prevent CSRF attacks by raising an exception.
# For APIs, you may want to use :null_session instead.
protect_from_forgery with: :exception
before_action :set_locale
def set_locale
I18n.locale = params[:locale] || I18n.default_locale
end
# Hijack un authorized error from cancancan
rescue_from CanCan::AccessDenied do |exception|
redirect_to root_url, :alert => exception.message
end
end
| 27.705882 | 56 | 0.762208 |
7908bfa0dd0b16396d9233bf8a9a391c7a02ff10 | 47 | module ActiveID
VERSION = "0.6.1".freeze
end
| 11.75 | 26 | 0.702128 |
616190750bce3d3fbce534abf680d6f408501c22 | 175 | json.data @feed_reviews do |review|
user = review.end_user
json.user_name user.name || user.email
json.comment review.message
json.profile_photo user.profile_photo
end | 29.166667 | 40 | 0.782857 |
1c0dce506576f4d1c9437ae74fe62adc12ef7481 | 1,905 | require_relative 'chromosome'
require_relative 'population'
require 'colorize'
WORD = 'badania'
NUM_GENERATIONS = 20000
CROSSOVER_RATE = 0.7
population = Population.new
population.seed!
first_fitness = population.average_fitness
first_max_fitness = population.max_fitness
1.upto(NUM_GENERATIONS).each do |generation|
offspring = Population.new
while offspring.count <= population.count-12
offspring.chromosomes.uniq!{|ch| ch.genes}
offspring.chromosomes.delete ""
parent1 = population.select
parent2 = population.select
if rand <= CROSSOVER_RATE
child1, child2 = parent1 & parent2
else
child1 = Chromosome.new
child2 = Chromosome.new
end
child1.mutate!
child2.mutate!
if offspring.count.even?
offspring.chromosomes << child1 << child2
else
offspring.chromosomes << [child1, child2].sample
end
end
population_fitness = population.average_fitness
population_max_fitness = population.max_fitness
average_color = population_fitness < first_fitness ? :red : :green
max_color = population_max_fitness < first_max_fitness ? :red : :green
# TERMINAL OUTPUT
system 'clear'
puts "TARGET: #{WORD.colorize(:green)}"
puts "Generation #{generation} - Average: #{population_fitness.round(2).to_s.colorize(average_color)} - Max: #{population_max_fitness.to_s.colorize(max_color)}\n\n"
population.chromosomes.take(10).each_with_index do |ch, i|
puts "Best word #{i+1}: #{ch.to_colored_word}"
puts "Code: #{ch.to_s}\n\n"
end
# FILE OUTPUT
f = File.open("populations/population_#{generation}.txt", 'w+')
f << population.to_file
offspring.chromosomes
population.best_chromosomes(10).each{|ch| offspring.chromosomes << ch }
population = offspring
population.sort!
if population.best_word == WORD
puts "\nFinal population:\n" + population.inspect
break
end
end
| 26.458333 | 166 | 0.71811 |
0840fbd5b0c3a88677eb7138bfbfbfc7d4c1348c | 2,592 | class QuotesApp::Scraper
def self.random_quote
random_quote_page = Nokogiri::HTML(open("https://blog.hubspot.com/sales/famous-quotes")) # Working quotes page without dynamic ads
random_quote_page.css("#hs_cos_wrapper_post_body p").collect do |i|
{:body => i.text.split("\"")[1], :author => i.css("em").text.strip}
end.delete_if {|i| i.values.include? (nil)}.uniq # Array contains a total of 100 qoutes
end
def self.categories_list
categories_list_page = Nokogiri::HTML(open("https://www.brainyquote.com/"))
categories_list_page.css(".homeGridBox #allTopics .bqLn").collect do |i|
{:name => i.text, :link => "https://www.brainyquotes.com#{i.css("a").attribute("href").value}"} # Webpage contains a total of 10 categories. I chose to display 5 categories only.
end.select{ |hash| hash[:name] == "Inspirational" || hash[:name] == "Motivational" || hash[:name] == "Life" || hash[:name] == "Wisdom" || hash[:name] == "Friendship"}
end
def self.category_quote(category_link)
category_quote_page = Nokogiri::HTML(open(category_link)) #category_quote_page = Nokogiri::HTML(open("https://www.brainyquote.com/topics/love-quotes"))
category_quote_page.css("#quotesList .grid-item").collect do |i|
{:body => i.css("a.b-qt").text, :author => i.css("a.bq-aut").text} # Each category webpage contains a total of 60 quotes
end.uniq.delete_if {|quote| quote[:body] == "" }.sample
end
def self.random_authors
random_authors_page = Nokogiri::HTML(open("https://www.brainyquote.com/authors"))
random_authors_page.css(".container .bqLn").collect do |i|
{:name => i.text.gsub("\n",""), :link => "https://www.brainyquote.com#{i.css("a").attribute("href").value}"} # The webpage contains a total of 448 authors
end
end
def self.author_quote(author_link)
author_quote_page = Nokogiri::HTML(open(author_link)) #author_quote_page = Nokogiri::HTML(open("https://www.brainyquote.com//authors/martin-luther-king-jr-quotes"))
author_quote_page.css("#quotesList .grid-item").collect do |i|
{:body => i.css("a.b-qt").text, :author => i.css("a.bq-aut").text} # The webpage for each author contains 60 quotes
end.uniq.delete_if {|quote| quote[:body] == "" }.sample
end
end
| 68.210526 | 259 | 0.596451 |
bbaa0c5fd2356a1456550ab4c6b053e8ea2412ce | 156 | class AddUserIdToJobApplications < ActiveRecord::Migration
def change
add_column :job_applications, :user_id, :string, :foreign_key => true
end
end
| 26 | 73 | 0.775641 |
ab7a25687dfb9ebbc2174b87b374ae31d24cdabe | 90 | require 'sinatra'
get '/' do
erb :welcome
end
get '/about' do
'A little about me.'
end | 12.857143 | 22 | 0.655556 |
629b34b34b1954827ac46a32735410987df71280 | 1,839 | require 'spec_helper'
describe WaxIiif::ImageVariant do
let(:config) {WaxIiif::Config.new}
let(:data) { WaxIiif::ImageRecord.new({
'path' => './spec/data/test.jpg',
'id' => 1})
}
context 'initialization errors' do
it 'raises if the image does not have an ID' do
data.id =nil
expect{WaxIiif::ImageVariant.new(data, config)}.to raise_error(WaxIiif::Error::InvalidImageData)
end
it 'raises if the image has a blank ID' do
data.id = ''
expect{WaxIiif::ImageVariant.new(data, config)}.to raise_error(WaxIiif::Error::InvalidImageData)
end
it 'raises if the image is not a valid image file' do
data.path = './spec/data/test.csv'
expect{WaxIiif::ImageVariant.new(data, config)}.to raise_error(WaxIiif::Error::InvalidImageData)
end
end
context 'basic data' do
before(:all) do
data = WaxIiif::ImageRecord.new({
'path' => './spec/data/test.jpg',
'id' => 1
})
config = WaxIiif::Config.new
@img = WaxIiif::ImageVariant.new(data, config, 100)
end
# it 'has a uri' do
# expect(@img.uri).to eq("#{@img.generate_image_id(1)}/full/100,/0/default.jpg")
# end
# it 'has an id' do
# expect(@img.id).to eq(@img.generate_image_id(1))
# end
# it 'has a width' do
# expect(@img.width).to eq(100)
# end
# it 'has a mime type' do
# expect(@img.mime_type).to eq('image/jpeg')
# end
end
context 'Full Image' do
before(:all) do
data = WaxIiif::ImageRecord.new({
'path' => './spec/data/test.jpg',
'id' => 1,
'page_number' => 1
})
config = WaxIiif::Config.new
@img = WaxIiif::FullImage.new(data, config)
end
# it 'has the default filestring' do
# expect(@img.uri).to include 'full/full'
# end
end
end
| 27.447761 | 102 | 0.599239 |
39af7b85e191e063f699a95a41037b2ee0223af1 | 9,628 | # encoding: utf-8
require File.expand_path(File.dirname(__FILE__)) + '/../test_helper'
class HolidaysTests < Test::Unit::TestCase
def setup
@date = Date.civil(2008,1,1)
end
def test_on
h = Holidays.on(Date.civil(2008,9,1), :ca)
assert_equal 'Labour Day', h[0][:name]
holidays = Holidays.on(Date.civil(2008,7,4), :ca)
assert_equal 0, holidays.length
end
def test_requires_valid_regions
assert_raises Holidays::InvalidRegion do
Holidays.on(Date.civil(2008,1,1), :xx)
end
assert_raises Holidays::InvalidRegion do
Holidays.on(Date.civil(2008,1,1), [:ca,:xx])
end
assert_raises Holidays::InvalidRegion do
Holidays.between(Date.civil(2008,1,1), Date.civil(2008,12,31), [:ca,:xx])
end
end
def test_requires_valid_regions_holiday_next
assert_raises Holidays::InvalidRegion do
Holidays.next_holidays(1, [:xx], Date.civil(2008,1,1))
end
assert_raises Holidays::InvalidRegion do
Holidays.next_holidays(1, [:ca,:xx], Date.civil(2008,1,1))
Holidays.on(Date.civil(2008,1,1), [:ca,:xx])
end
assert_raises Holidays::InvalidRegion do
Holidays.next_holidays(1, [:ca,:xx])
end
end
def test_region_params
holidays = Holidays.on(@date, :ca)
assert_equal 1, holidays.length
holidays = Holidays.on(@date, [:ca_bc,:ca])
assert_equal 1, holidays.length
end
def test_observed_dates
# Should fall on Tuesday the 1st
assert_equal 1, Holidays.on(Date.civil(2008,7,1), :ca, :observed).length
# Should fall on Monday the 2nd
assert_equal 1, Holidays.on(Date.civil(2007,7,2), :ca, :observed).length
end
def test_any_region
# Should return nothing(Victoria Day is not celebrated :ca wide anymore)
holidays = Holidays.between(Date.civil(2008,5,1), Date.civil(2008,5,31), :ca)
assert_equal 0, holidays.length
# Should return Victoria Day and National Patriotes Day.
#
# Should be 2 in the CA region but other regional files are loaded during the
# unit tests add to the :any count.
holidays = Holidays.between(Date.civil(2008,5,1), Date.civil(2008,5,31), [:any])
assert holidays.length >= 2
# Test blank region
holidays = Holidays.between(Date.civil(2008,5,1), Date.civil(2008,5,31))
assert holidays.length >= 3
end
def test_any_region_holiday_next
# Should return Victoria Day.
holidays = Holidays.next_holidays(1, [:ca], Date.civil(2008,5,1))
assert_equal 1, holidays.length
assert_equal ['2008-07-01','Canada Day'] , [holidays.first[:date].to_s, holidays.first[:name].to_s]
# Should return 2 holidays.
holidays = Holidays.next_holidays(2, [:ca], Date.civil(2008,5,1))
assert_equal 2, holidays.length
# Should return 1 holiday in July
holidays = Holidays.next_holidays(1, [:jp], Date.civil(2016, 5, 22))
assert_equal ['2016-07-18','海の日'] , [holidays.first[:date].to_s, holidays.first[:name].to_s]
# Must Region.If there is not region, raise ArgumentError.
assert_raises ArgumentError do
Holidays.next_holidays(2, '', Date.civil(2008,5,1))
end
# Options should be present.If they are empty, raise ArgumentError.
assert_raises ArgumentError do
Holidays.next_holidays(2, [], Date.civil(2008,5,1))
end
# Options should be Array.If they are not Array, raise ArgumentError.
assert_raises ArgumentError do
Holidays.next_holidays(2, :ca, Date.civil(2008,5,1))
end
end
def test_year_holidays
# Should return 7 holidays from February 23 to December 31
holidays = Holidays.year_holidays([:ca_on], Date.civil(2016, 2, 23))
assert_equal 7, holidays.length
# Must have options (Regions)
assert_raises ArgumentError do
Holidays.year_holidays([], Date.civil(2016, 2, 23))
end
# Options must be in the form of an array.
assert_raises ArgumentError do
Holidays.year_holidays(:ca_on, Date.civil(2016, 2, 23))
end
end
def test_year_holidays_with_specified_year
# Should return all 11 holidays for 2016 in Ontario, Canada
holidays = Holidays.year_holidays([:ca_on], Date.civil(2016, 1, 1))
assert_equal 9, holidays.length
# Should return all 5 holidays for 2016 in Australia
holidays = Holidays.year_holidays([:au], Date.civil(2016, 1, 1))
assert_equal 5, holidays.length
end
def test_year_holidays_without_specified_year
# Gets holidays for current year from today's date
holidays = Holidays.year_holidays([:de])
assert_equal holidays.first[:date].year, Date.today.year
end
def test_year_holidays_empty
# if remain holidays is nothing , method will return empty.
holidays = Holidays.year_holidays([:ca_on], Date.civil(2016, 12, 27))
assert_empty holidays
end
def test_year_holidays_feb_29_on_non_leap_year
assert_raises ArgumentError do
Holidays.year_holidays([:ca_on], Date.civil(2015, 2, 29))
end
assert_raises ArgumentError do
Holidays.year_holidays([:ca_on], Date.civil(2019, 2, 29))
end
assert_raises ArgumentError do
Holidays.year_holidays([:ca_on], Date.civil(2021, 2, 29))
end
assert_raises ArgumentError do
Holidays.year_holidays([:us], Date.civil(2023, 2, 29))
end
assert_raises ArgumentError do
Holidays.year_holidays([:ca_on], Date.civil(2025, 2, 29))
end
end
def test_year_holidays_random_years
# Should be 1 less holiday, as Family day didn't exist in Ontario in 1990
holidays = Holidays.year_holidays([:ca_on], Date.civil(1990, 1, 1))
assert_equal 8, holidays.length
# Family day still didn't exist in 2000
holidays = Holidays.year_holidays([:ca_on], Date.civil(2000, 1, 1))
assert_equal 8, holidays.length
holidays = Holidays.year_holidays([:ca_on], Date.civil(2020, 1, 1))
assert_equal 9, holidays.length
holidays = Holidays.year_holidays([:ca_on], Date.civil(2050, 1, 1))
assert_equal 9, holidays.length
holidays = Holidays.year_holidays([:jp], Date.civil(2070, 1, 1))
assert_equal 19, holidays.length
end
def test_sub_regions
# Should return nothing (Victoria Day is no longer :ca wide)
holidays = Holidays.between(Date.civil(2008,5,1), Date.civil(2008,5,31), :ca)
assert_equal 0, holidays.length
## Should return National Patriotes Day.
holidays = Holidays.between(Date.civil(2008,5,1), Date.civil(2008,5,31), :ca_qc)
assert_equal 1, holidays.length
# Should return Victoria Day and National Patriotes Day.
holidays = Holidays.between(Date.civil(2008,5,1), Date.civil(2008,5,31), :ca_)
assert_equal 3, holidays.length
end
def test_sub_regions_holiday_next
# Should return Victoria Day.
holidays = Holidays.next_holidays(2, [:ca_bc], Date.civil(2008,5,1))
assert_equal 2, holidays.length
assert_equal ['2008-05-19','Victoria Day'] , [holidays.first[:date].to_s, holidays.first[:name].to_s]
# Should return Victoria Da and National Patriotes Day.
holidays = Holidays.next_holidays(2, [:ca_qc], Date.civil(2008,5,1))
assert_equal 2, holidays.length
assert_equal ['2008-06-24','Fête Nationale'] , [holidays.last[:date].to_s, holidays.last[:name].to_s]
# Should return Victoria Day and National Patriotes Day.
holidays = Holidays.next_holidays(2, [:ca_], Date.civil(2008,5,1))
assert_equal 2, holidays.length
# Aparently something in jruby doesn't sort the same way as other rubies so....we'll just do it ourselves so
# we don't flap.
sorted_holidays = holidays.sort_by { |h| h[:name] }
assert_equal ['2008-05-19','National Patriotes Day'] , [sorted_holidays.first[:date].to_s, sorted_holidays.first[:name].to_s]
assert_equal ['2008-05-19','Victoria Day'] , [sorted_holidays.last[:date].to_s, sorted_holidays.last[:name].to_s]
end
def test_easter_lambda
[Date.civil(1800,4,11), Date.civil(1899,3,31), Date.civil(1900,4,13),
Date.civil(2008,3,21), Date.civil(2035,3,23)].each do |date|
assert_equal 'Good Friday', Holidays.on(date, :ca)[0][:name]
end
[Date.civil(1800,4,14), Date.civil(1899,4,3), Date.civil(1900,4,16),
Date.civil(2008,3,24), Date.civil(2035,3,26)].each do |date|
assert_equal 'Easter Monday', Holidays.on(date, :ca_qc, :informal)[0][:name]
end
end
def test_sorting
(1..10).each{|year|
(1..12).each{|month|
holidays = Holidays.between(Date.civil(year, month, 1), Date.civil(year, month, 28), :gb_)
holidays.each_with_index{|holiday, index|
assert holiday[:date] >= holidays[index - 1][:date] if index > 0
}
}
}
end
def test_caching
good_friday = Date.civil(2008, 3, 21)
easter_monday = Date.civil(2008, 3, 24)
cache_end_date = Date.civil(2008, 3, 25)
Holidays.cache_between(good_friday, cache_end_date, :ca, :informal)
# Test that correct results are returned outside the
# cache range, and with no caching
assert_equal 1, Holidays.on(Date.civil(2035, 1, 1), :ca, :informal).length
assert_equal 1, Holidays.on(Date.civil(2035, 1, 1), :us).length
# Make sure cache is hit for all successive calls
Holidays::Factory::Finder.expects(:between).never
# Test that cache has been set and it returns the same as before
assert_equal 1, Holidays.on(good_friday, :ca, :informal).length
assert_equal 1, Holidays.on(easter_monday, :ca, :informal).length
assert_equal 1, easter_monday.holidays(:ca, :informal).length
assert_equal true, easter_monday.holiday?(:ca, :informal)
end
def test_load_all
Holidays.load_all
assert_equal 242, Holidays.available_regions.count
end
end
| 35.397059 | 129 | 0.695368 |
0180002994edf8e0ffa1008a36e07fb9ba3ecdb8 | 2,067 | require 'rails_helper'
RSpec.describe Api::V0::Users::ToursController, :type => :controller, skip: true do
render_views
describe 'GET index' do
let!(:user) { FactoryBot.create(:pro_user) }
let!(:tour1) { FactoryBot.create(:tour, user: user, updated_at: Date.parse("10/10/2010")) }
let!(:tour2) { FactoryBot.create(:tour, user: user, updated_at: Date.parse("09/10/2010")) }
let!(:other_tours) { FactoryBot.create(:tour) }
context "without pagination params" do
before { get 'index', params: { user_id: user.id, token: user.token, format: :json } }
it { expect(response.status).to eq 200 }
it "responds with tours" do
Timecop.freeze(DateTime.parse("10/10/2010").at_beginning_of_day)
get 'index', params: { user_id: user.id, token: user.token, format: :json }
res = JSON.parse(response.body)
expect(res).to eq({"tours"=>[
{"id"=>tour1.id,
"tour_type"=>"medical",
"status"=>"ongoing",
"vehicle_type"=>"feet",
"distance"=>0,
"start_time"=>nil,
"end_time"=>nil,
"organization_name"=>tour1.user.organization.name,
"organization_description"=>"Association description",
"user_id"=>tour1.user_id,
"tour_points"=>[]},
{"id"=>tour2.id,
"tour_type"=>"medical",
"status"=>"ongoing",
"vehicle_type"=>"feet",
"distance"=>0,
"start_time"=>nil,
"end_time"=>nil,
"organization_name"=>tour2.user.organization.name,
"organization_description"=>"Association description",
"user_id"=>tour2.user_id,
"tour_points"=>[]}]})
end
end
context "with pagination params" do
before { get 'index', params: { user_id: user.id, token: user.token, format: :json, page: 1, per: 1 } }
it { expect(response.status).to eq 200 }
it { expect(JSON.parse(response.body)["tours"].count).to eq 1 }
end
end
end
| 37.581818 | 109 | 0.570876 |
33ef4509bb581ef4ec9a59f93061a476c39530e3 | 126 | require 'test_helper'
class SpellingTestTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 15.75 | 48 | 0.714286 |
e8ca54387fd715807aae7007b082febbd9609641 | 354 | module Newsletterable
module OrmAdapters
class ActiveModel < Adapter
def query_subscription(query, initialize = false)
scope = subscriptions_model.where(query)
if initialize
scope.first_or_initialize do |s|
s.pending!
end
else
scope.first
end
end
def save(record)
record.save!
end
end
end
end
| 16.090909 | 52 | 0.677966 |
21a45042fa545668c3ec79ad6a687321312b875a | 69,858 | require "cases/helper"
require 'models/developer'
require 'models/computer'
require 'models/project'
require 'models/company'
require 'models/contract'
require 'models/topic'
require 'models/reply'
require 'models/category'
require 'models/image'
require 'models/post'
require 'models/author'
require 'models/essay'
require 'models/comment'
require 'models/person'
require 'models/reader'
require 'models/tagging'
require 'models/tag'
require 'models/invoice'
require 'models/line_item'
require 'models/car'
require 'models/bulb'
require 'models/engine'
require 'models/categorization'
require 'models/minivan'
require 'models/speedometer'
require 'models/reference'
require 'models/job'
require 'models/college'
require 'models/student'
require 'models/pirate'
require 'models/ship'
require 'models/tyre'
require 'models/subscriber'
require 'models/subscription'
class HasManyAssociationsTestForReorderWithJoinDependency < ActiveRecord::TestCase
fixtures :authors, :posts, :comments
def test_should_generate_valid_sql
author = authors(:david)
# this can fail on adapters which require ORDER BY expressions to be included in the SELECT expression
# if the reorder clauses are not correctly handled
assert author.posts_with_comments_sorted_by_comment_id.where('comments.id > 0').reorder('posts.comments_count DESC', 'posts.tags_count DESC').last
end
end
class HasManyAssociationsTestPrimaryKeys < ActiveRecord::TestCase
fixtures :authors, :essays, :subscribers, :subscriptions, :people
def test_custom_primary_key_on_new_record_should_fetch_with_query
subscriber = Subscriber.new(nick: 'webster132')
assert !subscriber.subscriptions.loaded?
assert_queries 1 do
assert_equal 2, subscriber.subscriptions.size
end
assert_equal subscriber.subscriptions, Subscription.where(subscriber_id: 'webster132')
end
def test_association_primary_key_on_new_record_should_fetch_with_query
author = Author.new(:name => "David")
assert !author.essays.loaded?
assert_queries 1 do
assert_equal 1, author.essays.size
end
assert_equal author.essays, Essay.where(writer_id: "David")
end
def test_has_many_custom_primary_key
david = authors(:david)
assert_equal david.essays, Essay.where(writer_id: "David")
end
def test_has_many_assignment_with_custom_primary_key
david = people(:david)
assert_equal ["A Modest Proposal"], david.essays.map(&:name)
david.essays = [Essay.create!(name: "Remote Work" )]
assert_equal ["Remote Work"], david.essays.map(&:name)
end
def test_blank_custom_primary_key_on_new_record_should_not_run_queries
author = Author.new
assert !author.essays.loaded?
assert_queries 0 do
assert_equal 0, author.essays.size
end
end
end
class HasManyAssociationsTest < ActiveRecord::TestCase
fixtures :accounts, :categories, :companies, :developers, :projects,
:developers_projects, :topics, :authors, :comments,
:posts, :readers, :taggings, :cars, :jobs, :tags,
:categorizations
def setup
Client.destroyed_client_ids.clear
end
def test_sti_subselect_count
tag = Tag.first
len = Post.tagged_with(tag.id).limit(10).size
assert_operator len, :>, 0
end
def test_anonymous_has_many
developer = Class.new(ActiveRecord::Base) {
self.table_name = 'developers'
dev = self
developer_project = Class.new(ActiveRecord::Base) {
self.table_name = 'developers_projects'
belongs_to :developer, :class => dev
}
has_many :developer_projects, :class => developer_project, :foreign_key => 'developer_id'
}
dev = developer.first
named = Developer.find(dev.id)
assert_operator dev.developer_projects.count, :>, 0
assert_equal named.projects.map(&:id).sort,
dev.developer_projects.map(&:project_id).sort
end
def test_default_scope_on_relations_is_not_cached
counter = 0
posts = Class.new(ActiveRecord::Base) {
self.table_name = 'posts'
self.inheritance_column = 'not_there'
post = self
comments = Class.new(ActiveRecord::Base) {
self.table_name = 'comments'
self.inheritance_column = 'not_there'
belongs_to :post, :class => post
default_scope -> {
counter += 1
where("id = :inc", :inc => counter)
}
}
has_many :comments, :class => comments, :foreign_key => 'post_id'
}
assert_equal 0, counter
post = posts.first
assert_equal 0, counter
sql = capture_sql { post.comments.to_a }
post.comments.reset
assert_not_equal sql, capture_sql { post.comments.to_a }
end
def test_has_many_build_with_options
college = College.create(name: 'UFMT')
Student.create(active: true, college_id: college.id, name: 'Sarah')
assert_equal college.students, Student.where(active: true, college_id: college.id)
end
def test_create_from_association_should_respect_default_scope
car = Car.create(:name => 'honda')
assert_equal 'honda', car.name
bulb = Bulb.create
assert_equal 'defaulty', bulb.name
bulb = car.bulbs.build
assert_equal 'defaulty', bulb.name
bulb = car.bulbs.create
assert_equal 'defaulty', bulb.name
bulb = car.bulbs.create(:name => 'exotic')
assert_equal 'exotic', bulb.name
end
def test_build_from_association_should_respect_scope
author = Author.new
post = author.thinking_posts.build
assert_equal 'So I was thinking', post.title
end
def test_create_from_association_with_nil_values_should_work
car = Car.create(:name => 'honda')
bulb = car.bulbs.new(nil)
assert_equal 'defaulty', bulb.name
bulb = car.bulbs.build(nil)
assert_equal 'defaulty', bulb.name
bulb = car.bulbs.create(nil)
assert_equal 'defaulty', bulb.name
end
def test_do_not_call_callbacks_for_delete_all
car = Car.create(:name => 'honda')
car.funky_bulbs.create!
assert_nothing_raised { car.reload.funky_bulbs.delete_all }
assert_equal 0, Bulb.count, "bulbs should have been deleted using :delete_all strategy"
end
def test_delete_all_on_association_is_the_same_as_not_loaded
author = authors :david
author.thinking_posts.create!(:body => "test")
author.reload
expected_sql = capture_sql { author.thinking_posts.delete_all }
author.thinking_posts.create!(:body => "test")
author.reload
author.thinking_posts.inspect
loaded_sql = capture_sql { author.thinking_posts.delete_all }
assert_equal(expected_sql, loaded_sql)
end
def test_delete_all_on_association_with_nil_dependency_is_the_same_as_not_loaded
author = authors :david
author.posts.create!(:title => "test", :body => "body")
author.reload
expected_sql = capture_sql { author.posts.delete_all }
author.posts.create!(:title => "test", :body => "body")
author.reload
author.posts.to_a
loaded_sql = capture_sql { author.posts.delete_all }
assert_equal(expected_sql, loaded_sql)
end
def test_building_the_associated_object_with_implicit_sti_base_class
firm = DependentFirm.new
company = firm.companies.build
assert_kind_of Company, company, "Expected #{company.class} to be a Company"
end
def test_building_the_associated_object_with_explicit_sti_base_class
firm = DependentFirm.new
company = firm.companies.build(:type => "Company")
assert_kind_of Company, company, "Expected #{company.class} to be a Company"
end
def test_building_the_associated_object_with_sti_subclass
firm = DependentFirm.new
company = firm.companies.build(:type => "Client")
assert_kind_of Client, company, "Expected #{company.class} to be a Client"
end
def test_building_the_associated_object_with_an_invalid_type
firm = DependentFirm.new
assert_raise(ActiveRecord::SubclassNotFound) { firm.companies.build(:type => "Invalid") }
end
def test_building_the_associated_object_with_an_unrelated_type
firm = DependentFirm.new
assert_raise(ActiveRecord::SubclassNotFound) { firm.companies.build(:type => "Account") }
end
test "building the association with an array" do
speedometer = Speedometer.new(speedometer_id: "a")
data = [{name: "first"}, {name: "second"}]
speedometer.minivans.build(data)
assert_equal 2, speedometer.minivans.size
assert speedometer.save
assert_equal ["first", "second"], speedometer.reload.minivans.map(&:name)
end
def test_association_keys_bypass_attribute_protection
car = Car.create(:name => 'honda')
bulb = car.bulbs.new
assert_equal car.id, bulb.car_id
bulb = car.bulbs.new :car_id => car.id + 1
assert_equal car.id, bulb.car_id
bulb = car.bulbs.build
assert_equal car.id, bulb.car_id
bulb = car.bulbs.build :car_id => car.id + 1
assert_equal car.id, bulb.car_id
bulb = car.bulbs.create
assert_equal car.id, bulb.car_id
bulb = car.bulbs.create :car_id => car.id + 1
assert_equal car.id, bulb.car_id
end
def test_association_protect_foreign_key
invoice = Invoice.create
line_item = invoice.line_items.new
assert_equal invoice.id, line_item.invoice_id
line_item = invoice.line_items.new :invoice_id => invoice.id + 1
assert_equal invoice.id, line_item.invoice_id
line_item = invoice.line_items.build
assert_equal invoice.id, line_item.invoice_id
line_item = invoice.line_items.build :invoice_id => invoice.id + 1
assert_equal invoice.id, line_item.invoice_id
line_item = invoice.line_items.create
assert_equal invoice.id, line_item.invoice_id
line_item = invoice.line_items.create :invoice_id => invoice.id + 1
assert_equal invoice.id, line_item.invoice_id
end
# When creating objects on the association, we must not do it within a scope (even though it
# would be convenient), because this would cause that scope to be applied to any callbacks etc.
def test_build_and_create_should_not_happen_within_scope
car = cars(:honda)
scoped_count = car.foo_bulbs.where_values.count
bulb = car.foo_bulbs.build
assert_not_equal scoped_count, bulb.scope_after_initialize.where_values.count
bulb = car.foo_bulbs.create
assert_not_equal scoped_count, bulb.scope_after_initialize.where_values.count
bulb = car.foo_bulbs.create!
assert_not_equal scoped_count, bulb.scope_after_initialize.where_values.count
end
def test_no_sql_should_be_fired_if_association_already_loaded
Car.create(:name => 'honda')
bulbs = Car.first.bulbs
bulbs.to_a # to load all instances of bulbs
assert_no_queries do
bulbs.first()
bulbs.first({})
end
assert_no_queries do
bulbs.second()
bulbs.second({})
end
assert_no_queries do
bulbs.third()
bulbs.third({})
end
assert_no_queries do
bulbs.fourth()
bulbs.fourth({})
end
assert_no_queries do
bulbs.fifth()
bulbs.fifth({})
end
assert_no_queries do
bulbs.forty_two()
bulbs.forty_two({})
end
assert_no_queries do
bulbs.last()
bulbs.last({})
end
end
def test_create_resets_cached_counters
person = Person.create!(:first_name => 'tenderlove')
post = Post.first
assert_equal [], person.readers
assert_nil person.readers.find_by_post_id(post.id)
person.readers.create(:post_id => post.id)
assert_equal 1, person.readers.count
assert_equal 1, person.readers.length
assert_equal post, person.readers.first.post
assert_equal person, person.readers.first.person
end
def force_signal37_to_load_all_clients_of_firm
companies(:first_firm).clients_of_firm.each {|f| }
end
# sometimes tests on Oracle fail if ORDER BY is not provided therefore add always :order with :first
def test_counting_with_counter_sql
assert_equal 3, Firm.all.merge!(:order => "id").first.clients.count
end
def test_counting
assert_equal 3, Firm.all.merge!(:order => "id").first.plain_clients.count
end
def test_counting_with_single_hash
assert_equal 1, Firm.all.merge!(:order => "id").first.plain_clients.where(:name => "Microsoft").count
end
def test_counting_with_column_name_and_hash
assert_equal 3, Firm.all.merge!(:order => "id").first.plain_clients.count(:name)
end
def test_counting_with_association_limit
firm = companies(:first_firm)
assert_equal firm.limited_clients.length, firm.limited_clients.size
assert_equal firm.limited_clients.length, firm.limited_clients.count
end
def test_finding
assert_equal 3, Firm.all.merge!(:order => "id").first.clients.length
end
def test_finding_array_compatibility
assert_equal 3, Firm.order(:id).find{|f| f.id > 0}.clients.length
end
def test_find_many_with_merged_options
assert_equal 1, companies(:first_firm).limited_clients.size
assert_equal 1, companies(:first_firm).limited_clients.to_a.size
assert_equal 3, companies(:first_firm).limited_clients.limit(nil).to_a.size
end
def test_find_should_append_to_association_order
ordered_clients = companies(:first_firm).clients_sorted_desc.order('companies.id')
assert_equal ['id DESC', 'companies.id'], ordered_clients.order_values
end
def test_dynamic_find_should_respect_association_order
assert_equal companies(:another_first_firm_client), companies(:first_firm).clients_sorted_desc.where("type = 'Client'").first
assert_equal companies(:another_first_firm_client), companies(:first_firm).clients_sorted_desc.find_by_type('Client')
end
def test_cant_save_has_many_readonly_association
authors(:david).readonly_comments.each { |c| assert_raise(ActiveRecord::ReadOnlyRecord) { c.save! } }
authors(:david).readonly_comments.each { |c| assert c.readonly? }
end
def test_finding_default_orders
assert_equal "Summit", Firm.all.merge!(:order => "id").first.clients.first.name
end
def test_finding_with_different_class_name_and_order
assert_equal "Apex", Firm.all.merge!(:order => "id").first.clients_sorted_desc.first.name
end
def test_finding_with_foreign_key
assert_equal "Microsoft", Firm.all.merge!(:order => "id").first.clients_of_firm.first.name
end
def test_finding_with_condition
assert_equal "Microsoft", Firm.all.merge!(:order => "id").first.clients_like_ms.first.name
end
def test_finding_with_condition_hash
assert_equal "Microsoft", Firm.all.merge!(:order => "id").first.clients_like_ms_with_hash_conditions.first.name
end
def test_finding_using_primary_key
assert_equal "Summit", Firm.all.merge!(:order => "id").first.clients_using_primary_key.first.name
end
def test_update_all_on_association_accessed_before_save
firm = Firm.new(name: 'Firm')
firm.clients << Client.first
firm.save!
assert_equal firm.clients.count, firm.clients.update_all(description: 'Great!')
end
def test_belongs_to_sanity
c = Client.new
assert_nil c.firm, "belongs_to failed sanity check on new object"
end
def test_find_ids
firm = Firm.all.merge!(:order => "id").first
assert_raise(ActiveRecord::RecordNotFound) { firm.clients.find }
client = firm.clients.find(2)
assert_kind_of Client, client
client_ary = firm.clients.find([2])
assert_kind_of Array, client_ary
assert_equal client, client_ary.first
client_ary = firm.clients.find(2, 3)
assert_kind_of Array, client_ary
assert_equal 2, client_ary.size
assert_equal client, client_ary.first
assert_raise(ActiveRecord::RecordNotFound) { firm.clients.find(2, 99) }
end
def test_find_ids_and_inverse_of
force_signal37_to_load_all_clients_of_firm
firm = companies(:first_firm)
client = firm.clients_of_firm.find(3)
assert_kind_of Client, client
client_ary = firm.clients_of_firm.find([3])
assert_kind_of Array, client_ary
assert_equal client, client_ary.first
end
def test_find_all
firm = Firm.all.merge!(:order => "id").first
assert_equal 3, firm.clients.where("#{QUOTED_TYPE} = 'Client'").to_a.length
assert_equal 1, firm.clients.where("name = 'Summit'").to_a.length
end
def test_find_each
firm = companies(:first_firm)
assert ! firm.clients.loaded?
assert_queries(4) do
firm.clients.find_each(:batch_size => 1) {|c| assert_equal firm.id, c.firm_id }
end
assert ! firm.clients.loaded?
end
def test_find_each_with_conditions
firm = companies(:first_firm)
assert_queries(2) do
firm.clients.where(name: 'Microsoft').find_each(batch_size: 1) do |c|
assert_equal firm.id, c.firm_id
assert_equal "Microsoft", c.name
end
end
assert ! firm.clients.loaded?
end
def test_find_in_batches
firm = companies(:first_firm)
assert ! firm.clients.loaded?
assert_queries(2) do
firm.clients.find_in_batches(:batch_size => 2) do |clients|
clients.each {|c| assert_equal firm.id, c.firm_id }
end
end
assert ! firm.clients.loaded?
end
def test_find_all_sanitized
# sometimes tests on Oracle fail if ORDER BY is not provided therefore add always :order with :first
firm = Firm.all.merge!(:order => "id").first
summit = firm.clients.where("name = 'Summit'").to_a
assert_equal summit, firm.clients.where("name = ?", "Summit").to_a
assert_equal summit, firm.clients.where("name = :name", { :name => "Summit" }).to_a
end
def test_find_first
firm = Firm.all.merge!(:order => "id").first
client2 = Client.find(2)
assert_equal firm.clients.first, firm.clients.order("id").first
assert_equal client2, firm.clients.where("#{QUOTED_TYPE} = 'Client'").order("id").first
end
def test_find_first_sanitized
firm = Firm.all.merge!(:order => "id").first
client2 = Client.find(2)
assert_equal client2, firm.clients.merge!(:where => ["#{QUOTED_TYPE} = ?", 'Client'], :order => "id").first
assert_equal client2, firm.clients.merge!(:where => ["#{QUOTED_TYPE} = :type", { :type => 'Client' }], :order => "id").first
end
def test_find_all_with_include_and_conditions
assert_nothing_raised do
Developer.all.merge!(:joins => :audit_logs, :where => {'audit_logs.message' => nil, :name => 'Smith'}).to_a
end
end
def test_find_in_collection
assert_equal Client.find(2).name, companies(:first_firm).clients.find(2).name
assert_raise(ActiveRecord::RecordNotFound) { companies(:first_firm).clients.find(6) }
end
def test_find_grouped
all_clients_of_firm1 = Client.all.merge!(:where => "firm_id = 1").to_a
grouped_clients_of_firm1 = Client.all.merge!(:where => "firm_id = 1", :group => "firm_id", :select => 'firm_id, count(id) as clients_count').to_a
assert_equal 3, all_clients_of_firm1.size
assert_equal 1, grouped_clients_of_firm1.size
end
def test_find_scoped_grouped
assert_equal 1, companies(:first_firm).clients_grouped_by_firm_id.size
assert_equal 1, companies(:first_firm).clients_grouped_by_firm_id.length
assert_equal 3, companies(:first_firm).clients_grouped_by_name.size
assert_equal 3, companies(:first_firm).clients_grouped_by_name.length
end
def test_find_scoped_grouped_having
assert_equal 1, authors(:david).popular_grouped_posts.length
assert_equal 0, authors(:mary).popular_grouped_posts.length
end
def test_default_select
assert_equal Comment.column_names.sort, posts(:welcome).comments.first.attributes.keys.sort
end
def test_select_query_method
assert_equal ['id', 'body'], posts(:welcome).comments.select(:id, :body).first.attributes.keys
end
def test_select_with_block
assert_equal [1], posts(:welcome).comments.select { |c| c.id == 1 }.map(&:id)
end
def test_select_without_foreign_key
assert_equal companies(:first_firm).accounts.first.credit_limit, companies(:first_firm).accounts.select(:credit_limit).first.credit_limit
end
def test_adding
force_signal37_to_load_all_clients_of_firm
natural = Client.new("name" => "Natural Company")
companies(:first_firm).clients_of_firm << natural
assert_equal 3, companies(:first_firm).clients_of_firm.size # checking via the collection
assert_equal 3, companies(:first_firm).clients_of_firm(true).size # checking using the db
assert_equal natural, companies(:first_firm).clients_of_firm.last
end
def test_adding_using_create
first_firm = companies(:first_firm)
assert_equal 3, first_firm.plain_clients.size
first_firm.plain_clients.create(:name => "Natural Company")
assert_equal 4, first_firm.plain_clients.length
assert_equal 4, first_firm.plain_clients.size
end
def test_create_with_bang_on_has_many_when_parent_is_new_raises
error = assert_raise(ActiveRecord::RecordNotSaved) do
firm = Firm.new
firm.plain_clients.create! :name=>"Whoever"
end
assert_equal "You cannot call create unless the parent is saved", error.message
end
def test_regular_create_on_has_many_when_parent_is_new_raises
error = assert_raise(ActiveRecord::RecordNotSaved) do
firm = Firm.new
firm.plain_clients.create :name=>"Whoever"
end
assert_equal "You cannot call create unless the parent is saved", error.message
end
def test_create_with_bang_on_has_many_raises_when_record_not_saved
assert_raise(ActiveRecord::RecordInvalid) do
firm = Firm.all.merge!(:order => "id").first
firm.plain_clients.create!
end
end
def test_create_with_bang_on_habtm_when_parent_is_new_raises
error = assert_raise(ActiveRecord::RecordNotSaved) do
Developer.new("name" => "Aredridel").projects.create!
end
assert_equal "You cannot call create unless the parent is saved", error.message
end
def test_adding_a_mismatch_class
assert_raise(ActiveRecord::AssociationTypeMismatch) { companies(:first_firm).clients_of_firm << nil }
assert_raise(ActiveRecord::AssociationTypeMismatch) { companies(:first_firm).clients_of_firm << 1 }
assert_raise(ActiveRecord::AssociationTypeMismatch) { companies(:first_firm).clients_of_firm << Topic.find(1) }
end
def test_adding_a_collection
force_signal37_to_load_all_clients_of_firm
companies(:first_firm).clients_of_firm.concat([Client.new("name" => "Natural Company"), Client.new("name" => "Apple")])
assert_equal 4, companies(:first_firm).clients_of_firm.size
assert_equal 4, companies(:first_firm).clients_of_firm(true).size
end
def test_transactions_when_adding_to_persisted
good = Client.new(:name => "Good")
bad = Client.new(:name => "Bad", :raise_on_save => true)
begin
companies(:first_firm).clients_of_firm.concat(good, bad)
rescue Client::RaisedOnSave
end
assert !companies(:first_firm).clients_of_firm(true).include?(good)
end
def test_transactions_when_adding_to_new_record
assert_no_queries(ignore_none: false) do
firm = Firm.new
firm.clients_of_firm.concat(Client.new("name" => "Natural Company"))
end
end
def test_inverse_on_before_validate
firm = companies(:first_firm)
assert_queries(1) do
firm.clients_of_firm << Client.new("name" => "Natural Company")
end
end
def test_new_aliased_to_build
company = companies(:first_firm)
new_client = assert_no_queries(ignore_none: false) { company.clients_of_firm.new("name" => "Another Client") }
assert !company.clients_of_firm.loaded?
assert_equal "Another Client", new_client.name
assert !new_client.persisted?
assert_equal new_client, company.clients_of_firm.last
end
def test_build
company = companies(:first_firm)
new_client = assert_no_queries(ignore_none: false) { company.clients_of_firm.build("name" => "Another Client") }
assert !company.clients_of_firm.loaded?
assert_equal "Another Client", new_client.name
assert !new_client.persisted?
assert_equal new_client, company.clients_of_firm.last
end
def test_collection_size_after_building
company = companies(:first_firm) # company already has one client
company.clients_of_firm.build("name" => "Another Client")
company.clients_of_firm.build("name" => "Yet Another Client")
assert_equal 4, company.clients_of_firm.size
end
def test_collection_not_empty_after_building
company = companies(:first_firm)
assert_predicate company.contracts, :empty?
company.contracts.build
assert_not_predicate company.contracts, :empty?
end
def test_collection_size_twice_for_regressions
post = posts(:thinking)
assert_equal 0, post.readers.size
# This test needs a post that has no readers, we assert it to ensure it holds,
# but need to reload the post because the very call to #size hides the bug.
post.reload
post.readers.build
size1 = post.readers.size
size2 = post.readers.size
assert_equal size1, size2
end
def test_build_many
company = companies(:first_firm)
new_clients = assert_no_queries(ignore_none: false) { company.clients_of_firm.build([{"name" => "Another Client"}, {"name" => "Another Client II"}]) }
assert_equal 2, new_clients.size
end
def test_build_followed_by_save_does_not_load_target
companies(:first_firm).clients_of_firm.build("name" => "Another Client")
assert companies(:first_firm).save
assert !companies(:first_firm).clients_of_firm.loaded?
end
def test_build_without_loading_association
first_topic = topics(:first)
Reply.column_names
assert_equal 1, first_topic.replies.length
assert_no_queries do
first_topic.replies.build(:title => "Not saved", :content => "Superstars")
assert_equal 2, first_topic.replies.size
end
assert_equal 2, first_topic.replies.to_ary.size
end
def test_build_via_block
company = companies(:first_firm)
new_client = assert_no_queries(ignore_none: false) { company.clients_of_firm.build {|client| client.name = "Another Client" } }
assert !company.clients_of_firm.loaded?
assert_equal "Another Client", new_client.name
assert !new_client.persisted?
assert_equal new_client, company.clients_of_firm.last
end
def test_build_many_via_block
company = companies(:first_firm)
new_clients = assert_no_queries(ignore_none: false) do
company.clients_of_firm.build([{"name" => "Another Client"}, {"name" => "Another Client II"}]) do |client|
client.name = "changed"
end
end
assert_equal 2, new_clients.size
assert_equal "changed", new_clients.first.name
assert_equal "changed", new_clients.last.name
end
def test_create_without_loading_association
first_firm = companies(:first_firm)
Firm.column_names
Client.column_names
assert_equal 2, first_firm.clients_of_firm.size
first_firm.clients_of_firm.reset
assert_queries(1) do
first_firm.clients_of_firm.create(:name => "Superstars")
end
assert_equal 3, first_firm.clients_of_firm.size
end
def test_create
force_signal37_to_load_all_clients_of_firm
new_client = companies(:first_firm).clients_of_firm.create("name" => "Another Client")
assert new_client.persisted?
assert_equal new_client, companies(:first_firm).clients_of_firm.last
assert_equal new_client, companies(:first_firm).clients_of_firm(true).last
end
def test_create_many
companies(:first_firm).clients_of_firm.create([{"name" => "Another Client"}, {"name" => "Another Client II"}])
assert_equal 4, companies(:first_firm).clients_of_firm(true).size
end
def test_create_followed_by_save_does_not_load_target
companies(:first_firm).clients_of_firm.create("name" => "Another Client")
assert companies(:first_firm).save
assert !companies(:first_firm).clients_of_firm.loaded?
end
def test_deleting
force_signal37_to_load_all_clients_of_firm
companies(:first_firm).clients_of_firm.delete(companies(:first_firm).clients_of_firm.first)
assert_equal 1, companies(:first_firm).clients_of_firm.size
assert_equal 1, companies(:first_firm).clients_of_firm(true).size
end
def test_deleting_before_save
new_firm = Firm.new("name" => "A New Firm, Inc.")
new_client = new_firm.clients_of_firm.build("name" => "Another Client")
assert_equal 1, new_firm.clients_of_firm.size
new_firm.clients_of_firm.delete(new_client)
assert_equal 0, new_firm.clients_of_firm.size
end
def test_deleting_updates_counter_cache
topic = Topic.order("id ASC").first
assert_equal topic.replies.to_a.size, topic.replies_count
topic.replies.delete(topic.replies.first)
topic.reload
assert_equal topic.replies.to_a.size, topic.replies_count
end
def test_counter_cache_updates_in_memory_after_concat
topic = Topic.create title: "Zoom-zoom-zoom"
topic.replies << Reply.create(title: "re: zoom", content: "speedy quick!")
assert_equal 1, topic.replies_count
assert_equal 1, topic.replies.size
assert_equal 1, topic.reload.replies.size
end
def test_counter_cache_updates_in_memory_after_create
topic = Topic.create title: "Zoom-zoom-zoom"
topic.replies.create!(title: "re: zoom", content: "speedy quick!")
assert_equal 1, topic.replies_count
assert_equal 1, topic.replies.size
assert_equal 1, topic.reload.replies.size
end
def test_counter_cache_updates_in_memory_after_create_with_array
topic = Topic.create title: "Zoom-zoom-zoom"
topic.replies.create!([
{ title: "re: zoom", content: "speedy quick!" },
{ title: "re: zoom 2", content: "OMG lol!" },
])
assert_equal 2, topic.replies_count
assert_equal 2, topic.replies.size
assert_equal 2, topic.reload.replies.size
end
def test_pushing_association_updates_counter_cache
topic = Topic.order("id ASC").first
reply = Reply.create!
assert_difference "topic.reload.replies_count", 1 do
topic.replies << reply
end
end
def test_deleting_updates_counter_cache_without_dependent_option
post = posts(:welcome)
assert_difference "post.reload.tags_count", -1 do
post.taggings.delete(post.taggings.first)
end
end
def test_deleting_updates_counter_cache_with_dependent_delete_all
post = posts(:welcome)
post.update_columns(taggings_with_delete_all_count: post.tags_count)
assert_difference "post.reload.taggings_with_delete_all_count", -1 do
post.taggings_with_delete_all.delete(post.taggings_with_delete_all.first)
end
end
def test_deleting_updates_counter_cache_with_dependent_destroy
post = posts(:welcome)
post.update_columns(taggings_with_destroy_count: post.tags_count)
assert_difference "post.reload.taggings_with_destroy_count", -1 do
post.taggings_with_destroy.delete(post.taggings_with_destroy.first)
end
end
def test_calling_empty_with_counter_cache
post = posts(:welcome)
assert_queries(0) do
assert_not post.comments.empty?
end
end
def test_custom_named_counter_cache
topic = topics(:first)
assert_difference "topic.reload.replies_count", -1 do
topic.approved_replies.clear
end
end
def test_calling_update_attributes_on_id_changes_the_counter_cache
topic = Topic.order("id ASC").first
original_count = topic.replies.to_a.size
assert_equal original_count, topic.replies_count
first_reply = topic.replies.first
first_reply.update_attributes(:parent_id => nil)
assert_equal original_count - 1, topic.reload.replies_count
first_reply.update_attributes(:parent_id => topic.id)
assert_equal original_count, topic.reload.replies_count
end
def test_calling_update_attributes_changing_ids_doesnt_change_counter_cache
topic1 = Topic.find(1)
topic2 = Topic.find(3)
original_count1 = topic1.replies.to_a.size
original_count2 = topic2.replies.to_a.size
reply1 = topic1.replies.first
reply2 = topic2.replies.first
reply1.update_attributes(:parent_id => topic2.id)
assert_equal original_count1 - 1, topic1.reload.replies_count
assert_equal original_count2 + 1, topic2.reload.replies_count
reply2.update_attributes(:parent_id => topic1.id)
assert_equal original_count1, topic1.reload.replies_count
assert_equal original_count2, topic2.reload.replies_count
end
def test_deleting_a_collection
force_signal37_to_load_all_clients_of_firm
companies(:first_firm).clients_of_firm.create("name" => "Another Client")
assert_equal 3, companies(:first_firm).clients_of_firm.size
companies(:first_firm).clients_of_firm.delete([companies(:first_firm).clients_of_firm[0], companies(:first_firm).clients_of_firm[1], companies(:first_firm).clients_of_firm[2]])
assert_equal 0, companies(:first_firm).clients_of_firm.size
assert_equal 0, companies(:first_firm).clients_of_firm(true).size
end
def test_delete_all
force_signal37_to_load_all_clients_of_firm
companies(:first_firm).dependent_clients_of_firm.create("name" => "Another Client")
clients = companies(:first_firm).dependent_clients_of_firm.to_a
assert_equal 3, clients.count
assert_difference "Client.count", -(clients.count) do
companies(:first_firm).dependent_clients_of_firm.delete_all
end
end
def test_delete_all_with_not_yet_loaded_association_collection
force_signal37_to_load_all_clients_of_firm
companies(:first_firm).clients_of_firm.create("name" => "Another Client")
assert_equal 3, companies(:first_firm).clients_of_firm.size
companies(:first_firm).clients_of_firm.reset
companies(:first_firm).clients_of_firm.delete_all
assert_equal 0, companies(:first_firm).clients_of_firm.size
assert_equal 0, companies(:first_firm).clients_of_firm(true).size
end
def test_transaction_when_deleting_persisted
good = Client.new(:name => "Good")
bad = Client.new(:name => "Bad", :raise_on_destroy => true)
companies(:first_firm).clients_of_firm = [good, bad]
begin
companies(:first_firm).clients_of_firm.destroy(good, bad)
rescue Client::RaisedOnDestroy
end
assert_equal [good, bad], companies(:first_firm).clients_of_firm(true)
end
def test_transaction_when_deleting_new_record
assert_no_queries(ignore_none: false) do
firm = Firm.new
client = Client.new("name" => "New Client")
firm.clients_of_firm << client
firm.clients_of_firm.destroy(client)
end
end
def test_clearing_an_association_collection
firm = companies(:first_firm)
client_id = firm.clients_of_firm.first.id
assert_equal 2, firm.clients_of_firm.size
firm.clients_of_firm.clear
assert_equal 0, firm.clients_of_firm.size
assert_equal 0, firm.clients_of_firm(true).size
assert_equal [], Client.destroyed_client_ids[firm.id]
# Should not be destroyed since the association is not dependent.
assert_nothing_raised do
assert_nil Client.find(client_id).firm
end
end
def test_clearing_updates_counter_cache
topic = Topic.first
assert_difference 'topic.reload.replies_count', -1 do
topic.replies.clear
end
end
def test_clearing_updates_counter_cache_when_inverse_counter_cache_is_a_symbol_with_dependent_destroy
car = Car.first
car.engines.create!
assert_difference 'car.reload.engines_count', -1 do
car.engines.clear
end
end
def test_clearing_a_dependent_association_collection
firm = companies(:first_firm)
client_id = firm.dependent_clients_of_firm.first.id
assert_equal 2, firm.dependent_clients_of_firm.size
assert_equal 1, Client.find_by_id(client_id).client_of
# :delete_all is called on each client since the dependent options is :destroy
firm.dependent_clients_of_firm.clear
assert_equal 0, firm.dependent_clients_of_firm.size
assert_equal 0, firm.dependent_clients_of_firm(true).size
assert_equal [], Client.destroyed_client_ids[firm.id]
# Should be destroyed since the association is dependent.
assert_nil Client.find_by_id(client_id)
end
def test_delete_all_with_option_delete_all
firm = companies(:first_firm)
client_id = firm.dependent_clients_of_firm.first.id
firm.dependent_clients_of_firm.delete_all(:delete_all)
assert_nil Client.find_by_id(client_id)
end
def test_delete_all_accepts_limited_parameters
firm = companies(:first_firm)
assert_raise(ArgumentError) do
firm.dependent_clients_of_firm.delete_all(:destroy)
end
end
def test_clearing_an_exclusively_dependent_association_collection
firm = companies(:first_firm)
client_id = firm.exclusively_dependent_clients_of_firm.first.id
assert_equal 2, firm.exclusively_dependent_clients_of_firm.size
assert_equal [], Client.destroyed_client_ids[firm.id]
# :exclusively_dependent means each client is deleted directly from
# the database without looping through them calling destroy.
firm.exclusively_dependent_clients_of_firm.clear
assert_equal 0, firm.exclusively_dependent_clients_of_firm.size
assert_equal 0, firm.exclusively_dependent_clients_of_firm(true).size
# no destroy-filters should have been called
assert_equal [], Client.destroyed_client_ids[firm.id]
# Should be destroyed since the association is exclusively dependent.
assert_nil Client.find_by_id(client_id)
end
def test_dependent_association_respects_optional_conditions_on_delete
firm = companies(:odegy)
Client.create(:client_of => firm.id, :name => "BigShot Inc.")
Client.create(:client_of => firm.id, :name => "SmallTime Inc.")
# only one of two clients is included in the association due to the :conditions key
assert_equal 2, Client.where(client_of: firm.id).size
assert_equal 1, firm.dependent_conditional_clients_of_firm.size
firm.destroy
# only the correctly associated client should have been deleted
assert_equal 1, Client.where(client_of: firm.id).size
end
def test_dependent_association_respects_optional_sanitized_conditions_on_delete
firm = companies(:odegy)
Client.create(:client_of => firm.id, :name => "BigShot Inc.")
Client.create(:client_of => firm.id, :name => "SmallTime Inc.")
# only one of two clients is included in the association due to the :conditions key
assert_equal 2, Client.where(client_of: firm.id).size
assert_equal 1, firm.dependent_sanitized_conditional_clients_of_firm.size
firm.destroy
# only the correctly associated client should have been deleted
assert_equal 1, Client.where(client_of: firm.id).size
end
def test_dependent_association_respects_optional_hash_conditions_on_delete
firm = companies(:odegy)
Client.create(:client_of => firm.id, :name => "BigShot Inc.")
Client.create(:client_of => firm.id, :name => "SmallTime Inc.")
# only one of two clients is included in the association due to the :conditions key
assert_equal 2, Client.where(client_of: firm.id).size
assert_equal 1, firm.dependent_sanitized_conditional_clients_of_firm.size
firm.destroy
# only the correctly associated client should have been deleted
assert_equal 1, Client.where(client_of: firm.id).size
end
def test_delete_all_association_with_primary_key_deletes_correct_records
firm = Firm.first
# break the vanilla firm_id foreign key
assert_equal 3, firm.clients.count
firm.clients.first.update_columns(firm_id: nil)
assert_equal 2, firm.clients(true).count
assert_equal 2, firm.clients_using_primary_key_with_delete_all.count
old_record = firm.clients_using_primary_key_with_delete_all.first
firm = Firm.first
firm.destroy
assert_nil Client.find_by_id(old_record.id)
end
def test_creation_respects_hash_condition
ms_client = companies(:first_firm).clients_like_ms_with_hash_conditions.build
assert ms_client.save
assert_equal 'Microsoft', ms_client.name
another_ms_client = companies(:first_firm).clients_like_ms_with_hash_conditions.create
assert another_ms_client.persisted?
assert_equal 'Microsoft', another_ms_client.name
end
def test_clearing_without_initial_access
firm = companies(:first_firm)
firm.clients_of_firm.clear
assert_equal 0, firm.clients_of_firm.size
assert_equal 0, firm.clients_of_firm(true).size
end
def test_deleting_a_item_which_is_not_in_the_collection
force_signal37_to_load_all_clients_of_firm
summit = Client.find_by_name('Summit')
companies(:first_firm).clients_of_firm.delete(summit)
assert_equal 2, companies(:first_firm).clients_of_firm.size
assert_equal 2, companies(:first_firm).clients_of_firm(true).size
assert_equal 2, summit.client_of
end
def test_deleting_by_fixnum_id
david = Developer.find(1)
assert_difference 'david.projects.count', -1 do
assert_equal 1, david.projects.delete(1).size
end
assert_equal 1, david.projects.size
end
def test_deleting_by_string_id
david = Developer.find(1)
assert_difference 'david.projects.count', -1 do
assert_equal 1, david.projects.delete('1').size
end
assert_equal 1, david.projects.size
end
def test_deleting_self_type_mismatch
david = Developer.find(1)
david.projects.reload
assert_raise(ActiveRecord::AssociationTypeMismatch) { david.projects.delete(Project.find(1).developers) }
end
def test_destroying
force_signal37_to_load_all_clients_of_firm
assert_difference "Client.count", -1 do
companies(:first_firm).clients_of_firm.destroy(companies(:first_firm).clients_of_firm.first)
end
assert_equal 1, companies(:first_firm).reload.clients_of_firm.size
assert_equal 1, companies(:first_firm).clients_of_firm(true).size
end
def test_destroying_by_fixnum_id
force_signal37_to_load_all_clients_of_firm
assert_difference "Client.count", -1 do
companies(:first_firm).clients_of_firm.destroy(companies(:first_firm).clients_of_firm.first.id)
end
assert_equal 1, companies(:first_firm).reload.clients_of_firm.size
assert_equal 1, companies(:first_firm).clients_of_firm(true).size
end
def test_destroying_by_string_id
force_signal37_to_load_all_clients_of_firm
assert_difference "Client.count", -1 do
companies(:first_firm).clients_of_firm.destroy(companies(:first_firm).clients_of_firm.first.id.to_s)
end
assert_equal 1, companies(:first_firm).reload.clients_of_firm.size
assert_equal 1, companies(:first_firm).clients_of_firm(true).size
end
def test_destroying_a_collection
force_signal37_to_load_all_clients_of_firm
companies(:first_firm).clients_of_firm.create("name" => "Another Client")
assert_equal 3, companies(:first_firm).clients_of_firm.size
assert_difference "Client.count", -2 do
companies(:first_firm).clients_of_firm.destroy([companies(:first_firm).clients_of_firm[0], companies(:first_firm).clients_of_firm[1]])
end
assert_equal 1, companies(:first_firm).reload.clients_of_firm.size
assert_equal 1, companies(:first_firm).clients_of_firm(true).size
end
def test_destroy_all
force_signal37_to_load_all_clients_of_firm
clients = companies(:first_firm).clients_of_firm.to_a
assert !clients.empty?, "37signals has clients after load"
destroyed = companies(:first_firm).clients_of_firm.destroy_all
assert_equal clients.sort_by(&:id), destroyed.sort_by(&:id)
assert destroyed.all?(&:frozen?), "destroyed clients should be frozen"
assert companies(:first_firm).clients_of_firm.empty?, "37signals has no clients after destroy all"
assert companies(:first_firm).clients_of_firm(true).empty?, "37signals has no clients after destroy all and refresh"
end
def test_dependence
firm = companies(:first_firm)
assert_equal 3, firm.clients.size
firm.destroy
assert Client.all.merge!(:where => "firm_id=#{firm.id}").to_a.empty?
end
def test_dependence_for_associations_with_hash_condition
david = authors(:david)
assert_difference('Post.count', -1) { assert david.destroy }
end
def test_destroy_dependent_when_deleted_from_association
# sometimes tests on Oracle fail if ORDER BY is not provided therefore add always :order with :first
firm = Firm.all.merge!(:order => "id").first
assert_equal 3, firm.clients.size
client = firm.clients.first
firm.clients.delete(client)
assert_raise(ActiveRecord::RecordNotFound) { Client.find(client.id) }
assert_raise(ActiveRecord::RecordNotFound) { firm.clients.find(client.id) }
assert_equal 2, firm.clients.size
end
def test_three_levels_of_dependence
topic = Topic.create "title" => "neat and simple"
reply = topic.replies.create "title" => "neat and simple", "content" => "still digging it"
reply.replies.create "title" => "neat and simple", "content" => "ain't complaining"
assert_nothing_raised { topic.destroy }
end
uses_transaction :test_dependence_with_transaction_support_on_failure
def test_dependence_with_transaction_support_on_failure
firm = companies(:first_firm)
clients = firm.clients
assert_equal 3, clients.length
clients.last.instance_eval { def overwrite_to_raise() raise "Trigger rollback" end }
firm.destroy rescue "do nothing"
assert_equal 3, Client.all.merge!(:where => "firm_id=#{firm.id}").to_a.size
end
def test_dependence_on_account
num_accounts = Account.count
companies(:first_firm).destroy
assert_equal num_accounts - 1, Account.count
end
def test_depends_and_nullify
num_accounts = Account.count
core = companies(:rails_core)
assert_equal accounts(:rails_core_account), core.account
assert_equal companies(:leetsoft, :jadedpixel), core.companies
core.destroy
assert_nil accounts(:rails_core_account).reload.firm_id
assert_nil companies(:leetsoft).reload.client_of
assert_nil companies(:jadedpixel).reload.client_of
assert_equal num_accounts, Account.count
end
def test_restrict_with_exception
firm = RestrictedWithExceptionFirm.create!(:name => 'restrict')
firm.companies.create(:name => 'child')
assert !firm.companies.empty?
assert_raise(ActiveRecord::DeleteRestrictionError) { firm.destroy }
assert RestrictedWithExceptionFirm.exists?(:name => 'restrict')
assert firm.companies.exists?(:name => 'child')
end
def test_restrict_with_error
firm = RestrictedWithErrorFirm.create!(:name => 'restrict')
firm.companies.create(:name => 'child')
assert !firm.companies.empty?
firm.destroy
assert !firm.errors.empty?
assert_equal "Cannot delete record because dependent companies exist", firm.errors[:base].first
assert RestrictedWithErrorFirm.exists?(:name => 'restrict')
assert firm.companies.exists?(:name => 'child')
end
def test_included_in_collection
assert_equal true, companies(:first_firm).clients.include?(Client.find(2))
end
def test_included_in_collection_for_new_records
client = Client.create(:name => 'Persisted')
assert_nil client.client_of
assert_equal false, Firm.new.clients_of_firm.include?(client),
'includes a client that does not belong to any firm'
end
def test_adding_array_and_collection
assert_nothing_raised { Firm.first.clients + Firm.all.last.clients }
end
def test_replace_with_less
firm = Firm.all.merge!(:order => "id").first
firm.clients = [companies(:first_client)]
assert firm.save, "Could not save firm"
firm.reload
assert_equal 1, firm.clients.length
end
def test_replace_with_less_and_dependent_nullify
num_companies = Company.count
companies(:rails_core).companies = []
assert_equal num_companies, Company.count
end
def test_replace_with_new
firm = Firm.all.merge!(:order => "id").first
firm.clients = [companies(:second_client), Client.new("name" => "New Client")]
firm.save
firm.reload
assert_equal 2, firm.clients.length
assert_equal false, firm.clients.include?(:first_client)
end
def test_replace_failure
firm = companies(:first_firm)
account = Account.new
orig_accounts = firm.accounts.to_a
assert !account.valid?
assert !orig_accounts.empty?
error = assert_raise ActiveRecord::RecordNotSaved do
firm.accounts = [account]
end
assert_equal orig_accounts, firm.accounts
assert_equal "Failed to replace accounts because one or more of the " \
"new records could not be saved.", error.message
end
def test_replace_with_same_content
firm = Firm.first
firm.clients = []
firm.save
assert_queries(0, ignore_none: true) do
firm.clients = []
end
end
def test_transactions_when_replacing_on_persisted
good = Client.new(:name => "Good")
bad = Client.new(:name => "Bad", :raise_on_save => true)
companies(:first_firm).clients_of_firm = [good]
begin
companies(:first_firm).clients_of_firm = [bad]
rescue Client::RaisedOnSave
end
assert_equal [good], companies(:first_firm).clients_of_firm(true)
end
def test_transactions_when_replacing_on_new_record
assert_no_queries(ignore_none: false) do
firm = Firm.new
firm.clients_of_firm = [Client.new("name" => "New Client")]
end
end
def test_get_ids
assert_equal [companies(:first_client).id, companies(:second_client).id, companies(:another_first_firm_client).id], companies(:first_firm).client_ids
end
def test_get_ids_for_loaded_associations
company = companies(:first_firm)
company.clients(true)
assert_queries(0) do
company.client_ids
company.client_ids
end
end
def test_get_ids_for_unloaded_associations_does_not_load_them
company = companies(:first_firm)
assert !company.clients.loaded?
assert_equal [companies(:first_client).id, companies(:second_client).id, companies(:another_first_firm_client).id], company.client_ids
assert !company.clients.loaded?
end
def test_get_ids_ignores_include_option
assert_equal [readers(:michael_welcome).id], posts(:welcome).readers_with_person_ids
end
def test_get_ids_for_ordered_association
assert_equal [companies(:another_first_firm_client).id, companies(:second_client).id, companies(:first_client).id], companies(:first_firm).clients_ordered_by_name_ids
end
def test_get_ids_for_association_on_new_record_does_not_try_to_find_records
Company.columns # Load schema information so we don't query below
Contract.columns # if running just this test.
company = Company.new
assert_queries(0) do
company.contract_ids
end
assert_equal [], company.contract_ids
end
def test_set_ids_for_association_on_new_record_applies_association_correctly
contract_a = Contract.create!
contract_b = Contract.create!
Contract.create! # another contract
company = Company.new(:name => "Some Company")
company.contract_ids = [contract_a.id, contract_b.id]
assert_equal [contract_a.id, contract_b.id], company.contract_ids
assert_equal [contract_a, contract_b], company.contracts
company.save!
assert_equal company, contract_a.reload.company
assert_equal company, contract_b.reload.company
end
def test_assign_ids_ignoring_blanks
firm = Firm.create!(:name => 'Apple')
firm.client_ids = [companies(:first_client).id, nil, companies(:second_client).id, '']
firm.save!
assert_equal 2, firm.clients(true).size
assert_equal true, firm.clients.include?(companies(:second_client))
end
def test_get_ids_for_through
assert_equal [comments(:eager_other_comment1).id], authors(:mary).comment_ids
end
def test_modifying_a_through_a_has_many_should_raise
[
lambda { authors(:mary).comment_ids = [comments(:greetings).id, comments(:more_greetings).id] },
lambda { authors(:mary).comments = [comments(:greetings), comments(:more_greetings)] },
lambda { authors(:mary).comments << Comment.create!(:body => "Yay", :post_id => 424242) },
lambda { authors(:mary).comments.delete(authors(:mary).comments.first) },
].each {|block| assert_raise(ActiveRecord::HasManyThroughCantAssociateThroughHasOneOrManyReflection, &block) }
end
def test_dynamic_find_should_respect_association_order_for_through
assert_equal Comment.find(10), authors(:david).comments_desc.where("comments.type = 'SpecialComment'").first
assert_equal Comment.find(10), authors(:david).comments_desc.find_by_type('SpecialComment')
end
def test_has_many_through_respects_hash_conditions
assert_equal authors(:david).hello_posts, authors(:david).hello_posts_with_hash_conditions
assert_equal authors(:david).hello_post_comments, authors(:david).hello_post_comments_with_hash_conditions
end
def test_include_uses_array_include_after_loaded
firm = companies(:first_firm)
firm.clients.load_target
client = firm.clients.first
assert_no_queries do
assert firm.clients.loaded?
assert_equal true, firm.clients.include?(client)
end
end
def test_include_checks_if_record_exists_if_target_not_loaded
firm = companies(:first_firm)
client = firm.clients.first
firm.reload
assert ! firm.clients.loaded?
assert_queries(1) do
assert_equal true, firm.clients.include?(client)
end
assert ! firm.clients.loaded?
end
def test_include_returns_false_for_non_matching_record_to_verify_scoping
firm = companies(:first_firm)
client = Client.create!(:name => 'Not Associated')
assert ! firm.clients.loaded?
assert_equal false, firm.clients.include?(client)
end
def test_calling_first_nth_or_last_on_association_should_not_load_association
firm = companies(:first_firm)
firm.clients.first
firm.clients.second
firm.clients.last
assert !firm.clients.loaded?
end
def test_calling_first_or_last_on_loaded_association_should_not_fetch_with_query
firm = companies(:first_firm)
firm.clients.load_target
assert firm.clients.loaded?
assert_no_queries(ignore_none: false) do
firm.clients.first
assert_equal 2, firm.clients.first(2).size
firm.clients.last
assert_equal 2, firm.clients.last(2).size
end
end
def test_calling_first_or_last_on_existing_record_with_build_should_load_association
firm = companies(:first_firm)
firm.clients.build(:name => 'Foo')
assert !firm.clients.loaded?
assert_queries 1 do
firm.clients.first
firm.clients.second
firm.clients.last
end
assert firm.clients.loaded?
end
def test_calling_first_nth_or_last_on_existing_record_with_create_should_not_load_association
firm = companies(:first_firm)
firm.clients.create(:name => 'Foo')
assert !firm.clients.loaded?
assert_queries 3 do
firm.clients.first
firm.clients.second
firm.clients.last
end
assert !firm.clients.loaded?
end
def test_calling_first_nth_or_last_on_new_record_should_not_run_queries
firm = Firm.new
assert_no_queries do
firm.clients.first
firm.clients.second
firm.clients.last
end
end
def test_calling_first_or_last_with_integer_on_association_should_not_load_association
firm = companies(:first_firm)
firm.clients.create(:name => 'Foo')
assert !firm.clients.loaded?
assert_queries 2 do
firm.clients.first(2)
firm.clients.last(2)
end
assert !firm.clients.loaded?
end
def test_calling_many_should_count_instead_of_loading_association
firm = companies(:first_firm)
assert_queries(1) do
firm.clients.many? # use count query
end
assert !firm.clients.loaded?
end
def test_calling_many_on_loaded_association_should_not_use_query
firm = companies(:first_firm)
firm.clients.collect # force load
assert_no_queries { assert firm.clients.many? }
end
def test_calling_many_should_defer_to_collection_if_using_a_block
firm = companies(:first_firm)
assert_queries(1) do
firm.clients.expects(:size).never
firm.clients.many? { true }
end
assert firm.clients.loaded?
end
def test_calling_many_should_return_false_if_none_or_one
firm = companies(:another_firm)
assert !firm.clients_like_ms.many?
assert_equal 0, firm.clients_like_ms.size
firm = companies(:first_firm)
assert !firm.limited_clients.many?
assert_equal 1, firm.limited_clients.size
end
def test_calling_many_should_return_true_if_more_than_one
firm = companies(:first_firm)
assert firm.clients.many?
assert_equal 3, firm.clients.size
end
def test_joins_with_namespaced_model_should_use_correct_type
old = ActiveRecord::Base.store_full_sti_class
ActiveRecord::Base.store_full_sti_class = true
firm = Namespaced::Firm.create({ :name => 'Some Company' })
firm.clients.create({ :name => 'Some Client' })
stats = Namespaced::Firm.all.merge!(
:select => "#{Namespaced::Firm.table_name}.id, COUNT(#{Namespaced::Client.table_name}.id) AS num_clients",
:joins => :clients,
:group => "#{Namespaced::Firm.table_name}.id"
).find firm.id
assert_equal 1, stats.num_clients.to_i
ensure
ActiveRecord::Base.store_full_sti_class = old
end
def test_association_proxy_transaction_method_starts_transaction_in_association_class
Comment.expects(:transaction)
Post.first.comments.transaction do
# nothing
end
end
def test_sending_new_to_association_proxy_should_have_same_effect_as_calling_new
client_association = companies(:first_firm).clients
assert_equal client_association.new.attributes, client_association.send(:new).attributes
end
def test_respond_to_private_class_methods
client_association = companies(:first_firm).clients
assert !client_association.respond_to?(:private_method)
assert client_association.respond_to?(:private_method, true)
end
def test_creating_using_primary_key
firm = Firm.all.merge!(:order => "id").first
client = firm.clients_using_primary_key.create!(:name => 'test')
assert_equal firm.name, client.firm_name
end
def test_defining_has_many_association_with_delete_all_dependency_lazily_evaluates_target_class
ActiveRecord::Reflection::AssociationReflection.any_instance.expects(:class_name).never
class_eval(<<-EOF, __FILE__, __LINE__ + 1)
class DeleteAllModel < ActiveRecord::Base
has_many :nonentities, :dependent => :delete_all
end
EOF
end
def test_defining_has_many_association_with_nullify_dependency_lazily_evaluates_target_class
ActiveRecord::Reflection::AssociationReflection.any_instance.expects(:class_name).never
class_eval(<<-EOF, __FILE__, __LINE__ + 1)
class NullifyModel < ActiveRecord::Base
has_many :nonentities, :dependent => :nullify
end
EOF
end
def test_attributes_are_being_set_when_initialized_from_has_many_association_with_where_clause
new_comment = posts(:welcome).comments.where(:body => "Some content").build
assert_equal new_comment.body, "Some content"
end
def test_attributes_are_being_set_when_initialized_from_has_many_association_with_multiple_where_clauses
new_comment = posts(:welcome).comments.where(:body => "Some content").where(:type => 'SpecialComment').build
assert_equal new_comment.body, "Some content"
assert_equal new_comment.type, "SpecialComment"
assert_equal new_comment.post_id, posts(:welcome).id
end
def test_include_method_in_has_many_association_should_return_true_for_instance_added_with_build
post = Post.new
comment = post.comments.build
assert_equal true, post.comments.include?(comment)
end
def test_load_target_respects_protected_attributes
topic = Topic.create!
reply = topic.replies.create(:title => "reply 1")
reply.approved = false
reply.save!
# Save with a different object instance, so the instance that's still held
# in topic.relies doesn't know about the changed attribute.
reply2 = Reply.find(reply.id)
reply2.approved = true
reply2.save!
# Force loading the collection from the db. This will merge the existing
# object (reply) with what gets loaded from the db (which includes the
# changed approved attribute). approved is a protected attribute, so if mass
# assignment is used, it won't get updated and will still be false.
first = topic.replies.to_a.first
assert_equal reply.id, first.id
assert_equal true, first.approved?
end
def test_to_a_should_dup_target
ary = topics(:first).replies.to_a
target = topics(:first).replies.target
assert_not_equal target.object_id, ary.object_id
end
def test_merging_with_custom_attribute_writer
bulb = Bulb.new(:color => "red")
assert_equal "RED!", bulb.color
car = Car.create!
car.bulbs << bulb
assert_equal "RED!", car.bulbs.to_a.first.color
end
def test_abstract_class_with_polymorphic_has_many
post = SubStiPost.create! :title => "fooo", :body => "baa"
tagging = Tagging.create! :taggable => post
assert_equal [tagging], post.taggings
end
def test_with_polymorphic_has_many_with_custom_columns_name
post = Post.create! :title => 'foo', :body => 'bar'
image = Image.create!
post.images << image
assert_equal [image], post.images
end
def test_build_with_polymorphic_has_many_does_not_allow_to_override_type_and_id
welcome = posts(:welcome)
tagging = welcome.taggings.build(:taggable_id => 99, :taggable_type => 'ShouldNotChange')
assert_equal welcome.id, tagging.taggable_id
assert_equal 'Post', tagging.taggable_type
end
def test_dont_call_save_callbacks_twice_on_has_many
firm = companies(:first_firm)
contract = firm.contracts.create!
assert_equal 1, contract.hi_count
assert_equal 1, contract.bye_count
end
def test_association_attributes_are_available_to_after_initialize
car = Car.create(:name => 'honda')
bulb = car.bulbs.build
assert_equal car.id, bulb.attributes_after_initialize['car_id']
end
def test_attributes_are_set_when_initialized_from_has_many_null_relationship
car = Car.new name: 'honda'
bulb = car.bulbs.where(name: 'headlight').first_or_initialize
assert_equal 'headlight', bulb.name
end
def test_attributes_are_set_when_initialized_from_polymorphic_has_many_null_relationship
post = Post.new title: 'title', body: 'bar'
tag = Tag.create!(name: 'foo')
tagging = post.taggings.where(tag: tag).first_or_initialize
assert_equal tag.id, tagging.tag_id
assert_equal 'Post', tagging.taggable_type
end
def test_replace
car = Car.create(:name => 'honda')
bulb1 = car.bulbs.create
bulb2 = Bulb.create
assert_equal [bulb1], car.bulbs
car.bulbs.replace([bulb2])
assert_equal [bulb2], car.bulbs
assert_equal [bulb2], car.reload.bulbs
end
def test_replace_returns_target
car = Car.create(:name => 'honda')
bulb1 = car.bulbs.create
bulb2 = car.bulbs.create
bulb3 = Bulb.create
assert_equal [bulb1, bulb2], car.bulbs
result = car.bulbs.replace([bulb3, bulb1])
assert_equal [bulb1, bulb3], car.bulbs
assert_equal [bulb1, bulb3], result
end
def test_collection_association_with_private_kernel_method
firm = companies(:first_firm)
assert_equal [accounts(:signals37)], firm.accounts.open
end
test "first_or_initialize adds the record to the association" do
firm = Firm.create! name: 'omg'
client = firm.clients_of_firm.first_or_initialize
assert_equal [client], firm.clients_of_firm
end
test "first_or_create adds the record to the association" do
firm = Firm.create! name: 'omg'
firm.clients_of_firm.load_target
client = firm.clients_of_firm.first_or_create name: 'lol'
assert_equal [client], firm.clients_of_firm
assert_equal [client], firm.reload.clients_of_firm
end
test "delete_all, when not loaded, doesn't load the records" do
post = posts(:welcome)
assert post.taggings_with_delete_all.count > 0
assert !post.taggings_with_delete_all.loaded?
# 2 queries: one DELETE and another to update the counter cache
assert_queries(2) do
post.taggings_with_delete_all.delete_all
end
end
test "has many associations on new records use null relations" do
post = Post.new
assert_no_queries(ignore_none: false) do
assert_equal [], post.comments
assert_equal [], post.comments.where(body: 'omg')
assert_equal [], post.comments.pluck(:body)
assert_equal 0, post.comments.sum(:id)
assert_equal 0, post.comments.count
end
end
test "collection proxy respects default scope" do
author = authors(:mary)
assert !author.first_posts.exists?
end
test "association with extend option" do
post = posts(:welcome)
assert_equal "lifo", post.comments_with_extend.author
assert_equal "hello", post.comments_with_extend.greeting
end
test "association with extend option with multiple extensions" do
post = posts(:welcome)
assert_equal "lifo", post.comments_with_extend_2.author
assert_equal "hello", post.comments_with_extend_2.greeting
end
test "delete record with complex joins" do
david = authors(:david)
post = david.posts.first
post.type = 'PostWithSpecialCategorization'
post.save
categorization = post.categorizations.first
categorization.special = true
categorization.save
assert_not_equal [], david.posts_with_special_categorizations
david.posts_with_special_categorizations = []
assert_equal [], david.posts_with_special_categorizations
end
test "does not duplicate associations when used with natural primary keys" do
speedometer = Speedometer.create!(id: '4')
speedometer.minivans.create!(minivan_id: 'a-van-red' ,name: 'a van', color: 'red')
assert_equal 1, speedometer.minivans.to_a.size, "Only one association should be present:\n#{speedometer.minivans.to_a}"
assert_equal 1, speedometer.reload.minivans.to_a.size
end
test "can unscope the default scope of the associated model" do
car = Car.create!
bulb1 = Bulb.create! name: "defaulty", car: car
bulb2 = Bulb.create! name: "other", car: car
assert_equal [bulb1], car.bulbs
assert_equal [bulb1, bulb2], car.all_bulbs.sort_by(&:id)
end
test 'unscopes the default scope of associated model when used with include' do
car = Car.create!
bulb = Bulb.create! name: "other", car: car
assert_equal bulb, Car.find(car.id).all_bulbs.first
assert_equal bulb, Car.includes(:all_bulbs).find(car.id).all_bulbs.first
end
test "raises RecordNotDestroyed when replaced child can't be destroyed" do
car = Car.create!
original_child = FailedBulb.create!(car: car)
assert_raise(ActiveRecord::RecordNotDestroyed) do
car.failed_bulbs = [FailedBulb.create!]
end
assert_equal [original_child], car.reload.failed_bulbs
end
test 'updates counter cache when default scope is given' do
topic = DefaultRejectedTopic.create approved: true
assert_difference "topic.reload.replies_count", 1 do
topic.approved_replies.create!
end
end
test 'dangerous association name raises ArgumentError' do
[:errors, 'errors', :save, 'save'].each do |name|
assert_raises(ArgumentError, "Association #{name} should not be allowed") do
Class.new(ActiveRecord::Base) do
has_many name
end
end
end
end
test 'passes custom context validation to validate children' do
pirate = FamousPirate.new
pirate.famous_ships << ship = FamousShip.new
assert pirate.valid?
assert_not pirate.valid?(:conference)
assert_equal "can't be blank", ship.errors[:name].first
end
test 'association with instance dependent scope' do
bob = authors(:bob)
Post.create!(title: "signed post by bob", body: "stuff", author: authors(:bob))
Post.create!(title: "anonymous post", body: "more stuff", author: authors(:bob))
assert_equal ["misc post by bob", "other post by bob",
"signed post by bob"], bob.posts_with_signature.map(&:title).sort
assert_equal [], authors(:david).posts_with_signature.map(&:title)
end
test 'associations autosaves when object is already persited' do
bulb = Bulb.create!
tyre = Tyre.create!
car = Car.create! do |c|
c.bulbs << bulb
c.tyres << tyre
end
assert_equal 1, car.bulbs.count
assert_equal 1, car.tyres.count
end
test 'associations replace in memory when records have the same id' do
bulb = Bulb.create!
car = Car.create!(bulbs: [bulb])
new_bulb = Bulb.find(bulb.id)
new_bulb.name = "foo"
car.bulbs = [new_bulb]
assert_equal "foo", car.bulbs.first.name
end
test 'in memory replacement executes no queries' do
bulb = Bulb.create!
car = Car.create!(bulbs: [bulb])
new_bulb = Bulb.find(bulb.id)
assert_no_queries do
car.bulbs = [new_bulb]
end
end
test 'in memory replacements do not execute callbacks' do
raise_after_add = false
klass = Class.new(ActiveRecord::Base) do
self.table_name = :cars
has_many :bulbs, after_add: proc { raise if raise_after_add }
def self.name
"Car"
end
end
bulb = Bulb.create!
car = klass.create!(bulbs: [bulb])
new_bulb = Bulb.find(bulb.id)
raise_after_add = true
assert_nothing_raised do
car.bulbs = [new_bulb]
end
end
test 'in memory replacements sets inverse instance' do
bulb = Bulb.create!
car = Car.create!(bulbs: [bulb])
new_bulb = Bulb.find(bulb.id)
car.bulbs = [new_bulb]
assert_same car, new_bulb.car
end
test 'in memory replacement maintains order' do
first_bulb = Bulb.create!
second_bulb = Bulb.create!
car = Car.create!(bulbs: [first_bulb, second_bulb])
same_bulb = Bulb.find(first_bulb.id)
car.bulbs = [second_bulb, same_bulb]
assert_equal [first_bulb, second_bulb], car.bulbs
end
end
| 33.265714 | 180 | 0.741003 |
7981b7cb356db855292f450726525664e29a79a5 | 137 | json.array!(@users) do |user|
json.extract! user, :id, :username, :avatar_url, :biography
json.url user_url(user, format: :json)
end
| 27.4 | 61 | 0.70073 |
3911540459dd148c177eeeefb75cc5b11abcbbee | 140 | module Serverspec
module Helper
module Debian
def commands
Serverspec::Commands::Debian.new
end
end
end
end
| 14 | 40 | 0.642857 |
d52d2b681d712015303a3af9e1d6de262dadd557 | 3,800 | class Timecop
# A data class for carrying around "time movement" objects. Makes it easy to keep track of the time
# movements on a simple stack.
class TimeStackItem #:nodoc:
attr_reader :mock_type
def initialize(mock_type, *args)
raise "Unknown mock_type #{mock_type}" unless [:freeze, :travel, :scale].include?(mock_type)
@scaling_factor = args.shift if mock_type == :scale
@mock_type = mock_type
@time = parse_time(*args)
@time_was = Time.now_without_mock_time
@travel_offset = compute_travel_offset
end
def year
time.year
end
def month
time.month
end
def day
time.day
end
def hour
time.hour
end
def min
time.min
end
def sec
time.sec
end
def utc_offset
time.utc_offset
end
def travel_offset
@travel_offset
end
def scaling_factor
@scaling_factor
end
def time(time_klass = Time) #:nodoc:
if @time.respond_to?(:in_time_zone)
time = time_klass.at(@time.dup.utc.to_r)
else
time = time_klass.at(@time)
end
if travel_offset.nil?
time
elsif scaling_factor.nil?
time_klass.at((Time.now_without_mock_time + travel_offset).to_f)
else
time_klass.at(scaled_time)
end
end
def scaled_time
(@time + (Time.now_without_mock_time - @time_was) * scaling_factor).to_f
end
def date(date_klass = Date)
date_klass.jd(time.__send__(:to_date).jd)
end
def datetime(datetime_klass = DateTime)
if Float.method_defined?(:to_r)
if !sec.zero?
fractions_of_a_second = time.to_f % 1
datetime_klass.new(year, month, day, hour, min, (fractions_of_a_second + sec), utc_offset_to_rational(utc_offset))
else
datetime_klass.new(year, month, day, hour, min, sec, utc_offset_to_rational(utc_offset))
end
else
datetime_klass.new(year, month, day, hour, min, sec, utc_offset_to_rational(utc_offset))
end
end
private
def rational_to_utc_offset(rational)
((24.0 / rational.denominator) * rational.numerator) * (60 * 60)
end
def utc_offset_to_rational(utc_offset)
Rational(utc_offset, 24 * 60 * 60)
end
def parse_time(*args)
arg = args.shift
if arg.is_a?(Time)
arg
elsif Object.const_defined?(:DateTime) && arg.is_a?(DateTime)
time_klass.at(arg.to_time.to_f).getlocal
elsif Object.const_defined?(:Date) && arg.is_a?(Date)
time_klass.local(arg.year, arg.month, arg.day, 0, 0, 0)
elsif args.empty? && arg.kind_of?(Integer)
Time.now + arg
elsif arg.nil?
Time.now
else
if arg.is_a?(String) && Time.respond_to?(:parse)
Time.parse(arg)
else
# we'll just assume it's a list of y/m/d/h/m/s
year = arg || 2000
month = args.shift || 1
day = args.shift || 1
hour = args.shift || 0
minute = args.shift || 0
second = args.shift || 0
time_klass.local(year, month, day, hour, minute, second)
end
end
end
def compute_travel_offset
return nil if mock_type == :freeze
time - Time.now_without_mock_time
end
def times_are_equal_within_epsilon t1, t2, epsilon_in_seconds
(t1 - t2).abs < epsilon_in_seconds
end
def time_klass
Time.respond_to?(:zone) && Time.zone ? Time.zone : Time
end
end
end
| 26.950355 | 126 | 0.571316 |
accdc7b3ec3d3b49506557521e89b0cdf100755a | 1,157 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'pundit/resources/version'
Gem::Specification.new do |spec|
spec.name = "pundit-resources"
spec.version = Pundit::Resources::VERSION
spec.authors = ["Ross Penman", "Sean Devine"]
spec.email = ["[email protected]", "[email protected]"]
spec.summary = %q{Integrate JSONAPI::Resources with Pundit}
spec.homepage = "https://github.com/togglepro/pundit-resources"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_dependency "activesupport"
spec.add_dependency "jsonapi-resources"
spec.add_dependency "pundit"
spec.add_dependency "rails", ">= 4.2.1", "< 5.2"
spec.add_development_dependency "bundler", "~> 1.11"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec-rails", '>= 3.5.0.beta3', '< 4.0.0'
end
| 38.566667 | 104 | 0.654278 |
b9ce11c33dbd0825b984d1e0f90c183f8001c44a | 1,642 | # frozen_string_literal: true
lib = File.expand_path('lib', __dir__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'boletosimples/version'
Gem::Specification.new do |gem|
gem.name = 'boletosimples'
gem.version = BoletoSimples::VERSION
gem.authors = ['Kivanio Barbosa', 'Rafael Lima', 'Thiago Belem']
gem.email = ['[email protected]', '[email protected]', '[email protected]']
gem.description = 'Boleto Simples API wrapper.'
gem.summary = 'Boleto Simples API wrapper.'
gem.homepage = 'https://github.com/BoletoSimples/boletosimples-ruby'
gem.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(spec|features)/})
end
gem.test_files = `git ls-files -- {spec,features}/*`.split("\n")
gem.require_paths = ['lib']
# Gems that must be intalled for boletosimples to work
gem.add_dependency 'faraday-http-cache', '~> 2.2.0'
gem.add_dependency 'faraday_middleware', '~> 1.0'
gem.add_dependency 'her', '~> 1.1'
# Gems that must be installed for boletosimples to compile and build
gem.add_development_dependency 'fuubar'
gem.add_development_dependency 'rake'
gem.add_development_dependency 'rspec'
gem.add_development_dependency 'rubocop'
gem.add_development_dependency 'rubocop-performance'
gem.add_development_dependency 'rubocop-rake'
gem.add_development_dependency 'rubocop-rspec'
gem.add_development_dependency 'solargraph'
gem.add_development_dependency 'stub_env'
gem.add_development_dependency 'test-prof'
gem.add_development_dependency 'vcr'
gem.add_development_dependency 'webmock'
end
| 39.095238 | 95 | 0.725944 |
e254fb4c68b21ad13164fa8b10e3bfeac7cc19c3 | 638 | class CreateMonsters < ActiveRecord::Migration[5.2]
def change
create_table :monsters do |t|
t.string :name
t.string :monster_type
t.integer :level, :default => 1
t.integer :exp_provide, :default => 100
t.integer :max_hp, :default => 1000
t.integer :hp, :default => 200
t.integer :atk, :default => 20
t.integer :def, :default => 20
t.integer :x, :default => 350
t.integer :y, :default => 350
t.integer :population, :default => 0
t.integer :population_cap, :default => 3
t.integer :respawn_cooldown, :default => 3000
t.timestamps
end
end
end
| 29 | 51 | 0.60815 |
ff8714f7b40f9499e75d4ed7a2c8e75f5a70fbda | 2,250 | class Libcapn < Formula
desc "C library to send push notifications to Apple devices"
homepage "https://web.archive.org/web/20181220090839/libcapn.org/"
license "MIT"
revision 1
head "https://github.com/adobkin/libcapn.git"
stable do
url "https://github.com/adobkin/libcapn/archive/v2.0.0.tar.gz"
sha256 "6a0d786a431864178f19300aa5e1208c6c0cbd2d54fadcd27f032b4f3dd3539e"
resource "jansson" do
url "https://github.com/akheron/jansson.git",
revision: "8f067962f6442bda65f0a8909f589f2616a42c5a"
end
end
bottle do
sha256 "e355824f9490a5bb90964a7b5bf4b69735ebe72560bf112e2f083111ca31550e" => :big_sur
sha256 "67b634beae31705b6664702473cb42a686c50d84f4d0ec530bbe4e360c292dba" => :catalina
sha256 "3b4b1f331e7e79c6a99826c5ffd385df3f199a7d72c897e9fd31150be26303cb" => :mojave
sha256 "a3cd6c452f96c9914f41fe22c1c0b5518c282569dffcebe7d6f38783ce2fb4d1" => :high_sierra
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "[email protected]"
# Compatibility with OpenSSL 1.1
# Original: https://github.com/adobkin/libcapn/pull/46.diff?full_index=1
patch do
url "https://github.com/adobkin/libcapn/commit/d5e7cd219b7a82156de74d04bc3668a07ec96629.patch?full_index=1"
sha256 "d027dc78f490c749eb04c36001d28ce6296c2716325f48db291ce8e62d56ff26"
end
patch do
url "https://github.com/adobkin/libcapn/commit/5fde3a8faa6ce0da0bfe67834bec684a9c6fc992.patch?full_index=1"
sha256 "caa70babdc4e028d398e844df461f97b0dc192d5c6cc5569f88319b4fcac5ff7"
end
def install
# head gets jansson as a git submodule
(buildpath/"src/third_party/jansson").install resource("jansson") if build.stable?
system "cmake", ".", "-DOPENSSL_ROOT_DIR=#{Formula["[email protected]"].opt_prefix}",
*std_cmake_args
system "make", "install"
pkgshare.install "examples"
end
test do
system ENV.cc, pkgshare/"examples/send_push_message.c",
"-o", "send_push_message",
"-I#{Formula["[email protected]"].opt_include}",
"-L#{lib}/capn", "-lcapn"
output = shell_output("./send_push_message", 255)
assert_match "unable to use specified PKCS12 file (errno: 9012)", output
end
end
| 38.793103 | 111 | 0.732 |
9167e61a423a0ca499dd6c7f48e4ef2234533785 | 1,489 | module GrapeV0_14_0
module Validations
class Base
attr_reader :attrs
# Creates a new Validator from options specified
# by a +requires+ or +optional+ directive during
# parameter definition.
# @param attrs [Array] names of attributes to which the Validator applies
# @param options [Object] implementation-dependent Validator options
# @param required [Boolean] attribute(s) are required or optional
# @param scope [ParamsScope] parent scope for this Validator
def initialize(attrs, options, required, scope)
@attrs = Array(attrs)
@option = options
@required = required
@scope = scope
end
def validate!(params)
attributes = AttributesIterator.new(self, @scope, params)
attributes.each do |resource_params, attr_name|
if @required || (resource_params.respond_to?(:key?) && resource_params.key?(attr_name))
validate_param!(attr_name, resource_params)
end
end
end
def self.convert_to_short_name(klass)
ret = klass.name.gsub(/::/, '/')
.gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2')
.gsub(/([a-z\d])([A-Z])/, '\1_\2')
.tr('-', '_')
.downcase
File.basename(ret, '_validator')
end
def self.inherited(klass)
short_name = convert_to_short_name(klass)
Validations.register_validator(short_name, klass)
end
end
end
end
| 33.088889 | 97 | 0.60779 |
bb156aa91b1fd6c847902aa00e1ff33b66f0d7eb | 181 | Rails.application.routes.draw do
resources :posts
get 'welcome/index'
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
end
| 25.857143 | 101 | 0.767956 |
33f11ea3e63eb619df7aeeb6d1614bcad59799d6 | 168 | require 'multi_test'
# Now we create the world
MultiTest.extend_with_best_assertion_library(self)
# Now we execute a scenario and assert something
expect(1).to eq(1)
| 21 | 50 | 0.797619 |
79169d3ca6ae0feb036d6933aa6b8528961ac1b5 | 1,273 | #-- encoding: UTF-8
#-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2014 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See doc/COPYRIGHT.rdoc for more details.
#++
# add seeds specific for the test-environment here | 42.433333 | 91 | 0.76198 |
032dd6519893c6d13222bdea1bf17259f1107d9a | 264 | module Util
class Sigint
def initialize
@trapped = false
end
def trap
Signal.trap 'INT' do
puts 'intercept INT and stop ..'
@trapped = true
end
self
end
def trapped?
@trapped
end
end
end
| 12.571429 | 40 | 0.534091 |
b917b898f86bc1c0b30cc58f22dfcf3fa59c8e6c | 338 | class CreateProducts < ActiveRecord::Migration[5.2]
def change
create_table :products do |t|
t.string :name, null: false
t.float :price, null: false
t.string :status, null: false, default: 'active'
t.references :category, foreign_key: true
t.integer :presentation
t.timestamps
end
end
end
| 24.142857 | 54 | 0.659763 |
876aab053434c756390fc05b2236a754312dc4ea | 6,440 | ##
# $Id: module.rb 12624 2011-05-15 23:51:53Z hdm $
##
module Msf
module RPC
class RPC_Module < RPC_Base
def rpc_exploits
{ "modules" => self.framework.exploits.keys }
end
def rpc_auxiliary
{ "modules" => self.framework.auxiliary.keys }
end
def rpc_payloads
{ "modules" => self.framework.payloads.keys }
end
def rpc_encoders
{ "modules" => self.framework.encoders.keys }
end
def rpc_nops
{ "modules" => self.framework.nops.keys }
end
def rpc_post
{ "modules" => self.framework.post.keys }
end
def rpc_info(mtype, mname)
m = _find_module(mtype,mname)
res = {}
res['name'] = m.name
res['description'] = m.description
res['license'] = m.license
res['filepath'] = m.file_path
res['version'] = m.version
res['rank'] = m.rank.to_i
res['references'] = []
m.references.each do |r|
res['references'] << [r.ctx_id, r.ctx_val]
end
res['authors'] = []
m.each_author do |a|
res['authors'] << a.to_s
end
if(m.type == "exploit")
res['targets'] = {}
m.targets.each_index do |i|
res['targets'][i] = m.targets[i].name
end
if (m.default_target)
res['default_target'] = m.default_target
end
end
if(m.type == "auxiliary")
res['actions'] = {}
m.actions.each_index do |i|
res['actions'][i] = m.actions[i].name
end
if (m.default_action)
res['default_action'] = m.default_action
end
end
res
end
def rpc_compatible_payloads(mname)
m = _find_module('exploit',mname)
res = {}
res['payloads'] = []
m.compatible_payloads.each do |k|
res['payloads'] << k[0]
end
res
end
def rpc_compatible_sessions(mname)
m = _find_module('post',mname)
res = {}
res['sessions'] = m.compatible_sessions
res
end
def rpc_target_compatible_payloads(mname, target)
m = _find_module('exploit',mname)
res = {}
res['payloads'] = []
m.datastore['TARGET'] = target.to_i
m.compatible_payloads.each do |k|
res['payloads'] << k[0]
end
res
end
def rpc_options(mtype, mname)
m = _find_module(mtype,mname)
res = {}
m.options.each_key do |k|
o = m.options[k]
res[k] = {
'type' => o.type,
'required' => o.required,
'advanced' => o.advanced,
'evasion' => o.evasion,
'desc' => o.desc
}
if(not o.default.nil?)
res[k]['default'] = o.default
end
if(o.enums.length > 1)
res[k]['enums'] = o.enums
end
end
res
end
def rpc_execute(mtype, mname, opts)
mod = _find_module(mtype,mname)
case mtype
when 'exploit'
_run_exploit(mod, opts)
when 'auxiliary'
_run_auxiliary(mod, opts)
when 'payload'
_run_payload(mod, opts)
when 'post'
_run_post(mod, opts)
end
end
def rpc_encode_formats
# Supported formats
Msf::Simple::Buffer.transform_formats + Msf::Util::EXE.to_executable_fmt_formats
end
def rpc_encode(data, encoder, options)
# Load supported formats
supported_formats = Msf::Simple::Buffer.transform_formats + Msf::Util::EXE.to_executable_fmt_formats
if (fmt = options['format'])
if not supported_formats.include?(fmt)
error(500, "Invalid Format: #{fmt}")
end
end
badchars = ''
if options['badchars']
badchars = options['badchars']
end
platform = nil
if options['platform']
platform = Msf::Module::PlatformList.transform(options['platform'])
end
arch = nil
if options['arch']
arch = options['arch']
end
ecount = 1
if options['ecount']
ecount = options['ecount'].to_i
end
exeopts = {
:inject => options['inject'],
:template => options['altexe'],
:template_path => options['exedir']
}
# If we were given addshellcode for a win32 payload,
# create a double-payload; one running in one thread, one running in the other
if options['addshellcode']
buf = Msf::Util::EXE.win32_rwx_exec_thread(buf,0,'end')
file = ::File.new(options['addshellcode'])
file.binmode
buf << file.read
file.close
end
enc = self.framework.encoders.create(encoder)
begin
# Imports options
enc.datastore.update(options)
raw = data.unpack("C*").pack("C*")
1.upto(ecount) do |iteration|
# Encode it up
raw = enc.encode(raw, badchars, nil, platform)
end
output = Msf::Util::EXE.to_executable_fmt(self.framework, arch, platform, raw, fmt, exeopts)
if not output
fmt ||= "ruby"
output = Msf::Simple::Buffer.transform(raw, fmt)
end
# How to warn?
#if exeopts[:fellback]
# $stderr.puts(OutError + "Warning: Falling back to default template: #{exeopts[:fellback]}")
#end
{ "encoded" => output.to_s }
rescue => e
error(500, "#{enc.refname} failed: #{e} #{e.backtrace}")
end
end
private
def _find_module(mtype,mname)
if mname !~ /^(exploit|payload|nop|encoder|auxiliary|post)\//
mname = mtype + "/" + mname
end
mod = self.framework.modules.create(mname)
error(500, "Invalid Module") if not mod
mod
end
def _run_exploit(mod, opts)
s = Msf::Simple::Exploit.exploit_simple(mod, {
'Payload' => opts['PAYLOAD'],
'Target' => opts['TARGET'],
'RunAsJob' => true,
'Options' => opts
})
{
"job_id" => mod.job_id,
"uuid" => mod.uuid
}
end
def _run_auxiliary(mod, opts)
Msf::Simple::Auxiliary.run_simple(mod, {
'Action' => opts['ACTION'],
'RunAsJob' => true,
'Options' => opts
})
{
"job_id" => mod.job_id,
"uuid" => mod.uuid
}
end
def _run_post(mod, opts)
Msf::Simple::Post.run_simple(mod, {
'RunAsJob' => true,
'Options' => opts
})
{
"job_id" => mod.job_id,
"uuid" => mod.uuid
}
end
def _run_payload(mod, opts)
badchars = opts['BadChars'] || ''
fmt = opts['Format'] || 'raw'
force = opts['ForceEncode'] || false
template = opts['Template'] || nil
plat = opts['Platform'] || nil
keep = opts['KeepTemplateWorking'] || false
force = opts['ForceEncode'] || false
sled_size = opts['NopSledSize'].to_i || 0
iter = opts['Iterations'].to_i || 0
begin
res = Msf::Simple::Payload.generate_simple(mod, {
'BadChars' => badchars,
'Encoder' => opts['Encoder'],
'Format' => fmt,
'NoComment' => true,
'NopSledSize' => sled_size,
'Options' => opts,
'ForceEncode' => force,
'Template' => template,
'Platform' => plat,
'KeepTemplateWorking' => keep,
'Iterations' => iter
})
{ "payload" => res }
rescue ::Exception => e
error(500, "failed to generate: #{e.message}")
end
end
end
end
end
| 20.062305 | 102 | 0.623602 |
18af9a68e7f256dd14ab8be5f34582cef9a2558e | 3,711 | #---
# Excerpted from "Everyday Scripting in Ruby"
# We make no guarantees that this code is fit for any purpose.
# Visit http://www.pragmaticprogrammer.com/titles/bmsft for more book information.
#---
require 'test/unit'
require 'exercise-7b'
class SubversionRepositoryTests < Test::Unit::TestCase
def setup
@repository = SubversionRepository.new('root')
end
def test_date
assert_equal('2005-03-04',
@repository.date(Time.local(2005, 3, 4)))
end
def test_subversion_log_can_have_no_changes
assert_equal(0, @repository.extract_change_count_from("------------------------------------------------------------------------\n"))
end
def test_subversion_log_with_changes
assert_equal(2, @repository.extract_change_count_from("------------------------------------------------------------------------\nr2531 | bem | 2005-07-01 01:11:44 -0500 (Fri, 01 Jul 2005) | 1 line\n\nrevisions up through ch 3 exercises\n------------------------------------------------------------------------\nr2524 | bem | 2005-06-30 18:45:59 -0500 (Thu, 30 Jun 2005) | 1 line\n\nresults of read-through; including renaming mistyping to snapshots\n------------------------------------------------------------------------\n"))
end
end
class FormatterNormalUseTests < Test::Unit::TestCase
def setup
formatter = Formatter.new
formatter.use_date('1960-02-19')
formatter.use_subsystem_with_change_count('sub1', 30)
formatter.use_subsystem_with_change_count('sub2', 39)
@output_lines = formatter.output.split("\n") #(1)
end
def test_header_comes_before_subsystem_lines #(2)
assert_match(/Changes since 1960-02-19/, @output_lines[0])
end
def test_both_lines_are_present_in_descending_change_count_order #(3)
assert_match(/sub2.*39/, @output_lines[1])
assert_match(/sub1.*30/, @output_lines[2])
end
def test_nothing_else_is_present #(4)
assert_equal(3, @output_lines.size)
end
end
class FormatterPrivateMethodTests < Test::Unit::TestCase
def setup
@formatter = Formatter.new
end
def test_header_format
@formatter.use_date('2005-08-05')
assert_equal("Changes since 2005-08-05:",
@formatter.header)
end
def test_normal_subsystem_line_format
assert_equal(' audit ********* (45)',
@formatter.subsystem_line("audit", 45))
end
def test_asterisks_for_divides_by_five
assert_equal('****', @formatter.asterisks_for(20))
end
def test_asterisks_for_rounds_up_and_down
assert_equal('****', @formatter.asterisks_for(18))
assert_equal('***', @formatter.asterisks_for(17))
end
def test_churn_line_to_int_extracts_parenthesized_change_count
assert_equal(19, @formatter.churn_line_to_int(" churn2 **** (19)"))
assert_equal(9, @formatter.churn_line_to_int(" churn ** (9)"))
end
def test_lines_are_ordered_by_descending_change_count
@formatter.use_subsystem_with_change_count("a count matters for sorting, not a name", 1)
@formatter.use_subsystem_with_change_count("inventory", 0)
@formatter.use_subsystem_with_change_count("churn", 12)
expected = [ " churn ** (12)",
"all that really matters is the number in parens - (1)",
" inventory (0)" ]
actual = @formatter.lines_ordered_by_descending_change_count
assert_match(/churn/, actual[0])
assert_match(/a count matters/, actual[1])
assert_match(/inventory/, actual[2])
end
end
class ChurnTests < Test::Unit::TestCase
def test_month_before_is_28_days
assert_equal(Time.local(2005, 1, 1),
month_before(Time.local(2005, 1, 29)))
end
end
| 32.269565 | 531 | 0.643223 |
26a634615e78b643b1e842c19658f5dafbfa79cf | 1,470 | class RoastersController < ApplicationController
get '/roasters' do
if logged_in?
@roasters = Roaster.all
erb :'/roasters/index'
else
redirect '/login'
end
end
get '/roasters/new' do
if logged_in?
erb :'/roasters/new'
else
redirect '/login'
end
end
post '/roasters' do
@roaster = Roaster.find_or_initialize_by(name: normalize(params[:name]))
@roaster.save
flash[:message] = "You successfully posted a new Coffee Roaster!"
redirect "/roasters/#{@roaster.slug}"
end
get '/roasters/:slug/edit' do
if logged_in?
@roaster = Roaster.find_by_slug(params[:slug])
@roasts = Coffee.roasts
erb :'/roasters/edit'
else
redirect '/login'
end
end
patch '/roasters/:slug' do
@roaster = Roaster.find_by_slug(params[:slug])
coffee = Coffee.find_or_initialize_by(name: normalize(params[:coffee][:name]), roast: params[:coffee][:roast])
@roaster.coffees << coffee
flash[:message] = "You successfully added a Coffee to #{@roaster.name}!"
redirect "/roasters/#{@roaster.slug}"
end
get '/roasters/:slug' do
if logged_in?
@roaster = Roaster.find_by_slug(params[:slug])
erb :'/roasters/show'
else
redirect '/login'
end
end
end | 28.269231 | 118 | 0.559184 |
03d5abf231d2da524b28df327e3f98d5963f919c | 1,492 | require_relative 'material'
module Raylib
class Model
# Model loading/unloading functions
ray_alias_static :LoadModel, :load # Load model from files (mesh and material)
ray_alias_object :UnloadModel, :unload # Unload model from memory (RAM and/or VRAM)
# Model drawing functions
ray_alias_object :DrawModel, :draw # Draw a model (with texture if set)
ray_alias_object :DrawModelEx, :draw_ex # Draw a model with extended parameters
ray_alias_object :DrawModelWires, :draw_wires # Draw a model wires (with texture if set)
ray_alias_object :DrawModelWiresEx, :draw_wires_ex # Draw a model wires (with texture if set) with extended parameters
# TODO: There must be a better way of doing this…
def meshes
return @meshes if @meshes
@meshes = mesh_count.times.map do |i|
Mesh.new(self[:meshes] + i * Mesh.size)
end
end
# TODO: There must be a better way of doing this…
def materials
return @materials if @materials
@materials = material_count.times.map do |i|
Material.new(self[:materials] + i * Material.size)
end
end
# TODO: There must be a better way of doing this…
def bones
return @bones if @bones
@bones = bone_count.times.map do |i|
BoneInfo.new(self[:bones] + i * BoneInfo.size)
end
end
end
end
| 34.697674 | 135 | 0.618633 |
1a7067c9ea58c476fb1ea5fbc256670c31616d1b | 1,625 | class RatingsController < ApplicationController
def create
respond_to do |format|
@post = Post.find(params[:post_id])
if @post.topic.locked?
format.js { render js: '' }
else
if ([email protected]_by(user_id: current_user.id))
@rating = @post.ratings.build(rating_params)
current_user.ratings << @rating
if @rating.save
path = topic_path(@post.topic, anchor: @post.id, page: find_post_page(@post))
format.html { redirect_to path }
format.js
else
format.html { render 'topics/show' }
end
end
end
end
end
def update
respond_to do |format|
@rating = Rating.find_by(id: params[:id])
if @rating.post.topic.locked?
format.js { render js: '' }
else
if @rating.update(rating_params)
path = topic_path(@rating.post.topic, anchor: @rating.post.id, page: find_post_page(@rating.post))
format.html { redirect_to path}
format.js
else
format.html { render 'topics/show' }
end
end
end
end
def destroy
respond_to do |format|
@rating = Rating.find_by(id: params[:id])
if @rating.post.topic.locked?
format.js { render js: '' }
else
post = @rating.post
@rating.destroy
path = topic_path(post.topic, anchor: post.id, page: find_post_page(post))
format.html { redirect_to path }
format.js
end
end
end
private
def rating_params
params.require(:rating).permit(:value)
end
end
| 26.209677 | 108 | 0.579077 |
26e91f62cfc7aff822941ce463ec75579cb15ba1 | 3,255 | # frozen_string_literal: true
require 'bitcoin'
require 'celluloid/current'
require_relative './/stream_parser'
# NOTE(yu): Celluloid is unmaintained
#
# While really simple to use (literally just include the module, and add a single `async` call),
# the library quickly crashed with
#
# `E, [2018-09-24T00:35:59.347720 #83602] ERROR -- : Couldn't cleanly terminate all actors in 10 seconds!`
#
# I checked and it looked like the library is unmaintained. What a shame, seems a lot better than
# EventMachine.
#
# The `Engine` and `Peer` class are copy/pasta'd from `tcp.rb`
def log(str, tags = [])
out = str
if tags.count > 0
tag_str = tags.map { |t| "[#{t}]" }.join(" ")
out = "#{tag_str} #{out}"
end
puts "#{Time.now} #{out}"
end
class Engine_
attr_accessor :channel
def handle(messages)
messages.each do |m|
log "handling <- message: (#{m.type}, #{m.payload})"
send(:"handle_#{m.type}", m.payload)
end
end
def handle_version(version)
log "handling <- version: #{version.fields}"
end
def handle_verack(_)
log "handling <- verack"
# start = ("\x00" * 32)
# stop = ("\x00" * 32)
# pkt = Bitcoin::Protocol.pkt("getblocks", "\x00" + start + stop )
# log "-> getblocks (#{start}, #{stop})"
# channel.puts pkt
end
def handle_ping(nonce)
log "handling <- ping with nonce: #{nonce}"
pong = Bitcoin::Protocol.pong_pkt(nonce)
log "-> pong: #{pong}"
channel.puts pong
end
def handle_alert(_)
log "handling <- alert"
end
def handle_addr(address)
log "handling <- addr: #{address}"
end
def handle_getheaders(headers)
log "handling <- getheaders: #{headers}"
end
def handle_inv(inv)
log "handling <- inv: #{inv}"
end
def send_version(ip, port)
log "shaking hands..."
version = Bitcoin::Protocol::Version.new(
last_block: 127_953,
from: "127.0.0.1:8333",
to: "#{ip}:#{port}",
user_agent: "/rbtc:0.0.1/",
relay: true
)
log "-> version: #{version.fields}"
channel.puts version.to_pkt
end
# def method_missing(m, *args, &blk)
# if m.to_s.start_with?("handle")
# log "No handler defined: #{m}"
# else
# raise "BOOM: (#{m}, #{args}, #{blk}"
# end
#
# super
# end
end
class Peer_
include Celluloid
attr_reader :ip, :port
def initialize(ip, engine)
@socket = connect(ip)
@stream_parser = StreamParser.new
@engine = engine.tap { |e| e.channel = @socket }
end
def run_loop
engine.send_version(ip, port)
loop do
recv
end
end
private
attr_reader :socket, :stream_parser, :engine
def recv(_type = nil)
socket.gets.tap do |data|
if data.nil?
# log "<- nil"
else
log "<- data: #{data}"
messages = stream_parser.parse(data)
engine.handle(messages) unless messages.empty?
end
end
end
def connect(ip)
port = Bitcoin.network[:default_port]
log "Connecting to peer @ #{ip}:#{port}"
TCPSocket.open(ip, port).tap do
@ip = ip
@port = port
log "Connected!"
end
end
end
ip = "178.33.136.164" # Fast guy
engine = Engine_.new
peer = Peer_.new(ip, engine)
peer.async.run_loop
puts "DONE"
| 21.136364 | 106 | 0.60768 |
266335fa37b3d4b41b483f9d98eb612fa2835778 | 1,279 | $:.push File.join(File.dirname(__FILE__), 'lib')
require 'concurrent/version'
Gem::Specification.new do |s|
s.name = 'concurrent-ruby-ext'
s.version = Concurrent::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Jerry D'Antonio", 'The Ruby Concurrency Team']
s.email = ['[email protected]', '[email protected]']
s.homepage = 'http://www.concurrent-ruby.com'
s.summary = 'C extensions to optimize concurrent-ruby under MRI.'
s.license = 'MIT'
s.date = Time.now.strftime('%Y-%m-%d')
s.description = <<-EOF
C extensions to optimize the concurrent-ruby gem when running under MRI.
Please see http://concurrent-ruby.com for more information.
EOF
s.files = Dir['ext/**/*.{h,c,cpp}']
s.files += [
'lib/concurrent/atomic_reference/concurrent_update_error.rb',
'lib/concurrent/atomic_reference/direct_update.rb',
'lib/concurrent/atomic_reference/numeric_cas_wrapper.rb',
]
s.extra_rdoc_files = Dir['README*', 'LICENSE*', 'CHANGELOG*']
s.require_paths = ['lib', 'ext']
s.extensions = 'ext/concurrent/extconf.rb'
s.required_ruby_version = '>= 1.9.3'
s.add_runtime_dependency 'concurrent-ruby', "~> #{Concurrent::VERSION}"
end
| 36.542857 | 82 | 0.658327 |
03ed37a5703237596ccbafa00ae009e6cab2d027 | 505 | cask "eddie" do
version "2.18.9"
sha256 "4b6e233496f8d63d1cbab01a2f24e46c480741746c1b1b6e7468442d41c648c1"
# eddie.website/ was verified as official when first introduced to the cask
url "https://eddie.website/download/?platform=macos&arch=x64&ui=ui&format=disk.dmg&version=#{version}"
appcast "https://github.com/AirVPN/Eddie/releases.atom"
name "Air VPN"
name "Eddie"
desc "OpenVPN UI"
homepage "https://airvpn.org/macos/"
app "Eddie.app"
uninstall quit: "com.eddie.client"
end
| 29.705882 | 104 | 0.744554 |
ed7a8a417b8c2a6dd7d56cb88ccae81147dc2606 | 354 | cask 'melodics' do
version '2.0.2677'
sha256 '926241af674f116aeecaa2f298185006ec99e0c5951f7a6b37b4f1b842bd26f1'
url "https://web-cdn.melodics.com/download/MelodicsV#{version.major}.dmg"
appcast "https://api.melodics.com/download/osxupdatescastv#{version.major}.xml"
name 'Melodics'
homepage 'https://melodics.com/'
app 'Melodics.app'
end
| 29.5 | 81 | 0.765537 |
214f5a090bede014937d2c46c81d95cdb5a9f64b | 5,465 | # encoding: utf-8
require_relative './data_import_entity_builder'
=begin
= DataImportCityBuilder
- Goggles framework vers.: 6.075
- author: Steve A.
Specialized +DataImportEntityBuilder+ for searching (or adding brand new)
Badge entity rows.
=end
class DataImportBadgeBuilder < DataImportEntityBuilder
# Searches for an existing Badge given the parameters, or it adds a new one,
# if no matches are found.
#
# == Returns
# +nil+ in case of invalid parameters
# #result_id as:
# - positive (#id) for a freshly added row into DataImportBadge;
# - negative (- #id) for a matching existing or commited row in Badge;
# - 0 on error/unable to process.
#
def self.build_from_parameters( data_import_session, badge_code, season, team, swimmer,
category_type, entry_time_type )
# DEBUG
# puts "\r\nBadge - build_from_parameters: data_import_session ID: #{data_import_session.id}, number: #{badge_code}, season: #{season.inspect}"
self.build( data_import_session ) do
entity Badge
set_up do
set_result( nil ) and return if badge_code.nil? || badge_code.size < 1
end
# Do the search only if the badge code is not the placeholder for an unknown
# badge number ('?'). In that case, we have to add a new badge anyway.
if badge_code != '?'
search do
primary [
"(season_id = ?) AND (number = ?)",
( season.instance_of?(Season) ? season.id : 0 ),
badge_code
]
secondary [
"(data_import_session_id = ?) AND " <<
"(#{season.instance_of?(Season) ? '' : 'data_import_'}season_id = ?) AND " <<
"(number = ?)",
data_import_session.id,
season.id,
badge_code
]
default_search
# DEBUG
# puts "primary search by CODE ok!" if primary_search_ok?
# puts "secondary search by CODE ok!" if secondary_search_ok?
end
else
# Search for an existing badge, but use swimmer, team and season:
primary [
"(season_id = ?) AND (swimmer_id = ?) AND (team_id = ?)",
( season.instance_of?(Season) ? season.id : 0 ),
( swimmer.instance_of?(Swimmer) ? swimmer.id : 0 ),
( team.instance_of?(Team) ? team.id : 0 )
]
secondary [
"(data_import_session_id = ?) AND " <<
"(#{season.instance_of?(Season) ? '' : 'data_import_'}season_id = ?) AND " <<
"(#{swimmer.instance_of?(Swimmer) ? '' : 'data_import_'}swimmer_id = ?) AND " <<
"(#{team.instance_of?(Team) ? '' : 'data_import_'}team_id = ?)",
data_import_session.id,
season.id,
swimmer.id,
team.id
]
default_search
# DEBUG
# puts "primary search by Season, Swimmer & Team ok!" if primary_search_ok?
# puts "secondary search by Season, Swimmer & Team ok!" if secondary_search_ok?
end
if_not_found do # Still not found?
search do
# Search for an existing badge, but use just season & swimmer instead:
primary [
"(season_id = ?) AND (swimmer_id = ?)",
( season.instance_of?(Season) ? season.id : 0 ),
( swimmer.instance_of?(Swimmer) ? swimmer.id : 0 )
]
secondary [
"(data_import_session_id = ?) AND " <<
"(#{season.instance_of?(Season) ? '' : 'data_import_'}season_id = ?) AND " <<
"(#{swimmer.instance_of?(Swimmer) ? '' : 'data_import_'}swimmer_id = ?)",
data_import_session.id,
season.id,
swimmer.id
]
default_search
# DEBUG
# puts "primary search by Season & Swimmer ok!" if primary_search_ok?
# puts "secondary search by Season & Swimmer ok!" if secondary_search_ok?
end
end
if_not_found do
# DEBUG
# puts "NOT found!"
# Search or add a TeamAffiliation:
ta_builder = DataImportTeamAffiliationBuilder.build_from_parameters(
data_import_session,
team,
season
)
ta = ta_builder.result_row
attributes_for_creation(
data_import_session_id: data_import_session.id,
import_text: badge_code,
number: badge_code,
category_type_id: category_type.id,
entry_time_type_id: entry_time_type.id,
team_affiliation_id: ta.instance_of?(TeamAffiliation) ? ta.id : nil,
swimmer_id: swimmer.instance_of?(Swimmer) ? swimmer.id : nil,
data_import_swimmer_id: swimmer.instance_of?(DataImportSwimmer) ? swimmer.id : nil,
team_id: team.instance_of?(Team) ? team.id : nil,
data_import_team_id: team.instance_of?(DataImportTeam) ? team.id : nil,
season_id: season.instance_of?(Season) ? season.id : nil,
data_import_season_id: season.instance_of?(DataImportSeason) ? season.id : nil,
user_id: 1 # (don't care)
)
add_new
end
end
end
#-- -------------------------------------------------------------------------
#++
end
| 38.216783 | 146 | 0.551876 |
01684cf3bd0141c2b376b95e2b974bc9f964dc48 | 1,401 | require 'spec_helper'
describe Opener::LanguageIdentifier do
before do
@input = 'Hello world, how are you doing today?'
end
context '#initialize' do
example 'store the options' do
instance = described_class.new(:kaf => false)
instance.options[:kaf].should == false
end
end
context 'probabilities' do
before do
@probs = described_class.new(:probs => true).run(@input)
end
example 'return the probabilities' do
@probs.empty?.should == false
end
example 'return the language of the first probability' do
@probs[0].lang.should == 'en'
end
example 'return the probability score' do
@probs[0].prob.should > 0
end
end
context 'text output' do
example 'return the code for an English text' do
described_class.new(:kaf => false).run(@input).should == 'en'
end
example 'return "unknown" when a language could not be detected' do
described_class.new(:kaf => false).run('123').should == 'unknown'
end
end
context 'KAF output' do
before do
@document = Nokogiri::XML(described_class.new.run(@input))
end
example 'set the language as an attribute' do
@document.xpath('//KAF/@xml:lang')[0].value.should == 'en'
end
example 'include the raw language in the document' do
@document.xpath('//KAF/raw')[0].text.should == @input
end
end
end
| 24.155172 | 71 | 0.645967 |
3967868a13a01dda7650cead45e0cc90519a8c8c | 3,224 | # frozen_string_literal: true
class Jira
JIRA_KEY_REGEX = /[A-Z0-9]+-\d+/.freeze
include ActionView::Helpers::SanitizeHelper
attr_accessor :client, :jira_data
def initialize
options = {
username: ENV['JIRA_USERNAME'],
password: ENV['JIRA_PASSWORD'],
site: ENV['JIRA_URL'],
context_path: '',
auth_type: :basic
}
@client = JIRA::Client.new(options)
@jira_data = {}
end
def jiraize_ir(incident_report)
list = find_jira_key(incident_report.action_item)
return incident_report if list.blank?
generate_issue_list(list)
incident_report.action_item = jiraize(incident_report.action_item)
incident_report
end
def jiraize_cr(change_request)
list = Set.new
list.merge(find_jira_key(change_request.business_justification))
list.merge(find_jira_key(change_request.os))
list.merge(find_jira_key(change_request.db))
list.merge(find_jira_key(change_request.net))
list.merge(find_jira_key(change_request.other_dependency))
list.merge(find_jira_key(change_request.analysis))
list.merge(find_jira_key(change_request.impact))
list.merge(find_jira_key(change_request.solution))
list.delete('')
generate_issue_list(list.flatten)
change_request.business_justification = jiraize(change_request.business_justification)
change_request.os = jiraize(change_request.os)
change_request.db = jiraize(change_request.db)
change_request.net = jiraize(change_request.net)
change_request.other_dependency = jiraize(change_request.other_dependency)
change_request.analysis = jiraize(change_request.analysis)
change_request.impact = jiraize(change_request.impact)
change_request.solution = jiraize(change_request.solution)
change_request
end
def jiraize(text)
return text if text.blank?
text.gsub(JIRA_KEY_REGEX) { |key| get_issue(key) }
end
def get_issue(key)
return key unless (issue = @jira_data[key])
summary = sanitize(issue.fields['summary'], tags: [])
issue_type_icon = issue.fields['issuetype']['iconUrl']
status_category = issue.fields['status']['statusCategory']
color_name = status_category['colorName']
name = status_category['name']
url = URI.join(ENV['JIRA_URL'], '/browse/', key)
"<span class='jira-button'>" \
" <a href='#{url}' target='_blank' data-toggle='popover' title='Summary' data-content='#{summary}'>" \
" <img class='icon' src='#{issue_type_icon}'> #{key} </a>" \
" <span class='jira-#{color_name.downcase}'>#{name}</span>" \
'</span>'
end
def generate_issue_list(issue_string)
return if issue_string.empty?
list_issue = issue_string.map { |item| "issueKey=#{item}" }.join(' OR ')
get_jira_data(list_issue)
end
def get_jira_data(list_issue)
raw_issues = @client.Issue.jql(list_issue,
fields: %w[summary status issuetype], fields_by_key: true, validate_query: false)
raw_issues.each do |issue|
@jira_data[issue.key] = issue
end
rescue JIRA::HTTPError
@jira_data
end
def find_jira_key(text)
return [] if text.blank?
jira_keys = text.scan(JIRA_KEY_REGEX)
jira_keys.present? ? jira_keys : []
end
end
| 33.583333 | 116 | 0.704404 |
1ca698551e723fe316b3c657c7cbf84554722aca | 4,549 | class Plugins::Ecommerce::CartService
def initialize(site, cart)
@site = site
@cart = cart
end
attr_reader :site, :cart
def pay_with_authorize_net(options={})
payment_method = options[:payment_method] || site.payment_method('authorize_net')
billing_address = cart.get_meta("billing_address")
details = cart.get_meta("details")
amount = Plugins::Ecommerce::UtilService.ecommerce_money_to_cents(cart.total_amount)
payment_params = {
order_id: cart.slug,
currency: site.currency_code,
email: cart.user.email,
billing_address: {name: "#{cart.user.fullname}",
address1: billing_address[:address1],
address2: billing_address[:address2],
city: billing_address[:city],
state: billing_address[:state],
country: billing_address[:country],
zip: billing_address[:zip]
},
description: 'Buy Products',
ip: options[:ip]
}
if options[:ip]
payment_params[:ip] = options[:ip]
end
authorize_net_options = {
login: payment_method.options[:authorize_net_login_id],
password: payment_method.options[:authorize_net_transaction_key]
}
ActiveMerchant::Billing::Base.mode = payment_method.options[:authorize_net_sandbox].to_s.to_bool ? :test : :production
credit_card = ActiveMerchant::Billing::CreditCard.new(
first_name: options[:first_name],
last_name: options[:last_name],
number: options[:number],
month: options[:exp_month],
year: "20#{options[:exp_year]}",
verification_value: options[:cvc]
)
if credit_card.validate.empty?
gateway = ActiveMerchant::Billing::AuthorizeNetGateway.new(authorize_net_options)
response = gateway.purchase(amount, credit_card, payment_params)
if response.success?
cart.set_meta('pay_authorize_net', payment_params)
return {}
else
return {error: response.message}
end
else
return {error: credit_card.validate.map{|k, v| "#{k}: #{v.join(', ')}"}.join('<br>')}
end
end
def pay_with_paypal(options={})
billing_address = cart.get_meta("billing_address")
gateway = cart.paypal_gateway
amount_in_cents = Plugins::Ecommerce::UtilService.ecommerce_money_to_cents(cart.total_amount)
gateway_request = {
brand_name: site.name,
items: [{
number: cart.slug,
name: "Buy Products from #{site.the_title}: #{cart.products_title}",
amount: amount_in_cents,
}],
order_id: cart.slug,
currency: site.currency_code,
email: cart.user.email,
billing_address: {name: "#{billing_address[:first_name]} #{billing_address[:last_name]}",
address1: billing_address[:address1],
address2: billing_address[:address2],
city: billing_address[:city],
state: billing_address[:state],
country: billing_address[:country],
zip: billing_address[:zip]
},
description: "Buy Products from #{site.the_title}: #{cart.total_amount}",
ip: options[:ip],
return_url: options[:return_url],
cancel_return_url: options[:cancel_return_url]
}
if options[:ip]
gateway_request[:ip] = options[:ip]
end
response = gateway.setup_purchase(amount_in_cents, gateway_request)
# TODO handle errors
{redirect_url: gateway.redirect_url_for(response.token)}
end
def pay_with_stripe(options)
require 'stripe'
payment_method = options[:payment_method] || site.payment_method('stripe')
Stripe.api_key = payment_method.options[:stripe_id]
customer = Stripe::Customer.create(
email: options[:email], source: options[:stripe_token])
amount_in_cents = Plugins::Ecommerce::UtilService.ecommerce_money_to_cents(cart.total_amount)
begin
charge = Stripe::Charge.create(
customer: customer.id,
amount: amount_in_cents,
description: "Payment Products: #{cart.products_title}",
currency: site.currency_code,
)
payment_data = {
email: options[:email],
customer: customer.id,
charge: charge.id,
}
cart.set_meta("payment_data", payment_data)
{}
rescue Stripe::CardError => e
{error: e.message, payment_error: true}
rescue => e
{error: e.message}
end
end
end
| 35.539063 | 122 | 0.632447 |
e2f385415ede6b6a617c71170d52bfbec62e4690 | 4,415 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Kusto::Mgmt::V2019_01_21
module Models
#
# Class representing an event hub data connection.
#
class EventHubDataConnection < DataConnection
include MsRestAzure
def initialize
@kind = "EventHub"
end
attr_accessor :kind
# @return [String] The resource ID of the event hub to be used to create
# a data connection.
attr_accessor :event_hub_resource_id
# @return [String] The event hub consumer group.
attr_accessor :consumer_group
# @return [String] The table where the data should be ingested.
# Optionally the table information can be added to each message.
attr_accessor :table_name
# @return [String] The mapping rule to be used to ingest the data.
# Optionally the mapping information can be added to each message.
attr_accessor :mapping_rule_name
# @return [DataFormat] The data format of the message. Optionally the
# data format can be added to each message. Possible values include:
# 'MULTIJSON', 'JSON', 'CSV', 'TSV', 'SCSV', 'SOHSV', 'PSV', 'TXT',
# 'RAW', 'SINGLEJSON', 'AVRO'
attr_accessor :data_format
#
# Mapper for EventHubDataConnection class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'EventHub',
type: {
name: 'Composite',
class_name: 'EventHubDataConnection',
model_properties: {
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
location: {
client_side_validation: true,
required: false,
serialized_name: 'location',
type: {
name: 'String'
}
},
kind: {
client_side_validation: true,
required: true,
serialized_name: 'kind',
type: {
name: 'String'
}
},
event_hub_resource_id: {
client_side_validation: true,
required: true,
serialized_name: 'properties.eventHubResourceId',
type: {
name: 'String'
}
},
consumer_group: {
client_side_validation: true,
required: true,
serialized_name: 'properties.consumerGroup',
type: {
name: 'String'
}
},
table_name: {
client_side_validation: true,
required: false,
serialized_name: 'properties.tableName',
type: {
name: 'String'
}
},
mapping_rule_name: {
client_side_validation: true,
required: false,
serialized_name: 'properties.mappingRuleName',
type: {
name: 'String'
}
},
data_format: {
client_side_validation: true,
required: false,
serialized_name: 'properties.dataFormat',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 30.034014 | 78 | 0.47769 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.