hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e94971b3194041eeed201049f6f0243f5c17ea52 | 1,946 | Given /^(provider "[^\"]*") has page partials$/ do |provider|
FactoryBot.create :page_partial, :account => provider
end
Given /^the partial "([^\"]*)" of (provider "[^\"]*") is$/ do |name, provider, body|
FactoryBot.create(:cms_partial, :system_name => name, :provider => provider, :draft => body).publish!
end
When /^I create a bcms page partial$/ do
visit cms_page_partials_path
click_link "Add"
fill_in "Name", :with => "name"
find(:xpath, ".//input[@id='page_partial_submit']").click
end
When /^I update a page partial$/ do
visit cms_page_partials_path
find(:xpath, ".//tr[@id='page_partial_#{current_account.page_partials.first.id}']").click
find(:xpath, ".//a[@id='edit_button']").click
fill_in "Name", :with => "new page partial"
find(:xpath, ".//input[@id='page_partial_submit']").click
sleep 0.5
end
When /^I delete a page partial$/ do
visit cms_page_partials_path
find(:xpath, ".//tr[@id='page_partial_#{current_account.page_partials.first.id}']").click
find(:xpath, ".//a[@id='delete_button']").click
sleep(0.5)
end
Then /^I should see my page partials$/ do
current_account.page_partials.each do |partial|
assert has_xpath?(".//tr[@id='page_partial_#{partial.id}']")
end
end
Then /^I should see my page partial$/ do
partial = current_account.page_partials.first
assert has_xpath?(".//tr[@id='page_partial_#{partial.id}']")
end
Then /^I should see the page partial changed$/ do
assert current_account.page_partials.first.name == "new page partial"
end
#TODO: dry these two steps to a helper assert method
Then /^I should see no page partials$/ do
PageTemplate.all.each do |partial|
assert has_no_xpath?(".//tr[@id='page_partial_#{partial.id}']")
end
end
Then /^I should see the page partial was deleted$/ do
# asserting an empty page partials table
PageTemplate.all.each do |partial|
assert has_no_xpath?(".//tr[@id='page_partial_#{partial.id}']")
end
end
| 29.044776 | 103 | 0.692703 |
ffccc4b7f8432105b7652743033b12abca7720e3 | 407 | # Be sure to restart your server when you modify this file.
Wordy::Application.config.session_store :cookie_store, key: '_Wordy_session'
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with "rails generate session_migration")
# Wordy::Application.config.session_store :active_record_store
| 45.222222 | 76 | 0.808354 |
01d7a7719b4bde768ca292ea3c3c22c819d257d3 | 108 | class AddColumn < ActiveRecord::Migration
def change
add_column :posts, :overtime, :integer
end
end
| 18 | 42 | 0.740741 |
26c84b5e5d0c91b7ea8510d9f30d0029760da66d | 234 | class Tagging < ApplicationRecord
belongs_to :tag, counter_cache: true
belongs_to :task
# Filter tasks by tag name
def self.task_ids_by_tag(tag_name)
joins(:tag).where(tags: { name: tag_name }).select(:task_id)
end
end
| 23.4 | 64 | 0.730769 |
1d5b44e7a23f428f800c151513a7b3f9afbcd69e | 57,906 | # Copyright 2014 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "helper"
describe Google::Cloud::Storage::File, :mock_storage do
let(:bucket_gapi) { Google::Apis::StorageV1::Bucket.from_json random_bucket_hash("bucket").to_json }
let(:bucket) { Google::Cloud::Storage::Bucket.from_gapi bucket_gapi, storage.service }
let(:bucket_user_project) { Google::Cloud::Storage::Bucket.from_gapi bucket_gapi, storage.service, user_project: true }
let(:custom_time) { DateTime.new 2020, 2, 3, 4, 5, 6 }
let(:file_hash) { random_file_hash bucket.name, "file.ext", custom_time: custom_time }
let(:file_gapi) { Google::Apis::StorageV1::Object.from_json file_hash.to_json }
let(:file) { Google::Cloud::Storage::File.from_gapi file_gapi, storage.service }
let(:file_user_project) { Google::Cloud::Storage::File.from_gapi file_gapi, storage.service, user_project: true }
let(:generation) { 1234567890 }
let(:generations) { [1234567894, 1234567893, 1234567892, 1234567891] }
let(:file_gapis) do
generations.map { |g| Google::Apis::StorageV1::Object.from_json(random_file_hash(bucket.name, file.name, g).to_json) }
end
let(:metageneration) { 6 }
let(:encryption_key) { "y\x03\"\x0E\xB6\xD3\x9B\x0E\xAB*\x19\xFAv\xDEY\xBEI\xF8ftA|[z\x1A\xFBE\xDE\x97&\xBC\xC7" }
let(:encryption_key_sha256) { "5\x04_\xDF\x1D\x8A_d\xFEK\e6p[XZz\x13s]E\xF6\xBB\x10aQH\xF6o\x14f\xF9" }
let(:key_headers) do {
"x-goog-encryption-algorithm" => "AES256",
"x-goog-encryption-key" => Base64.strict_encode64(encryption_key),
"x-goog-encryption-key-sha256" => Base64.strict_encode64(encryption_key_sha256)
}
end
let(:copy_key_headers) do {
"x-goog-copy-source-encryption-algorithm" => "AES256",
"x-goog-copy-source-encryption-key" => Base64.strict_encode64(encryption_key),
"x-goog-copy-source-encryption-key-sha256" => Base64.strict_encode64(encryption_key_sha256),
"x-goog-encryption-algorithm" => "AES256",
"x-goog-encryption-key" => Base64.strict_encode64(encryption_key),
"x-goog-encryption-key-sha256" => Base64.strict_encode64(encryption_key_sha256)
}
end
let(:key_options) { { header: key_headers } }
let(:copy_key_options) { { header: copy_key_headers } }
let(:source_encryption_key) { "T\x80\xC2}\x91R\xD2\x05\fTo\xD4\xB3+\xAE\xBCbd\xD1\x81|\xCD\x06%\xC8|\xA2\x17\xF6\xB4^\xD0" }
let(:source_encryption_key_sha256) { "\x03(M#\x1D(BF\x12$T\xD4\xDCP\xE6\x98\a\xEB'\x8A\xB9\x89\xEEM)\x94\xFD\xE3VR*\x86" }
let(:source_key_headers) do {
"x-goog-copy-source-encryption-algorithm" => "AES256",
"x-goog-copy-source-encryption-key" => Base64.strict_encode64(source_encryption_key),
"x-goog-copy-source-encryption-key-sha256" => Base64.strict_encode64(source_encryption_key_sha256)
}
end
let(:kms_key) { "path/to/encryption_key_name" }
it "knows its attributes" do
_(file.id).must_equal file_hash["id"]
_(file.name).must_equal file_hash["name"]
_(file.created_at).must_be_within_delta file_hash["timeCreated"].to_datetime
_(file.api_url).must_equal file_hash["selfLink"]
_(file.media_url).must_equal file_hash["mediaLink"]
_(file.public_url).must_equal "https://storage.googleapis.com/#{file.bucket}/#{file.name}"
_(file.public_url(protocol: :http)).must_equal "http://storage.googleapis.com/#{file.bucket}/#{file.name}"
_(file.url).must_equal file.public_url
_(file.md5).must_equal file_hash["md5Hash"]
_(file.crc32c).must_equal file_hash["crc32c"]
_(file.etag).must_equal file_hash["etag"]
_(file.cache_control).must_equal "public, max-age=3600"
_(file.content_disposition).must_equal "attachment; filename=filename.ext"
_(file.content_encoding).must_equal "gzip"
_(file.content_language).must_equal "en"
_(file.content_type).must_equal "text/plain"
_(file.custom_time).must_equal custom_time
_(file.metadata).must_be_kind_of Hash
_(file.metadata.size).must_equal 2
_(file.metadata.frozen?).must_equal true
_(file.metadata["player"]).must_equal "Alice"
_(file.metadata["score"]).must_equal "101"
_(file.temporary_hold?).must_equal true
_(file.event_based_hold?).must_equal true
_(file.retention_expires_at).must_be_within_delta Time.now.to_datetime
end
it "can delete itself" do
mock = Minitest::Mock.new
mock.expect :delete_object, nil, delete_object_args(bucket.name, file.name)
file.service.mocked_service = mock
file.delete
mock.verify
end
it "can delete itself with generation set to true" do
mock = Minitest::Mock.new
mock.expect :delete_object, nil, delete_object_args(bucket.name, file.name, generation: generation)
file.service.mocked_service = mock
_(file.generation).must_equal generation
file.delete generation: true
mock.verify
end
it "can delete itself with generation set to a generation" do
mock = Minitest::Mock.new
mock.expect :delete_object, nil, delete_object_args(bucket.name, file.name, generation: generation)
file.service.mocked_service = mock
file.delete generation: generation
mock.verify
end
it "can delete itself with if_generation_match set to a generation" do
mock = Minitest::Mock.new
mock.expect :delete_object, nil, delete_object_args(bucket.name, file.name, if_generation_match: generation)
file.service.mocked_service = mock
file.delete if_generation_match: generation
mock.verify
end
it "can delete itself with if_generation_not_match set to a generation" do
mock = Minitest::Mock.new
mock.expect :delete_object, nil, delete_object_args(bucket.name, file.name, if_generation_not_match: generation)
file.service.mocked_service = mock
file.delete if_generation_not_match: generation
mock.verify
end
it "can delete itself with if_metageneration_match set to a metageneration" do
mock = Minitest::Mock.new
mock.expect :delete_object, nil, delete_object_args(bucket.name, file.name, if_metageneration_match: metageneration)
file.service.mocked_service = mock
file.delete if_metageneration_match: metageneration
mock.verify
end
it "can delete itself with if_metageneration_not_match set to a metageneration" do
mock = Minitest::Mock.new
mock.expect :delete_object, nil, delete_object_args(bucket.name, file.name, if_metageneration_not_match: metageneration)
file.service.mocked_service = mock
file.delete if_metageneration_not_match: metageneration
mock.verify
end
it "can delete itself with user_project set to true" do
mock = Minitest::Mock.new
mock.expect :delete_object, nil, delete_object_args(bucket.name, file_user_project.name, user_project: "test")
file_user_project.service.mocked_service = mock
file_user_project.delete
mock.verify
end
it "can delete itself with generation set to true and user_project set to true" do
mock = Minitest::Mock.new
mock.expect :delete_object, nil, delete_object_args(bucket.name, file_user_project.name, generation: generation, user_project: "test")
file_user_project.service.mocked_service = mock
file_user_project.delete generation: true
mock.verify
end
it "can delete itself with generation set to a generation and user_project set to true" do
mock = Minitest::Mock.new
mock.expect :delete_object, nil, delete_object_args(bucket.name, file_user_project.name, generation: generation, user_project: "test")
file_user_project.service.mocked_service = mock
file_user_project.delete generation: generation
mock.verify
end
it "can download itself to a file" do
# Stub the md5 to match.
def file.md5
"X7A8HRvZUCT5gbq0KNDL8Q=="
end
Tempfile.open "google-cloud" do |tmpfile|
# write to the file since the mocked call won't
data = "yay!"
tmpfile.write data
tmpfile.rewind
mock = Minitest::Mock.new
mock.expect :get_object_with_response, [tmpfile, download_http_resp],
[bucket.name, file.name, download_dest: tmpfile, generation: generation, user_project: nil, options: {}]
bucket.service.mocked_service = mock
downloaded = file.download tmpfile
_(downloaded).must_be_kind_of Tempfile
_(tmpfile.read).must_equal data
mock.verify
end
end
it "can download and decompress itself to a file when Content-Encoding gzip response header" do
data = "Hello world!"
gzipped_data = gzip_data data
# Stub the md5 to match.
file.gapi.md5_hash = Digest::MD5.base64digest gzipped_data
Tempfile.open "google-cloud" do |tmpfile|
# write to the file since the mocked call won't
tmpfile.write gzipped_data
tmpfile.rewind
mock = Minitest::Mock.new
mock.expect :get_object_with_response, [tmpfile, download_http_resp(gzip: true)],
[bucket.name, file.name, download_dest: tmpfile, generation: generation, user_project: nil, options: {}]
bucket.service.mocked_service = mock
downloaded = file.download tmpfile
_(downloaded).must_be_kind_of File
_(tmpfile.read).must_equal data
mock.verify
end
end
it "can download itself to a file when Content-Encoding gzip response header with skip_decompress" do
data = "Hello world!"
gzipped_data = gzip_data data
# Stub the md5 to match.
file.gapi.md5_hash = Digest::MD5.base64digest gzipped_data
Tempfile.open "google-cloud" do |tmpfile|
# write to the file since the mocked call won't
tmpfile.write gzipped_data
tmpfile.rewind
mock = Minitest::Mock.new
mock.expect :get_object_with_response, [tmpfile, download_http_resp(gzip: true)],
[bucket.name, file.name, download_dest: tmpfile, generation: generation, user_project: nil, options: {}]
bucket.service.mocked_service = mock
downloaded = file.download tmpfile, skip_decompress: true
_(downloaded).must_be_kind_of Tempfile
_(tmpfile.read).must_equal gzipped_data
mock.verify
end
end
it "can download itself to a file by path" do
# Stub the md5 to match.
def file.md5
"1B2M2Y8AsgTpgAmY7PhCfg=="
end
Tempfile.open "google-cloud" do |tmpfile|
# write to the file since the mocked call won't
tmpfile.write "yay!"
mock = Minitest::Mock.new
mock.expect :get_object_with_response, [tmpfile, download_http_resp],
[bucket.name, file.name, download_dest: tmpfile.path, generation: generation, user_project: nil, options: {}]
bucket.service.mocked_service = mock
downloaded = file.download tmpfile.path
_(downloaded).must_be_kind_of Tempfile
mock.verify
end
end
it "can download itself to a file with user_project set to true" do
# Stub the md5 to match.
def file_user_project.md5
"1B2M2Y8AsgTpgAmY7PhCfg=="
end
Tempfile.open "google-cloud" do |tmpfile|
# write to the file since the mocked call won't
tmpfile.write "yay!"
mock = Minitest::Mock.new
mock.expect :get_object_with_response, [tmpfile, download_http_resp],
[bucket.name, file_user_project.name, download_dest: tmpfile, generation: generation, user_project: "test", options: {}]
bucket.service.mocked_service = mock
downloaded = file_user_project.download tmpfile
_(downloaded).must_be_kind_of Tempfile
mock.verify
end
end
it "can download itself to an IO" do
# Stub the md5 to match.
def file.md5
"X7A8HRvZUCT5gbq0KNDL8Q=="
end
data = "yay!"
mock = Minitest::Mock.new
mock.expect :get_object_with_response, [StringIO.new(data), download_http_resp],
[bucket.name, file.name, Hash] # Can't match StringIO in mock...
bucket.service.mocked_service = mock
downloaded = file.download
_(downloaded).must_be_kind_of StringIO
_(downloaded.read).must_equal data
mock.verify
end
it "can download and decompress itself to an IO when Content-Encoding gzip response header" do
data = "Hello world!"
gzipped_data = gzip_data data
# Stub the md5 to match.
file.gapi.md5_hash = Digest::MD5.base64digest gzipped_data
mock = Minitest::Mock.new
mock.expect :get_object_with_response, [StringIO.new(gzipped_data), download_http_resp(gzip: true)],
[bucket.name, file.name, Hash] # Can't match StringIO in mock...
bucket.service.mocked_service = mock
downloaded = file.download
_(downloaded).must_be_kind_of StringIO
_(downloaded.read).must_equal data
mock.verify
end
it "can download itself to an IO when Content-Encoding gzip response header with skip_decompress" do
data = "Hello world!"
gzipped_data = gzip_data data
# Stub the md5 to match.
file.gapi.md5_hash = Digest::MD5.base64digest gzipped_data
mock = Minitest::Mock.new
mock.expect :get_object_with_response, [StringIO.new(gzipped_data), download_http_resp(gzip: true)],
[bucket.name, file.name, Hash] # Can't match StringIO in mock...
bucket.service.mocked_service = mock
downloaded = file.download skip_decompress: true
_(downloaded).must_be_kind_of StringIO
_(downloaded.read).must_equal gzipped_data
mock.verify
end
it "can download itself by specifying an IO" do
# Stub the md5 to match.
def file.md5
"X7A8HRvZUCT5gbq0KNDL8Q=="
end
mock = Minitest::Mock.new
mock.expect :get_object_with_response, [StringIO.new("yay!"), download_http_resp],
[bucket.name, file.name, Hash] # Can't match StringIO in mock...
bucket.service.mocked_service = mock
downloadio = StringIO.new
downloaded = file.download downloadio
_(downloaded).must_be_kind_of StringIO
_(downloadio).must_equal downloadio # should be the same object
mock.verify
end
it "can download itself with customer-supplied encryption key" do
# Stub the md5 to match.
def file.md5
"1B2M2Y8AsgTpgAmY7PhCfg=="
end
Tempfile.open "google-cloud" do |tmpfile|
# write to the file since the mocked call won't
tmpfile.write "yay!"
mock = Minitest::Mock.new
mock.expect :get_object_with_response, [nil, download_http_resp], # using encryption keys seems to return nil
[bucket.name, file.name, download_dest: tmpfile, generation: generation, user_project: nil, options: key_options]
bucket.service.mocked_service = mock
downloaded = file.download tmpfile, encryption_key: encryption_key
_(downloaded.path).must_equal tmpfile.path
mock.verify
end
end
it "can partially download itself with a range" do
Tempfile.open "google-cloud" do |tmpfile|
mock = Minitest::Mock.new
mock.expect :get_object_with_response, [tmpfile, download_http_resp],
[bucket.name, file.name, download_dest: tmpfile, generation: generation, user_project: nil, options: { header: { 'Range' => 'bytes=3-6' }}]
bucket.service.mocked_service = mock
downloaded = file.download tmpfile, range: 3..6
_(downloaded.path).must_equal tmpfile.path
mock.verify
end
end
it "can partially download itself with a string" do
Tempfile.open "google-cloud" do |tmpfile|
mock = Minitest::Mock.new
mock.expect :get_object_with_response, [tmpfile, download_http_resp],
[bucket.name, file.name, download_dest: tmpfile, generation: generation, user_project: nil, options: { header: { 'Range' => 'bytes=-6' }}]
bucket.service.mocked_service = mock
downloaded = file.download tmpfile, range: 'bytes=-6'
_(downloaded.path).must_equal tmpfile.path
mock.verify
end
end
describe "verified downloads" do
it "verifies m5d by default" do
# Stub these values
def file.md5; "md5="; end
def file.crc32c; "crc32c="; end
Tempfile.open "google-cloud" do |tmpfile|
mock = Minitest::Mock.new
mock.expect :get_object_with_response, [tmpfile, download_http_resp],
[bucket.name, file.name, download_dest: tmpfile, generation: generation, user_project: nil, options: {}]
bucket.service.mocked_service = mock
mocked_md5 = Minitest::Mock.new
mocked_md5.expect :md5_mock, file.md5
stubbed_md5 = lambda { |_| mocked_md5.md5_mock }
stubbed_crc32c = lambda { |_| fail "Should not be called!" }
Google::Cloud::Storage::File::Verifier.stub :md5_for, stubbed_md5 do
Google::Cloud::Storage::File::Verifier.stub :crc32c_for, stubbed_crc32c do
file.download tmpfile
end
end
mocked_md5.verify
mock.verify
end
end
it "verifies m5d when specified" do
# Stub these values
def file.md5; "md5="; end
def file.crc32c; "crc32c="; end
Tempfile.open "google-cloud" do |tmpfile|
mock = Minitest::Mock.new
mock.expect :get_object_with_response, [tmpfile, download_http_resp],
[bucket.name, file.name, download_dest: tmpfile, generation: generation, user_project: nil, options: {}]
bucket.service.mocked_service = mock
mocked_md5 = Minitest::Mock.new
mocked_md5.expect :md5_mock, file.md5
stubbed_md5 = lambda { |_| mocked_md5.md5_mock }
stubbed_crc32c = lambda { |_| fail "Should not be called!" }
Google::Cloud::Storage::File::Verifier.stub :md5_for, stubbed_md5 do
Google::Cloud::Storage::File::Verifier.stub :crc32c_for, stubbed_crc32c do
file.download tmpfile, verify: :md5
end
end
mocked_md5.verify
mock.verify
end
end
it "verifies crc32c when specified" do
# Stub these values
def file.md5; "md5="; end
def file.crc32c; "crc32c="; end
Tempfile.open "google-cloud" do |tmpfile|
mock = Minitest::Mock.new
mock.expect :get_object_with_response, [tmpfile, download_http_resp],
[bucket.name, file.name, download_dest: tmpfile, generation: generation, user_project: nil, options: {}]
bucket.service.mocked_service = mock
stubbed_md5 = lambda { |_| fail "Should not be called!" }
mocked_crc32c = Minitest::Mock.new
mocked_crc32c.expect :crc32c_mock, file.crc32c
stubbed_crc32c = lambda { |_| mocked_crc32c.crc32c_mock }
Google::Cloud::Storage::File::Verifier.stub :md5_for, stubbed_md5 do
Google::Cloud::Storage::File::Verifier.stub :crc32c_for, stubbed_crc32c do
file.download tmpfile, verify: :crc32c
end
end
mocked_crc32c.verify
mock.verify
end
end
it "verifies crc32c downloading to an IO when specified" do
data = "yay!"
file.gapi.crc32c = Digest::CRC32c.base64digest data
mock = Minitest::Mock.new
mock.expect :get_object_with_response, [StringIO.new(data), download_http_resp],
[bucket.name, file.name, Hash] # Can't match StringIO in mock...
bucket.service.mocked_service = mock
downloaded = file.download verify: :crc32c
_(downloaded).must_be_kind_of StringIO
_(downloaded.read).must_equal data
mock.verify
end
it "verifies m5d and crc32c when specified" do
# Stub these values
def file.md5; "md5="; end
def file.crc32c; "crc32c="; end
Tempfile.open "google-cloud" do |tmpfile|
mock = Minitest::Mock.new
mock.expect :get_object_with_response, [tmpfile, download_http_resp],
[bucket.name, file.name, download_dest: tmpfile, generation: generation, user_project: nil, options: {}]
bucket.service.mocked_service = mock
mocked_md5 = Minitest::Mock.new
mocked_md5.expect :md5_mock, file.md5
stubbed_md5 = lambda { |_| mocked_md5.md5_mock }
mocked_crc32c = Minitest::Mock.new
mocked_crc32c.expect :crc32c_mock, file.crc32c
stubbed_crc32c = lambda { |_| mocked_crc32c.crc32c_mock }
Google::Cloud::Storage::File::Verifier.stub :md5_for, stubbed_md5 do
Google::Cloud::Storage::File::Verifier.stub :crc32c_for, stubbed_crc32c do
file.download tmpfile, verify: :all
end
end
mocked_md5.verify
mocked_crc32c.verify
mock.verify
end
end
it "doesn't verify at all when specified" do
# Stub these values
def file.md5; "md5="; end
def file.crc32c; "crc32c="; end
Tempfile.open "google-cloud" do |tmpfile|
mock = Minitest::Mock.new
mock.expect :get_object_with_response, [tmpfile, download_http_resp],
[bucket.name, file.name, download_dest: tmpfile, generation: generation, user_project: nil, options: {}]
bucket.service.mocked_service = mock
stubbed_md5 = lambda { |_| fail "Should not be called!" }
stubbed_crc32c = lambda { |_| fail "Should not be called!" }
Google::Cloud::Storage::File::Verifier.stub :md5_for, stubbed_md5 do
Google::Cloud::Storage::File::Verifier.stub :crc32c_for, stubbed_crc32c do
file.download tmpfile, verify: :none
end
end
mock.verify
end
end
it "raises when verification fails" do
# Stub these values
def file.md5; "md5="; end
def file.crc32c; "crc32c="; end
Tempfile.open "google-cloud" do |tmpfile|
mock = Minitest::Mock.new
mock.expect :get_object_with_response, [tmpfile, download_http_resp],
[bucket.name, file.name, download_dest: tmpfile.path, generation: generation, user_project: nil, options: {}]
bucket.service.mocked_service = mock
mocked_md5 = Minitest::Mock.new
mocked_md5.expect :md5_mock, "NOPE="
stubbed_md5 = lambda { |_| mocked_md5.md5_mock }
stubbed_crc32c = lambda { |_| fail "Should not be called!" }
Google::Cloud::Storage::File::Verifier.stub :md5_for, stubbed_md5 do
Google::Cloud::Storage::File::Verifier.stub :crc32c_for, stubbed_crc32c do
assert_raises Google::Cloud::Storage::FileVerificationError do
file.download tmpfile.path
end
end
end
mocked_md5.verify
mock.verify
end
end
end
describe "File#copy" do
it "can copy itself in the same bucket" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext")
file.service.mocked_service = mock
file.copy "new-file.ext"
mock.verify
end
it "can copy itself in the same bucket with generation" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", source_generation: generation)
file.service.mocked_service = mock
file.copy "new-file.ext", generation: generation
mock.verify
end
it "can copy itself in the same bucket with predefined ACL" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", destination_predefined_acl: "private")
file.service.mocked_service = mock
file.copy "new-file.ext", acl: "private"
mock.verify
end
it "can copy itself in the same bucket with ACL alias" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", destination_predefined_acl: "publicRead")
file.service.mocked_service = mock
file.copy "new-file.ext", acl: :public
mock.verify
end
it "can copy itself with customer-supplied encryption key" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", rewrite_token: nil, user_project: nil, options: copy_key_options)
file.service.mocked_service = mock
file.copy "new-file.ext", encryption_key: encryption_key
mock.verify
end
it "can copy itself with user_project set to true" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file_user_project.name, bucket.name, "new-file.ext", rewrite_token: nil, user_project: "test")
file_user_project.service.mocked_service = mock
copied = file_user_project.copy "new-file.ext"
_(copied.user_project).must_equal true
mock.verify
end
it "can copy itself to a different bucket" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, "new-bucket", "new-file.ext")
file.service.mocked_service = mock
file.copy "new-bucket", "new-file.ext"
mock.verify
end
it "can copy itself to a different bucket with generation" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, "new-bucket", "new-file.ext", source_generation: generation)
file.service.mocked_service = mock
file.copy "new-bucket", "new-file.ext", generation: generation
mock.verify
end
it "can copy itself to a different bucket with predefined ACL" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, "new-bucket", "new-file.ext", destination_predefined_acl: "private")
file.service.mocked_service = mock
file.copy "new-bucket", "new-file.ext", acl: "private"
mock.verify
end
it "can copy itself to a different bucket with ACL alias" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, "new-bucket", "new-file.ext", destination_predefined_acl: "publicRead")
file.service.mocked_service = mock
file.copy "new-bucket", "new-file.ext", acl: :public
mock.verify
end
it "can copy itself to a different bucket with customer-supplied encryption key" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, "new-bucket", "new-file.ext", rewrite_token: nil, user_project: nil, options: copy_key_options)
file.service.mocked_service = mock
file.copy "new-bucket", "new-file.ext", encryption_key: encryption_key
mock.verify
end
it "can copy itself calling rewrite multiple times" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, undone_rewrite("notyetcomplete"),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext")
mock.expect :rewrite_object, undone_rewrite("keeptrying"),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", rewrite_token: "notyetcomplete")
mock.expect :rewrite_object, undone_rewrite("almostthere"),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", rewrite_token: "keeptrying")
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", rewrite_token: "almostthere")
file.service.mocked_service = mock
# mock out sleep to make the test run faster
def file.sleep *args
end
file.copy "new-file.ext"
mock.verify
end
it "can copy itself calling rewrite multiple times with user_project set to true" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, undone_rewrite("notyetcomplete"),
rewrite_object_args(bucket.name, file_user_project.name, bucket.name, "new-file.ext", rewrite_token: nil, user_project: "test")
mock.expect :rewrite_object, undone_rewrite("keeptrying"),
rewrite_object_args(bucket.name, file_user_project.name, bucket.name, "new-file.ext", rewrite_token: "notyetcomplete", user_project: "test")
mock.expect :rewrite_object, undone_rewrite("almostthere"),
rewrite_object_args(bucket.name, file_user_project.name, bucket.name, "new-file.ext", rewrite_token: "keeptrying", user_project: "test")
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file_user_project.name, bucket.name, "new-file.ext", rewrite_token: "almostthere", user_project: "test")
file_user_project.service.mocked_service = mock
# mock out sleep to make the test run faster
def file_user_project.sleep *args
end
copied = file_user_project.copy "new-file.ext"
_(copied.user_project).must_equal true
mock.verify
end
it "can copy itself while updating its attributes" do
mock = Minitest::Mock.new
update_file_gapi = Google::Apis::StorageV1::Object.new
update_file_gapi.cache_control = "private, max-age=0, no-cache"
update_file_gapi.content_disposition = "inline; filename=filename.ext"
update_file_gapi.content_encoding = "deflate"
update_file_gapi.content_language = "de"
update_file_gapi.content_type = "application/json"
update_file_gapi.metadata = { "player" => "Bob", "score" => "10" }
update_file_gapi.storage_class = "NEARLINE"
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", update_file_gapi)
file.service.mocked_service = mock
file.copy "new-file.ext" do |f|
f.cache_control = "private, max-age=0, no-cache"
f.content_disposition = "inline; filename=filename.ext"
f.content_encoding = "deflate"
f.content_language = "de"
f.content_type = "application/json"
f.metadata["player"] = "Bob"
f.metadata["score"] = "10"
f.storage_class = :nearline
end
mock.verify
end
it "can copy itself while updating its attributes with force_copy_metadata set to true" do
mock = Minitest::Mock.new
update_file_gapi = Google::Apis::StorageV1::Object.new
update_file_gapi.cache_control = "private, max-age=0, no-cache"
update_file_gapi.content_disposition = "inline; filename=filename.ext"
update_file_gapi.content_encoding = "deflate"
update_file_gapi.content_language = "de"
update_file_gapi.content_type = "application/json"
update_file_gapi.metadata = { "player" => "Bob", "score" => "10" }
update_file_gapi.storage_class = "NEARLINE"
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", update_file_gapi)
file.service.mocked_service = mock
file.copy "new-file.ext", force_copy_metadata: true do |f|
f.cache_control = "private, max-age=0, no-cache"
f.content_disposition = "inline; filename=filename.ext"
f.content_encoding = "deflate"
f.content_language = "de"
f.content_type = "application/json"
f.metadata["player"] = "Bob"
f.metadata["score"] = "10"
f.storage_class = :nearline
end
mock.verify
end
it "can copy itself while updating its attributes with user_project set to true" do
mock = Minitest::Mock.new
update_file_gapi = Google::Apis::StorageV1::Object.new
update_file_gapi.cache_control = "private, max-age=0, no-cache"
update_file_gapi.content_disposition = "inline; filename=filename.ext"
update_file_gapi.content_encoding = "deflate"
update_file_gapi.content_language = "de"
update_file_gapi.content_type = "application/json"
update_file_gapi.metadata = { "player" => "Bob", "score" => "10" }
update_file_gapi.storage_class = "NEARLINE"
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file_user_project.name, bucket.name, "new-file.ext", update_file_gapi, rewrite_token: nil, user_project: "test")
file_user_project.service.mocked_service = mock
copied = file_user_project.copy "new-file.ext" do |f|
f.cache_control = "private, max-age=0, no-cache"
f.content_disposition = "inline; filename=filename.ext"
f.content_encoding = "deflate"
f.content_language = "de"
f.content_type = "application/json"
f.metadata["player"] = "Bob"
f.metadata["score"] = "10"
f.storage_class = :nearline
end
_(copied.user_project).must_equal true
mock.verify
end
end
describe "File#rewrite" do
it "can rewrite itself in the same bucket" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext")
file.service.mocked_service = mock
file.rewrite "new-file.ext"
mock.verify
end
it "can rewrite itself in the same bucket with generation" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", source_generation: generation)
file.service.mocked_service = mock
file.rewrite "new-file.ext", generation: generation
mock.verify
end
it "can rewrite itself with if_generation_match" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", if_generation_match: generation)
file.service.mocked_service = mock
file.rewrite "new-file.ext", if_generation_match: generation
mock.verify
end
it "can rewrite itself with if_generation_not_match" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", if_generation_not_match: generation)
file.service.mocked_service = mock
file.rewrite "new-file.ext", if_generation_not_match: generation
mock.verify
end
it "can rewrite itself with if_metageneration_match" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", if_metageneration_match: metageneration)
file.service.mocked_service = mock
file.rewrite "new-file.ext", if_metageneration_match: metageneration
mock.verify
end
it "can rewrite itself with if_metageneration_not_match" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", if_metageneration_not_match: metageneration)
file.service.mocked_service = mock
file.rewrite "new-file.ext", if_metageneration_not_match: metageneration
mock.verify
end
it "can rewrite itself with if_source_generation_match" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", if_source_generation_match: generation)
file.service.mocked_service = mock
file.rewrite "new-file.ext", if_source_generation_match: generation
mock.verify
end
it "can rewrite itself with if_source_generation_not_match" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", if_source_generation_not_match: generation)
file.service.mocked_service = mock
file.rewrite "new-file.ext", if_source_generation_not_match: generation
mock.verify
end
it "can rewrite itself with if_source_metageneration_match" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", if_source_metageneration_match: metageneration)
file.service.mocked_service = mock
file.rewrite "new-file.ext", if_source_metageneration_match: metageneration
mock.verify
end
it "can rewrite itself with if_source_metageneration_not_match" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", if_source_metageneration_not_match: metageneration)
file.service.mocked_service = mock
file.rewrite "new-file.ext", if_source_metageneration_not_match: metageneration
mock.verify
end
it "can rewrite itself in the same bucket with predefined ACL" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", destination_predefined_acl: "private")
file.service.mocked_service = mock
file.rewrite "new-file.ext", acl: "private"
mock.verify
end
it "can rewrite itself in the same bucket with ACL alias" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", destination_predefined_acl: "publicRead")
file.service.mocked_service = mock
file.rewrite "new-file.ext", acl: :public
mock.verify
end
it "can rewrite itself to a new customer-supplied encryption key (CSEK)" do
options = { header: source_key_headers.merge(key_headers) }
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", rewrite_token: nil, user_project: nil, options: options)
file.service.mocked_service = mock
file.rewrite "new-file.ext", encryption_key: source_encryption_key, new_encryption_key: encryption_key
mock.verify
end
it "can rewrite itself from default service encryption to a new customer-managed encryption key (CMEK) with new_kms_key" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", destination_kms_key_name: kms_key)
file.service.mocked_service = mock
file.rewrite "new-file.ext", new_kms_key: kms_key
mock.verify
end
it "can rewrite itself from a customer-supplied encryption key (CSEK) to a new customer-managed encryption key (CMEK) with new_kms_key" do
options = { header: source_key_headers }
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", destination_kms_key_name: kms_key, rewrite_token: nil, user_project: nil, options: options)
file.service.mocked_service = mock
file.rewrite "new-file.ext", encryption_key: source_encryption_key, new_kms_key: kms_key
mock.verify
end
it "can rewrite itself with user_project set to true" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file_user_project.name, bucket.name, "new-file.ext", rewrite_token: nil, user_project: "test")
file_user_project.service.mocked_service = mock
copied = file_user_project.copy "new-file.ext"
_(copied.user_project).must_equal true
mock.verify
end
it "can rewrite itself to a different bucket" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, "new-bucket", "new-file.ext")
file.service.mocked_service = mock
file.rewrite "new-bucket", "new-file.ext"
mock.verify
end
it "can rewrite itself to a different bucket with generation" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, "new-bucket", "new-file.ext", source_generation: generation)
file.service.mocked_service = mock
file.rewrite "new-bucket", "new-file.ext", generation: generation
mock.verify
end
it "can rewrite itself to a different bucket with predefined ACL" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, "new-bucket", "new-file.ext", destination_predefined_acl: "private")
file.service.mocked_service = mock
file.rewrite "new-bucket", "new-file.ext", acl: "private"
mock.verify
end
it "can rewrite itself to a different bucket with ACL alias" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, "new-bucket", "new-file.ext", destination_predefined_acl: "publicRead")
file.service.mocked_service = mock
file.rewrite "new-bucket", "new-file.ext", acl: :public
mock.verify
end
it "can rewrite itself to a different bucket with customer-supplied encryption key" do
options = { header: source_key_headers.merge(key_headers) }
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, "new-bucket", "new-file.ext", rewrite_token: nil, user_project: nil, options: options)
file.service.mocked_service = mock
file.rewrite "new-bucket", "new-file.ext", encryption_key: source_encryption_key, new_encryption_key: encryption_key
mock.verify
end
it "can rewrite itself calling rewrite multiple times" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, undone_rewrite("notyetcomplete"),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext")
mock.expect :rewrite_object, undone_rewrite("keeptrying"),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", rewrite_token: "notyetcomplete")
mock.expect :rewrite_object, undone_rewrite("almostthere"),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", rewrite_token: "keeptrying")
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", rewrite_token: "almostthere")
file.service.mocked_service = mock
# mock out sleep to make the test run faster
def file.sleep *args
end
file.rewrite "new-file.ext"
mock.verify
end
it "can rewrite itself calling rewrite multiple times with user_project set to true" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, undone_rewrite("notyetcomplete"),
rewrite_object_args(bucket.name, file_user_project.name, bucket.name, "new-file.ext", rewrite_token: nil, user_project: "test")
mock.expect :rewrite_object, undone_rewrite("keeptrying"),
rewrite_object_args(bucket.name, file_user_project.name, bucket.name, "new-file.ext", rewrite_token: "notyetcomplete", user_project: "test")
mock.expect :rewrite_object, undone_rewrite("almostthere"),
rewrite_object_args(bucket.name, file_user_project.name, bucket.name, "new-file.ext", rewrite_token: "keeptrying", user_project: "test")
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file_user_project.name, bucket.name, "new-file.ext", rewrite_token: "almostthere", user_project: "test")
file_user_project.service.mocked_service = mock
# mock out sleep to make the test run faster
def file_user_project.sleep *args
end
copied = file_user_project.copy "new-file.ext"
_(copied.user_project).must_equal true
mock.verify
end
it "can rewrite itself while updating its attributes" do
mock = Minitest::Mock.new
update_file_gapi = Google::Apis::StorageV1::Object.new
update_file_gapi.cache_control = "private, max-age=0, no-cache"
update_file_gapi.content_disposition = "inline; filename=filename.ext"
update_file_gapi.content_encoding = "deflate"
update_file_gapi.content_language = "de"
update_file_gapi.content_type = "application/json"
update_file_gapi.metadata = { "player" => "Bob", "score" => "10" }
update_file_gapi.storage_class = "NEARLINE"
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", update_file_gapi)
file.service.mocked_service = mock
file.rewrite "new-file.ext" do |f|
f.cache_control = "private, max-age=0, no-cache"
f.content_disposition = "inline; filename=filename.ext"
f.content_encoding = "deflate"
f.content_language = "de"
f.content_type = "application/json"
f.metadata["player"] = "Bob"
f.metadata["score"] = "10"
f.storage_class = :nearline
end
mock.verify
end
it "can rewrite itself while updating its attributes with force_copy_metadata set to true" do
mock = Minitest::Mock.new
update_file_gapi = Google::Apis::StorageV1::Object.new
update_file_gapi.cache_control = "private, max-age=0, no-cache"
update_file_gapi.content_disposition = "inline; filename=filename.ext"
update_file_gapi.content_encoding = "deflate"
update_file_gapi.content_language = "de"
update_file_gapi.content_type = "application/json"
update_file_gapi.metadata = { "player" => "Bob", "score" => "10" }
update_file_gapi.storage_class = "NEARLINE"
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, "new-file.ext", update_file_gapi)
file.service.mocked_service = mock
file.rewrite "new-file.ext", force_copy_metadata: true do |f|
f.cache_control = "private, max-age=0, no-cache"
f.content_disposition = "inline; filename=filename.ext"
f.content_encoding = "deflate"
f.content_language = "de"
f.content_type = "application/json"
f.metadata["player"] = "Bob"
f.metadata["score"] = "10"
f.storage_class = :nearline
end
mock.verify
end
it "can rewrite itself while updating its attributes with user_project set to true" do
mock = Minitest::Mock.new
update_file_gapi = Google::Apis::StorageV1::Object.new
update_file_gapi.cache_control = "private, max-age=0, no-cache"
update_file_gapi.content_disposition = "inline; filename=filename.ext"
update_file_gapi.content_encoding = "deflate"
update_file_gapi.content_language = "de"
update_file_gapi.content_type = "application/json"
update_file_gapi.metadata = { "player" => "Bob", "score" => "10" }
update_file_gapi.storage_class = "NEARLINE"
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file_user_project.name, bucket.name, "new-file.ext", update_file_gapi, rewrite_token: nil, user_project: "test")
file_user_project.service.mocked_service = mock
copied = file_user_project.rewrite "new-file.ext" do |f|
f.cache_control = "private, max-age=0, no-cache"
f.content_disposition = "inline; filename=filename.ext"
f.content_encoding = "deflate"
f.content_language = "de"
f.content_type = "application/json"
f.metadata["player"] = "Bob"
f.metadata["score"] = "10"
f.storage_class = :nearline
end
_(copied.user_project).must_equal true
mock.verify
end
end
describe "File#rotate" do
it "can rotate its customer-supplied encryption keys" do
mock = Minitest::Mock.new
options = { header: source_key_headers.merge(key_headers) }
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, file.name, options: options)
file.service.mocked_service = mock
updated = file.rotate encryption_key: source_encryption_key, new_encryption_key: encryption_key
_(updated.name).must_equal file.name
mock.verify
end
it "can rotate its customer-supplied encryption keys with user_project set to true" do
mock = Minitest::Mock.new
options = { header: source_key_headers.merge(key_headers) }
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file_user_project.name, bucket.name, file_user_project.name, user_project: "test", options: options)
file_user_project.service.mocked_service = mock
updated = file_user_project.rotate encryption_key: source_encryption_key, new_encryption_key: encryption_key
_(updated.name).must_equal file_user_project.name
_(updated.user_project).must_equal true
mock.verify
end
it "can rotate to a customer-supplied encryption key if previously unencrypted with customer key" do
mock = Minitest::Mock.new
options = { header: key_headers }
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, file.name, options: options)
file.service.mocked_service = mock
updated = file.rotate new_encryption_key: encryption_key
_(updated.name).must_equal file.name
mock.verify
end
it "can rotate from a customer-supplied encryption key to default service encryption" do
mock = Minitest::Mock.new
options = { header: source_key_headers }
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, file.name, options: options)
file.service.mocked_service = mock
updated = file.rotate encryption_key: source_encryption_key
_(updated.name).must_equal file.name
mock.verify
end
it "can rotate from default service encryption to a new customer-managed encryption key (CMEK) with new_kms_key" do
mock = Minitest::Mock.new
mock.expect :rewrite_object, done_rewrite(file_gapi), rewrite_object_args(bucket.name, file.name, bucket.name, file.name, destination_kms_key_name: kms_key)
file.service.mocked_service = mock
updated = file.rotate new_kms_key: kms_key
_(updated.name).must_equal file.name
mock.verify
end
it "can rotate from a customer-supplied encryption key (CSEK) to a new customer-managed encryption key (CMEK) with new_kms_key" do
mock = Minitest::Mock.new
options = { header: source_key_headers }
mock.expect :rewrite_object, done_rewrite(file_gapi), rewrite_object_args(bucket.name, file.name, bucket.name, file.name, destination_kms_key_name: kms_key, options: options)
file.service.mocked_service = mock
updated = file.rotate encryption_key: source_encryption_key, new_kms_key: kms_key
_(updated.name).must_equal file.name
mock.verify
end
it "can rotate its customer-supplied encryption keys with multiple requests for large objects" do
mock = Minitest::Mock.new
options = { header: source_key_headers.merge(key_headers) }
mock.expect :rewrite_object, undone_rewrite("notyetcomplete"), rewrite_object_args(bucket.name, file.name, bucket.name, file.name, options: options)
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file.name, bucket.name, file.name, rewrite_token: "notyetcomplete", options: options)
file.service.mocked_service = mock
# mock out sleep to make the test run faster
def file.sleep *args
end
updated = file.rotate encryption_key: source_encryption_key, new_encryption_key: encryption_key
_(updated.name).must_equal file.name
mock.verify
end
it "can rotate its customer-supplied encryption keys with multiple requests for large objects with user_project set to true" do
mock = Minitest::Mock.new
options = { header: source_key_headers.merge(key_headers) }
mock.expect :rewrite_object, undone_rewrite("notyetcomplete"),
rewrite_object_args(bucket.name, file_user_project.name, bucket.name, file_user_project.name, user_project: "test", options: options)
mock.expect :rewrite_object, done_rewrite(file_gapi),
rewrite_object_args(bucket.name, file_user_project.name, bucket.name, file_user_project.name, rewrite_token: "notyetcomplete", user_project: "test", options: options)
file_user_project.service.mocked_service = mock
# mock out sleep to make the test run faster
def file_user_project.sleep *args
end
updated = file_user_project.rotate encryption_key: source_encryption_key, new_encryption_key: encryption_key
_(updated.name).must_equal file_user_project.name
_(updated.user_project).must_equal true
mock.verify
end
end
it "can reload itself" do
file_name = "file.ext"
mock = Minitest::Mock.new
mock.expect :get_object, Google::Apis::StorageV1::Object.from_json(random_file_hash(bucket.name, file_name, generations[3]).to_json),
get_object_args(bucket.name, file_name)
mock.expect :get_object, Google::Apis::StorageV1::Object.from_json(random_file_hash(bucket.name, file_name, generations[2]).to_json),
get_object_args(bucket.name, file_name)
bucket.service.mocked_service = mock
file.service.mocked_service = mock
file = bucket.file file_name
_(file.generation).must_equal generations[3]
file.reload!
_(file.generation).must_equal generations[2]
mock.verify
end
it "can reload itself with user_project set to true" do
file_name = "file.ext"
mock = Minitest::Mock.new
mock.expect :get_object, Google::Apis::StorageV1::Object.from_json(random_file_hash(bucket_user_project.name, file_name, generations[3]).to_json),
get_object_args(bucket_user_project.name, file_name, user_project: "test")
mock.expect :get_object, Google::Apis::StorageV1::Object.from_json(random_file_hash(bucket_user_project.name, file_name, generations[2]).to_json),
get_object_args(bucket_user_project.name, file_name, user_project: "test")
bucket_user_project.service.mocked_service = mock
file.service.mocked_service = mock
file = bucket_user_project.file file_name
_(file.generation).must_equal generations[3]
file.reload!
_(file.generation).must_equal generations[2]
mock.verify
end
it "can list its generations" do
file_name = "file.ext"
mock = Minitest::Mock.new
mock.expect :get_object, Google::Apis::StorageV1::Object.from_json(random_file_hash(bucket.name, file_name, generations[0]).to_json),
get_object_args(bucket.name, file_name)
mock.expect :list_objects, Google::Apis::StorageV1::Objects.new(kind: "storage#objects", items: file_gapis),
[bucket.name, delimiter: nil, max_results: nil, page_token: nil, prefix: file_name, versions: true, user_project: nil]
bucket.service.mocked_service = mock
file.service.mocked_service = mock
file = bucket.file file_name
_(file.generation).must_equal generations[0]
file_generations = file.generations
_(file_generations.count).must_equal 4
file_generations.each do |f|
_(f).must_be_kind_of Google::Cloud::Storage::File
_(f.user_project).must_be :nil?
end
_(file_generations.map(&:generation)).must_equal generations
mock.verify
end
it "can list its generations with user_project set to true" do
file_name = "file.ext"
mock = Minitest::Mock.new
mock.expect :get_object, Google::Apis::StorageV1::Object.from_json(random_file_hash(bucket_user_project.name, file_name, generations[0]).to_json),
get_object_args(bucket_user_project.name, file_name, user_project: "test")
mock.expect :list_objects, Google::Apis::StorageV1::Objects.new(kind: "storage#objects", items: file_gapis),
[bucket.name, delimiter: nil, max_results: nil, page_token: nil, prefix: file_name, versions: true, user_project: "test"]
bucket_user_project.service.mocked_service = mock
file.service.mocked_service = mock
file = bucket_user_project.file file_name
_(file.generation).must_equal generations[0]
_(file.user_project).must_equal true
file_generations = file.generations
_(file_generations.count).must_equal 4
file_generations.each do |f|
_(f).must_be_kind_of Google::Cloud::Storage::File
_(f.user_project).must_equal true
end
_(file_generations.map(&:generation)).must_equal generations
mock.verify
end
it "knows its KMS encryption key" do
_(file.kms_key).must_equal kms_key
end
def gzip_data data
gz = StringIO.new("")
z = Zlib::GzipWriter.new(gz)
z.write data
z.close # write the gzip footer
gz.string
end
end
| 37.455369 | 184 | 0.701758 |
b96e922c3929a816190882595364655e0dae57c2 | 924 | class LoverInfo < ActiveRecord::Base
include Scrapeable, BetterJson
# ASSOCIATIONS
belongs_to :mod
belongs_to :game, :inverse_of => 'lover_infos'
# VALIDATIONS
validates :game_id, :mod_name, :uploaded_by, :released, presence: true
def self.prepare_for_mod(id)
info = LoverInfo.find_or_initialize_by(id: id)
raise Exceptions::ModExistsError.new(info.mod_id) if info.mod_id
info
end
def scrape
# retrieve using the Lover Helper
mod_data = LoverHelper.retrieve_mod(id)
# write the results to the lover info record
self.assign_attributes(mod_data)
# save retrieved mod data
self.save!
end
def url
"http://www.loverslab.com/files/file/#{id}"
end
def link_uploader
bio = UserBio.find_by(lover_username: uploaded_by)
ModAuthor.add_author(mod_id, bio.user_id) if bio.present? && mod_id.present?
end
def can_scrape_statistics?
true
end
end
| 22.536585 | 80 | 0.718615 |
e96e314b0f3f264bf9673f11a6749fbc7226db57 | 5,022 | module Podbay
class Components::Aws
class Cluster
attr_reader :name
def initialize(name)
@name = name
end
def formatted_name
name.gsub(/[ _]/, '-').gsub(/[^a-zA-Z0-9-]/, '').downcase
end
def vpc
@__vpc ||= Resources::EC2.vpc(
stack.resource('VPC').physical_resource_id
).tap do |vpc|
unless Resources::EC2.vpc_exists?(vpc.id)
fail MissingResourceError, "VPC for cluster '#{name}' missing"
end
end
end
def vpc_cidr
vpc.cidr_block
end
def availability_zones
@__azs ||= Resources::EC2.tags_of(vpc)
.select { |k,_| k.to_s.match(/\Aaz\d\z/) }
.sort.map(&:last).tap do |azs|
if azs.empty?
fail MissingResourceError, 'Availability Zone(s) VPC tags missing'
end
end
end
def public_route_table
@__public_route_table ||= Resources::EC2.route_table(
id_of('PublicRouteTable')
)
end
def private_route_tables
@__private_route_tables ||= Resources::EC2.route_tables(
filters: [
{
name: 'route-table-id',
values: (1..availability_zones.count).map do |i|
stack.resource("PrivateRouteTable#{i}").physical_resource_id
end
}
]
)
end
def stack
@__cf_stack ||= Resources::CloudFormation.stack(name).tap do |s|
unless s.exists?
fail MissingResourceError, "CloudFormation stack '#{name}' missing"
end
end
end
def config_bucket
@__config_bucket ||= stack.resource('ConfigBucket').physical_resource_id
end
def config_key
@__config_key ||= stack.resource('ConfigKey').physical_resource_id
end
def podbay_bucket
@__podbay_bucket ||= stack.resource('PodbayBucket').physical_resource_id
end
def podbay_key
@__podbay_key ||= stack.resource('PodbayKey').physical_resource_id
end
def region
Resources::EC2.region
end
def servers_exist?
if @__servers_exist.nil?
@__servers_exist = !Resources::EC2.instances(
filters: [
{ name: 'tag:podbay:cluster', values: [name] },
{ name: 'tag:podbay:role', values: ['server'] },
{ name: 'instance-state-name', values: ['running'] }
]
).to_a.empty?
end
@__servers_exist
end
def create_subnets(count, dmz, mask, tags)
number_of_azs = availability_zones.length
fail "count must be <= #{number_of_azs}" unless number_of_azs <= count
cidrs = _pick_cidrs(count, dmz, mask)
subnets = cidrs.each_with_index.map do |cidr, i|
print "Creating subnet #{cidr}... "
Resources::EC2.create_subnet(
cidr_block: cidr,
availability_zone: availability_zones[i],
vpc_id: vpc.id
).tap do |subnet|
loop do
break if Resources::EC2.subnet_exists?(subnet.id)
sleep 1
end
puts 'Complete!'.green
end
end
Resources::EC2.add_tags(subnets.map(&:id),
tags.merge('podbay:cluster' => name)
)
subnets
end
def groups
Resources::AutoScaling.groups.select do |g|
g.data.tags.any? { |t| t.key == 'podbay:cluster' && t.value == name }
end
end
def asg_of_group(group_name)
ec2_instances = Resources::EC2.instances(
filters: [
{ name: 'tag:podbay:cluster', values: [name] },
{ name: 'tag:podbay:group', values: [group_name] },
{ name: 'instance-state-name', values: ['running'] }
]
).to_a
return nil if ec2_instances.empty?
asg_instances = Resources::AutoScaling.instances(
instance_ids: ec2_instances.map(&:id)
)
if (gnames = asg_instances.map(&:group_name).uniq).count > 1
fail PodbayGroupError,
"More than 1 AutoScaling Group found: #{gnames.inspect}."
end
asg_instances.first && asg_instances.first.group
end
def subnets_of_group(group_name)
subnets([{ name: 'tag:podbay:group', values: [group_name] }])
end
def subnets(filters = [])
vpc.subnets(filters: filters)
end
def id_of(logical_id)
stack.resource(logical_id).physical_resource_id
end
private
def _pick_cidrs(count, dmz, mask)
used_subnet_cidrs = subnets.map(&:cidr_block)
count.times.inject([]) do |c|
cidr = Utils.pick_available_cidr(vpc_cidr, used_subnet_cidrs + c,
dmz: dmz, mask: mask)
fail 'no IPs available for subnet' unless cidr
c << cidr
end
end
end # Cluster
end # Components::Aws
end # Podbay
| 27.745856 | 80 | 0.558542 |
ab81cf7442c1ee3bdcc4c8ce605f452e824abeef | 782 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'twitter_bro'
require 'rspec/its'
require 'rspec/collection_matchers'
#require 'faraday'
Dir[File.expand_path("../support/**/*.rb", __FILE__)].each { |f| require f }
RSpec.configure do |config|
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
expectations.syntax = :expect
end
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
mocks.syntax = :expect
end
config.filter_run :focus
config.run_all_when_everything_filtered = true
config.disable_monkey_patching!
if config.files_to_run.one?
config.default_formatter = 'doc'
end
config.order = :random
Kernel.srand config.seed
end
| 23 | 76 | 0.746803 |
8760aa5f17d2992faf13f745a200e0a5ea9bf651 | 1,262 | require_relative 'lib/ruby_activerecord_class/version'
Gem::Specification.new do |spec|
spec.name = "ruby_activerecord_class"
spec.version = RubyDb::VERSION
spec.authors = ["Carlos Torrealba"]
spec.email = ["[email protected]"]
spec.summary = %q{Write a short summary, because RubyGems requires one.}
spec.description = %q{Write a longer description or delete this line.}
spec.homepage = "http://github.com"
spec.license = "MIT"
spec.required_ruby_version = Gem::Requirement.new(">= 2.3.0")
spec.metadata["allowed_push_host"] = "TODO: Set to 'http://mygemserver.com'"
spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = "http://github.com"
spec.metadata["changelog_uri"] = "http://github.com"
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
end
| 42.066667 | 87 | 0.662441 |
3837257bdcf90fdbb3c3066172e7013842b7d4fd | 359 | # frozen_string_literal: true
class ConnectNewReportWithVariables < ActiveRecord::Migration[5.0]
def up
default_report = Report.current
Variable.find_each do |variable|
Report.where.not(id: default_report.id).find_each do |report|
Variable.create!(key: variable.key, value: variable.value, report: report)
end
end
end
end
| 27.615385 | 82 | 0.724234 |
d555aaa2499d66ab93557f8eb0cf61661b1cb825 | 129 | class CreateLocales < ActiveRecord::Migration
def change
create_table :locales do |t|
t.timestamps
end
end
end
| 16.125 | 45 | 0.697674 |
d5d1150e559150c47436c61beadf26a71545e1a8 | 2,758 | # frozen_string_literal: true
require_relative '../modules/terminal'
# player class
class Player
START_BALANCE = 100
@params = {
print_name_y_coord: 12,
print_score_coords: [52, 20],
print_score_text: 'Сумма очков вашей руки: %i',
print_top_card_y_coord: 14,
available_commands: {
take: 'Взять карту',
show: 'Открыть карты',
skip: 'Пропустить ход'
},
hide_cards: false
}
class << self
attr_reader :params
end
attr_accessor :name, :avail_commands, :balance, :hide_cards, :cards
def initialize(name = 'Player')
@balance = START_BALANCE
@name = name
end
def interface_param(param)
self.class.params[param]
end
def soft_reset
self.cards = []
self.hide_cards = interface_param(:hide_cards).dup
self.avail_commands = interface_param(:available_commands).dup
end
def hard_reset
self.balance = START_BALANCE
soft_reset
end
def take(deck)
take_card(deck, false)
delete_command(:take)
end
def print_name
y_coord = interface_param(:print_name_y_coord)
Terminal.print_text_with_origin(' ' * 78, 2, y_coord)
Terminal.print_text_center_with_origin("#{name}: $#{balance}", 40, y_coord)
end
def print_score
x_coord, y_coord = interface_param(:print_score_coords)
text = interface_param(:print_score_text)
Terminal.print_text_with_origin(format(text, score), x_coord, y_coord)
end
def delete_command(command)
avail_commands.delete(command)
end
def score
score_sum = 0
cards.sort_by { |card| card.score.is_a?(Array) ? card.score[1] : card.score }
.each do |card|
score = card.score
if score.is_a?(Array)
if score_sum + score[1] > 21
score_sum += score[0]
else
score_sum += score[1]
end
else
score_sum += score
end
end
score_sum
end
def print_top_card(index)
y_coord = interface_param(:print_top_card_y_coord)
top_left_card = 8 * index + 7
Terminal.cursor_goto(top_left_card, y_coord)
print '┌────┐'
Terminal.cursor_back_in(6)
Terminal.goto_n_line_down(1)
end
def print_middle_card(card)
print "│ #{card.dig.to_s.ljust(2)} │"
Terminal.cursor_back_in(6)
Terminal.goto_n_line_down(1)
print "│ #{card.suit} │"
end
def print_bottom_card
Terminal.cursor_back_in(6)
Terminal.goto_n_line_down(1)
print '└────┘'
end
def print_cards
cards.each_with_index do |card, index|
print_top_card(index)
print_middle_card(card)
print_bottom_card
end
end
def take_card(deck, delay = true)
cards << deck.take_one
sleep(1.fdiv(3)) if delay
print_cards
print_score unless hide_cards
end
end
| 20.42963 | 81 | 0.664249 |
1870745f30eaf60b5b3cbc936734bb02adecbaea | 5,905 | # encoding: utf-8
require 'spec_helper'
describe Ably::Rest do
describe 'transport protocol' do
include Ably::Modules::Conversions
let(:client_options) { {} }
let(:client) do
Ably::Rest::Client.new(client_options.merge(key: 'appid.keyuid:keysecret', log_retries_as_info: true))
end
let(:now) { Time.now - 1000 }
let(:body_value) { [as_since_epoch(now)] }
before do
stub_request(:get, "#{client.endpoint}/time").
with(:headers => { 'Accept' => mime }).
to_return(:status => 200, :body => request_body, :headers => { 'Content-Type' => mime })
end
context 'when protocol is not defined it defaults to :msgpack' do
let(:client_options) { { } }
let(:mime) { 'application/x-msgpack' }
let(:request_body) { body_value.to_msgpack }
it 'uses MsgPack', :webmock do
expect(client.protocol).to eql(:msgpack)
expect(client.time).to be_within(1).of(now)
end
end
options = [
{ protocol: :json },
{ use_binary_protocol: false }
].each do |client_option|
context "when option #{client_option} is used" do
let(:client_options) { client_option }
let(:mime) { 'application/json' }
let(:request_body) { body_value.to_json }
it 'uses JSON', :webmock do
expect(client.protocol).to eql(:json)
expect(client.time).to be_within(1).of(now)
end
end
end
options = [
{ protocol: :msgpack },
{ use_binary_protocol: true }
].each do |client_option|
context "when option #{client_option} is used" do
let(:client_options) { client_option }
let(:mime) { 'application/x-msgpack' }
let(:request_body) { body_value.to_msgpack }
it 'uses MsgPack', :webmock do
expect(client.protocol).to eql(:msgpack)
expect(client.time).to be_within(1).of(now)
end
end
end
end
vary_by_protocol do
let(:client) do
Ably::Rest::Client.new(key: api_key, environment: environment, protocol: protocol, log_retries_as_info: true)
end
describe 'failed requests' do
context 'due to invalid Auth' do
it 'should raise an InvalidRequest exception with a valid error message and code' do
invalid_client = Ably::Rest::Client.new(key: 'appid.keyuid:keysecret', environment: environment)
expect { invalid_client.channel('test').publish('foo', 'choo') }.to raise_error do |error|
expect(error).to be_a(Ably::Exceptions::ResourceMissing)
expect(error.message).to match(/No application found/)
expect(error.code).to eql(40400)
expect(error.status).to eql(404)
end
end
end
describe 'server error with JSON error response body', :webmock do
let(:error_response) { '{ "error": { "statusCode": 500, "code": 50000, "message": "Internal error" } }' }
before do
stub_request(:get, "#{client.endpoint}/time").
to_return(:status => 500, :body => error_response, :headers => { 'Content-Type' => 'application/json' })
end
it 'should raise a ServerError exception' do
expect { client.time }.to raise_error(Ably::Exceptions::ServerError, /Internal error/)
end
end
describe '500 server error without a valid JSON response body', :webmock do
before do
stub_request(:get, "#{client.endpoint}/time").
to_return(:status => 500, :headers => { 'Content-Type' => 'application/json' })
end
it 'should raise a ServerError exception' do
expect { client.time }.to raise_error(Ably::Exceptions::ServerError, /Unknown/)
end
end
end
describe 'token authentication failures', :webmock do
let(:token_1) { { token: random_str } }
let(:token_2) { { token: random_str } }
let(:channel) { random_str }
before do
@token_requests = 0
@publish_attempts = 0
stub_request(:post, "#{client.endpoint}/keys/#{key_name}/requestToken").to_return do
@token_requests += 1
{
:body => public_send("token_#{@token_requests}").merge(expires: (Time.now.to_i + 60) * 1000).to_json,
:headers => { 'Content-Type' => 'application/json' }
}
end
stub_request(:post, "#{client.endpoint}/channels/#{channel}/publish").to_return do
@publish_attempts += 1
if [1, 3].include?(@publish_attempts)
{ status: 201, :body => '[]', :headers => { 'Content-Type' => 'application/json' } }
else
raise Ably::Exceptions::TokenExpired.new('Authentication failure', 401, 40142)
end
end
end
context 'when auth#token_renewable?' do
before do
client.auth.authorize
end
it 'should automatically reissue a token' do
client.channel(channel).publish('evt', 'msg')
expect(@publish_attempts).to eql(1)
expect(@token_requests).to eql(1)
# Triggers an authentication 401 failure which should automatically request a new token
client.channel(channel).publish('evt', 'msg')
expect(@publish_attempts).to eql(3)
expect(@token_requests).to eql(2)
end
end
context 'when NOT auth#token_renewable?' do
let(:client) { Ably::Rest::Client.new(token: 'token ID cannot be used to create a new token', environment: environment, protocol: protocol) }
it 'should raise an TokenExpired exception' do
client.channel(channel).publish('evt', 'msg')
expect(@publish_attempts).to eql(1)
expect { client.channel(channel).publish('evt', 'msg') }.to raise_error Ably::Exceptions::TokenExpired
expect(@token_requests).to eql(0)
end
end
end
end
end
| 35.14881 | 149 | 0.606435 |
26d7b24ce23e6fdc3a993d6931edb39e1b3f1d54 | 781 | #
#
#
class GDO::DB::Result
def initialize(result)
@table = nil
@result = result
@enum = result.to_enum
@cached = false
end
def finalize
# TODO: Free result
end
def table(table); @table = table; self; end
def cached(cached); @cached = cached; self; end
def num_rows
byebug
end
def fetch_var(i=0)
fetch_row[i]
end
def fetch_row
fetch_assoc.values rescue nil
end
def fetch_assoc
@enum.next rescue nil
end
def fetch_object
fetch_as(@table)
end
def fetch_as(table)
return nil unless vars = fetch_assoc
return table._cache.init_cached(vars) if @cached && table.gdo_cached
return table.blank(vars).dirty(false).persisted
end
end
| 16.617021 | 73 | 0.610755 |
d5186beff15d5fcd04153f844b4b2671e130e544 | 1,531 | =begin
#Custom Workflow Actions
#Create custom workflow actions
The version of the OpenAPI document: v4
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.3.1
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for Hubspot::Automation::Actions::ActionFunctionIdentifier
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe 'ActionFunctionIdentifier' do
before do
# run before each test
@instance = Hubspot::Automation::Actions::ActionFunctionIdentifier.new
end
after do
# run after each test
end
describe 'test an instance of ActionFunctionIdentifier' do
it 'should create an instance of ActionFunctionIdentifier' do
expect(@instance).to be_instance_of(Hubspot::Automation::Actions::ActionFunctionIdentifier)
end
end
describe 'test attribute "function_type"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
# validator = Petstore::EnumTest::EnumAttributeValidator.new('String', ["PRE_ACTION_EXECUTION", "PRE_FETCH_OPTIONS", "POST_FETCH_OPTIONS"])
# validator.allowable_values.each do |value|
# expect { @instance.function_type = value }.not_to raise_error
# end
end
end
describe 'test attribute "id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 29.442308 | 145 | 0.741999 |
915221e9728c012ae42458e9512189ab5267ebfa | 5,301 | require 'brakeman/processors/template_processor'
#Processes HAML templates.
class Brakeman::HamlTemplateProcessor < Brakeman::TemplateProcessor
HAML_FORMAT_METHOD = /format_script_(true|false)_(true|false)_(true|false)_(true|false)_(true|false)_(true|false)_(true|false)/
HAML_HELPERS = s(:colon2, s(:const, :Haml), :Helpers)
JAVASCRIPT_FILTER = s(:colon2, s(:colon2, s(:const, :Haml), :Filters), :Javascript)
#Processes call, looking for template output
def process_call exp
target = exp.target
if sexp? target
target = process target
end
method = exp.method
if (call? target and target.method == :_hamlout)
res = case method
when :adjust_tabs, :rstrip!, :attributes #Check attributes, maybe?
ignore
when :options, :buffer
exp
when :open_tag
process_call_args exp
else
arg = exp.first_arg
if arg
@inside_concat = true
out = exp.first_arg = process(arg)
@inside_concat = false
else
raise "Empty _hamlout.#{method}()?"
end
if string? out
ignore
else
r = case method.to_s
when "push_text"
build_output_from_push_text(out)
when HAML_FORMAT_METHOD
if $4 == "true"
if string_interp? out
build_output_from_push_text(out, :escaped_output)
else
Sexp.new :format_escaped, out
end
else
if string_interp? out
build_output_from_push_text(out)
else
Sexp.new :format, out
end
end
else
raise "Unrecognized action on _hamlout: #{method}"
end
@javascript = false
r
end
end
res.line(exp.line)
res
#_hamlout.buffer <<
#This seems to be used rarely, but directly appends args to output buffer.
#Has something to do with values of blocks?
elsif sexp? target and method == :<< and is_buffer_target? target
@inside_concat = true
out = exp.first_arg = process(exp.first_arg)
@inside_concat = false
if out.node_type == :str #ignore plain strings
ignore
else
s = Sexp.new(:output, out)
@current_template.add_output s
s.line(exp.line)
s
end
elsif target == nil and method == :render
#Process call to render()
exp.arglist = process exp.arglist
make_render_in_view exp
elsif target == nil and method == :find_and_preserve
process exp.first_arg
elsif method == :render_with_options
if target == JAVASCRIPT_FILTER
@javascript = true
end
process exp.first_arg
else
exp.target = target
exp.arglist = process exp.arglist
exp
end
end
#If inside an output stream, only return the final expression
def process_block exp
exp = exp.dup
exp.shift
if @inside_concat
@inside_concat = false
exp[0..-2].each do |e|
process e
end
@inside_concat = true
process exp[-1]
else
exp.map! do |e|
res = process e
if res.empty?
nil
else
res
end
end
Sexp.new(:rlist).concat(exp).compact
end
end
#Checks if the buffer is the target in a method call Sexp.
#TODO: Test this
def is_buffer_target? exp
exp.node_type == :call and
node_type? exp.target, :lvar and
exp.target.value == :_hamlout and
exp.method == :buffer
end
#HAML likes to put interpolated values into _hamlout.push_text
#but we want to handle those individually
def build_output_from_push_text exp, default = :output
if string_interp? exp
exp.map! do |e|
if sexp? e
if node_type? e, :evstr and e[1]
e = e.value
end
get_pushed_value e, default
else
e
end
end
end
end
#Gets outputs from values interpolated into _hamlout.push_text
def get_pushed_value exp, default = :output
return exp unless sexp? exp
case exp.node_type
when :format
exp.node_type = :output
@current_template.add_output exp
exp
when :format_escaped
exp.node_type = :escaped_output
@current_template.add_output exp
exp
when :str, :ignore, :output, :escaped_output
exp
when :block, :rlist, :dstr
exp.map! { |e| get_pushed_value e }
else
if call? exp and exp.target == HAML_HELPERS and exp.method == :html_escape
s = Sexp.new(:escaped_output, exp.first_arg)
elsif @javascript and call? exp and (exp.method == :j or exp.method == :escape_javascript)
s = Sexp.new(:escaped_output, exp.first_arg)
else
s = Sexp.new(default, exp)
end
s.line(exp.line)
@current_template.add_output s
s
end
end
end
| 28.196809 | 129 | 0.561215 |
1a34bf1e215c641ffdac221c2e6024448f2bbd3c | 1,825 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Configure static file server for tests with Cache-Control for performance.
config.serve_static_files = true
config.static_cache_control = 'public, max-age=3600'
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Randomize the order test cases are executed.
config.active_support.test_order = :random
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# config.action_controller.action_on_unpermitted_parameters = :log
end
| 40.555556 | 85 | 0.777534 |
62c064cfbe28f8b62429af479ce4e95291e48cdd | 272 |
module EbayTrading # :nodoc:
module Types # :nodoc:
# == Attributes
class AttributeConversionEnabledFeatureDefinition
include XML::Mapping
include Initializer
root_element_name 'AttributeConversionEnabledFeatureDefinition'
end
end
end
| 19.428571 | 69 | 0.735294 |
33cb73649a6c125fe8327951eb5d1e7492b80614 | 18,613 | require_relative 'cspec_span_report_item'
class Cspec_set < Error_holder
attr_accessor :top_commit
attr_accessor :dependency_commits
def initialize(top_commit, dependency_commits)
if top_commit.is_a?(String)
self.top_commit = Cspec.from_repo_and_commit_id(top_commit)
else
self.top_commit = top_commit
end
if !dependency_commits
self.dependency_commits = []
else
raise "bad dependency_commits=#{dependency_commits}" unless dependency_commits.respond_to?(:size)
self.dependency_commits = dependency_commits
end
end
def eql?(other)
self.top_commit.eql?(other.top_commit) && dependency_commits.eql?(other.dependency_commits)
end
def to_json()
h = Hash.new
h["cspec"] = top_commit.repo_and_commit_id
cspec_deps = []
self.dependency_commits.each do | commit |
cspec_deps << commit.repo_and_commit_id
end
h["cspec_deps"] = cspec_deps
JSON.pretty_generate(h)
end
def commits()
z = []
z << self.top_commit
if self.dependency_commits
z = z.concat(self.dependency_commits)
end
z
end
def list_files_changed_since(other_cspec_set)
pairs = get_pairs_of_commits_with_matching_repo(other_cspec_set)
report_item_set = Cspec_span_report_item_set.new
pairs.each do | pair |
commit0 = pair[0]
commit1 = pair[1]
report_item_set.add(commit1.list_files_changed_since(commit0))
end
report_item_set
end
def list_changes_since(other_cspec_set)
pairs = get_pairs_of_commits_with_matching_repo(other_cspec_set)
report_item_set = Cspec_span_report_item_set.new
pairs.each do | pair |
commit0 = pair[0]
commit1 = pair[1]
report_item_set.add(commit1.list_changes_since(commit0))
end
report_item_set
end
def get_pairs_of_commits_with_matching_repo(other_cspec_set)
pairs = []
self.commits.each do | commit |
previous_commit_for_same_component = commit.find_commit_for_same_component(other_cspec_set)
if previous_commit_for_same_component
pairs << [ previous_commit_for_same_component, commit ]
end
end
pairs
end
def list_bug_IDs_since(other_cspec_set)
report_item_set = list_changes_since(other_cspec_set)
bug_ID_report_item_set = Cspec_span_report_item_set.new
report_item_set.items.each do | report_item |
bug_IDs = Cspec.grep_group1(report_item.item, Cspec_set.bug_id_regexp)
if !bug_IDs.empty?
report_item.item = bug_IDs
bug_ID_report_item_set.add(report_item)
end
end
bug_ID_report_item_set
end
def find_commits_for_components_that_were_added_since(other_cspec_set)
commits_for_components_that_were_added = []
self.commits.each do | commit |
if !commit.component_contained_by?(other_cspec_set)
commits_for_components_that_were_added << commit
end
end
commits_for_components_that_were_added
end
def find_commits_for_components_that_changed_since(other_cspec_set)
commits_for_components_that_changed = []
self.commits.each do | commit |
previous_commit_for_same_component = commit.find_commit_for_same_component(other_cspec_set)
if previous_commit_for_same_component
commits_for_components_that_changed << commit
end
end
commits_for_components_that_changed
end
def list_files_added_or_updated_since(other_cspec_set)
commits_for_components_that_were_added = self.find_commits_for_components_that_were_added_since(other_cspec_set)
commits_which_were_updated = self.find_commits_for_components_that_changed_since(other_cspec_set)
added_files = []
commits_for_components_that_were_added.each do | commit |
added_files += commit.list_files()
end
updated_files = []
commits_which_were_updated.each do | commit |
updated_files += commit.list_files_added_or_updated()
end
added_files + updated_files
end
def to_s()
z = "Cspec_set(#{self.top_commit}/["
self.dependency_commits.each do | commit |
z << " " << commit.to_s
end
z << "]"
z
end
def Cspec_set.list_bug_IDs_between(cspec_set_s1, cspec_set_s2)
cspec_set1 = Cspec_set.from_s(cspec_set_s1)
cspec_set2 = Cspec_set.from_s(cspec_set_s2)
return cspec_set2.list_bug_IDs_since(cspec_set1)
end
def Cspec_set.list_changes_between(cspec_set_s1, cspec_set_s2)
cspec_set1 = Cspec_set.from_s(cspec_set_s1)
cspec_set2 = Cspec_set.from_s(cspec_set_s2)
return cspec_set2.list_changes_since(cspec_set1)
end
def Cspec_set.list_files_changed_between(cspec_set_s1, cspec_set_s2)
cspec_set1 = Cspec_set.from_s(cspec_set_s1)
cspec_set2 = Cspec_set.from_s(cspec_set_s2)
return cspec_set2.list_files_changed_since(cspec_set1)
end
def Cspec_set.list_last_changes(repo_spec, n)
gr = Repo.from_spec(repo_spec)
# Example log entry:
#
# "commit 22ab587dd9741430c408df1f40dbacd56c657c3f"
# "Author: osnbt on socialdev Jenkins <[email protected]>"
# "Date: Tue Feb 20 09:28:24 2018 -0800"
# ""
# " New version com.oracle.cecs.caas:manifest:1.0.3012, initiated by https://osnci.us.oracle.com/job/caas.build.pl.master/3012/"
# " and updated (consumed) by https://osnci.us.oracle.com/job/serverintegration.deptrigger.pl.master/484/"
# " "
# " The deps.gradle file, component.properties and any other @autoupdate files listed in deps.gradle"
# " have been automatically updated to consume these dynamic dependencies."
commit_log_entries = gr.vcs.list_last_changes(n)
commits = []
commit_log_entries.each do | commit_log_entry |
if commit_log_entry !~ /^([a-f0-9]+):(.*)$/m
raise "could not understand #{commit_log_entry}"
else
commit_id, comment = $1, $2
commit = Cspec.new(gr, commit_id)
commit.comment = comment
commits << commit
end
end
commits
end
def Cspec_set.bug_id_regexp()
if !Cspec_set.bug_id_regexp_val
z = Global.get("bug_id_regexp_val", ".*Bug (.*).*")
Cspec_set.bug_id_regexp_val = Regexp.new(z, "m")
end
Cspec_set.bug_id_regexp_val
end
def Cspec_set.from_file(json_fn)
from_s(IO.read(json_fn))
end
def Cspec_set.from_json_obj_v1(z)
deps = []
if z.has_key?("cspec")
cs0 = Cspec_set.from_s(z["cspec"])
else
cs0 = nil
end
if z.has_key?("cspec_deps")
Error_holder.raise("since there is a cspec_deps value, there should also be a cspec value, but it is missing in #{s}", 400) unless cs0
deps = []
z["cspec_deps"].each do | dep_cspec |
cs = Cspec_set.from_s(dep_cspec)
deps += cs.commits
end
cs0.dependency_commits = deps
end
cs0
end
def Cspec_set.from_json_obj_v2(z)
cspec_set_array = z.map do | cspec_h |
if !cspec_h.has_key?("cspec")
Error_holder.raise("expected a cspec key in the hash #{cspec_h} (from #{z})", 400)
end
cs = Cspec_set.from_s(cspec_h["cspec"])
cspec_h.each_pair do | key, val |
if key != "cspec"
cs.top_commit.add_prop(key, val)
end
end
cs
end
cs0 = cspec_set_array.shift
cspec_set_array.each do | csx |
cs0.dependency_commits += csx.commits
end
cs0
end
def Cspec_set.from_json_obj(z)
begin
return Cspec_set.from_json_obj_v1(z)
rescue
return Cspec_set.from_json_obj_v2(z)
end
end
def Cspec_set.from_s(s, arg_name="Cspec_set.from_s", autodiscover=false)
if s.start_with?('http')
url = s
return from_s(U.rest_get(url), "#{arg_name} => #{url}")
end
deps = nil
if Cspec.is_repo_and_commit_id?(s)
repo_and_commit_id = s
cs = Cspec_set.new(repo_and_commit_id, nil)
else
if s !~ /\{/
Error_holder.raise("expecting JSON, but I see no hash in #{s}", 400)
end
begin
h = JSON.parse(s)
rescue JSON::ParserError => jpe
Error_holder.raise("trouble parsing #{arg_name} \"#{s}\": #{jpe.to_s}", 400)
end
cs = from_json_obj(h)
end
if Cspec.auto_discover_requested_in__repo_and_commit_id(repo_and_commit_id)
autodiscover = true
end
if autodiscover && (!deps || deps.empty?)
# executes auto-discovery in this case
cs.dependency_commits = cs.top_commit.unreliable_autodiscovery_of_dependencies_from_build_configuration
end
cs
end
def Cspec_set.from_repo_and_commit_id(repo_and_commit_id, dependency_commits=nil)
if repo_and_commit_id =~ /\+$/
autodiscover = true
elsif dependency_commits == Cspec::AUTODISCOVER
autodiscover = true
else
autodiscover = false
end
top_commit = Cspec.from_repo_and_commit_id(repo_and_commit_id)
if autodiscover
dependency_commits = top_commit.unreliable_autodiscovery_of_dependencies_from_build_configuration
end
Cspec_set.new(top_commit, dependency_commits)
end
def Cspec_set.test_list_changes_since()
compound_spec1 = "git;git.osn.oraclecorp.com;osn/serverintegration;;6b5ed0226109d443732540fee698d5d794618b64"
compound_spec2 = "git;git.osn.oraclecorp.com;osn/serverintegration;;06c85af5cfa00b0e8244d723517f8c3777d7b77e"
cc1 = Cspec_set.from_repo_and_commit_id(compound_spec1)
cc2 = Cspec_set.from_repo_and_commit_id(compound_spec2)
gc2 = Cspec.from_repo_and_commit_id(compound_spec2)
report_item_set1 = cc2.list_changes_since(cc1)
report_item_set2 = Cspec_set.list_changes_between(compound_spec1, compound_spec2)
changes1 = report_item_set1.all_items
changes2 = report_item_set2.all_items
U.assert_eq(changes1, changes2, "vfy same result from wrapper 2a")
g1b = Cspec.from_repo_and_commit_id("git;git.osn.oraclecorp.com;osn/serverintegration;;22ab587dd9741430c408df1f40dbacd56c657c3f")
g1a = Cspec.from_repo_and_commit_id("git;git.osn.oraclecorp.com;osn/serverintegration;;7dfff5f400b3011ae2c4aafac286d408bce11504")
U.assert_eq(gc2, changes1[0], "test_list_changes_since.0")
U.assert_eq(g1b, changes1[1], "test_list_changes_since.1")
U.assert_eq(g1a, changes1[2], "test_list_changes_since.2")
end
def Cspec_set.test_list_files_changed_since_cs()
compound_spec1 = "git;git.osn.oraclecorp.com;osn/serverintegration;;6b5ed0226109d443732540fee698d5d794618b64+"
compound_spec2 = "git;git.osn.oraclecorp.com;osn/serverintegration;;06c85af5cfa00b0e8244d723517f8c3777d7b77e+"
cc1 = Cspec_set.from_repo_and_commit_id(compound_spec1, Cspec::AUTODISCOVER)
cc2 = Cspec_set.from_repo_and_commit_id(compound_spec2, Cspec::AUTODISCOVER)
report_item_set = Cspec_set.list_files_changed_between(compound_spec1, compound_spec2)
changed_files2 = report_item_set.all_items
changed_files = cc2.list_files_changed_since(cc1).all_items
U.assert_eq(changed_files, changed_files2, "vfy same result from wrapper 2b")
expected_changed_files = [ "component.properties", "deps.gradle", "component.properties", "deps.gradle" ]
U.assert_json_eq(expected_changed_files, changed_files, "Cspec_set.test_list_files_changed_since_cs")
end
def Cspec_set.test_list_bug_IDs_since()
# I noticed that for the commits in this range, there is a recurring automated comment "caas.build.pl.master/3013/" -- so
# I thought I would reset the pattern to treat that number like a bug ID for the purposes of the test.
# (At some point, i'll need to go find a comment that really does refer to a bug ID.)
saved_bug_id_regexp = Cspec_set.bug_id_regexp_val
begin
compound_spec1 = "git;git.osn.oraclecorp.com;osn/serverintegration;;6b5ed0226109d443732540fee698d5d794618b64"
compound_spec2 = "git;git.osn.oraclecorp.com;osn/serverintegration;;06c85af5cfa00b0e8244d723517f8c3777d7b77e"
gc1 = Cspec_set.from_repo_and_commit_id(compound_spec1)
gc2 = Cspec_set.from_repo_and_commit_id(compound_spec2)
Cspec_set.bug_id_regexp_val = Regexp.new(".*caas.build.pl.master/(\\d+)/.*", "m")
bug_IDs = gc2.list_bug_IDs_since(gc1).all_items
U.assert_eq(["3013", "3012", "3011"], bug_IDs, "bug_IDs_since")
ensure
Cspec_set.bug_id_regexp_val = saved_bug_id_regexp
end
end
def Cspec_set.test_json_export()
json = Cspec_set.from_s("git;git.osn.oraclecorp.com;osn/serverintegration;master;2bc0b1a58a9277e97037797efb93a2a94c9b6d99", "Cspec_set.test_json_export", true).to_json
U.assert_json_eq_f(json, "Cspec_set.test_json_export")
json = Cspec_set.from_s("git;git.osn.oraclecorp.com;osn/serverintegration;master;2bc0b1a58a9277e97037797efb93a2a94c9b6d99").to_json
U.assert_json_eq_f(json, "Cspec_set.test_json_export__without_autodiscover")
end
def Cspec_set.test_full_cspec_set_as_dep()
U.assert_json_eq_f(Cspec_set.from_s(Json_change_tracker.load_local("test_full_cspec_set_as_dep.json")).to_json, "test_full_cspec_set_as_dep")
end
def Cspec_set.test_reading_attributes()
cs = Cspec_set.from_s(U.read_file("public/test_cspec_set1_v2.json"))
U.assert_eq("first one", cs.top_commit.props["a1"], "test_reading_attributes.a1")
U.assert_eq(2, cs.top_commit.props.size, "test_reading_attributes.size for #{cs.top_commit.props}")
end
def Cspec_set.test()
test_full_cspec_set_as_dep()
test_reading_attributes()
test_list_files_changed_since_cs()
test_json_export()
repo_spec = "git;git.osn.oraclecorp.com;osn/serverintegration;master"
valentine_commit_id = "2bc0b1a58a9277e97037797efb93a2a94c9b6d99"
cc = Cspec_set.from_repo_and_commit_id("#{repo_spec};#{valentine_commit_id}", Cspec::AUTODISCOVER)
U.assert(cc.dependency_commits.size > 0, "cc.dependency_commits.size > 0")
json = cc.to_json
U.assert_json_eq_f(json, "dependency_gather1")
cc2 = Cspec_set.from_s(json)
U.assert_eq(cc, cc2, "json copy dependency_gather1")
cc9 = Cspec_set.from_repo_and_commit_id("git;git.osn.oraclecorp.com;osn/serverintegration;;2bc0b1a58a9277e97037797efb93a2a94c9b6d99", Cspec::AUTODISCOVER)
U.assert_json_eq_f(cc9.to_json, "cc9.to_json")
test_list_changes_since()
test_list_bug_IDs_since()
end
class << self
attr_accessor :bug_id_regexp_val
end
end
| 51.846797 | 183 | 0.560092 |
182aa9443275af5ea8329b7dfeef818e70b05100 | 3,963 | require 'puppet/provider/package'
require 'puppet/util/windows'
require 'puppet/provider/package/windows/package'
Puppet::Type.type(:package).provide(:windows, :parent => Puppet::Provider::Package) do
desc "Windows package management.
This provider supports either MSI or self-extracting executable installers.
This provider requires a `source` attribute when installing the package.
It accepts paths paths to local files, mapped drives, or UNC paths.
If the executable requires special arguments to perform a silent install or
uninstall, then the appropriate arguments should be specified using the
`install_options` or `uninstall_options` attributes, respectively. Puppet
will automatically quote any option that contains spaces."
confine :operatingsystem => :windows
defaultfor :operatingsystem => :windows
has_feature :installable
has_feature :uninstallable
has_feature :install_options
has_feature :uninstall_options
has_feature :versionable
attr_accessor :package
# Return an array of provider instances
def self.instances
Puppet::Provider::Package::Windows::Package.map do |pkg|
provider = new(to_hash(pkg))
provider.package = pkg
provider
end
end
def self.to_hash(pkg)
{
:name => pkg.name,
:ensure => pkg.version || :installed,
:provider => :windows
}
end
# Query for the provider hash for the current resource. The provider we
# are querying, may not have existed during prefetch
def query
Puppet::Provider::Package::Windows::Package.find do |pkg|
if pkg.match?(resource)
return self.class.to_hash(pkg)
end
end
nil
end
def install
installer = Puppet::Provider::Package::Windows::Package.installer_class(resource)
command = [installer.install_command(resource), install_options].flatten.compact.join(' ')
execute(command, :failonfail => false, :combine => true)
check_result(exit_status)
end
def uninstall
command = [package.uninstall_command, uninstall_options].flatten.compact.join(' ')
execute(command, :failonfail => false, :combine => true)
check_result(exit_status)
end
def exit_status
$CHILD_STATUS.exitstatus
end
# http://msdn.microsoft.com/en-us/library/windows/desktop/aa368542(v=vs.85).aspx
self::ERROR_SUCCESS = 0
self::ERROR_SUCCESS_REBOOT_INITIATED = 1641
self::ERROR_SUCCESS_REBOOT_REQUIRED = 3010
# (Un)install may "fail" because the package requested a reboot, the system requested a
# reboot, or something else entirely. Reboot requests mean the package was installed
# successfully, but we warn since we don't have a good reboot strategy.
def check_result(hr)
operation = resource[:ensure] == :absent ? 'uninstall' : 'install'
case hr
when self.class::ERROR_SUCCESS
# yeah
when 194
warning("The package requested a reboot to finish the operation.")
when self.class::ERROR_SUCCESS_REBOOT_INITIATED
warning("The package #{operation}ed successfully and the system is rebooting now.")
when self.class::ERROR_SUCCESS_REBOOT_REQUIRED
warning("The package #{operation}ed successfully, but the system must be rebooted.")
else
raise Puppet::Util::Windows::Error.new("Failed to #{operation}", hr)
end
end
# This only get's called if there is a value to validate, but not if it's absent
def validate_source(value)
fail("The source parameter cannot be empty when using the Windows provider.") if value.empty?
end
def install_options
join_options(resource[:install_options])
end
def uninstall_options
join_options(resource[:uninstall_options])
end
def join_options(options)
return unless options
options.collect do |val|
case val
when Hash
val.keys.sort.collect do |k|
"#{k}=#{val[k]}"
end.join(' ')
else
val
end
end
end
end
| 30.484615 | 97 | 0.708049 |
b98cbab42738ed3814e8f77086fe5c58a106deeb | 26 | require 'core_ext/object'
| 13 | 25 | 0.807692 |
ac3834c32ff69d1969637e029e510d34ebb4ee24 | 5,909 | require 'spec_helper'
describe Issues::CreateService, services: true do
let(:project) { create(:empty_project) }
let(:user) { create(:user) }
describe '#execute' do
let(:issue) { described_class.new(project, user, opts).execute }
context 'when params are valid' do
let(:assignee) { create(:user) }
let(:milestone) { create(:milestone, project: project) }
let(:labels) { create_pair(:label, project: project) }
before do
project.team << [user, :master]
project.team << [assignee, :master]
end
let(:opts) do
{ title: 'Awesome issue',
description: 'please fix',
assignee_id: assignee.id,
label_ids: labels.map(&:id),
milestone_id: milestone.id,
due_date: Date.tomorrow }
end
it 'creates the issue with the given params' do
expect(issue).to be_persisted
expect(issue.title).to eq('Awesome issue')
expect(issue.assignee).to eq assignee
expect(issue.labels).to match_array labels
expect(issue.milestone).to eq milestone
expect(issue.due_date).to eq Date.tomorrow
end
context 'when current user cannot admin issues in the project' do
let(:guest) { create(:user) }
before do
project.team << [guest, :guest]
end
it 'filters out params that cannot be set without the :admin_issue permission' do
issue = described_class.new(project, guest, opts).execute
expect(issue).to be_persisted
expect(issue.title).to eq('Awesome issue')
expect(issue.assignee).to be_nil
expect(issue.labels).to be_empty
expect(issue.milestone).to be_nil
expect(issue.due_date).to be_nil
end
end
it 'creates a pending todo for new assignee' do
attributes = {
project: project,
author: user,
user: assignee,
target_id: issue.id,
target_type: issue.class.name,
action: Todo::ASSIGNED,
state: :pending
}
expect(Todo.where(attributes).count).to eq 1
end
context 'when label belongs to project group' do
let(:group) { create(:group) }
let(:group_labels) { create_pair(:group_label, group: group) }
let(:opts) do
{
title: 'Title',
description: 'Description',
label_ids: group_labels.map(&:id)
}
end
before do
project.update(group: group)
end
it 'assigns group labels' do
expect(issue.labels).to match_array group_labels
end
end
context 'when label belongs to different project' do
let(:label) { create(:label) }
let(:opts) do
{ title: 'Title',
description: 'Description',
label_ids: [label.id] }
end
it 'does not assign label' do
expect(issue.labels).not_to include label
end
end
context 'when milestone belongs to different project' do
let(:milestone) { create(:milestone) }
let(:opts) do
{ title: 'Title',
description: 'Description',
milestone_id: milestone.id }
end
it 'does not assign milestone' do
expect(issue.milestone).not_to eq milestone
end
end
it 'executes issue hooks when issue is not confidential' do
opts = { title: 'Title', description: 'Description', confidential: false }
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks)
expect(project).to receive(:execute_services).with(an_instance_of(Hash), :issue_hooks)
described_class.new(project, user, opts).execute
end
it 'executes confidential issue hooks when issue is confidential' do
opts = { title: 'Title', description: 'Description', confidential: true }
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks)
expect(project).to receive(:execute_services).with(an_instance_of(Hash), :confidential_issue_hooks)
described_class.new(project, user, opts).execute
end
end
it_behaves_like 'issuable create service'
it_behaves_like 'new issuable record that supports slash commands'
context 'for a merge request' do
let(:discussion) { Discussion.for_diff_notes([create(:diff_note_on_merge_request)]).first }
let(:merge_request) { discussion.noteable }
let(:project) { merge_request.source_project }
let(:opts) { { merge_request_for_resolving_discussions: merge_request } }
before do
project.team << [user, :master]
end
it 'resolves the discussion for the merge request' do
described_class.new(project, user, opts).execute
discussion.first_note.reload
expect(discussion.resolved?).to be(true)
end
it 'added a system note to the discussion' do
described_class.new(project, user, opts).execute
reloaded_discussion = MergeRequest.find(merge_request.id).discussions.first
expect(reloaded_discussion.last_note.system).to eq(true)
end
it 'assigns the title and description for the issue' do
issue = described_class.new(project, user, opts).execute
expect(issue.title).not_to be_nil
expect(issue.description).not_to be_nil
end
it 'can set nil explicityly to the title and description' do
issue = described_class.new(project, user,
merge_request_for_resolving_discussions: merge_request,
description: nil,
title: nil).execute
expect(issue.description).to be_nil
expect(issue.title).to be_nil
end
end
end
end
| 31.768817 | 107 | 0.617533 |
1824ba112bc22b792f0dc86a192749714733a35f | 982 | require "language/haskell"
class DhallJson < Formula
include Language::Haskell::Cabal
desc "Dhall to JSON compiler and a Dhall to YAML compiler"
homepage "https://github.com/Gabriel439/Haskell-Dhall-JSON-Library"
url "https://hackage.haskell.org/package/dhall-json-1.2.1/dhall-json-1.2.1.tar.gz"
sha256 "999cd25e03d9c859a7df53b535ca59a1a2cdc1b728162c87d23f3b08fc45c87d"
head "https://github.com/Gabriel439/Haskell-Dhall-JSON-Library.git"
bottle do
cellar :any_skip_relocation
sha256 "25736b0e9d678c5a3dc6c477f15527cfe9761bf85398fc3bf6e9cbe13e391b58" => :high_sierra
sha256 "d935eb5cf5a22930e6bdc5f7bd4b3fc5ba391bd243d83018c46307c23b550a03" => :sierra
sha256 "661985f1086344de54a417d327dece699ec97dc21dc814bd3f926ccc2e9d12ef" => :el_capitan
end
depends_on "cabal-install" => :build
depends_on "ghc" => :build
def install
install_cabal_package
end
test do
assert_match "1", pipe_output("#{bin}/dhall-to-json", "1", 0)
end
end
| 32.733333 | 93 | 0.773931 |
2110da9aa220ece3d68925655924cdb84787ecb8 | 14,030 | require 'spec_helper'
RSpec.describe 'puppet_agent', tag: 'win' do
package_version = '5.10.100.1'
collection = 'puppet5'
global_params = {
:package_version => package_version,
:collection => collection
}
['x86', 'x64'].each do |arch|
context "Windows arch #{arch}" do
facts = {
:architecture => arch,
:env_temp_variable => 'C:/tmp',
:osfamily => 'windows',
:puppetversion => '4.10.100',
:puppet_confdir => "C:\\ProgramData\\Puppetlabs\\puppet\\etc",
:puppet_agent_pid => 42,
:system32 => 'C:\windows\sysnative',
:puppet_agent_appdata => "C:\\ProgramData",
}
let(:facts) { facts }
let(:params) { global_params }
context 'is_pe' do
before(:each) do
# Need to mock the PE functions
Puppet::Parser::Functions.newfunction(:pe_build_version, :type => :rvalue) do |args|
'4.10.100'
end
Puppet::Parser::Functions.newfunction(:pe_compiling_server_aio_build, :type => :rvalue) do |args|
package_version
end
end
let(:facts) { facts.merge({:is_pe => true}) }
context 'with up to date aio_agent_version matching server' do
let(:facts) { facts.merge({
:is_pe => true,
:aio_agent_version => package_version
})}
it { is_expected.not_to contain_file('c:\tmp\install_puppet.bat') }
it { is_expected.not_to contain_exec('prerequisites_check.ps1') }
it { is_expected.not_to contain_exec('fix inheritable SYSTEM perms') }
end
context 'with equal package_version containing git sha' do
let(:facts) { facts.merge({
:is_pe => true,
:aio_agent_version => package_version
})}
let(:params) {
global_params.merge(:package_version => "#{package_version}.g886c5ab")
}
it { is_expected.not_to contain_file('c:\tmp\install_puppet.bat') }
it { is_expected.not_to contain_exec('install_puppet.bat') }
it { is_expected.not_to contain_exec('prerequisites_check.ps1') }
end
context 'with out of date aio_agent_version' do
let(:facts) { facts.merge({
:is_pe => true,
:aio_agent_version => '1.10.0'
})}
it { is_expected.to contain_class('puppet_agent::install::windows') }
it { is_expected.to contain_exec('prerequisites_check.ps1').with_command(/\ #{package_version} C:\\ProgramData\\Puppetlabs\\packages\\puppet-agent-#{arch}.msi C:\\tmp\\puppet-\d+_\d+_\d+-\d+_\d+-installer.log/) }
it { is_expected.to contain_exec('install_puppet.ps1').with_command(/\-Source \'C:\\ProgramData\\Puppetlabs\\packages\\puppet-agent-#{arch}.msi\'/) }
it { is_expected.to contain_exec('fix inheritable SYSTEM perms') }
end
end
context 'package_version =>' do
describe '5.6.7' do
let(:params) { global_params.merge(
{:package_version => '5.6.7'})
}
it {
is_expected.to contain_exec('install_puppet.ps1').with_unless(/\-Command {\$CurrentVersion = \[string\]\(facter.bat \-p aio_agent_version\);/)
is_expected.to contain_exec('install_puppet.ps1').with_unless(/\-Command.*if \(\$CurrentVersion \-eq '5\.6\.7'\) { +exit 0; *} *exit 1; }\.Invoke\(\)/)
}
end
end
context 'install_options =>' do
describe 'OPTION1=value1 OPTION2=value2' do
let(:params) { global_params.merge(
{:install_options => ['OPTION1=value1','OPTION2="value2"'],})
}
it {
is_expected.to contain_exec('install_puppet.ps1').with_command(/\-InstallArgs 'OPTION1=value1 OPTION2="""value2"""'/)
}
it {
is_expected.not_to contain_exec('install_puppet.ps1').with_command(/\-InstallArgs 'REINSTALLMODE="""amus"""'/)
}
end
end
context 'Default INSTALLMODE Option' do
describe 'REINSTALLMODE=amus' do
it {
is_expected.to contain_exec('install_puppet.ps1').with_command(/\-InstallArgs 'REINSTALLMODE="""amus"""'/)
}
end
end
context 'absolute_source =>' do
describe 'https://alterernate.com/puppet-agent-999.1-x64.msi' do
let(:params) { global_params.merge(
{:absolute_source => 'https://alternate.com/puppet-agent-999.1-x64.msi',})
}
it {
is_expected.to contain_file('C:\ProgramData\Puppetlabs\packages\puppet-agent-999.1-x64.msi').with_source('https://alternate.com/puppet-agent-999.1-x64.msi')
is_expected.to contain_exec('install_puppet.ps1').with_command(/\-Source 'C:\\ProgramData\\Puppetlabs\\packages\\puppet-agent-999.1-x64\.msi'/)
is_expected.to contain_exec('install_puppet.ps1').with_command(/\-Logfile 'C:\\tmp\\puppet-\d+_\d+_\d+-\d+_\d+-installer.log'/)
}
it { is_expected.to contain_exec('fix inheritable SYSTEM perms') }
end
describe 'C:/tmp/puppet-agent-999.2-x64.msi' do
let(:params) { global_params.merge(
{:absolute_source => 'C:/tmp/puppet-agent-999.2-x64.msi',})
}
it {
is_expected.to contain_file('C:\ProgramData\Puppetlabs\packages\puppet-agent-999.2-x64.msi').with_source('C:/tmp/puppet-agent-999.2-x64.msi')
is_expected.to contain_exec('install_puppet.ps1').with_command(/\-Source 'C:\\ProgramData\\Puppetlabs\\packages\\puppet-agent-999.2-x64\.msi'/)
is_expected.to contain_exec('install_puppet.ps1').with_command(/\-Logfile 'C:\\tmp\\puppet-\d+_\d+_\d+-\d+_\d+-installer.log'/)
}
it { is_expected.to contain_exec('fix inheritable SYSTEM perms') }
end
describe '\\\\garded\c$\puppet-agent-999.3-x64.msi' do
let(:params) { global_params.merge(
{:absolute_source => "\\\\garded\\c$\\puppet-agent-999.3-x64.msi",})
}
it {
is_expected.to contain_file('C:\ProgramData\Puppetlabs\packages\puppet-agent-999.3-x64.msi').with_source('\\\\garded\c$\puppet-agent-999.3-x64.msi')
is_expected.to contain_exec('install_puppet.ps1').with_command(/\-Source 'C:\\ProgramData\\Puppetlabs\\packages\\puppet-agent-999.3-x64\.msi'/)
is_expected.to contain_exec('install_puppet.ps1').with_command(/\-Logfile 'C:\\tmp\\puppet-\d+_\d+_\d+-\d+_\d+-installer.log'/)
}
it { is_expected.to contain_exec('fix inheritable SYSTEM perms') }
end
describe 'default source' do
it {
is_expected.to contain_file("C:\\ProgramData\\Puppetlabs\\packages\\puppet-agent-#{package_version}-#{arch}\.msi")
.with_source("https://downloads.puppet.com/windows/#{collection}/puppet-agent-#{package_version}-#{arch}.msi")
is_expected.to contain_exec('install_puppet.ps1').with_command(/\-Source 'C:\\ProgramData\\Puppetlabs\\packages\\puppet-agent-#{package_version}-#{arch}\.msi'/)
}
it { is_expected.to contain_exec('fix inheritable SYSTEM perms') }
end
describe 'puppet:///puppet_agent/puppet-agent-999.4-x86.msi' do
let(:params) { global_params.merge(
{:absolute_source => 'puppet:///puppet_agent/puppet-agent-999.4-x86.msi'})
}
it {
is_expected.to contain_file('C:\ProgramData\Puppetlabs\packages\puppet-agent-999.4-x86.msi').with_source('puppet:///puppet_agent/puppet-agent-999.4-x86.msi')
is_expected.to contain_exec('install_puppet.ps1').with_command(/\-Source 'C:\\ProgramData\\Puppetlabs\\packages\\puppet-agent-999.4-x86\.msi'/)
}
it { is_expected.to contain_exec('fix inheritable SYSTEM perms') }
end
end
context 'arch =>' do
describe 'specify x86' do
let(:params) { global_params.merge(
{:arch => 'x86'})
}
it {
is_expected.to contain_exec('install_puppet.ps1').with_command(/\-Source 'C:\\ProgramData\\Puppetlabs\\packages\\puppet-agent-#{package_version}-x86.msi'/
)
}
end
describe 'try x64 on x86 system' do
let(:facts) { {
:osfamily => 'windows',
:puppetversion => '4.10.100',
:tmpdir => 'C:\tmp',
:architecture => 'x86',
:system32 => 'C:\windows\sysnative',
:puppet_confdir => "C:\\ProgramData\\Puppetlabs\\puppet\\etc",
} }
let(:params) { global_params.merge(
{:arch => 'x64'})
}
it {
expect { catalogue }.to raise_error(Puppet::Error, /Unable to install x64 on a x86 system/)
}
end
end
context 'msi_move_locked_files =>' do
describe 'default' do
it {
is_expected.not_to contain_exec('install_puppet.ps1').with_command(/\-UseLockedFilesWorkaround/)
}
end
describe 'specify false' do
let(:params) { global_params.merge(
{:msi_move_locked_files => false,})
}
it {
is_expected.not_to contain_exec('install_puppet.ps1').with_command(/\-UseLockedFilesWorkaround/)
}
end
describe 'specify true with puppet 5.5.16' do
let(:params) { global_params.merge(
{:msi_move_locked_files => true,
:package_version => '5.5.16',})
}
it {
is_expected.to contain_exec('install_puppet.ps1').with_command(/\-UseLockedFilesWorkaround/)
}
end
describe 'specify true with puppet 5.5.17' do
let(:params) { global_params.merge(
{:msi_move_locked_files => true,
:package_version => '5.5.17',})
}
it {
is_expected.not_to contain_exec('install_puppet.ps1').with_command(/\-UseLockedFilesWorkaround/)
}
end
describe 'specify true with puppet 6.7.0' do
let(:params) { global_params.merge(
{:msi_move_locked_files => true,
:package_version => '6.7.0',})
}
it {
is_expected.to contain_exec('install_puppet.ps1').with_command(/\-UseLockedFilesWorkaround/)
}
end
describe 'specify true with puppet 6.8.0' do
let(:params) { global_params.merge(
{:msi_move_locked_files => true,
:package_version => '6.8.0',})
}
it {
is_expected.not_to contain_exec('install_puppet.ps1').with_command(/\-UseLockedFilesWorkaround/)
}
end
end
context 'wait_for_pxp_agent_exit =>' do
describe 'default' do
it {
is_expected.not_to contain_exec('install_puppet.ps1').with_command(/\-WaitForPXPAgentExit/)
}
end
describe 'specify timeout value of 5 minutes' do
let(:params) { global_params.merge(
{:wait_for_pxp_agent_exit => 300000,})
}
it {
is_expected.to contain_exec('install_puppet.ps1').with_command(/\-WaitForPXPAgentExit 300000/)
}
end
end
context 'wait_for_puppet_run =>' do
describe 'default' do
it {
is_expected.not_to contain_exec('install_puppet.ps1').with_command(/\-WaitForPuppetRun/)
}
end
describe 'specify timeout of 10 minutes' do
let(:params) { global_params.merge(
{:wait_for_puppet_run => 600000,})
}
it {
is_expected.to contain_exec('install_puppet.ps1').with_command(/\-WaitForPuppetRun 600000/)
}
end
end
context 'wait_for_puppet_run =>' do
describe 'default' do
it {
is_expected.not_to contain_exec('install_puppet.ps1').with_command(/\-WaitForPuppetRun/)
}
end
describe 'specify false' do
let(:params) { global_params.merge(
{:wait_for_puppet_run => false,})
}
it {
is_expected.not_to contain_exec('install_puppet.ps1').with_command(/\-WaitForPuppetRun/)
}
end
describe 'specify true' do
let(:params) { global_params.merge(
{:wait_for_puppet_run => true,})
}
it {
is_expected.to contain_exec('install_puppet.ps1').with_command(/\-WaitForPuppetRun/)
}
end
end
end
context 'rubyplatform' do
facts = {
:architecture => 'x64',
:env_temp_variable => 'C:/tmp',
:osfamily => 'windows',
:puppetversion => '3.8.0',
:puppet_confdir => "C:\\ProgramData/PuppetLabs/puppet/etc",
:puppet_agent_pid => 42,
:system32 => 'C:\windows\sysnative',
:tmpdir => 'C:\tmp',
}
describe 'i386-ming32' do
let(:facts) { facts.merge({:rubyplatform => 'i386-ming32'}) }
let(:params) { global_params }
it {
is_expected.to contain_exec('install_puppet.ps1').with { {
'command' => 'C:\windows\sysnative\cmd.exe /c start /b C:\windows\system32\cmd.exe /c "C:\tmp\install_puppet.ps1" 42',
} }
}
it { is_expected.to contain_exec('fix inheritable SYSTEM perms') }
end
describe 'x86' do
let(:facts) { facts.merge({:rubyplatform => 'x86_64'}) }
let(:params) { global_params }
it {
is_expected.to contain_exec('install_puppet.ps1').with { {
'command' => 'C:\windows\sysnative\cmd.exe /c start /b C:\windows\sysnative\cmd.exe /c "C:\tmp\install_puppet.ps1" 42',
} }
}
it { is_expected.to contain_exec('fix inheritable SYSTEM perms') }
end
end
end
end
| 38.125 | 222 | 0.576123 |
3949a8755edc9f685e01836a17e8f47b87e228f4 | 259 | # frozen_string_literal: true
# Generated via
# `rails generate curation_concerns:work Exhibition`
module CurationConcerns
class ExhibitionForm < CurationConcerns::Forms::WorkForm
self.model_class = ::Exhibition
include CitiFormBehaviors
end
end
| 25.9 | 58 | 0.795367 |
6121173c748b737e572950fedc806fbab266ac04 | 656 | # This migration comes from acts_as_taggable_on_engine (originally 2)
class AddMissingUniqueIndices < ActiveRecord::Migration[4.2]
def self.up
add_index :tags, :name, unique: true
remove_index :taggings, :tag_id
remove_index :taggings, [:taggable_id, :taggable_type, :context]
add_index :taggings,
[:tag_id, :taggable_id, :taggable_type, :context, :tagger_id, :tagger_type],
unique: true, name: 'taggings_idx'
end
def self.down
remove_index :tags, :name
remove_index :taggings, name: 'taggings_idx'
add_index :taggings, :tag_id
add_index :taggings, [:taggable_id, :taggable_type, :context]
end
end
| 28.521739 | 82 | 0.716463 |
e95b3a2f014dea670b2ca981fef43b224211d066 | 75 | module TwilioSegment
class Analytics
VERSION = '2.2.8.pre'
end
end
| 12.5 | 25 | 0.693333 |
38801214f4927cd5008b9c800ddec714e833d9a3 | 1,553 | require 'test_helper'
class SubscribershipTest < ActiveSupport::TestCase
def setup
@s = subscriptions(:one)
@subscribership = @s.subscriberships.build user: users(:two)
end
test "fixture subscription should be valid" do
assert_valid @subscribership,
"Initial fixture subscribership should be valid."
assert_valid subscriberships(:subscribership_1),
"Fixtures subscribership should be valid."
end
test "subscription_id should be present" do
assert_required_attribute @subscribership, :subscription_id
@subscribership.subscription_id = nil; assert_not_valid @subscribership
@subscribership.subscription_id = " "; assert_not_valid @subscribership
end
test "subscription_id should be valid" do
@subscribership.subscription_id = -1; assert_not_valid @subscribership
@subscribership.subscription_id = @s.id; assert_valid @subscribership
@subscribership.subscription = @s; assert_valid @subscribership
end
test "user_id should be present" do
assert_required_attribute @subscribership, :user_id
@subscribership.user_id = nil; assert_not_valid @subscribership
@subscribership.user_id = " "; assert_not_valid @subscribership
end
test "user_id should be valid" do
@subscribership.user_id = -1; assert_not_valid @subscribership
@subscribership.user_id = users(:two).id; assert_valid @subscribership
@subscribership.user = users(:two); assert_valid @subscribership
end
end
| 37.878049 | 79 | 0.721185 |
5d753601ce4b94139b1d94c7aca49360cf285b3f | 1,916 | require 'formula'
class Pyqt5 < Formula
homepage 'http://www.riverbankcomputing.co.uk/software/pyqt/download5'
url 'https://downloads.sf.net/project/pyqt/PyQt5/PyQt-5.3/PyQt-gpl-5.3.tar.gz'
sha1 '087d75be63351cfb7965075f448df218f688fa75'
option 'enable-debug', "Build with debug symbols"
option 'with-docs', "Install HTML documentation and python examples"
depends_on :python3 => :recommended
depends_on :python => :optional
if build.without?("python3") && build.without?("python")
odie "pyqt5: --with-python3 must be specified when using --without-python"
end
depends_on 'qt5'
if build.with? 'python3'
depends_on 'sip' => 'with-python3'
else
depends_on 'sip'
end
def install
Language::Python.each_python(build) do |python, version|
args = [ "--confirm-license",
"--bindir=#{bin}",
"--destdir=#{lib}/python#{version}/site-packages",
# To avoid conflicts with PyQt (for Qt4):
"--sipdir=#{share}/sip/Qt5/",
# sip.h could not be found automatically
"--sip-incdir=#{Formula["sip"].opt_include}",
# Make sure the qt5 version of qmake is found.
# If qt4 is linked it will pickup that version otherwise.
"--qmake=#{Formula["qt5"].bin}/qmake",
# Force deployment target to avoid libc++ issues
"QMAKE_MACOSX_DEPLOYMENT_TARGET=#{MacOS.version}" ]
args << '--debug' if build.include? 'enable-debug'
system python, "configure.py", *args
system "make"
system "make", "install"
system "make", "clean"
end
doc.install 'doc/html', 'examples' if build.with? "docs"
end
test do
system "pyuic5", "--version"
system "pylupdate5", "-version"
Language::Python.each_python(build) do |python, version|
system python, "-c", "import PyQt5"
end
end
end
| 33.034483 | 80 | 0.623173 |
018d21bce9fd8b4246472898ddd7856eccf0a426 | 4,170 | # Author:: Eric Crane (mailto:[email protected])
# Copyright:: Copyright (c) 2020 Eric Crane. All rights reserved.
#
# A Sqlite3 database connection.
#
# https://www.rubydoc.info/gems/sqlite3/1.3.11
# https://www.devdungeon.com/content/ruby-sqlite-tutorial
#
# db.results_as_hash = true
# Set results to return as Hash object.
# This is slower but offers a huge convenience.
# Consider turning it off for high performance situations.
# Each row will have the column name as the hash key.
#
# # Alternatively, to only get one row and discard the rest,
# replace `db.query()` with `db.get_first_value()`.
#
require 'sqlite3'
module Gloo
module Objs
class Sqlite < Gloo::Core::Obj
KEYWORD = 'sqlite'.freeze
KEYWORD_SHORT = 'sqlite'.freeze
DB = 'database'.freeze
DEFAULT_DB = 'test.db'.freeze
DB_REQUIRED_ERR = 'The database name is required!'.freeze
DB_NOT_FOUND_ERR = 'The database file was not found!'.freeze
#
# The name of the object type.
#
def self.typename
return KEYWORD
end
#
# The short name of the object type.
#
def self.short_typename
return KEYWORD_SHORT
end
# ---------------------------------------------------------------------
# Children
# ---------------------------------------------------------------------
#
# Does this object have children to add when an object
# is created in interactive mode?
# This does not apply during obj load, etc.
#
def add_children_on_create?
return true
end
#
# Add children to this object.
# This is used by containers to add children needed
# for default configurations.
#
def add_default_children
fac = $engine.factory
fac.create_string DB, DEFAULT_DB, self
end
# ---------------------------------------------------------------------
# Messages
# ---------------------------------------------------------------------
#
# Get a list of message names that this object receives.
#
def self.messages
return super + [ 'verify' ]
end
#
# Verify access to the Sqlite database specified.
#
def msg_verify
name = db_value
if name.empty?
$engine.err DB_REQUIRED_ERR
$engine.heap.it.set_to false
return
end
unless File.exist? name
$engine.err DB_NOT_FOUND_ERR
$engine.heap.it.set_to false
return
end
return unless connects?
$engine.heap.it.set_to true
end
# ---------------------------------------------------------------------
# DB functions (all database connections)
# ---------------------------------------------------------------------
#
# Open a connection and execute the SQL statement.
# Return the resulting data.
#
def query( sql, params = nil )
name = db_value
unless name
$engine.err DB_REQUIRED_ERR
return
end
db = SQLite3::Database.open name
db.results_as_hash = true
return db.query( sql, params )
end
# ---------------------------------------------------------------------
# Private functions
# ---------------------------------------------------------------------
private
#
# Get the Database file from the child object.
# Returns nil if there is none.
#
def db_value
o = find_child DB
return nil unless o
return o.value
end
#
# Try the connection and make sure it works.
# Returns true if we can connect and do a query.
#
def connects?
begin
db = SQLite3::Database.open db_value
sql = "SELECT COUNT(name) FROM sqlite_master WHERE type='table'"
db.get_first_value sql
rescue => e
$engine.err e.message
$engine.heap.it.set_to false
return false
end
return true
end
end
end
end
| 26.0625 | 77 | 0.502638 |
4a8f789d7f9e4c8214b84de97d1413a25a3f58f3 | 244 | # frozen_string_literal: true
class ApplicationMailer < ActionMailer::Base
default from: 'CodeHarbor <[email protected]>'
default 'Precedence' => 'bulk'
default 'Auto-Submitted' => 'auto-generated'
layout 'mailer'
end
| 27.111111 | 65 | 0.745902 |
1cc1efa178582fc8235048c63e3f32dfcf75abef | 790 | class ServiceRequestSerializer < ActiveModel::Serializer
self.root = 'request'
attributes :service_request_id, :status, :status_notes, :service_name,
:service_code, :description, :requested_datetime, :updated_datetime,
:address, :lat, :long, :media_url
def service_request_id
object.id
end
def status
object.status.name
end
def status_notes
messages = object.status.messages.where(service_id: object.service_id)
messages.first.try(:content)
end
def service_name
object.service.name
end
def service_code
object.service_id
end
def requested_datetime
object.created_at
end
def updated_datetime
object.updated_at
end
def long
object.lng.to_f
end
def lat
object.lat.to_f
end
end
| 17.555556 | 81 | 0.706329 |
ffb62135375c26fff6f1c122608e5872bfa4044e | 1,538 | # frozen_string_literal: true
class ReceiveEventsFromDiscoveryService
def perform
sqs_results.each do |result|
result.messages.each do |message|
process_message(message)
sqs_client.delete_message(queue_url: queue_url,
receipt_handle: message.receipt_handle)
end
end
end
private
def sqs_results
Enumerator.new do |y|
loop do
result = sqs_client.receive_message(queue_url: queue_url)
break if result.messages.empty?
y << result
end
end
end
def process_message(message)
jwe = JSON::JWT.decode(message.body, key)
data = JSON::JWT.decode(jwe.plain_text, key)
DiscoveryServiceEvent.transaction do
data['events'].each do |event|
push_to_fr_queue(event)
DiscoveryServiceEvent
.create_with(event)
.find_or_create_by(event.slice(:unique_id, :phase))
end
end
end
def push_to_fr_queue(event)
return unless event[:phase] == 'response'
redis.lpush('wayf_access_record', JSON.generate(event))
end
def sqs_client
@sqs_client ||= Aws::SQS::Client.new(endpoint: sqs_config[:endpoint],
region: sqs_config[:region])
end
def sqs_config
Rails.application.config.reporting_service.sqs
end
def queue_url
sqs_config[:queues][:discovery]
end
def key
@key ||= OpenSSL::PKey::RSA.new(File.read(sqs_config[:encryption_key]))
end
def redis
@redis ||= Redis.new
end
end
| 22.289855 | 75 | 0.645644 |
39e7adb1f64e744299b7d9151a301ed7266a5992 | 3,107 | module ScoutApm
class Agent
class Preconditions
# The preconditions here must be a 2 element hash, with :message and :check.
# message: Proc that takes the environment, and returns a string
# check: Proc that takes an AgentContext and returns true if precondition was met, if false, we shouldn't start.
# severity: Severity of the log message (one of: :debug, :info, :warn, :error or :fatal)
PRECONDITIONS = [
PRECONDITION_ENABLED = {
:message => proc {|environ| "Monitoring isn't enabled for the [#{environ.env}] environment." },
:check => proc { |context| context.config.value('monitor') },
:severity => :info,
},
PRECONDITION_APP_NAME = {
:message => proc {|environ| "An application name could not be determined. Specify the :name value in scout_apm.yml." },
:check => proc { |context| context.environment.application_name },
:severity => :warn,
},
PRECONDITION_INTERACTIVE = {
:message => proc {|environ| "Agent attempting to load in interactive mode." },
:check => proc { |context| ! context.environment.interactive? },
:severity => :info,
},
PRECONDITION_DETECTED_SERVER = {
:message => proc {|environ| "Deferring agent start. Standing by for first request" },
:check => proc { |context|
app_server_missing = !context.environment.app_server_integration(true).found?
background_job_missing = context.environment.background_job_integrations.length == 0
!app_server_missing && !background_job_missing
},
:severity => :info,
},
PRECONDITION_ALREADY_STARTED = {
:message => proc {|environ| "Already started agent." },
:check => proc { |context| !context.started? },
:severity => :info,
},
PRECONDITION_OLD_SCOUT_RAILS = {
:message => proc {|environ| "ScoutAPM is incompatible with the old Scout Rails plugin. Please remove scout_rails from your Gemfile" },
:check => proc { !defined?(::ScoutRails) },
:severity => :warn,
},
]
def check?(context)
@check_result ||=
begin
failed_preconditions = PRECONDITIONS.inject(Array.new) { |errors, condition|
unless condition[:check].call(context)
errors << {
:severity => condition[:severity],
:message => condition[:message].call(context.environment),
}
end
errors
}
if failed_preconditions.any?
failed_preconditions.each {|error| context.logger.send(error[:severity], error[:message]) }
force? # if forced, return true anyway
else
# No errors, we met preconditions
true
end
end
end
# XXX: Wire up options here and below in the appserver & bg server detections
def force?
false
end
end
end
end
| 37.890244 | 144 | 0.579015 |
f7c3a895f6de6a30f8798237598d97eb15be22b6 | 5,101 | class PostgresqlAT96 < Formula
desc "Object-relational database system"
homepage "https://www.postgresql.org/"
url "https://ftp.postgresql.org/pub/source/v9.6.20/postgresql-9.6.20.tar.bz2"
sha256 "3d08cba409d45ab62d42b24431a0d55e7537bcd1db2d979f5f2eefe34d487bb6"
license "PostgreSQL"
livecheck do
url "https://ftp.postgresql.org/pub/source/"
regex(%r{href=["']?v?(9\.6(?:\.\d+)*)/?["' >]}i)
end
bottle do
sha256 arm64_big_sur: "3ffef993065857ae631e04efa4a1e6c2b998940acf11dabd15622924c8be8a8d"
sha256 big_sur: "1d2e78a45b69aa887935849a2b2a663825ef1e78e6eb8df2692f1642d51fb073"
sha256 catalina: "1406b0e42667227867dc5cdb4ecf316387b7ade1789837969f8aa2295afe22b4"
sha256 mojave: "e0d93fe31b16a638b34c4e381e4d631f3cba32abbc9728f447e2960a829d235d"
end
keg_only :versioned_formula
# https://www.postgresql.org/support/versioning/
deprecate! date: "2021-11-11", because: :unsupported
depends_on arch: :x86_64
depends_on "[email protected]"
depends_on "readline"
uses_from_macos "libxslt"
uses_from_macos "perl"
on_linux do
depends_on "util-linux"
end
def install
# avoid adding the SDK library directory to the linker search path
ENV["XML2_CONFIG"] = "xml2-config --exec-prefix=/usr"
ENV.prepend "LDFLAGS", "-L#{Formula["[email protected]"].opt_lib} -L#{Formula["readline"].opt_lib}"
ENV.prepend "CPPFLAGS", "-I#{Formula["[email protected]"].opt_include} -I#{Formula["readline"].opt_include}"
args = %W[
--disable-debug
--prefix=#{prefix}
--datadir=#{pkgshare}
--libdir=#{lib}
--sysconfdir=#{prefix}/etc
--docdir=#{doc}
--enable-thread-safety
--with-bonjour
--with-gssapi
--with-ldap
--with-openssl
--with-pam
--with-libxml
--with-libxslt
--with-perl
--with-tcl
--with-uuid=e2fs
]
# PostgreSQL by default uses xcodebuild internally to determine this,
# which does not work on CLT-only installs.
args << "PG_SYSROOT=#{MacOS.sdk_path}" if MacOS.sdk_root_needed?
system "./configure", *args
system "make"
dirs = %W[datadir=#{pkgshare} libdir=#{lib} pkglibdir=#{lib}]
# Temporarily disable building/installing the documentation.
# Postgresql seems to "know" the build system has been altered and
# tries to regenerate the documentation when using `install-world`.
# This results in the build failing:
# `ERROR: `osx' is missing on your system.`
# Attempting to fix that by adding a dependency on `open-sp` doesn't
# work and the build errors out on generating the documentation, so
# for now let's simply omit it so we can package Postgresql for Mojave.
if DevelopmentTools.clang_build_version >= 1000
system "make", "all"
system "make", "-C", "contrib", "install", "all", *dirs
system "make", "install", "all", *dirs
else
system "make", "install-world", *dirs
end
end
def post_install
(var/"log").mkpath
postgresql_datadir.mkpath
# Don't initialize database, it clashes when testing other PostgreSQL versions.
return if ENV["HOMEBREW_GITHUB_ACTIONS"]
postgresql_datadir.mkpath
system "#{bin}/initdb", postgresql_datadir unless pg_version_exists?
end
def postgresql_datadir
var/name
end
def postgresql_log_path
var/"log/#{name}.log"
end
def pg_version_exists?
(postgresql_datadir/"PG_VERSION").exist?
end
def caveats
<<~EOS
If builds of PostgreSQL 9 are failing and you have version 8.x installed,
you may need to remove the previous version first. See:
https://github.com/Homebrew/legacy-homebrew/issues/2510
This formula has created a default database cluster with:
initdb #{postgresql_datadir}
For more details, read:
https://www.postgresql.org/docs/#{version.major}/app-initdb.html
EOS
end
plist_options manual: "pg_ctl -D #{HOMEBREW_PREFIX}/var/[email protected] start"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/postgres</string>
<string>-D</string>
<string>#{postgresql_datadir}</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{HOMEBREW_PREFIX}</string>
<key>StandardErrorPath</key>
<string>#{postgresql_log_path}</string>
</dict>
</plist>
EOS
end
test do
system "#{bin}/initdb", testpath/"test" unless ENV["HOMEBREW_GITHUB_ACTIONS"]
assert_equal pkgshare.to_s, shell_output("#{bin}/pg_config --sharedir").chomp
assert_equal lib.to_s, shell_output("#{bin}/pg_config --libdir").chomp
assert_equal lib.to_s, shell_output("#{bin}/pg_config --pkglibdir").chomp
end
end
| 31.68323 | 108 | 0.666928 |
ab1cd689eeb68ea3836c89646781a05e63375470 | 1,033 | module Brightbox
command [:cloudips] do |cmd|
cmd.desc I18n.t("cloudips.update.desc")
cmd.arg_name "cloudip-id"
cmd.command [:update] do |c|
c.desc "Set reverse DNS for this Cloud IP"
c.flag [:r, "reverse-dns"]
c.desc "Delete the reverse DNS for this Cloud IP"
c.switch ["delete-reverse-dns"], :negatable => false
c.desc I18n.t("options.name.desc")
c.flag [:n, :name]
c.desc I18n.t("cloudips.options.port_translators.desc")
c.flag [:t, :"port-translators"]
c.action do |global_options, options, args|
cip_id = args.shift
raise "You must specify the Cloud IP id as the first argument" unless cip_id =~ /^cip-/
if options[:r] && options[:r] != "" && options[:"delete-reverse-dns"]
raise "You must either specify a reverse DNS record or --delete-reverse-dns"
end
cip = CloudIP.find cip_id
cip.update(options)
cip.reload
render_table([cip], global_options)
end
end
end
end
| 27.184211 | 95 | 0.610842 |
1c6271f0955867ef8fa2f397c2052de3dd3bab0f | 2,587 | class Pango < Formula
desc "Framework for layout and rendering of i18n text"
homepage "http://www.pango.org/"
url "https://download.gnome.org/sources/pango/1.36/pango-1.36.8.tar.xz"
sha256 "18dbb51b8ae12bae0ab7a958e7cf3317c9acfc8a1e1103ec2f147164a0fc2d07"
revision 1
head do
url "https://git.gnome.org/browse/pango"
depends_on "automake" => :build
depends_on "autoconf" => :build
depends_on "libtool" => :build
depends_on "gtk-doc" => :build
end
option :universal
depends_on "pkg-config" => :build
depends_on "glib"
depends_on "cairo"
depends_on "harfbuzz"
depends_on "fontconfig"
depends_on "gobject-introspection"
fails_with :llvm do
build 2326
cause "Undefined symbols when linking"
end
def install
ENV.universal_binary if build.universal?
args = %W[
--disable-dependency-tracking
--disable-silent-rules
--prefix=#{prefix}
--enable-man
--with-html-dir=#{share}/doc
--enable-introspection=yes
--without-xft
]
system "./autogen.sh" if build.head?
system "./configure", *args
system "make"
system "make", "install"
end
test do
system "#{bin}/pango-querymodules", "--version"
(testpath/"test.c").write <<-EOS.undent
#include <pango/pangocairo.h>
int main(int argc, char *argv[]) {
PangoFontMap *fontmap;
int n_families;
PangoFontFamily **families;
fontmap = pango_cairo_font_map_get_default();
pango_font_map_list_families (fontmap, &families, &n_families);
g_free(families);
return 0;
}
EOS
cairo = Formula["cairo"]
fontconfig = Formula["fontconfig"]
freetype = Formula["freetype"]
gettext = Formula["gettext"]
glib = Formula["glib"]
libpng = Formula["libpng"]
pixman = Formula["pixman"]
flags = (ENV.cflags || "").split + (ENV.cppflags || "").split + (ENV.ldflags || "").split
flags += %W[
-I#{cairo.opt_include}/cairo
-I#{fontconfig.opt_include}
-I#{freetype.opt_include}/freetype2
-I#{gettext.opt_include}
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{include}/pango-1.0
-I#{libpng.opt_include}/libpng16
-I#{pixman.opt_include}/pixman-1
-D_REENTRANT
-L#{cairo.opt_lib}
-L#{gettext.opt_lib}
-L#{glib.opt_lib}
-L#{lib}
-lcairo
-lglib-2.0
-lgobject-2.0
-lintl
-lpango-1.0
-lpangocairo-1.0
]
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
| 26.131313 | 93 | 0.622342 |
1df375565ab19c531bfa44fdaf5ad999c27cee31 | 888 | # encoding: utf-8
require 'rubygems'
require 'mongo'
require 'rest_client'
include Mongo
client = MongoClient.new
db = client['gazetteer']
places = db['place']
i = 0
subjects = places.find(
"$and" => [
{ "parent" => "2043686" },
{ "type" => { "$ne" => "region" } }
]
)
subjects.each do |subject|
candidates = places.find(
"$and" => [
{
"$or" => [
{ "prefName.title" => subject["prefName"]["title"] },
{ "names.title" => subject["prefName"]["title"] }
]
},
{
"_id" => { "$ne" => subject["_id"] }
}
]
)
if candidates.count == 1 then
candidate = candidates.to_a.first
puts "#{candidate["_id"]} #{candidate["prefName"]["title"]} - #{subject["_id"]} #{subject["prefName"]["title"]}"
RestClient.post "http://admin:[email protected]/merge/#{subject["_id"]}/#{candidate["_id"]}", {}
i += 1
end
end
puts "merged #{i} places" | 20.181818 | 114 | 0.572072 |
1160dcae2cd3d24741566c5f3b7bfb5cc884fa3a | 25,238 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
# require "google/ads/google_ads/error"
require "google/ads/googleads/v7/services/customer_feed_service_pb"
module Google
module Ads
module GoogleAds
module V7
module Services
module CustomerFeedService
##
# Client for the CustomerFeedService service.
#
# Service to manage customer feeds.
#
class Client
include Paths
# @private
attr_reader :customer_feed_service_stub
##
# Configure the CustomerFeedService Client class.
#
# See {::Google::Ads::GoogleAds::V7::Services::CustomerFeedService::Client::Configuration}
# for a description of the configuration fields.
#
# @example
#
# # Modify the configuration for all CustomerFeedService clients
# ::Google::Ads::GoogleAds::V7::Services::CustomerFeedService::Client.configure do |config|
# config.timeout = 10.0
# end
#
# @yield [config] Configure the Client client.
# @yieldparam config [Client::Configuration]
#
# @return [Client::Configuration]
#
def self.configure
@configure ||= begin
default_config = Client::Configuration.new
default_config.timeout = 3600.0
default_config.retry_policy = {
initial_delay: 5.0, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4]
}
default_config
end
yield @configure if block_given?
@configure
end
##
# Configure the CustomerFeedService Client instance.
#
# The configuration is set to the derived mode, meaning that values can be changed,
# but structural changes (adding new fields, etc.) are not allowed. Structural changes
# should be made on {Client.configure}.
#
# See {::Google::Ads::GoogleAds::V7::Services::CustomerFeedService::Client::Configuration}
# for a description of the configuration fields.
#
# @yield [config] Configure the Client client.
# @yieldparam config [Client::Configuration]
#
# @return [Client::Configuration]
#
def configure
yield @config if block_given?
@config
end
##
# Create a new CustomerFeedService client object.
#
# @example
#
# # Create a client using the default configuration
# client = ::Google::Ads::GoogleAds::V7::Services::CustomerFeedService::Client.new
#
# # Create a client using a custom configuration
# client = ::Google::Ads::GoogleAds::V7::Services::CustomerFeedService::Client.new do |config|
# config.timeout = 10.0
# end
#
# @yield [config] Configure the CustomerFeedService client.
# @yieldparam config [Client::Configuration]
#
def initialize
# These require statements are intentionally placed here to initialize
# the gRPC module only when it's required.
# See https://github.com/googleapis/toolkit/issues/446
require "gapic/grpc"
require "google/ads/googleads/v7/services/customer_feed_service_services_pb"
# Create the configuration object
@config = Configuration.new Client.configure
# Yield the configuration if needed
yield @config if block_given?
# Create credentials
credentials = @config.credentials
# Use self-signed JWT if the endpoint is unchanged from default,
# but only if the default endpoint does not have a region prefix.
enable_self_signed_jwt = @config.endpoint == Client.configure.endpoint &&
[email protected](".").first.include?("-")
credentials ||= Credentials.default scope: @config.scope,
enable_self_signed_jwt: enable_self_signed_jwt
if credentials.is_a?(::String) || credentials.is_a?(::Hash)
credentials = Credentials.new credentials, scope: @config.scope
end
@quota_project_id = @config.quota_project
@quota_project_id ||= credentials.quota_project_id if credentials.respond_to? :quota_project_id
@customer_feed_service_stub = ::Gapic::ServiceStub.new(
::Google::Ads::GoogleAds::V7::Services::CustomerFeedService::Stub,
credentials: credentials,
endpoint: @config.endpoint,
channel_args: @config.channel_args,
interceptors: @config.interceptors
)
end
# Service calls
##
# Returns the requested customer feed in full detail.
#
# List of thrown errors:
# [AuthenticationError]()
# [AuthorizationError]()
# [HeaderError]()
# [InternalError]()
# [QuotaError]()
# [RequestError]()
#
# @overload get_customer_feed(request, options = nil)
# Pass arguments to `get_customer_feed` via a request object, either of type
# {::Google::Ads::GoogleAds::V7::Services::GetCustomerFeedRequest} or an equivalent Hash.
#
# @param request [::Google::Ads::GoogleAds::V7::Services::GetCustomerFeedRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload get_customer_feed(resource_name: nil)
# Pass arguments to `get_customer_feed` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param resource_name [::String]
# Required. The resource name of the customer feed to fetch.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Ads::GoogleAds::V7::Resources::CustomerFeed]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Ads::GoogleAds::V7::Resources::CustomerFeed]
#
# @raise [Google::Ads::GoogleAds::Error] if the RPC is aborted.
#
def get_customer_feed request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request,
to: ::Google::Ads::GoogleAds::V7::Services::GetCustomerFeedRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.get_customer_feed.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Ads::GoogleAds::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {
"resource_name" => request.resource_name
}
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.get_customer_feed.timeout,
metadata: metadata,
retry_policy: @config.rpcs.get_customer_feed.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@customer_feed_service_stub.call_rpc :get_customer_feed, request,
options: options do |response, operation|
yield response, operation if block_given?
return response
end
# rescue GRPC::BadStatus => grpc_error
# raise Google::Ads::GoogleAds::Error.new grpc_error.message
end
##
# Creates, updates, or removes customer feeds. Operation statuses are
# returned.
#
# List of thrown errors:
# [AuthenticationError]()
# [AuthorizationError]()
# [CollectionSizeError]()
# [CustomerFeedError]()
# [DatabaseError]()
# [DistinctError]()
# [FieldError]()
# [FieldMaskError]()
# [FunctionError]()
# [FunctionParsingError]()
# [HeaderError]()
# [IdError]()
# [InternalError]()
# [MutateError]()
# [NotEmptyError]()
# [OperatorError]()
# [QuotaError]()
# [RangeError]()
# [RequestError]()
# [SizeLimitError]()
# [StringFormatError]()
# [StringLengthError]()
#
# @overload mutate_customer_feeds(request, options = nil)
# Pass arguments to `mutate_customer_feeds` via a request object, either of type
# {::Google::Ads::GoogleAds::V7::Services::MutateCustomerFeedsRequest} or an equivalent Hash.
#
# @param request [::Google::Ads::GoogleAds::V7::Services::MutateCustomerFeedsRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload mutate_customer_feeds(customer_id: nil, operations: nil, partial_failure: nil, validate_only: nil, response_content_type: nil)
# Pass arguments to `mutate_customer_feeds` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param customer_id [::String]
# Required. The ID of the customer whose customer feeds are being modified.
# @param operations [::Array<::Google::Ads::GoogleAds::V7::Services::CustomerFeedOperation, ::Hash>]
# Required. The list of operations to perform on individual customer feeds.
# @param partial_failure [::Boolean]
# If true, successful operations will be carried out and invalid
# operations will return errors. If false, all operations will be carried
# out in one transaction if and only if they are all valid.
# Default is false.
# @param validate_only [::Boolean]
# If true, the request is validated but not executed. Only errors are
# returned, not results.
# @param response_content_type [::Google::Ads::GoogleAds::V7::Enums::ResponseContentTypeEnum::ResponseContentType]
# The response content type setting. Determines whether the mutable resource
# or just the resource name should be returned post mutation.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Ads::GoogleAds::V7::Services::MutateCustomerFeedsResponse]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Ads::GoogleAds::V7::Services::MutateCustomerFeedsResponse]
#
# @raise [Google::Ads::GoogleAds::Error] if the RPC is aborted.
#
def mutate_customer_feeds request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request,
to: ::Google::Ads::GoogleAds::V7::Services::MutateCustomerFeedsRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.mutate_customer_feeds.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Ads::GoogleAds::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {
"customer_id" => request.customer_id
}
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.mutate_customer_feeds.timeout,
metadata: metadata,
retry_policy: @config.rpcs.mutate_customer_feeds.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@customer_feed_service_stub.call_rpc :mutate_customer_feeds, request,
options: options do |response, operation|
yield response, operation if block_given?
return response
end
# rescue GRPC::BadStatus => grpc_error
# raise Google::Ads::GoogleAds::Error.new grpc_error.message
end
##
# Configuration class for the CustomerFeedService API.
#
# This class represents the configuration for CustomerFeedService,
# providing control over timeouts, retry behavior, logging, transport
# parameters, and other low-level controls. Certain parameters can also be
# applied individually to specific RPCs. See
# {::Google::Ads::GoogleAds::V7::Services::CustomerFeedService::Client::Configuration::Rpcs}
# for a list of RPCs that can be configured independently.
#
# Configuration can be applied globally to all clients, or to a single client
# on construction.
#
# @example
#
# # Modify the global config, setting the timeout for
# # get_customer_feed to 20 seconds,
# # and all remaining timeouts to 10 seconds.
# ::Google::Ads::GoogleAds::V7::Services::CustomerFeedService::Client.configure do |config|
# config.timeout = 10.0
# config.rpcs.get_customer_feed.timeout = 20.0
# end
#
# # Apply the above configuration only to a new client.
# client = ::Google::Ads::GoogleAds::V7::Services::CustomerFeedService::Client.new do |config|
# config.timeout = 10.0
# config.rpcs.get_customer_feed.timeout = 20.0
# end
#
# @!attribute [rw] endpoint
# The hostname or hostname:port of the service endpoint.
# Defaults to `"googleads.googleapis.com"`.
# @return [::String]
# @!attribute [rw] credentials
# Credentials to send with calls. You may provide any of the following types:
# * (`String`) The path to a service account key file in JSON format
# * (`Hash`) A service account key as a Hash
# * (`Google::Auth::Credentials`) A googleauth credentials object
# (see the [googleauth docs](https://googleapis.dev/ruby/googleauth/latest/index.html))
# * (`Signet::OAuth2::Client`) A signet oauth2 client object
# (see the [signet docs](https://googleapis.dev/ruby/signet/latest/Signet/OAuth2/Client.html))
# * (`GRPC::Core::Channel`) a gRPC channel with included credentials
# * (`GRPC::Core::ChannelCredentials`) a gRPC credentails object
# * (`nil`) indicating no credentials
# @return [::Object]
# @!attribute [rw] scope
# The OAuth scopes
# @return [::Array<::String>]
# @!attribute [rw] lib_name
# The library name as recorded in instrumentation and logging
# @return [::String]
# @!attribute [rw] lib_version
# The library version as recorded in instrumentation and logging
# @return [::String]
# @!attribute [rw] channel_args
# Extra parameters passed to the gRPC channel. Note: this is ignored if a
# `GRPC::Core::Channel` object is provided as the credential.
# @return [::Hash]
# @!attribute [rw] interceptors
# An array of interceptors that are run before calls are executed.
# @return [::Array<::GRPC::ClientInterceptor>]
# @!attribute [rw] timeout
# The call timeout in seconds.
# @return [::Numeric]
# @!attribute [rw] metadata
# Additional gRPC headers to be sent with the call.
# @return [::Hash{::Symbol=>::String}]
# @!attribute [rw] retry_policy
# The retry policy. The value is a hash with the following keys:
# * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
# * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
# * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
# * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
# trigger a retry.
# @return [::Hash]
# @!attribute [rw] quota_project
# A separate project against which to charge quota.
# @return [::String]
#
class Configuration
extend ::Gapic::Config
config_attr :endpoint, "googleads.googleapis.com", ::String
config_attr :credentials, nil do |value|
allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client,
nil]
allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
allowed.any? { |klass| klass === value }
end
config_attr :scope, nil, ::String, ::Array, nil
config_attr :lib_name, nil, ::String, nil
config_attr :lib_version, nil, ::String, nil
config_attr(:channel_args, { "grpc.service_config_disable_resolution" => 1 }, ::Hash, nil)
config_attr :interceptors, nil, ::Array, nil
config_attr :timeout, nil, ::Numeric, nil
config_attr :metadata, nil, ::Hash, nil
config_attr :retry_policy, nil, ::Hash, ::Proc, nil
config_attr :quota_project, nil, ::String, nil
# @private
def initialize parent_config = nil
@parent_config = parent_config unless parent_config.nil?
yield self if block_given?
end
##
# Configurations for individual RPCs
# @return [Rpcs]
#
def rpcs
@rpcs ||= begin
parent_rpcs = nil
parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config.respond_to?(:rpcs)
Rpcs.new parent_rpcs
end
end
##
# Configuration RPC class for the CustomerFeedService API.
#
# Includes fields providing the configuration for each RPC in this service.
# Each configuration object is of type `Gapic::Config::Method` and includes
# the following configuration fields:
#
# * `timeout` (*type:* `Numeric`) - The call timeout in seconds
# * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers
# * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields
# include the following keys:
# * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
# * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
# * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
# * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
# trigger a retry.
#
class Rpcs
##
# RPC-specific configuration for `get_customer_feed`
# @return [::Gapic::Config::Method]
#
attr_reader :get_customer_feed
##
# RPC-specific configuration for `mutate_customer_feeds`
# @return [::Gapic::Config::Method]
#
attr_reader :mutate_customer_feeds
# @private
def initialize parent_rpcs = nil
get_customer_feed_config = parent_rpcs.get_customer_feed if parent_rpcs.respond_to? :get_customer_feed
@get_customer_feed = ::Gapic::Config::Method.new get_customer_feed_config
mutate_customer_feeds_config = parent_rpcs.mutate_customer_feeds if parent_rpcs.respond_to? :mutate_customer_feeds
@mutate_customer_feeds = ::Gapic::Config::Method.new mutate_customer_feeds_config
yield self if block_given?
end
end
end
end
end
end
end
end
end
end
| 50.780684 | 152 | 0.536928 |
032e856fb2bb99d471f36da43ee47069fad8cf1a | 240 | # encoding: UTF-8
module Spontaneous::Model::Core
module Media
extend Spontaneous::Concern
def padded_id
save if media_id.nil?
Spontaneous::Media.pad_id(media_id)
end
def media_id
id
end
end
end
| 14.117647 | 41 | 0.654167 |
ff1cbd153d556ee19c5ff0eb7a72961dc88bc66f | 4,257 | class Swift < Formula
desc "High-performance system programming language"
homepage "https://github.com/apple/swift"
url "https://github.com/apple/swift/archive/swift-4.0.3-RELEASE.tar.gz"
sha256 "026d596dd4a24580a5e442409e8c58259197bd73ddbb77e5aade96da982ea39b"
bottle do
cellar :any
sha256 "f283aa347aa58b57dd835a91f311837fcb4b8cdbea52aaf9dcffdf33b8915e0c" => :high_sierra
sha256 "2d8f4b3bf2a3c1d5ffd811b42378cb43da9f49c0c16fec6e294d93338bfc57ad" => :sierra
end
keg_only :provided_by_osx, "Apple's CLT package contains Swift"
depends_on "cmake" => :build
depends_on "ninja" => :build
# Depends on latest version of Xcode
# https://github.com/apple/swift#system-requirements
depends_on :xcode => ["9.0", :build]
depends_on "icu4c" unless OS.mac?
# This formula is expected to have broken/missing linkage to
# both UIKit.framework and AssetsLibrary.framework. This is
# simply due to the nature of Swift's SDK Overlays.
resource "clang" do
url "https://github.com/apple/swift-clang/archive/swift-4.0.3-RELEASE.tar.gz"
sha256 "c940bd48c88f71622fb00167d92a619dd1614093893e1a09982c08da42259404"
end
resource "cmark" do
url "https://github.com/apple/swift-cmark/archive/swift-4.0.3-RELEASE.tar.gz"
sha256 "e95d0b54a0e897e768c9437dd67d56ec887909d0294cf6536ba240accd0d294f"
end
resource "compiler-rt" do
url "https://github.com/apple/swift-compiler-rt/archive/swift-4.0.3-RELEASE.tar.gz"
sha256 "1c2da685e8f424cb4460ed1daaf0c308f8deff63e7a3716c8a881cef60fbc7d8"
end
resource "llbuild" do
url "https://github.com/apple/swift-llbuild/archive/swift-4.0.3-RELEASE.tar.gz"
sha256 "92001e449b54a47516086a4e7d5f575bffa2847ae1f658540b2ec6f6dee6c6e7"
end
resource "llvm" do
url "https://github.com/apple/swift-llvm/archive/swift-4.0.3-RELEASE.tar.gz"
sha256 "a611487a82636142bc1ea8ef5b21401a5c75e57fb0dbf041ef8f2e85a472db2e"
end
resource "swiftpm" do
url "https://github.com/apple/swift-package-manager/archive/swift-4.0.3-RELEASE.tar.gz"
sha256 "4c26d333a01c239de8aa96b0536b7ff7218b7a322851a7d3b3b91b59fb4ce244"
end
# According to the official llvm readme, GCC 4.7+ is required
fails_with :gcc_4_0
fails_with :gcc
("4.3".."4.6").each do |n|
fails_with :gcc => n
end
def install
workspace = buildpath.parent
build = workspace/"build"
toolchain_prefix = "/Swift-#{version}.xctoolchain"
install_prefix = "#{toolchain_prefix}/usr"
ln_sf buildpath, "#{workspace}/swift"
resources.each { |r| r.stage("#{workspace}/#{r.name}") }
mkdir build do
system "#{buildpath}/utils/build-script",
"--release", "--assertions",
"--no-swift-stdlib-assertions",
"--build-subdir=#{build}",
"--llbuild", "--swiftpm",
"--ios", "--tvos", "--watchos",
"--",
"--workspace=#{workspace}", "--build-args=-j#{ENV.make_jobs}",
"--install-destdir=#{prefix}", "--toolchain-prefix=#{toolchain_prefix}",
"--install-prefix=#{install_prefix}", "--host-target=macosx-x86_64",
"--build-swift-static-stdlib", "--build-swift-dynamic-stdlib",
"--build-swift-dynamic-sdk-overlay", "--build-swift-static-sdk-overlay",
"--build-swift-stdlib-unittest-extra", "--install-swift",
"--swift-install-components=compiler;clang-resource-dir-symlink;"\
"clang-builtin-headers-in-clang-resource-dir;stdlib;sdk-overlay;tools;"\
"editor-integration;testsuite-tools;toolchain-dev-tools;license;sourcekit-inproc;"\
"sourcekit-xpc-service;swift-remote-mirror;swift-remote-mirror-headers",
"--llvm-install-components=clang;libclang;libclang-headers",
"--install-llbuild", "--install-swiftpm"
end
end
test do
(testpath/"test.swift").write <<~EOS
let base = 2
let exponent_inner = 3
let exponent_outer = 4
var answer = 1
for _ in 1...exponent_outer {
for _ in 1...exponent_inner {
answer *= base
}
}
print("(\\(base)^\\(exponent_inner))^\\(exponent_outer) == \\(answer)")
EOS
output = shell_output("#{prefix}/Swift-#{version}.xctoolchain/usr/bin/swift test.swift")
assert_match "(2^3)^4 == 4096\n", output
end
end
| 36.698276 | 93 | 0.693211 |
5d1f851990e83ac4aab0b234942f2b1ab497a12c | 1,182 | # WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
Gem::Specification.new do |spec|
spec.name = 'aws-sdk-servicediscovery'
spec.version = File.read(File.expand_path('../VERSION', __FILE__)).strip
spec.summary = 'AWS SDK for Ruby - ServiceDiscovery'
spec.description = 'Official AWS Ruby gem for Amazon Route 53 Auto Naming (ServiceDiscovery). This gem is part of the AWS SDK for Ruby.'
spec.author = 'Amazon Web Services'
spec.homepage = 'http://github.com/aws/aws-sdk-ruby'
spec.license = 'Apache-2.0'
spec.email = ['[email protected]']
spec.require_paths = ['lib']
spec.files = Dir['lib/**/*.rb']
spec.metadata = {
'source_code_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/master/gems/aws-sdk-servicediscovery',
'changelog_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/master/gems/aws-sdk-servicediscovery/CHANGELOG.md'
}
spec.add_dependency('aws-sdk-core', '~> 3')
spec.add_dependency('aws-sigv4', '~> 1.0')
end
| 39.4 | 140 | 0.676819 |
61f6bd72681886d04cb392d2b2a08302bb2c578b | 3,204 | #
# Description: When a VM encounters high CPU % Ready, VMotion VM to a more
# suitable host.
#
def emailresults(vmname, target_host, vm_host, vmotion, event_type)
# Get to_email_address from model unless specified below
to = nil
to ||= $evm.object['to_email_address']
# Get from_email_address from model unless specified below
from = nil
from ||= $evm.object['from_email_address']
# Get signature from model unless specified below
signature = nil
signature ||= $evm.object['signature']
subject = "Alert! EVM has detected event [#{event_type}] on VM #{vmname}"
body = "Hello, "
body += "<br>"
body += "EVM has detected event: #{event_type} on VM: <b>#{vmname}</b> running on Host: <b>#{vm_host}</b>."
body += "<br><br>"
if vmotion
body += "VM: <b>#{vmname}</b> will be moved to Host: <b>#{target_host}</b>"
else
body += "Host: <b>#{vm_host}</b> is already the lowest CPU % Ready Host. <br><br>"
body += "VM: <b>#{vmname}</b> will NOT be moved."
end
body += "<br><br>"
body += "Thank You,"
body += "<br><br>"
body += signature.to_s
body += "<br>"
$evm.log("info", "Sending email to <#{to}> from <#{from}> subject: <#{subject}>")
$evm.execute('send_email', to, from, subject, body)
end
# Initialize variables
vm = $evm.root['vm']
raise "VM object not found" if vm.nil?
vm_host = vm.host
curr_host_cpu_percent = vm_host.get_realtime_metric(:v_pct_cpu_ready_delta_summation, [15.minutes.ago.utc, 5.minutes.ago.utc], :avg)
process = $evm.object('process')
event_type = process.attributes['event_type']
event_type ||= 'High CPU Percent Ready Time'
# Get the ESX Host scope from VC or Cluster
# Default is to get ESX Hosts from the Cluster source VM resides
host_scope = nil
host_scope ||= $evm.object['host_scope']
if host_scope && host_scope.downcase == "vc"
ems = vm.ext_management_system
else
ems = vm.ems_cluster
end
$evm.log("info", "Detected Host Scope: <#{host_scope}>")
$evm.log("info", "VM: <#{vm.name}> currently residing on Host: <#{vm_host.name}> with CPU % Ready: <#{curr_host_cpu_percent}>")
# Get hosts attached to the VC
hosts = ems.hosts
# Loop through all hosts
host_suspects = hosts.select { |h| h.power_state == 'on' && h.name != vm_host.name }
host_all = []
host_suspects.each do |h|
host_cpu_percent = h.get_realtime_metric(:v_pct_cpu_ready_delta_summation, [15.minutes.ago.utc, 5.minutes.ago.utc], :avg)
host_all << {:id => h.id, :percent => host_cpu_percent, :type => :cpu}
$evm.log("info", "ESX Host: <#{h.name}> CPU Ready Delta Summation: <#{host_cpu_percent}>")
end
host_all.sort! { |a, b| a[:percent] <=> b[:percent] }
target_host = host_suspects.detect { |h| h.id == host_all.first[:id] }
vmotion = true
if curr_host_cpu_percent <= host_all.first[:percent]
$evm.log("info", "ESX Host: >#{target_host}> is the lowest CPU Ready Host. VM: <#{vm.name}> will NOT be moved.")
vmotion = true
else
$evm.log("info", "VM: <#{vm.name}> will be moved to ESX Host: <#{target_host.name}> with CPU % Ready: <#{host_all.first[:percent]}>")
end
# Email Results
emailresults(vm.name, target_host.name, vm_host, vmotion, event_type)
# VMotion VM to Target_host
if vmotion
vm.migrate(target_host)
end
| 32.363636 | 135 | 0.676966 |
21ee1b97944bf3d0bf42c62f0f1c582b70e61cc5 | 1,217 | #
# Copyright 2015, SUSE Linux GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require_relative "../../../../spec_helper"
describe "Crowbar::Client::Request::Proposal::Show" do
it_behaves_like "a request class", true do
subject do
::Crowbar::Client::Request::Proposal::Show.new(
attrs
)
end
let!(:attrs) do
{
barclamp: "ntp",
proposal: "default"
}
end
let!(:params) do
{}
end
let!(:method) do
:get
end
let!(:url) do
"crowbar/ntp/1.0/proposals/default"
end
let!(:headers) do
{
"Content-Type" => "application/json",
"Accept" => "application/json"
}
end
end
end
| 22.537037 | 74 | 0.63599 |
3945537b9f097097f745ed874016d6d505961a9a | 805 | puts
puts "Shopify App Generator"
puts "---------------------"
puts
puts "To get started, first register your app as a Shopify Partner:"
puts
puts " * Go to http://www.shopify.com/partners and create or login to your Partner account."
puts
puts " * Jump over to the Apps tab and hit the 'Create a new app' button"
puts " (Make sure to set the Application URL to http://localhost:3000/login during development)"
puts
puts " * Install the Shopify API gem:
$ gem install shopify_api"
puts
puts " * Run
$ rails generate shopify_app your_app_api_key your_app_secret"
puts
puts " * Set up a test shop to install your app in (do this on the Partner site)"
puts
puts " * Run $ rails server"
puts
puts " * Visit http://localhost:3000 and use the test shop's URL to install this app"
puts | 32.2 | 98 | 0.701863 |
2621c0fecf2c7f30557fbb797f07cc82f4b82c95 | 4,261 | # == Mentionable concern
#
# Contains functionality related to objects that can mention Users, Issues, MergeRequests, or Commits by
# GFM references.
#
# Used by Issue, Note, MergeRequest, and Commit.
#
module Mentionable
extend ActiveSupport::Concern
module ClassMethods
# Indicate which attributes of the Mentionable to search for GFM references.
def attr_mentionable(attr, options = {})
attr = attr.to_s
mentionable_attrs << [attr, options]
end
# Accessor for attributes marked mentionable.
def mentionable_attrs
@mentionable_attrs ||= []
end
end
included do
if self < Participable
participant ->(current_user) { mentioned_users(current_user) }
end
end
# Returns the text used as the body of a Note when this object is referenced
#
# By default this will be the class name and the result of calling
# `to_reference` on the object.
def gfm_reference(from_project = nil)
# "MergeRequest" > "merge_request" > "Merge request" > "merge request"
friendly_name = self.class.to_s.underscore.humanize.downcase
"#{friendly_name} #{to_reference(from_project)}"
end
# The GFM reference to this Mentionable, which shouldn't be included in its #references.
def local_reference
self
end
def all_references(current_user = nil, text = nil)
ext = Gitlab::ReferenceExtractor.new(self.project, current_user, self.author)
if text
ext.analyze(text)
else
self.class.mentionable_attrs.each do |attr, options|
text = send(attr)
context = options.dup
context[:cache_key] = [self, attr] if context.delete(:cache) && self.persisted?
ext.analyze(text, context)
end
end
ext
end
def mentioned_users(current_user = nil)
all_references(current_user).users
end
# Extract GFM references to other Mentionables from this Mentionable. Always excludes its #local_reference.
def referenced_mentionables(current_user = self.author, text = nil)
refs = all_references(current_user, text)
refs = (refs.issues + refs.merge_requests + refs.commits)
# We're using this method instead of Array diffing because that requires
# both of the object's `hash` values to be the same, which may not be the
# case for otherwise identical Commit objects.
refs.reject { |ref| ref == local_reference }
end
# Create a cross-reference Note for each GFM reference to another Mentionable found in the +mentionable_attrs+.
def create_cross_references!(author = self.author, without = [], text = nil)
refs = referenced_mentionables(author, text)
# We're using this method instead of Array diffing because that requires
# both of the object's `hash` values to be the same, which may not be the
# case for otherwise identical Commit objects.
refs.reject! { |ref| without.include?(ref) || cross_reference_exists?(ref) }
refs.each do |ref|
SystemNoteService.cross_reference(ref, local_reference, author)
end
end
# When a mentionable field is changed, creates cross-reference notes that
# don't already exist
def create_new_cross_references!(author = self.author)
changes = detect_mentionable_changes
return if changes.empty?
original_text = changes.collect { |_, vals| vals.first }.join(' ')
preexisting = referenced_mentionables(author, original_text)
create_cross_references!(author, preexisting)
end
private
# Returns a Hash of changed mentionable fields
#
# Preference is given to the `changes` Hash, but falls back to
# `previous_changes` if it's empty (i.e., the changes have already been
# persisted).
#
# See ActiveModel::Dirty.
#
# Returns a Hash.
def detect_mentionable_changes
source = (changes.present? ? changes : previous_changes).dup
mentionable = self.class.mentionable_attrs.map { |attr, options| attr }
# Only include changed fields that are mentionable
source.select { |key, val| mentionable.include?(key) }
end
# Determine whether or not a cross-reference Note has already been created between this Mentionable and
# the specified target.
def cross_reference_exists?(target)
SystemNoteService.cross_reference_exists?(target, local_reference)
end
end
| 32.037594 | 113 | 0.717672 |
28336e785579198c0592f4ab78c20261850170ff | 2,809 | module Constellation
#
# Constellation::Reader observes the given log files for changes and inserts new entries
# into the data store.
#
class Reader
attr_accessor :debug_mode
def initialize(config)
@config = config
@debug_mode = false
@running = true
@threads = []
end
#
# Starts observing the given files
#
def start
Constellation::UserInterface.inform(">> Starting file observation", :prepend_newline => true)
@config.watched_files.each { |file|
@threads << Thread.new { read_log_entries(file) }
}
wait_for_interrupt
end
#
# Read the given log file every TIME_TO_WAIT seconds
#
def read_log_entries(file)
begin
file = File.open(file, "a+")
rescue Errno::EACCES
Constellation::UserInterface.error("Permission denied: Please check the access permissions of #{file}", :prepend_newline => true)
quit_application
end
while(@running)
begin
while(line = file.readline)
begin
log_entry = Constellation::LogEntry.new(line)
@config.data_store.insert(log_entry)
Constellation::UserInterface.inform(Time.now.strftime("%m/%d/%Y %I:%M%p") + ":" + log_entry.inspect) if @debug_mode
rescue Exception => e
new_system_error(e)
end
end
# rescue from several errors that may occur due to an invalid log format
# but should not appear in order to avoid performance issues
rescue EOFError => e
end
sleep(@config.reading_buffer)
end
end
#
# Log errors that get raised while reading the log file
#
def new_system_error(error)
log_entry = Constellation::LogEntry.new
log_entry.machine = "system"
log_entry.application = "Constellation"
log_entry.time = Time.now
log_entry.message = "A new exception got raised: #{error.to_s}"
log_entry.key = "#{log_entry.time.year}/#{log_entry.time.month}/#{log_entry.time.day}/#{log_entry.time.hour}"
@config.data_store.insert(log_entry)
Constellation::UserInterface.error(log_entry.message) if @debug_mode
end
#
# Wait until the user quits Constellation
#
def wait_for_interrupt
while(@running)
sleep(100)
end
end
#
# Quit the application by killing all opened threads and the process itself.
#
def quit_application()
# Kill each thread except the current thread
@threads.each { |t| t.kill unless t.object_id==Thread.current.object_id }
Constellation::UserInterface.confirm("Quitting constellation..", :prepend_newline => true)
Kernel.exit(1)
end
end
end
| 30.204301 | 137 | 0.626201 |
879f437aea01ea84f7049dfa5bfb2fa546cab584 | 6,530 | #
# Be sure to run `pod lib lint MUKit.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
# 先修改podspec文件,然后pod spec lint/pod lib lint验证通过后再git打标签,否则容易出错
# pod lib lint --allow-warnings --use-libraries
# 包含第三方库时使用pod repo push MUKit MUKit.podspec --allow-warnings --use-libraries验证
# 注册pod trunk register [email protected] 'Jekity' --verbose
# 发布到cocoapods pod trunk push MUKit.podspec --use-libraries --allow-warnings
# 在podfile文件中加入inhibit_all_warnings!可以消除pod库警告
Pod::Spec.new do |s|
s.name = 'MUKit'
s.version = '1.6.2'
s.summary = 'UITableView、UICollectionView、Signal、UINavigation、AliPay、weChatPay、Shared、Popup、Networking,runtime、Carousel、QRCode,Block,ScrollView、嵌套滚动 、MVVM、delegate、Refresh、route、路由、CheckBox、popupView 一款提高iOS开发效率的工具包MUKit'
s.description = <<-DESC
一款提高iOS开发效率的组件框架,涉及UITableView、UICollectionView、Signal、UINavigation、AliPay、weChatPay、Shared、Popup、Networking,runtime、Carousel、QRCode,Block,ScrollView、嵌套滚动 、MVVM、delegate、Refresh内容
DESC
s.homepage = 'https://github.com/Jeykit/MUKit'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Jeykit' => '[email protected]' }
s.source = { :git => 'https://github.com/Jeykit/MUKit.git', :tag => s.version }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
#s.ios.deployment_target = '8.0'
#s.source_files = 'MUKit/Classes/**/*'
s.source_files = 'MUKit/Classes/MUKit.h'
s.public_header_files = 'MUKit/Classes/MUKit.h'
s.ios.deployment_target = '8.0'
#s.platform = :ios, '8.0' #支持的系统
s.subspec 'Normal' do |ss|
ss.source_files = 'MUKit/Classes/MUNormal/*.{h,m}'
ss.public_header_files = 'MUKit/Classes/MUNormal/UIView+MUNormal.h'
end
s.subspec 'TipsView' do |ss|
ss.source_files = 'MUKit/Classes/MUTipsView/*.{h,m}'
end
s.subspec 'Public' do |ss|
ss.source_files = 'MUKit/Classes/Public/*.{h,m}'
end
s.subspec 'Image' do |ss|
ss.source_files = 'MUKit/Classes/UIImage/*.{h,m}'
end
s.subspec 'Color' do |ss|
ss.source_files = 'MUKit/Classes/UIColor/*.{h,m}'
end
s.subspec 'Refresh' do |ss|
ss.source_files = 'MUKit/Classes/Refresh/*.{h,m}'
ss.dependency 'MUKit/Normal'
end
s.subspec 'Signal' do |ss|
ss.source_files = 'MUKit/Classes/MUSignal/*.{h,m}'
ss.public_header_files = 'MUKit/Classes/MUSignal/{MUSignal,UIView+MUSignal}.h'
end
s.subspec 'Carousel' do |ss|
ss.source_files = 'MUKit/Classes/Carousel/MUCarouselView.{h,m}'
ss.public_header_files = 'MUKit/Classes/Carousel/MUCarouselView.h'
end
s.subspec 'AdaptiveView' do |ss|
ss.source_files = 'MUKit/Classes/MUAdaptiveView/*.{h,m}'
ss.public_header_files = 'MUKit/Classes/MUAdaptiveView/MUAdaptiveView.h'
end
s.subspec 'Navigation' do |ss|
ss.source_files = 'MUKit/Classes/MUNavigationController/*.{h,m}'
ss.public_header_files = 'MUKit/Classes/MUNavigationController/MUNavigation.h'
ss.dependency 'YYModel'
end
s.subspec 'TableViewManager' do |ss|
ss.source_files = 'MUKit/Classes/MUTableViewManager/*.{h,m}'
ss.dependency 'MUKit/TipsView'
ss.dependency 'MUKit/Refresh'
ss.dependency 'MUKit/Public'
ss.dependency 'YYModel'
end
s.subspec 'PaperView' do |ss|
ss.source_files = 'MUKit/Classes/MUPaperView/*.{h,m}'
ss.public_header_files = 'MUKit/Classes/MUPaperView/MUPaperView.h'
end
s.subspec 'Shared' do |ss|
ss.source_files = 'MUKit/Classes/MUShared/*.{h,m}'
ss.public_header_files = 'MUKit/Classes/MUShared/MUShared{Manager,Object}.h'
#ss.dependency 'AliPay'
ss.dependency 'WeChat_SDK'
ss.dependency 'WeiboSDK'
ss.dependency 'TencentOpenApiSDK'
ss.dependency 'MUKit/Public'
end
s.subspec 'EPaymentManager' do |ss|
ss.source_files = 'MUKit/Classes/MUEPaymentManager/*.{h,m}'
ss.public_header_files = 'MUKit/Classes/MUEPaymentManager/MUEPaymentManager.h'
ss.dependency 'MUKit/Public'
ss.dependency 'AliPay'
ss.dependency 'WeChat_SDK'
end
s.subspec 'PopupController' do |ss|
ss.source_files = 'MUKit/Classes/MUPopupController/*.{h,m}'
ss.public_header_files = 'MUKit/Classes/MUPopupController/{MUPopup,MUPopupController,UIViewController+MUPopup}.h'
ss.dependency 'MUKit/Public'
end
s.subspec 'Encryption' do |ss|
ss.source_files = 'MUKit/Classes/MUEncryption/*.{h,m}'
ss.public_header_files = 'MUKit/Classes/MUEncryption/MUEncryptionUtil.h'
ss.frameworks = 'Security'
end
s.subspec 'CollectionViewManager' do |ss|
ss.source_files = 'MUKit/Classes/MUCollectionViewManager/*.{h,m}'
ss.dependency 'YYModel'
ss.dependency 'MUKit/TipsView'
ss.dependency 'MUKit/Refresh'
ss.dependency 'MUKit/Public'
end
s.subspec 'QRCodeManager' do |ss|
ss.source_files = 'MUKit/Classes/QRCodeScan/{MUQRCodeManager,MU_Scan_Success}.{h,m,wav}'
end
s.subspec 'Networking' do |ss|
ss.source_files = 'MUKit/Classes/Networking/*.{h,m}'
ss.dependency 'YYModel'
ss.dependency 'AFNetworking'
end
s.subspec 'ScrollManager' do |ss|
ss.source_files = 'MUKit/Classes/MUScrollManager/*.{h,m}'
ss.dependency 'MUKit/Public'
end
s.subspec 'Checkbox' do |ss|
ss.source_files = 'MUKit/Classes/Checkbox/*.{h,m}'
ss.public_header_files = 'MUKit/Classes/Checkbox/MUCheckbox.h'
end
s.subspec 'popupView' do |ss|
ss.source_files = 'MUKit/Classes/MUPopupView/*.{h,m}'
ss.public_header_files = 'MUKit/Classes/MUPopupView/MUPopupView.h'
end
s.subspec 'ImagePickerManager' do |ss|
ss.source_files = 'MUKit/Classes/MUImagePickerManager/*.{h,m}'
ss.public_header_files = 'MUKit/Classes/MUImagePickerManager/MUImagePickerManager.h'
ss.dependency 'MUKit/PhotoPreview'
end
s.subspec 'PhotoPreview' do |ss|
ss.source_files = 'MUKit/Classes/MUPhotoPreview/*.{h,m}'
ss.public_header_files = 'MUKit/Classes/MUPhotoPreview/MUPhotoPreviewController.h'
end
s.subspec 'ImageCache' do |ss|
ss.source_files = 'MUKit/Classes/ImageCache/*.{h,m}'
ss.public_header_files = 'MUKit/Classes/ImageCache/*.h'
end
s.subspec 'Tag' do |ss|
ss.source_files = 'MUKit/Classes/Tag/*.{h,m}'
ss.public_header_files = 'MUKit/Classes/Tag/MUTagView.h'
end
s.subspec 'Levitate' do |ss|
ss.source_files = 'MUKit/Classes/Levitate/*.{h,m}'
ss.public_header_files = 'MUKit/Classes/Levitate/MULevitateView.h'
end
end
| 40.308642 | 232 | 0.718224 |
ff449920a2d88dbf8d86ffa293fbde07cc549c8e | 133 | require_relative 'strategy'
class AddStrategy < Strategy
def initialize
end
def do_operation(num1, num2)
num1 + num2
end
end | 13.3 | 29 | 0.759398 |
d565fa4955966319f81b7bddea9bea1620e83b89 | 204 | require "squeel"
class Testimonial < ActiveRecord::Base
default_scope -> { order{created_at.desc} }
required_locale_columns :content
validates_presence_of :author
def to_s
author
end
end
| 15.692308 | 45 | 0.75 |
1c9a1a74a4eb91c1502d0735fd1afc2001340414 | 875 | # rubocop:disable all
class MoveSlackServiceToWebhook < ActiveRecord::Migration[4.2]
DOWNTIME = true
DOWNTIME_REASON = 'Move old fields "token" and "subdomain" to one single field "webhook"'
def change
SlackService.all.each do |slack_service|
if ["token", "subdomain"].all? { |property| slack_service.properties.key? property }
token = slack_service.properties['token']
subdomain = slack_service.properties['subdomain']
webhook = "https://#{subdomain}.slack.com/services/hooks/incoming-webhook?token=#{token}"
slack_service.properties['webhook'] = webhook
slack_service.properties.delete('token')
slack_service.properties.delete('subdomain')
# Room is configured on the Slack side
slack_service.properties.delete('room')
slack_service.save(validate: false)
end
end
end
end
| 38.043478 | 97 | 0.696 |
e91a722337dafc41496eb1901dd940d30cc7121b | 144 | require 'ytsearch'
require 'pp'
options = {
"q" => "taylor swift 22",
"part" => "snippet"
}
data = YoutubeSearch.search(options)
pp data
| 12 | 36 | 0.638889 |
b9ed9f281be08c42461c43027e9e8661ba5ade64 | 1,137 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20170622055919) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "users", force: :cascade do |t|
t.string "email"
t.string "password_digest"
t.string "token"
t.datetime "token_expires_at"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
end
| 40.607143 | 86 | 0.766051 |
f7719447b92007bf986774dc666de7921643f513 | 1,583 | # frozen_string_literal: true
class IssueBoardEntity < Grape::Entity
include RequestAwareEntity
expose :id
expose :iid
expose :title
expose :confidential
expose :due_date
expose :project_id
expose :relative_position
expose :project do |issue|
API::Entities::Project.represent issue.project, only: [:id, :path]
end
expose :milestone, expose_nil: false do |issue|
API::Entities::Milestone.represent issue.milestone, only: [:id, :title]
end
expose :assignees do |issue|
API::Entities::UserBasic.represent issue.assignees, only: [:id, :name, :username, :avatar_url]
end
expose :labels do |issue|
LabelEntity.represent issue.labels, project: issue.project, only: [:id, :title, :description, :color, :priority, :text_color]
end
expose :reference_path, if: -> (issue) { issue.project } do |issue, options|
options[:include_full_project_path] ? issue.to_reference(full: true) : issue.to_reference
end
expose :real_path, if: -> (issue) { issue.project } do |issue|
project_issue_path(issue.project, issue)
end
expose :issue_sidebar_endpoint, if: -> (issue) { issue.project } do |issue|
project_issue_path(issue.project, issue, format: :json, serializer: 'sidebar_extras')
end
expose :toggle_subscription_endpoint, if: -> (issue) { issue.project } do |issue|
toggle_subscription_project_issue_path(issue.project, issue)
end
expose :assignable_labels_endpoint, if: -> (issue) { issue.project } do |issue|
project_labels_path(issue.project, format: :json, include_ancestor_groups: true)
end
end
| 31.039216 | 129 | 0.72331 |
e23a62e7ff0b0170887a9d94dc7e175f71edb96b | 923 | # == Schema Information
#
# Table name: levels
#
# id :integer not null, primary key
# game_id :integer
# name :string(255) not null
# created_at :datetime
# updated_at :datetime
# level_num :string(255)
# ideal_level_source_id :integer
# solution_level_source_id :integer
# user_id :integer
# properties :text(65535)
# type :string(255)
# md5 :string(255)
# published :boolean default(FALSE), not null
# notes :text(65535)
#
# Indexes
#
# index_levels_on_game_id (game_id)
#
# Text Match type.
class TextMatch < DSLDefined
def dsl_default
<<ruby
name 'Enter name here'
title 'Enter title here'
content1 'Enter prompt here'
answer 'Enter answer here'
ruby
end
end
| 25.638889 | 70 | 0.535211 |
d54b9415d43fd17d1cecebcd7a8d11978f879ab9 | 3,621 | class UpdateAccessForWhosOnline < ActiveRecord::Migration
def up
say "adding access restrictions for some customized actions..."
AppParameter.transaction do # -- START TRANSACTION --
ap = AppParameter.find_by_code( AppParameter::PARAM_BLACKLIST_ACCESS_START )
if (ap.nil?)
AppParameter.create :code => AppParameter::PARAM_BLACKLIST_ACCESS_START,
:controller_name => 'week_plan',
:action_name => 'income_analysis',
:a_integer => 4, # (higher than projects)
:description => '(controller_name, action_name): action identifiers; a_integer: required level for access grant (should be greater than base level required for controller access)'
end
# (Let's say 5 is enough as a step in between action restrictions - it won't be read or needed anywhere else)
ap = AppParameter.find_by_code( AppParameter::PARAM_BLACKLIST_ACCESS_START + 5 )
if (ap.nil?)
AppParameter.create :code => AppParameter::PARAM_BLACKLIST_ACCESS_START + 5,
:controller_name => 'appointments',
:action_name => 'issue_receipt',
:a_integer => 4, # (same level as contacts, firms, ...)
:description => '(controller_name, action_name): action identifiers; a_integer: required level for access grant (should be greater than base level required for controller access)'
end
ap = AppParameter.find_by_code( AppParameter::PARAM_BLACKLIST_ACCESS_START + 10 )
if (ap.nil?)
AppParameter.create :code => AppParameter::PARAM_BLACKLIST_ACCESS_START + 10,
:controller_name => 'welcome',
:action_name => 'whos_online',
:a_integer => 8, # (same level as contacts, firms, ...)
:description => '(controller_name, action_name): action identifiers; a_integer: required level for access grant (should be greater than base level required for controller access)'
end
ap = AppParameter.find_by_code( AppParameter::PARAM_BLACKLIST_ACCESS_START + 15 )
if (ap.nil?)
AppParameter.create :code => AppParameter::PARAM_BLACKLIST_ACCESS_START + 15,
:controller_name => 'welcome',
:action_name => 'edit_current_user',
:a_integer => 1,
:description => '(controller_name, action_name): action identifiers; a_integer: required level for access grant (should be greater than base level required for controller access)'
end
end # -- END TRANSACTION --
say 'verifying the existence of the parameters...'
[
AppParameter::PARAM_BLACKLIST_ACCESS_START,
AppParameter::PARAM_BLACKLIST_ACCESS_START + 5,
AppParameter::PARAM_BLACKLIST_ACCESS_START + 10,
AppParameter::PARAM_BLACKLIST_ACCESS_START + 15
].each { |code|
say "seeking param. row w/ code #{code}"
raise "Parameter row not found with code #{code}!" unless AppParameter.find_by_code( code )
}
say 'done.'
end
def down
say "deleting access restrictions for all customized actions..."
AppParameter.delete_all(
"(code >= #{AppParameter::PARAM_BLACKLIST_ACCESS_START}) AND (code <= #{AppParameter::PARAM_BLACKLIST_ACCESS_START + 15})"
)
say 'done.'
end
end
| 55.707692 | 207 | 0.606738 |
91708d244187049503fff3aeffd7bf5bd1f44165 | 6,039 | require 'spec_helper'
# This spec was generated by rspec-rails when you ran the scaffold generator.
# It demonstrates how one might use RSpec to specify the controller code that
# was generated by Rails when you ran the scaffold generator.
#
# It assumes that the implementation code is generated by the rails scaffold
# generator. If you are using any extension libraries to generate different
# controller code, this generated spec may or may not pass.
#
# It only uses APIs available in rails and/or rspec-rails. There are a number
# of tools you can use to make these specs even more expressive, but we're
# sticking to rails and rspec-rails APIs to keep things simple and stable.
#
# Compared to earlier versions of this generator, there is very limited use of
# stubs and message expectations in this spec. Stubs are only used when there
# is no simpler way to get a handle on the object needed for the example.
# Message expectations are only used when there is no simpler way to specify
# that an instance is receiving a specific message.
describe TweetsController do
# This should return the minimal set of attributes required to create a valid
# Tweet. As you add validations to Tweet, be sure to
# adjust the attributes here as well.
let(:valid_attributes) { { "title" => "MyString" } }
# This should return the minimal set of values that should be in the session
# in order to pass any filters (e.g. authentication) defined in
# TweetsController. Be sure to keep this updated too.
let(:valid_session) { {} }
describe "GET index" do
it "assigns all tweets as @tweets" do
tweet = Tweet.create! valid_attributes
get :index, {}, valid_session
assigns(:tweets).should eq([tweet])
end
end
describe "GET show" do
it "assigns the requested tweet as @tweet" do
tweet = Tweet.create! valid_attributes
get :show, {:id => tweet.to_param}, valid_session
assigns(:tweet).should eq(tweet)
end
end
describe "GET new" do
it "assigns a new tweet as @tweet" do
get :new, {}, valid_session
assigns(:tweet).should be_a_new(Tweet)
end
end
describe "GET edit" do
it "assigns the requested tweet as @tweet" do
tweet = Tweet.create! valid_attributes
get :edit, {:id => tweet.to_param}, valid_session
assigns(:tweet).should eq(tweet)
end
end
describe "POST create" do
describe "with valid params" do
it "creates a new Tweet" do
expect {
post :create, {:tweet => valid_attributes}, valid_session
}.to change(Tweet, :count).by(1)
end
it "assigns a newly created tweet as @tweet" do
post :create, {:tweet => valid_attributes}, valid_session
assigns(:tweet).should be_a(Tweet)
assigns(:tweet).should be_persisted
end
it "redirects to the created tweet" do
post :create, {:tweet => valid_attributes}, valid_session
response.should redirect_to(Tweet.last)
end
end
describe "with invalid params" do
it "assigns a newly created but unsaved tweet as @tweet" do
# Trigger the behavior that occurs when invalid params are submitted
Tweet.any_instance.stub(:save).and_return(false)
post :create, {:tweet => { "title" => "invalid value" }}, valid_session
assigns(:tweet).should be_a_new(Tweet)
end
it "re-renders the 'new' template" do
# Trigger the behavior that occurs when invalid params are submitted
Tweet.any_instance.stub(:save).and_return(false)
post :create, {:tweet => { "title" => "invalid value" }}, valid_session
response.should render_template("new")
end
end
end
describe "PUT update" do
describe "with valid params" do
it "updates the requested tweet" do
tweet = Tweet.create! valid_attributes
# Assuming there are no other tweets in the database, this
# specifies that the Tweet created on the previous line
# receives the :update_attributes message with whatever params are
# submitted in the request.
Tweet.any_instance.should_receive(:update).with({ "title" => "MyString" })
put :update, {:id => tweet.to_param, :tweet => { "title" => "MyString" }}, valid_session
end
it "assigns the requested tweet as @tweet" do
tweet = Tweet.create! valid_attributes
put :update, {:id => tweet.to_param, :tweet => valid_attributes}, valid_session
assigns(:tweet).should eq(tweet)
end
it "redirects to the tweet" do
tweet = Tweet.create! valid_attributes
put :update, {:id => tweet.to_param, :tweet => valid_attributes}, valid_session
response.should redirect_to(tweet)
end
end
describe "with invalid params" do
it "assigns the tweet as @tweet" do
tweet = Tweet.create! valid_attributes
# Trigger the behavior that occurs when invalid params are submitted
Tweet.any_instance.stub(:save).and_return(false)
put :update, {:id => tweet.to_param, :tweet => { "title" => "invalid value" }}, valid_session
assigns(:tweet).should eq(tweet)
end
it "re-renders the 'edit' template" do
tweet = Tweet.create! valid_attributes
# Trigger the behavior that occurs when invalid params are submitted
Tweet.any_instance.stub(:save).and_return(false)
put :update, {:id => tweet.to_param, :tweet => { "title" => "invalid value" }}, valid_session
response.should render_template("edit")
end
end
end
describe "DELETE destroy" do
it "destroys the requested tweet" do
tweet = Tweet.create! valid_attributes
expect {
delete :destroy, {:id => tweet.to_param}, valid_session
}.to change(Tweet, :count).by(-1)
end
it "redirects to the tweets list" do
tweet = Tweet.create! valid_attributes
delete :destroy, {:id => tweet.to_param}, valid_session
response.should redirect_to(tweets_url)
end
end
end
| 37.509317 | 101 | 0.674615 |
6a11523718afff870d58b6b97f7525f544a5cce2 | 1,910 | class UsersController < ApplicationController
before_action :logged_in_user, only: [:index, :edit, :update, :destroy, :following, :followers]
before_action :correct_user, only: [:edit, :update]
before_action :admin_user, only: :destroy
def index
@users = User.paginate(page: params[:page])
end
def destroy
User.find(params[:id]).destroy
flash[:success] = "User deleted"
redirect_to users_url
end
def show
@user = User.find(params[:id])
@microposts = @user.microposts.paginate(page: params[:page])
end
def new
@user = User.new
end
def create
@user = User.new(user_params)
if @user.save
@user.send_activation_email
flash[:info] = "Please check your email to activate your account."
redirect_to root_url
else
render 'new'
end
end
def edit
@user = User.find(params[:id])
end
def update
@user = User.find(params[:id])
if @user.update(user_params)
flash[:success] = "Profile updated"
redirect_to @user
else
render 'edit'
end
end
def following
@title = "Following"
@user = User.find(params[:id])
@users = @user.following.paginate(page: params[:page])
render 'show_follow'
end
def followers
@title = "Followers"
@user = User.find(params[:id])
@users = @user.followers.paginate(page: params[:page])
render 'show_follow'
end
private
def user_params
params.require(:user).permit(:name, :email, :password, :password_confirmation)
end
# before filters
# Confirms the correct user.
def correct_user
@user = User.find(params[:id])
redirect_to(root_url) unless current_user?(@user)
end
# Confirms the correct user.
def correct_user
@user = User.find(params[:id])
redirect_to(root_url) unless @user == current_user
end
# Confirms an admin user
def admin_user
redirect_to(root_url) unless current_user.admin?
end
end | 21.460674 | 97 | 0.675916 |
39a922be17c7861b6b3bc366c886a156f43c0d82 | 1,308 | # frozen_string_literal: true
require 'barong/security/access_token'
module UserApi
module V1
class SessionJWTGenerator
ALGORITHM = 'RS256'
def initialize(jwt_token:, kid:)
@kid = kid
@jwt_token = jwt_token
@api_key = APIKey.active.find_by!(uid: kid)
end
def verify_payload
payload, = decode_payload
payload.present?
end
def generate_session_jwt
account = @api_key.account
payload = {
iat: Time.current.to_i,
exp: @api_key.expires_in.seconds.from_now.to_i,
sub: 'session',
iss: 'barong',
aud: @api_key.scopes,
jti: SecureRandom.hex(12).upcase,
uid: account.uid,
email: account.email,
role: account.role,
level: account.level,
state: account.state,
api_kid: @api_key.uid
}
JWT.encode(payload, Barong::Security.private_key, ALGORITHM)
end
private
def decode_payload
public_key = OpenSSL::PKey.read(Base64.urlsafe_decode64(@api_key.public_key))
return {} if public_key.private?
JWT.decode(@jwt_token,
public_key,
true,
APIKey::JWT_OPTIONS)
end
end
end
end
| 23.781818 | 85 | 0.571865 |
873bddffa2520e52e88c2dd61f0a6a42873dfcc6 | 165 | require 'spec_helper'
require 'redis'
require 'rhcf/timeseries/manager'
describe Rhcf::Timeseries::RedisHgetallStrategy do
it_behaves_like 'a valid strategy'
end
| 20.625 | 50 | 0.812121 |
017b571f9940dc93f821ff7715f6f624399bf059 | 46 | module ActiveReporter
VERSION = "0.6.5"
end
| 11.5 | 21 | 0.717391 |
6100000c76ff088a0b4a6080401678ffda80a5e4 | 196 | first_number = 1
second_number = 2
sum = first_number + second_number
difference = first_number - second_number
product = first_number * second_number
quotient = first_number / second_number | 17.818182 | 41 | 0.790816 |
619387be10bb3b1b33b0ff745ea3f2005fd5b681 | 181 | require 'just_shogi/pieces/kin_base'
module JustShogi
# = Narikei
#
# The piece that can move 1 space orthogonally or forwards diagonally
class Narikei < KinBase; end
end
| 18.1 | 71 | 0.745856 |
1102835d8850da3b2731be56a496fe989c181a1a | 8,258 | # frozen_string_literal: true
Bundler.require
require_all 'features/support/lib'
describe 'Testrail' do
include Testrail
context 'when the testrail environment
variable is not set' do
it 'will not have tetrail enabled' do
expect(testrail_enabled).to be false
end
end
context 'when the testrail environment
variable is set to false' do
it 'will not have tetrail enabled' do
ENV['TESTRAIL'] = 'false'
expect(testrail_enabled).to be false
end
end
context 'when the testrail environment
variable is set to true' do
it 'will have testrail enabled' do
ENV['TESTRAIL'] = 'true'
expect(testrail_enabled).to be true
end
context 'but missing any of the other
required testrail environment variables' do
it 'will abort the run' do
ENV['TESTRAIL'] = 'true'
expect(missing_testrail_environment_variables).to be true
end
end
context 'but the url for testrail is wrong' do
it 'will abort the run' do
ENV['TESTRAIL'] = 'true'
ENV['TESTRAIL_URL'] = 'https://invalid_url.testrail.net'
expect(testrail_credentials_invalid).to be true
end
end
context 'but the username for testrail is wrong' do
it 'will abort the run' do
set_invalid_username_env_variables
expect(testrail_credentials_invalid).to be true
end
end
context 'but the password for testrail is wrong' do
it 'will abort the run' do
set_invalid_password_env_variables
expect(testrail_credentials_invalid).to be true
end
end
context 'but the project ID is invalid' do
it 'will not add a new test run in Testrail' do
set_invalid_testrail_project_env_variables
expect(valid_testrail_project).to be false
end
it 'will print helper text to user that
project id is incorrect' do
set_invalid_testrail_project_env_variables
expect(valid_testrail_project).to be false
end
end
context 'and the credentials and project id is valid' do
it 'will add a new test run in Testrail' do
set_valid_testrail_env_variables
expect(valid_testrail_project).to be true
end
end
end
context 'when a scenario is not already in the testrail project' do
it 'the scenario_in_project? method will be false' do
set_scenario_not_in_project_env_variables
@scenario = fake_scenario
expect(scenario_in_project?).to be false
end
end
context 'when a scenario is already in the testrail project' do
it 'the scenario_in_project? method will be true' do
set_scenario_in_project_env_variables
@scenario = fake_scenario
expect(scenario_in_project?).to be true
end
end
context 'when a scenario is to be added to testrail' do
it "the scenario's gherkin will be corect" do
set_valid_testrail_env_variables
@scenario = fake_scenario
expect(scenario_gherkin).to eq expected_gherkin
end
end
context 'when the version (milestone) already is available in testrail' do
it 'will return the milestone' do
set_valid_testrail_env_variables
ENV['VERSION'] = 'AlreadyThere1'
expect(milestone_with_version).not_to be nil
end
it 'will return a milestone id' do
set_valid_testrail_env_variables
ENV['VERSION'] = 'AlreadyThere1'
milestone = milestone_with_version || add_milestone
expect(milestone['id']).to be > 0
end
end
context 'when the version (milestone) is not available in testrail' do
it 'will not return the milestone' do
set_valid_testrail_env_variables
ENV['VERSION'] = 'NotThere1'
expect(milestone_with_version).to be nil
end
it 'will return a milestone id' do
set_valid_testrail_env_variables
ENV['VERSION'] = 'NotThere1'
milestone = milestone_with_version || add_milestone
expect(milestone['id']).to be > 0
end
end
context 'when the scenario fails with a error
message longer than 250 characters' do
it 'will truncate the error message so it fits 250 characters' do
@scenario = OpenStruct.new(
exception: OpenStruct.new(
message: error_message_more_than_250_characters
)
)
expect(error_message.size).to be < 250
end
end
context 'when the scenario fails with an error
message less than 250 characters' do
it 'will keep the entire error message' do
@scenario = OpenStruct.new(
exception: OpenStruct.new(
message: error_message_less_than_250_characters
)
)
expect(error_message).to eq error_message_less_than_250_characters
end
end
context 'when the scenario does not fail' do
it 'it will not have an error message' do
@scenario = OpenStruct.new(
exception: nil
)
expect(error_message).to be nil
end
end
context 'when a scenario has finished running' do
it 'will have an elapsed time' do
@scenario_start_time = Time.now
expect(scenario_elapsed_time).not_to be nil
end
end
end
def error_message_less_than_250_characters
'Lorem ipsum dolor sit amet, consectetuer adipiscing elit.'\
' Aenean commodo ligula eget dolor. Aenean massa. Cum sociis'\
' natoque penatibus et magnis dis parturient montes, nascetur'\
' ridiculus mus. Donec quam felis, ultricies nec, pellentesque'
end
def error_message_more_than_250_characters
'Lorem ipsum dolor sit amet, consectetuer adipiscing elit.'\
' Aenean commodo ligula eget dolor. Aenean massa. Cum sociis'\
' natoque penatibus et magnis dis parturient montes, nascetur'\
' ridiculus mus. Donec quam felis, ultricies nec, pellentesque'\
' eu, pretium quis, sem. Nulla consequat massa quis enim. Donec.'
end
def set_invalid_testrail_project_env_variables
ENV['TESTRAIL'] = 'true'
ENV['TESTRAIL_URL'] = 'https://invalid_project_id.testrail.net'
ENV['TESTRAIL_USERNAME'] = '[email protected]'
ENV['TESTRAIL_USERNAME'] = 'validPassword'
ENV['TESTRAIL_PROJECT_ID'] = 'InvalidID'
end
def set_valid_testrail_env_variables
ENV['TESTRAIL'] = 'true'
ENV['TESTRAIL_URL'] = 'https://valid_project_id.testrail.net'
ENV['TESTRAIL_USERNAME'] = '[email protected]'
ENV['TESTRAIL_USERNAME'] = 'validPassword'
ENV['TESTRAIL_PROJECT_ID'] = '1'
end
def set_invalid_username_env_variables
ENV['TESTRAIL'] = 'true'
ENV['TESTRAIL_URL'] = 'https://invalid_credentials.testrail.net'
ENV['TESTRAIL_USERNAME'] = '[email protected]'
ENV['TESTRAIL_USERNAME'] = 'validPassword'
ENV['TESTRAIL_PROJECT_ID'] = '1'
end
def set_invalid_password_env_variables
ENV['TESTRAIL'] = 'true'
ENV['TESTRAIL_URL'] = 'https://invalid_credentials.testrail.net'
ENV['TESTRAIL_USERNAME'] = '[email protected]'
ENV['TESTRAIL_USERNAME'] = 'invalidPassword'
ENV['TESTRAIL_PROJECT_ID'] = '1'
end
def set_scenario_not_in_project_env_variables
ENV['TESTRAIL'] = 'true'
ENV['TESTRAIL_URL'] = 'https://scenario-not-in-project.testrail.net'
ENV['TESTRAIL_USERNAME'] = '[email protected]'
ENV['TESTRAIL_USERNAME'] = 'validPassword'
ENV['TESTRAIL_PROJECT_ID'] = '1'
end
def set_scenario_in_project_env_variables
ENV['TESTRAIL'] = 'true'
ENV['TESTRAIL_URL'] = 'https://scenario-already-in-project.testrail.net'
ENV['TESTRAIL_USERNAME'] = '[email protected]'
ENV['TESTRAIL_USERNAME'] = 'validPassword'
ENV['TESTRAIL_PROJECT_ID'] = '1'
end
def fake_scenario
OpenStruct.new(
name: 'this is a test Scenario',
all_source: fake_sources
)
end
def fake_sources
[
struct('Feature', 'this is a test Feature'),
struct('Scenario', 'this is a test Scenario'),
struct('Given ', 'this is a test Given step'),
struct('When ', 'this is a test When step'),
struct('And ', 'this is a test And step'),
struct('Then ', 'this is a test Then step'),
struct('But ', 'this is a test But step')
]
end
def struct(keyword, text)
OpenStruct.new(
keyword: keyword,
text: text
)
end
def expected_gherkin
"Given this is a test Given step\n"\
"When this is a test When step\n"\
"And this is a test And step\n"\
"Then this is a test Then step\n"\
'But this is a test But step'
end
| 28.1843 | 76 | 0.698474 |
110ee4f5d530a809af9e13714f1cb9ecb36e1cdd | 630 | Pod::Spec.new do |s|
s.name = "CMMapLauncher"
s.version = "0.0.1"
s.summary = "CMMapLauncher is a mini-library for iOS that makes it quick and easy to show directions in various mapping applications."
s.homepage = "https://github.com/citymapper/CMMapLauncher"
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = 'Citymapper'
s.platform = :ios
s.source = { :git => "https://github.com/citymapper/CMMapLauncher.git", :commit => "33d369a7f90da7958f3742f4643e75d7402b2bcc" }
s.source_files = 'CMMapLauncher'
s.framework = 'MapKit'
s.requires_arc = true
end
| 45 | 141 | 0.649206 |
d5acc84316715862581f4c6ff1abb3c382633c3e | 269 | require "bundler/setup"
require "typograf_client"
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| 22.416667 | 63 | 0.750929 |
f70ea4ec61121b90ce2bb942eb77f7c23a1ebecc | 121 | require 'test_helper'
class DatasetTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 15.125 | 43 | 0.702479 |
113dbcb1b38e2c0512196fb454784f5ddbc785b9 | 1,048 | # frozen_string_literal: true
module RSpec
module Support
module FileFixtureHelpers
def file_fixture(path)
file_fixture_pathname(path).to_s
end
def file_fixture_pathname(path)
::Rails.root.join(file_fixture_path, path)
end
def file_fixture_tempfile(path)
tempfile = Tempfile.open(encoding: 'binary')
FileUtils.cp(file_fixture(path), tempfile.path)
tempfile
end
def file_fixture_uploaded_file(path, filename: nil, content_type: nil)
filename ||= File.basename(path)
ActionDispatch::Http::UploadedFile.new(
tempfile: file_fixture_tempfile(path),
filename: filename,
type: content_type || 'application/octet-stream'
)
end
def file_fixture_asset(path, filename: nil, content_type: nil, user: nil)
Asset.create(
user: user || users(:sudara),
mp3: file_fixture_uploaded_file(path, filename: filename, content_type: content_type)
)
end
end
end
end
| 27.578947 | 95 | 0.648855 |
910f67a128f68d5c30aff3d684e6692cd60e1e54 | 597 | cask 'soulver' do
version '3.3.2-96'
sha256 '997388cb58b95587e73c89e7c1771f413f5164a9633cb3396f9bb1f8b66e0bac'
url "https://soulver.app/mac/sparkle/soulver-#{version}.zip"
appcast 'https://soulver.app/mac/sparkle/appcast.xml'
name 'Soulver'
homepage 'https://soulver.app/'
auto_updates true
depends_on macos: '>= :mojave'
app "Soulver #{version.major}.app"
zap trash: [
'~/Library/Application Support/Soulver 3',
'~/Library/Application Support/app.soulver.mac',
'~/Library/Preferences/app.soulver.mac.plist',
]
end
| 28.428571 | 75 | 0.663317 |
5de098cdbe5097e8a2a8facf01623e464165dd71 | 884 | # frozen_string_literal: true
module DrawioDsl
module Schema
class NodeList
attr_reader :nodes
def initialize
@nodes = []
end
def add(parent, node)
node.parent = parent
@nodes << node
end
def all
@nodes
end
def shapes
@nodes.select { |node| node.is_a?(DrawioDsl::Schema::Shape) }
end
def layouts
@nodes.select { |node| node.is_a?(DrawioDsl::Schema::Layout) }
end
def length
@nodes.length
end
def empty?
@nodes.empty?
end
def any?
@nodes.any?
end
def first
@nodes.first
end
def as_xml(xml)
nodes.each do |node|
node.as_xml(xml) if node.respond_to?(:as_xml)
end
end
def to_h
@nodes.map(&:to_h)
end
end
end
end
| 15.508772 | 70 | 0.512443 |
1d8e61cc258b87fa594fa4611d498915861885a2 | 371 | module LitecoinPayable::Adapters
class Base
def self.fetch_adapter
case LitecoinPayable.config.adapter
when "blockchain_info"
LitecoinPayable::Adapters::BlockchainInfoAdapter.new
when "blockcypher"
LitecoinPayable::Adapters::BlockcypherAdapter.new
else
raise "Please specify an adapter"
end
end
end
end
| 21.823529 | 60 | 0.695418 |
6a45f13c2a2c5d7c79f3145e9e5d5fbb70b51433 | 9,765 | # encoding: utf-8
require 'common/format'
=begin
= ImporterEntityPopulator
- Goggles framework vers.: 6.127
- author: Leega
Strategy that populates importer temporary data structures from json parsed data.
Assumes meeting already exist. Also meeting_sessions should exists with meeting_events defined
Note that importar has to create new meeting_event it will be associated with first meetin_session
Steps to perform for data parsing
0. Collect distinct meeting_programs (and meeting_events)
0.a Collect events list
0.b Collect program list
(Those steps could be performed while collecting team names in step 1)
1. Collect distinct team names
2. Collect distinct swimmer names (with year and sex) into corresponding team
3. Collect results associating them to respective swimmers
The resulting import structure should be like:
events_list [event [programs]]
teams [team_data, team_id, team_affiliation_id [swimmers [swimmer_data, swimmer_id, badge_id, {meting_program, result_data}]]]
Meeting header json example:
"name": "15° Trofeo Citta` di Riccione",
"meetingURL": "https://www.federnuoto.it/home/master/circuito-supermaster/archivio-2012-2019/stagione-2018-2019.html#/risultati/134219:15%C2%B0-trofeo-citta`%C2%A0di-riccione.html",
"manifestURL": "/component/solrconnect/download.html?file=L3Zhci93d3cvZmluX2ZpbGVzL2V2ZW50aS8wMDAwMTM0MjE5LnBkZg==",
"dateDay1": "08",
"dateMonth1": "Dicembre",
"dateYear1": "2018",
"dateDay2": "09",
"dateMonth2": "Dicembre",
"dateYear2": "2018",
"organization": "A.S.D. POLISPORTIVA COMUNALE RICCIONE",
"venue1": "RICCIONE STADIO DEL NUOTO",
"address1": "VIA MONTEROSA, SNC - Riccione (RN)",
"venue2": "",
"address2": "",
"poolLength": "50",
"timeLimit": "SI",
"registration": "12/11 - 03/12 23:45",
"sections": []
Individual result group json example (inside the sections element):
{
"title": "50 Stile Libero - M25",
"fin_id_evento": "134219",
"fin_codice_gara": "00",
"fin_sigla_categoria": "M25",
"fin_sesso": "M",
"rows": [
{
"pos": "1",
"name": "PETRINI ANDREA",
"year": "1992",
"sex": "M",
"team": "Virtus Buonconvento ssd",
"timing": "24.32",
"score": "949,42"
},...
With that regexp we can extract swimmer data for test from json (swimmer name, year, sex and team name)
/(?<="name": )(".*",)(?=\s*"year": ("[0-9]{4}",)\s*"sex": ("[MF]",)\s*"team": (".*",))/
=end
class ImporterEntityPopulator
# These must be initialized on creation:
attr_reader :full_pathname, :meeting
# These can be edited later on:
attr_accessor :data_hash, :importer_hash, :individual_events_def
# Creates a new instance
#
# params: file name with full path
#
def initialize( full_pathname, meeting )
@full_pathname = full_pathname
@meeting = meeting
@data_hash = Hash.new()
@importer_hash = JsonImporterDAO.new( meeting )
@individual_events_def = nil
end
# Read the file and extract json string
# Returns the string red
#
def read_json_file()
data = ""
File.open( @full_pathname, 'r' ) do |f|
f.each_line do |curr_line|
data << curr_line
end
end
data
end
# Parse json file and return an hash with symbolized keys
#
def parse()
@data_hash = JSON.parse( read_json_file ) #, { :symbolize_names => true, :quirks_mode => true }
end
# Read elements and retrieve distinct primary enetity values
#
def get_distinct_elements()
#@individual_events_def = get_individual_event_list
# TODO - Find pool type. Verify this is a good data
# What if meeting has multiple pools of different types (such Regionali Emilia)
pool = @data_hash['poolLength']
# Each program element has distinct results in rows element
@data_hash['sections'].each do |program|
# Store event and programs element
# This is the json structure:
# "title": "50 Stile Libero - M25",
# "fin_id_evento": "134219",
# "fin_codice_gara": "00",
# "fin_sigla_categoria": "M25",
# "fin_sesso": "M"
# Separates event title and retrieve program code (event, category, sex)
# Assumes in program_title is always present category
program_title = program['title'].upcase
event_title = find_event_title( program_title )
event_code = find_event_code( event_title )
program_key = create_program_key(event_code, program['fin_sigla_categoria'], program['fin_sesso'])
# If the event isn't already defined creates
@importer_hash.events[event_code] = JsonImporterDAO::EventImporterDAO.new( event_title ) if !@importer_hash.events.has_key?(event_code)
event = @importer_hash.events[event_code]
# Define pool type
pool = find_pool_type( event_title ) if pool = nil
# Assumes program elements are already unique.
# If program already present traces an errors
@importer_hash.add_duplicate_program_error(program_key) if event.programs.has_key?(program_key)
event.programs[program_key] = JsonImporterDAO::EventProgramImporterDAO.new( program_title, pool, program['fin_sesso'], program['fin_sigla_categoria'] )
# Cycle program results
program['rows'].each do |result|
# Json structure for result rows
# "pos": "1",
# "name": "PETRINI ANDREA",
# "year": "1992",
# "sex": "M",
# "team": "Virtus Buonconvento ssd",
# "timing": "24.32",
# "score": "949,42"
# For teams we will consider only name
team_name = result['team'].upcase
# If the team isn't already defined creates
@importer_hash.teams[team_name] = JsonImporterDAO::TeamImporterDAO.new( team_name ) if !@importer_hash.teams.has_key?( team_name )
team = @importer_hash.teams[team_name]
# For swimmer we will consider name, year, sex
swimmer_name = result['name'].upcase
swimmer_year = result['year']
swimmer_sex = result['sex'].upcase
swimmer_key = create_swimmer_key( swimmer_name, swimmer_year, swimmer_sex )
if !team.swimmers.has_key?( swimmer_key )
# If swimmer key already exixts maybe there is an error
if @importer_hash.swimmer_keys.has_key?( swimmer_key )
@importer_hash.add_duplicate_swimmer_error( swimmer_key )
else
#Store swimmer key for checking purposes
@importer_hash.swimmer_keys[swimmer_key] = []
end
# If the swimmer isn't already defined creates
team.swimmers[swimmer_key] = JsonImporterDAO::SwimmerImporterDAO.new( swimmer_name, swimmer_year, swimmer_sex )
end
swimmer = team.swimmers[swimmer_key]
@importer_hash.swimmer_keys[swimmer_key] << "#{team_name} - #{event_code}"
# Adds result to swimmer
# If result exists for event code traces an error
@importer_hash.add_duplicate_result_error("#{swimmer_key} #{event_code} #{program_title} #{result.to_s}") if swimmer.results.has_key?(event_code)
swimmer.results[event_code] = JsonImporterDAO::SwimmerResultImporterDAO.new( result['pos'], result['timing'], result['score'] )
end
end
end
# Removes non valid characters from names
#
def remove_invalid_char( name )
name.gsub(/[\s\-_\.]/, '')
end
# Creates an 'unique' swimmer key to identify swimmers
#
def create_program_key( event, category, sex, separator = ';' )
event + separator + category + separator + sex
end
# Creates an 'unique' swimmer key to identify swimmers
#
def create_swimmer_key( swimmer_name, swimmer_year, swimmer_sex, separator = ';' )
remove_invalid_char(swimmer_name) + separator + swimmer_year + separator + swimmer_sex
end
# Creates an hash with event_code => [short_name, compact_name, description] for each Individual event
#
def get_individual_event_list
possible_events = Hash.new()
EventType.are_not_relays.joins(:stroke_type).includes(:stroke_type).each do |event_type|
possible_events[event_type.code] = [event_type.i18n_short, event_type.i18n_compact, event_type.i18n_description]
end
possible_events
end
# Separates event title from program title
# Event title should contain only the event code, short or long description
#
def find_event_title( program_title )
regexp = Regexp::new(/(50|100|200|400|800|1500)\s*(STILE LIBERO|STILE|DORSO|MISTI|RANA|FARFALLA|DELFINO|MI|MX|SL|DO|FA|RA|ST|DE)/i)
regexp.match( program_title )[0]
end
# Find event code starting from an event title
#
def find_event_code( event_title )
distace_match = /(50|100|200|400|800|1500)/.match( event_title )
if distace_match
distance = distace_match[0]
stroke_match = /(STILE LIBERO|STILE|DORSO|MISTI|RANA|FARFALLA|DELFINO|MI|MX|SL|DO|FA|RA|ST|DE)/i.match( event_title )
if stroke_match
stroke = stroke_match[0]
if /(DORSO|DO|DS)/i.match?( stroke )
stroke_code = 'DO'
elsif /(RANA|RA)/i.match?( stroke )
stroke_code = 'RA'
elsif /(FARFALLA|DELFINO|FA|DE)/i.match?( stroke )
stroke_code = 'FA'
elsif /(MISTI|MI|MX)/i.match?( stroke )
stroke_code = 'MI'
else
stroke_code = 'SL'
end
distance + stroke_code
else
nil
end
else
nil
end
end
# Find pool type for given event in meeting schedule
# Assumes meeting schedule is complete or at least has one session configured
#
def find_pool_type( event_title )
# TODO
'25'
end
#-- --------------------------------------------------------------------------
#++
end
| 36.301115 | 184 | 0.664823 |
1d4c2880129bc9f7a03962204c15e1258c32d422 | 90 | # typed: strict
module Test::Foo::MyPackage
Test::Foo::Bar::OtherPackage::TestUtil
end
| 15 | 40 | 0.733333 |
1d2286c469c19abb04f6a853d766502f7204c690 | 159 | require File.expand_path('../../../../spec_helper', __FILE__)
describe "Net::HTTPHeader#type_params" do
it "needs to be reviewed for spec completeness"
end
| 26.5 | 61 | 0.72956 |
e84415a8687edaecfd30ae44cdb697063c00f856 | 3,213 | # frozen_string_literal: true
require "cases/helper"
require "models/topic"
require "models/person"
class ConfirmationValidationTest < ActiveModel::TestCase
def teardown
Topic.clear_validators!
end
def test_no_title_confirmation
Topic.validates_confirmation_of(:title)
t = Topic.new(author_name: "Plutarch")
assert t.valid?
t.title_confirmation = "Parallel Lives"
assert t.invalid?
t.title_confirmation = nil
t.title = "Parallel Lives"
assert t.valid?
t.title_confirmation = "Parallel Lives"
assert t.valid?
end
def test_title_confirmation
Topic.validates_confirmation_of(:title)
t = Topic.new("title" => "We should be confirmed", "title_confirmation" => "")
assert t.invalid?
t.title_confirmation = "We should be confirmed"
assert t.valid?
end
def test_validates_confirmation_of_for_ruby_class
Person.validates_confirmation_of :karma
p = Person.new
p.karma_confirmation = "None"
assert p.invalid?
assert_equal ["doesn't match Karma"], p.errors[:karma_confirmation]
p.karma = "None"
assert p.valid?
ensure
Person.clear_validators!
end
def test_title_confirmation_with_i18n_attribute
begin
@old_load_path, @old_backend = I18n.load_path.dup, I18n.backend
I18n.load_path.clear
I18n.backend = I18n::Backend::Simple.new
I18n.backend.store_translations("en",
errors: { messages: { confirmation: "doesn't match %{attribute}" } },
activemodel: { attributes: { topic: { title: "Test Title" } } })
Topic.validates_confirmation_of(:title)
t = Topic.new("title" => "We should be confirmed", "title_confirmation" => "")
assert t.invalid?
assert_equal ["doesn't match Test Title"], t.errors[:title_confirmation]
ensure
I18n.load_path.replace @old_load_path
I18n.backend = @old_backend
I18n.backend.reload!
end
end
test "does not override confirmation reader if present" do
klass = Class.new do
include ActiveModel::Validations
def title_confirmation
"expected title"
end
validates_confirmation_of :title
end
assert_equal "expected title", klass.new.title_confirmation,
"confirmation validation should not override the reader"
end
test "does not override confirmation writer if present" do
klass = Class.new do
include ActiveModel::Validations
def title_confirmation=(value)
@title_confirmation = "expected title"
end
validates_confirmation_of :title
end
model = klass.new
model.title_confirmation = "new title"
assert_equal "expected title", model.title_confirmation,
"confirmation validation should not override the writer"
end
def test_title_confirmation_with_case_sensitive_option_true
Topic.validates_confirmation_of(:title, case_sensitive: true)
t = Topic.new(title: "title", title_confirmation: "Title")
assert t.invalid?
end
def test_title_confirmation_with_case_sensitive_option_false
Topic.validates_confirmation_of(:title, case_sensitive: false)
t = Topic.new(title: "title", title_confirmation: "Title")
assert t.valid?
end
end
| 26.336066 | 84 | 0.708061 |
abe40a516e988080032978859c6c5b73bcc173ec | 294 | class OrganizationObserver < ActiveRecord::Observer
def after_commit(organization)
Webhook::EventRegister.new(organization, created: just_created?(organization))
end
private
def just_created?(organization)
!!organization.send(:transaction_record_state, :new_record)
end
end
| 24.5 | 82 | 0.785714 |
7a6f9ff93f6964d0f48e320d1b19e083a1f8890e | 1,858 | #
# Be sure to run `pod lib lint LPMusicKitiOS.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'LPMusicKitiOS'
s.version = '1.0.10'
s.summary = 'iOS Device SDK.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/linkplayapp/LPMusicKitiOS'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'LinkPlay' => '[email protected]' }
s.source = { :git => 'https://github.com/linkplayapp/LPMusicKitiOS.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '9.0'
s.subspec 'LPAsyncSocket' do |ss|
ss.source_files = 'LPMusicKitiOS/Third/LPAsyncSocket/**/*'
ss.dependency 'CocoaLumberjack'
end
s.static_framework = true
s.libraries = 'c++'
s.ios.vendored_framework = '*.framework'
s.dependency 'AFNetworking', '~> 4.0'
s.dependency 'KissXML'
# s.resource_bundles = {
# 'LPMusicKitiOS' => ['LPMusicKitiOS/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 35.056604 | 109 | 0.646932 |
abffa5d206026b9eb6b67d227678771f575b66aa | 1,731 | class Wv < Formula
desc "Programs for accessing Microsoft Word documents"
homepage "https://wvware.sourceforge.io/"
url "https://abisource.com/downloads/wv/1.2.9/wv-1.2.9.tar.gz"
sha256 "4c730d3b325c0785450dd3a043eeb53e1518598c4f41f155558385dd2635c19d"
revision 1
bottle do
sha256 arm64_big_sur: "36bac1865cab3a50dafdf0477bb914d6c9df08c386b5586951f6681e5d5f73ad"
sha256 big_sur: "6e6499eca2f6ab68a58a4a0548ac4954eec052d20558dc1bd834cc4bb030e0cc"
sha256 catalina: "c617efb5a72bf2dbca4a3c85fdb59460ce6aaaf21b1f1db1e89f53ac3fc07224"
sha256 mojave: "e3b62df7fad6fefbd233abc45ede4f9705b447df51433e0129a82d98dc321811"
sha256 high_sierra: "470ecfe6b84e931d4c4363b8274a04d42b2e2c3b6c5f50bc12b55a7fda6f5acb"
sha256 sierra: "7df867080d9b2edb57780c5f971a4a22d01c301aff70c1af7a6ce13385828908"
sha256 x86_64_linux: "3cdd75690a26f3dc49b27296044a3c19afb7cacf812ef32f4eb23209b84afb06" # linuxbrew-core
end
depends_on "pkg-config" => :build
depends_on "glib"
depends_on "libgsf"
depends_on "libpng"
depends_on "libwmf"
def install
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--mandir=#{man}"
system "make"
ENV.deparallelize
# the makefile generated does not create the file structure when installing
# till it is fixed upstream, create the target directories here.
# https://www.abisource.com/mailinglists/abiword-dev/2011/Jun/0108.html
bin.mkpath
(lib/"pkgconfig").mkpath
(include/"wv").mkpath
man1.mkpath
(pkgshare/"wingdingfont").mkpath
(pkgshare/"patterns").mkpath
system "make", "install"
end
end
| 39.340909 | 109 | 0.740035 |
61c7b078ae9871d8522a86cc0bc1e150c88571cb | 254 | class CollectibleGem
attr_reader :x, :y
def initialize(image, x, y)
@image = image
@x, @y = x, y
end
def draw
# Draw, slowly rotating
@image.draw_rot(@x, @y, 0, 25 * Math.sin(Gosu.milliseconds / 133.7))
end
end
| 16.933333 | 73 | 0.570866 |
ffaf6b08cbc909839d4911bcbe1c3c77d63f266e | 1,116 | # providers/vhost.rb
#
# Author: Simple Finance <[email protected]>
# License: Apache License, Version 2.0
#
# Copyright 2013 Simple Finance Technology Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Create and delete virtualhosts
include RabbitMQ::Management
def initialize(new_resource, run_context)
super
@client = rabbitmq_client
@vhost = new_resource.vhost
end
action :add do
@client.create_vhost(@vhost)
@client.update_permissions_of(
@vhost,
rabbitmq_admin_user,
read: '',
write: '',
configure: '.*'
)
end
action :delete do
@client.delete_vhost(@vhost)
end
| 25.363636 | 74 | 0.738351 |
33c3bff70588fa49d6480ebd8d57c3880728fb44 | 2,773 | require "active_support/core_ext/integer/time"
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded any time
# it changes. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
# Run rails dev:cache to toggle caching.
if Rails.root.join("tmp", "caching-dev.txt").exist?
config.action_controller.perform_caching = true
config.action_controller.enable_fragment_cache_logging = true
config.cache_store = :memory_store
config.public_file_server.headers = {
"Cache-Control" => "public, max-age=#{2.days.to_i}"
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise exceptions for disallowed deprecations.
config.active_support.disallowed_deprecation = :raise
# Tell Active Support which deprecation messages to disallow.
config.active_support.disallowed_deprecation_warnings = []
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Highlight code that triggered database queries in logs.
config.active_record.verbose_query_logs = true
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = false
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations.
# config.i18n.raise_on_missing_translations = true
# Annotate rendered view with file names.
# config.action_view.annotate_rendered_view_with_filenames = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
# Uncomment if you wish to allow Action Cable access from any origin.
# config.action_cable.disable_request_forgery_protection = true
end
| 36.012987 | 87 | 0.771727 |
1119ad46cee10686ca22fa68bb2eb8907e4b77c3 | 377 | require 'base_kde_formula'
class Kmag < BaseKdeFormula
homepage 'http://www.kde.org/'
url 'http://download.kde.org/stable/4.11.4/src/kmag-4.11.4.tar.xz'
sha1 'dac6e1d44d034d9b96013b256ec8e333396abf1d'
devel do
url 'http://download.kde.org/stable/4.12.0/src/kmag-4.12.0.tar.xz'
sha1 'b75992bfe53ab7e22e33d5746ed28ab9583be816'
end
depends_on 'kdelibs'
end
| 25.133333 | 70 | 0.740053 |
ffa29ffac682616b8026a4e1cfbeabe74542b743 | 1,559 | class Bogofilter < Formula
desc "Mail filter via statistical analysis"
homepage "https://bogofilter.sourceforge.io"
url "https://downloads.sourceforge.net/project/bogofilter/bogofilter-stable/bogofilter-1.2.5.tar.xz"
sha256 "3248a1373bff552c500834adbea4b6caee04224516ae581fb25a4c6a6dee89ea"
bottle do
sha256 cellar: :any, arm64_monterey: "25e3974a7aa8d9dcc2c3e95b85e7a4e9abba388adf54470dcfd705d29ba3c6d1"
sha256 cellar: :any, arm64_big_sur: "2206ad532a38d489deb48bb9cafec00c9b98a09f621f7f208f95cc36387dafb4"
sha256 cellar: :any, monterey: "89d4f31cd57d801d99a68950682b746c490b481891bfb904f173270f13fc751f"
sha256 cellar: :any, big_sur: "d6ad409edcabed2d32cc945c36151b3a0ae17258d9430f3192b912f1dd1050e8"
sha256 cellar: :any, catalina: "2f2d4c414683f922e687d054e71619a0455560aac2522484132099fbddcc6a77"
sha256 cellar: :any, mojave: "d7df5e0d29f4fcbc9eafc129ddfd993dc785ee3a4bf79b70b0dce9b5f31f7be4"
sha256 cellar: :any, high_sierra: "c7998fa1651590e6aaf27f8fe014a7b0e305a48a02de4cdcb9ba53f1c84bd1e7"
sha256 cellar: :any_skip_relocation, x86_64_linux: "0a74a36fca55ff920b663466e33ed22a127726da33b90f26b45abcc084074f33"
end
depends_on "berkeley-db"
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
test do
system "#{bin}/bogofilter", "--version"
end
end
| 51.966667 | 123 | 0.720975 |
28088dad65feea0123059bb5365a5c82f430a7f9 | 455 | namespace :test do
desc "Setup Test Database"
task setup_test_db: :environment do
unless Dir.exists?(Rails.root.join("db", "neo4j", "test"))
Rake::Task["neo4j:install"].invoke("community-2.1.5","test")
Rake::Task["neo4j:config"].invoke("test","7475")
end
end
desc "clean test db"
task clean_test_db: :environment do
puts "Cleaning Neo4j test database"
Rake::Task["neo4j:reset_yes_i_am_sure"].invoke("test")
end
end | 30.333333 | 66 | 0.676923 |
ed852716e20ebb30435b8d45967f90c53914a0ec | 3,556 | class ErlangAT22 < Formula
desc "Programming language for highly scalable real-time systems"
homepage "https://www.erlang.org/"
# Download tarball from GitHub; it is served faster than the official tarball.
url "https://github.com/erlang/otp/releases/download/OTP-22.3.4.20/otp_src_22.3.4.20.tar.gz"
sha256 "43289f20a7038b6835615a1f68a6e32b9aeec6db38cdb7c97adf78d048d74079"
license "Apache-2.0"
revision 1
livecheck do
url :stable
regex(/^OTP[._-]v?(22(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any, arm64_big_sur: "bf9e22eb6c20285d33d208ef2097efa88bacf01da8447e4cc0a59ef0de70e8ed"
sha256 cellar: :any, big_sur: "cfba73ed8488d94e9f8ca6f9b36fd9f0bbd2d2399b382a33202ffcd70743b9c4"
sha256 cellar: :any, catalina: "89beffd9e5522bc326988a12ac254a9255f767f47df92a8dd5f952a3a0843d53"
sha256 cellar: :any, mojave: "62dbe9073c0354474c8e1a52ca24a84c96cac266066ce0d1b386b40863342e6c"
sha256 cellar: :any_skip_relocation, x86_64_linux: "2486f5eaa04c8bd863847f77c9999b0feaea7e995dca8ff82da5616807aadb89" # linuxbrew-core
end
keg_only :versioned_formula
depends_on "[email protected]"
depends_on "wxmac" # for GUI apps like observer
resource "man" do
url "https://www.erlang.org/download/otp_doc_man_22.3.tar.gz"
sha256 "43b6d62d9595e1dc51946d55c9528c706c5ae753876b9bf29303b7d11a7ccb16"
end
resource "html" do
url "https://www.erlang.org/download/otp_doc_html_22.3.tar.gz"
sha256 "9b01c61f2898235e7f6643c66215d6419f8706c8fdd7c3e0123e68960a388c34"
end
def install
# Unset these so that building wx, kernel, compiler and
# other modules doesn't fail with an unintelligible error.
%w[LIBS FLAGS AFLAGS ZFLAGS].each { |k| ENV.delete("ERL_#{k}") }
args = %W[
--disable-debug
--disable-silent-rules
--prefix=#{prefix}
--enable-dynamic-ssl-lib
--enable-hipe
--enable-shared-zlib
--enable-smp-support
--enable-threads
--enable-wx
--with-ssl=#{Formula["[email protected]"].opt_prefix}
--without-javac
]
on_macos do
args << "--enable-darwin-64bit"
args << "--enable-kernel-poll" if MacOS.version > :el_capitan
args << "--with-dynamic-trace=dtrace" if MacOS::CLT.installed?
end
system "./configure", *args
system "make"
system "make", "install"
(lib/"erlang").install resource("man").files("man")
doc.install resource("html")
end
def caveats
<<~EOS
Man pages can be found in:
#{opt_lib}/erlang/man
Access them with `erl -man`, or add this directory to MANPATH.
EOS
end
test do
system "#{bin}/erl", "-noshell", "-eval", "crypto:start().", "-s", "init", "stop"
(testpath/"factorial").write <<~EOS
#!#{bin}/escript
%% -*- erlang -*-
%%! -smp enable -sname factorial -mnesia debug verbose
main([String]) ->
try
N = list_to_integer(String),
F = fac(N),
io:format("factorial ~w = ~w\n", [N,F])
catch
_:_ ->
usage()
end;
main(_) ->
usage().
usage() ->
io:format("usage: factorial integer\n").
fac(0) -> 1;
fac(N) -> N * fac(N-1).
EOS
chmod 0755, "factorial"
assert_match "usage: factorial integer", shell_output("./factorial")
assert_match "factorial 42 = 1405006117752879898543142606244511569936384000000000", shell_output("./factorial 42")
end
end
| 32.623853 | 139 | 0.644263 |
bf38ac3d966fb05e61500ff64ab3c5d90282e205 | 1,303 | require_relative 'boot'
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "active_storage/engine"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_mailbox/engine"
require "action_text/engine"
require "action_view/railtie"
require "action_cable/engine"
# require "sprockets/railtie"
require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Dns
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.0
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
# Only loads a smaller set of middleware suitable for API only apps.
# Middleware like session, flash, cookies can be added back manually.
# Skip views, helpers and assets when generating a new resource.
config.api_only = true
end
end
| 34.289474 | 82 | 0.776669 |
39274c04444c00ee3e92dea350329251c6c65be4 | 9,482 | require 'puppet/util/logging'
require 'semver'
require 'puppet/module_tool/applications'
# Support for modules
class Puppet::Module
class Error < Puppet::Error; end
class MissingModule < Error; end
class IncompatibleModule < Error; end
class UnsupportedPlatform < Error; end
class IncompatiblePlatform < Error; end
class MissingMetadata < Error; end
class InvalidName < Error; end
class InvalidFilePattern < Error; end
include Puppet::Util::Logging
FILETYPES = {
"manifests" => "manifests",
"files" => "files",
"templates" => "templates",
"plugins" => "lib",
"pluginfacts" => "facts.d",
}
# Find and return the +module+ that +path+ belongs to. If +path+ is
# absolute, or if there is no module whose name is the first component
# of +path+, return +nil+
def self.find(modname, environment = nil)
return nil unless modname
Puppet::Node::Environment.new(environment).module(modname)
end
attr_reader :name, :environment, :path
attr_writer :environment
attr_accessor :dependencies, :forge_name
attr_accessor :source, :author, :version, :license, :puppetversion, :summary, :description, :project_page
def initialize(name, path, environment)
@name = name
@path = path
@environment = environment
assert_validity
load_metadata if has_metadata?
validate_puppet_version
@absolute_path_to_manifests = Puppet::FileSystem::PathPattern.absolute(manifests)
end
def has_metadata?
return false unless metadata_file
return false unless Puppet::FileSystem::File.exist?(metadata_file)
begin
metadata = PSON.parse(File.read(metadata_file))
rescue PSON::PSONError => e
Puppet.debug("#{name} has an invalid and unparsable metadata.json file. The parse error: #{e.message}")
return false
end
return metadata.is_a?(Hash) && !metadata.keys.empty?
end
FILETYPES.each do |type, location|
# A boolean method to let external callers determine if
# we have files of a given type.
define_method(type +'?') do
type_subpath = subpath(location)
unless Puppet::FileSystem::File.exist?(type_subpath)
Puppet.debug("No #{type} found in subpath '#{type_subpath}' " +
"(file / directory does not exist)")
return false
end
return true
end
# A method for returning a given file of a given type.
# e.g., file = mod.manifest("my/manifest.pp")
#
# If the file name is nil, then the base directory for the
# file type is passed; this is used for fileserving.
define_method(type.sub(/s$/, '')) do |file|
# If 'file' is nil then they're asking for the base path.
# This is used for things like fileserving.
if file
full_path = File.join(subpath(location), file)
else
full_path = subpath(location)
end
return nil unless Puppet::FileSystem::File.exist?(full_path)
return full_path
end
# Return the base directory for the given type
define_method(type) do
subpath(location)
end
end
def license_file
return @license_file if defined?(@license_file)
return @license_file = nil unless path
@license_file = File.join(path, "License")
end
def load_metadata
data = PSON.parse File.read(metadata_file)
@forge_name = data['name'].gsub('-', '/') if data['name']
[:source, :author, :version, :license, :puppetversion, :dependencies].each do |attr|
unless value = data[attr.to_s]
unless attr == :puppetversion
raise MissingMetadata, "No #{attr} module metadata provided for #{self.name}"
end
end
# NOTICE: The fallback to `versionRequirement` is something we'd like to
# not have to support, but we have a reasonable number of releases that
# don't use `version_requirement`. When we can deprecate this, we should.
if attr == :dependencies
value.tap do |dependencies|
dependencies.each do |dep|
dep['version_requirement'] ||= dep['versionRequirement'] || '>= 0.0.0'
end
end
end
send(attr.to_s + "=", value)
end
end
# Return the list of manifests matching the given glob pattern,
# defaulting to 'init.{pp,rb}' for empty modules.
def match_manifests(rest)
if rest
wanted_manifests = wanted_manifests_from(rest)
searched_manifests = wanted_manifests.glob.reject { |f| FileTest.directory?(f) }
else
searched_manifests = []
end
# (#4220) Always ensure init.pp in case class is defined there.
init_manifests = [manifest("init.pp"), manifest("init.rb")].compact
init_manifests + searched_manifests
end
def all_manifests
return [] unless Puppet::FileSystem::File.exist?(manifests)
Dir.glob(File.join(manifests, '**', '*.{rb,pp}'))
end
def metadata_file
return @metadata_file if defined?(@metadata_file)
return @metadata_file = nil unless path
@metadata_file = File.join(path, "metadata.json")
end
def modulepath
File.dirname(path) if path
end
# Find all plugin directories. This is used by the Plugins fileserving mount.
def plugin_directory
subpath("lib")
end
def plugin_fact_directory
subpath("facts.d")
end
def has_external_facts?
File.directory?(plugin_fact_directory)
end
def supports(name, version = nil)
@supports ||= []
@supports << [name, version]
end
def to_s
result = "Module #{name}"
result += "(#{path})" if path
result
end
def dependencies_as_modules
dependent_modules = []
dependencies and dependencies.each do |dep|
author, dep_name = dep["name"].split('/')
found_module = environment.module(dep_name)
dependent_modules << found_module if found_module
end
dependent_modules
end
def required_by
environment.module_requirements[self.forge_name] || {}
end
def has_local_changes?
changes = Puppet::ModuleTool::Applications::Checksummer.run(path)
!changes.empty?
end
def local_changes
Puppet::ModuleTool::Applications::Checksummer.run(path)
end
# Identify and mark unmet dependencies. A dependency will be marked unmet
# for the following reasons:
#
# * not installed and is thus considered missing
# * installed and does not meet the version requirements for this module
# * installed and doesn't use semantic versioning
#
# Returns a list of hashes representing the details of an unmet dependency.
#
# Example:
#
# [
# {
# :reason => :missing,
# :name => 'puppetlabs-mysql',
# :version_constraint => 'v0.0.1',
# :mod_details => {
# :installed_version => '0.0.1'
# }
# :parent => {
# :name => 'puppetlabs-bacula',
# :version => 'v1.0.0'
# }
# }
# ]
#
def unmet_dependencies
unmet_dependencies = []
return unmet_dependencies unless dependencies
dependencies.each do |dependency|
forge_name = dependency['name']
version_string = dependency['version_requirement'] || '>= 0.0.0'
dep_mod = begin
environment.module_by_forge_name(forge_name)
rescue
nil
end
error_details = {
:name => forge_name,
:version_constraint => version_string.gsub(/^(?=\d)/, "v"),
:parent => {
:name => self.forge_name,
:version => self.version.gsub(/^(?=\d)/, "v")
},
:mod_details => {
:installed_version => dep_mod.nil? ? nil : dep_mod.version
}
}
unless dep_mod
error_details[:reason] = :missing
unmet_dependencies << error_details
next
end
if version_string
begin
required_version_semver_range = SemVer[version_string]
actual_version_semver = SemVer.new(dep_mod.version)
rescue ArgumentError
error_details[:reason] = :non_semantic_version
unmet_dependencies << error_details
next
end
unless required_version_semver_range.include? actual_version_semver
error_details[:reason] = :version_mismatch
unmet_dependencies << error_details
next
end
end
end
unmet_dependencies
end
def validate_puppet_version
return unless puppetversion and puppetversion != Puppet.version
raise IncompatibleModule, "Module #{self.name} is only compatible with Puppet version #{puppetversion}, not #{Puppet.version}"
end
private
def wanted_manifests_from(pattern)
begin
extended = File.extname(pattern).empty? ? "#{pattern}.{pp,rb}" : pattern
relative_pattern = Puppet::FileSystem::PathPattern.relative(extended)
rescue Puppet::FileSystem::PathPattern::InvalidPattern => error
raise Puppet::Module::InvalidFilePattern.new(
"The pattern \"#{pattern}\" to find manifests in the module \"#{name}\" " +
"is invalid and potentially unsafe.", error)
end
relative_pattern.prefix_with(@absolute_path_to_manifests)
end
def subpath(type)
File.join(path, type)
end
def assert_validity
raise InvalidName, "Invalid module name #{name}; module names must be alphanumeric (plus '-'), not '#{name}'" unless name =~ /^[-\w]+$/
end
def ==(other)
self.name == other.name &&
self.version == other.version &&
self.path == other.path &&
self.environment == other.environment
end
end
| 28.389222 | 139 | 0.653449 |
f7957cea5b5cfbe1632c8f343f85d785866da3f0 | 844 | # encoding: utf-8
#
# Redmine - project management software
# Copyright (C) 2006-2017 Jean-Philippe Lang
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
module MailHandlerHelper
end
| 38.363636 | 81 | 0.766588 |
edf350e059bb8ff5929175bd8ddbda32f311bcfd | 1,426 | class Enumerator
include Enumerable
class Chain < Enumerator
def initialize(*enumerables)
@enum_block = Proc.new do |yielder|
enumerables.each do |enumerable|
enumerable.each do |item|
yielder << item
end
end
end
end
end
class Yielder
def initialize(block = nil)
@block = block
end
def yield(item)
Fiber.yield(item)
end
alias << yield
def to_proc
@block
end
end
def initialize(size = nil, &enum_block)
@size = size
@enum_block = enum_block
end
def each(&block)
@yielder = Yielder.new(block)
@fiber = Fiber.new do
@enum_block.call @yielder
end
loop do
begin
yield self.next
rescue StopIteration
return @final_result
end
end
end
def next
rewind unless @fiber
if @peeked
@peeked = false
return @peeked_value
end
@last_result = @fiber.resume(@last_result)
if @fiber.status == :terminated
@final_result = @last_result
raise StopIteration, 'iteration reached an end'
end
@last_result
end
def peek
if @peeked
return @peeked_value
end
@peeked_value = self.next
@peeked = true
@peeked_value
end
def rewind
@yielder = Yielder.new
@fiber = Fiber.new do
@enum_block.call @yielder
end
end
def size
@size
end
end
| 16.776471 | 53 | 0.596073 |
794b34367f1e5dce4abf5c088b4c14754bc05076 | 157 | require "rails/generators"
module Ukstyle
class Engine < ::Rails::Engine
Rails::Generators.options.merge!(ukstyle: { orm: :active_record })
end
end
| 19.625 | 70 | 0.719745 |
339b4f2e99abe0e6cf86186b679d188d05b64631 | 1,146 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `rails
# db:schema:load`. When creating a new database, `rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2021_03_02_183841) do
create_table "recipes", force: :cascade do |t|
t.string "name"
t.string "ingredients"
t.string "steps"
t.string "level"
t.string "cooking_time"
t.integer "author_id"
t.integer "save_id"
t.integer "save_times"
end
create_table "users", force: :cascade do |t|
t.string "username"
t.string "email"
t.string "password_digest"
end
end
| 34.727273 | 86 | 0.743455 |
b97b7c54a1258a4882c888cf7cbad228eb68d1e3 | 797 | require File.dirname(__FILE__) + '/../../../spec_helper'
require File.dirname(__FILE__) + '/../fixtures/classes'
describe "Socket::BasicSocket#getpeername" do
before :each do
@server = TCPServer.new("127.0.0.1", SocketSpecs.port)
@client = TCPSocket.new("127.0.0.1", SocketSpecs.port)
end
after :each do
@server.close unless @server.closed?
@client.close unless @client.closed?
end
it "returns the sockaddr of the other end of the connection" do
server_sockaddr = Socket.pack_sockaddr_in(SocketSpecs.port, "127.0.0.1")
@client.getpeername.should == server_sockaddr
end
# Catch general exceptions to prevent NotImplementedError
it "raises an error if socket's not connected" do
lambda { @server.getpeername }.should raise_error(Exception)
end
end
| 30.653846 | 76 | 0.718946 |
e2e6b543cda73cc4b76052efed5e91c1eb1e845b | 423 | # frozen_string_literal: true
module GraphQL
module PersistedQueries
module Analyzers
# Verifies that mutations are not executed using GET requests
class HttpMethodAstAnalyzer < GraphQL::Analysis::AST::Analyzer
def initialize(query)
super
@query = query
end
def result
HttpMethodValidator.new(@query).perform
end
end
end
end
end
| 21.15 | 68 | 0.643026 |
8748f2bd75e5e60c65ecfe578f89dcec9c996604 | 261 | # frozen_string_literal: true
require "rails_helper"
RSpec.describe "application/_masthead_nav_menu_icon.html.erb", type: :view do
# before do
# render partial: "standard_view/breadcrumbs", locals: { material: material }
# end
it "needs specs"
end
| 21.75 | 81 | 0.739464 |
d5afd4a0edf7785c94f65b66789d8278ca1597ef | 2,074 | # frozen_string_literal: true
require "rails_helper"
module WasteCarriersEngine
RSpec.describe "CopyCardsOrderCompletedForm", type: :request do
describe "GET new_copy_cards_order_completed_form_path" do
context "when a valid user is signed in" do
let(:user) { create(:user) }
before(:each) do
sign_in(user)
end
context "when no transient registration exists" do
it "redirects to the invalid page" do
get new_copy_cards_order_completed_form_path("wibblewobblejellyonaplate")
expect(response).to redirect_to(page_path("invalid"))
end
end
context "when a valid transient registration exists" do
let(:transient_registration) do
create(
:order_copy_cards_registration,
:has_finance_details,
workflow_state: "copy_cards_order_completed_form"
)
end
context "when the workflow_state is correct" do
it "deletes the transient object, copy all finance details to the registration, load the confirmation page and sends an email" do
registration = transient_registration.registration
get new_copy_cards_order_completed_form_path(transient_registration.token)
finance_details = registration.reload.finance_details
order = finance_details.orders.last
order_item = order.order_items.first
expect(WasteCarriersEngine::TransientRegistration.count).to eq(0)
expect(finance_details.orders.count).to eq(2)
expect(finance_details.balance).to eq(500)
expect(order.order_items.count).to eq(1)
expect(order_item.type).to eq("COPY_CARDS")
expect(order_item.amount).to eq(500)
expect(response).to have_http_status(200)
expect(response).to render_template("waste_carriers_engine/copy_cards_order_completed_forms/new")
end
end
end
end
end
end
end
| 36.385965 | 141 | 0.650434 |
617b4e71f2e18da944209460145723c5df6ea77b | 44 | cmd = "bundler install"
cmd = "rails serve"
| 14.666667 | 23 | 0.681818 |
bb0f5f2cd72a262d7faf4b1ce4c337404676b9af | 11,412 | require 'action_controller/model_naming'
module ActionDispatch
module Routing
# Polymorphic URL helpers are methods for smart resolution to a named route call when
# given an Active Record model instance. They are to be used in combination with
# ActionController::Resources.
#
# These methods are useful when you want to generate correct URL or path to a RESTful
# resource without having to know the exact type of the record in question.
#
# Nested resources and/or namespaces are also supported, as illustrated in the example:
#
# polymorphic_url([:admin, @article, @comment])
#
# results in:
#
# admin_article_comment_url(@article, @comment)
#
# == Usage within the framework
#
# Polymorphic URL helpers are used in a number of places throughout the \Rails framework:
#
# * <tt>url_for</tt>, so you can use it with a record as the argument, e.g.
# <tt>url_for(@article)</tt>;
# * ActionView::Helpers::FormHelper uses <tt>polymorphic_path</tt>, so you can write
# <tt>form_for(@article)</tt> without having to specify <tt>:url</tt> parameter for the form
# action;
# * <tt>redirect_to</tt> (which, in fact, uses <tt>url_for</tt>) so you can write
# <tt>redirect_to(post)</tt> in your controllers;
# * ActionView::Helpers::AtomFeedHelper, so you don't have to explicitly specify URLs
# for feed entries.
#
# == Prefixed polymorphic helpers
#
# In addition to <tt>polymorphic_url</tt> and <tt>polymorphic_path</tt> methods, a
# number of prefixed helpers are available as a shorthand to <tt>action: "..."</tt>
# in options. Those are:
#
# * <tt>edit_polymorphic_url</tt>, <tt>edit_polymorphic_path</tt>
# * <tt>new_polymorphic_url</tt>, <tt>new_polymorphic_path</tt>
#
# Example usage:
#
# edit_polymorphic_path(@post) # => "/posts/1/edit"
# polymorphic_path(@post, format: :pdf) # => "/posts/1.pdf"
#
# == Usage with mounted engines
#
# If you are using a mounted engine and you need to use a polymorphic_url
# pointing at the engine's routes, pass in the engine's route proxy as the first
# argument to the method. For example:
#
# polymorphic_url([blog, @post]) # calls blog.post_path(@post)
# form_for([blog, @post]) # => "/blog/posts/1"
#
module PolymorphicRoutes
include ActionController::ModelNaming
# Constructs a call to a named RESTful route for the given record and returns the
# resulting URL string. For example:
#
# # calls post_url(post)
# polymorphic_url(post) # => "http://example.com/posts/1"
# polymorphic_url([blog, post]) # => "http://example.com/blogs/1/posts/1"
# polymorphic_url([:admin, blog, post]) # => "http://example.com/admin/blogs/1/posts/1"
# polymorphic_url([user, :blog, post]) # => "http://example.com/users/1/blog/posts/1"
# polymorphic_url(Comment) # => "http://example.com/comments"
#
# ==== Options
#
# * <tt>:action</tt> - Specifies the action prefix for the named route:
# <tt>:new</tt> or <tt>:edit</tt>. Default is no prefix.
# * <tt>:routing_type</tt> - Allowed values are <tt>:path</tt> or <tt>:url</tt>.
# Default is <tt>:url</tt>.
#
# Also includes all the options from <tt>url_for</tt>. These include such
# things as <tt>:anchor</tt> or <tt>:trailing_slash</tt>. Example usage
# is given below:
#
# polymorphic_url([blog, post], anchor: 'my_anchor')
# # => "http://example.com/blogs/1/posts/1#my_anchor"
# polymorphic_url([blog, post], anchor: 'my_anchor', script_name: "/my_app")
# # => "http://example.com/my_app/blogs/1/posts/1#my_anchor"
#
# For all of these options, see the documentation for <tt>url_for</tt>.
#
# ==== Functionality
#
# # an Article record
# polymorphic_url(record) # same as article_url(record)
#
# # a Comment record
# polymorphic_url(record) # same as comment_url(record)
#
# # it recognizes new records and maps to the collection
# record = Comment.new
# polymorphic_url(record) # same as comments_url()
#
# # the class of a record will also map to the collection
# polymorphic_url(Comment) # same as comments_url()
#
def polymorphic_url(record_or_hash_or_array, options = {})
if Hash === record_or_hash_or_array
options = record_or_hash_or_array.merge(options)
record = options.delete :id
return polymorphic_url record, options
end
opts = options.dup
action = opts.delete :action
type = opts.delete(:routing_type) || :url
HelperMethodBuilder.polymorphic_method self,
record_or_hash_or_array,
action,
type,
opts
end
# Returns the path component of a URL for the given record. It uses
# <tt>polymorphic_url</tt> with <tt>routing_type: :path</tt>.
def polymorphic_path(record_or_hash_or_array, options = {})
if Hash === record_or_hash_or_array
options = record_or_hash_or_array.merge(options)
record = options.delete :id
return polymorphic_path record, options
end
opts = options.dup
action = opts.delete :action
type = :path
HelperMethodBuilder.polymorphic_method self,
record_or_hash_or_array,
action,
type,
opts
end
%w(edit new).each do |action|
module_eval <<-EOT, __FILE__, __LINE__ + 1
def #{action}_polymorphic_url(record_or_hash, options = {})
polymorphic_url_for_action("#{action}", record_or_hash, options)
end
def #{action}_polymorphic_path(record_or_hash, options = {})
polymorphic_path_for_action("#{action}", record_or_hash, options)
end
EOT
end
private
def polymorphic_url_for_action(action, record_or_hash, options)
polymorphic_url(record_or_hash, options.merge(:action => action))
end
def polymorphic_path_for_action(action, record_or_hash, options)
polymorphic_path(record_or_hash, options.merge(:action => action))
end
class HelperMethodBuilder # :nodoc:
CACHE = { 'path' => {}, 'url' => {} }
def self.get(action, type)
type = type.to_s
CACHE[type].fetch(action) { build action, type }
end
def self.url; CACHE['url'.freeze][nil]; end
def self.path; CACHE['path'.freeze][nil]; end
def self.build(action, type)
prefix = action ? "#{action}_" : ""
suffix = type
if action.to_s == 'new'
HelperMethodBuilder.singular prefix, suffix
else
HelperMethodBuilder.plural prefix, suffix
end
end
def self.singular(prefix, suffix)
new(->(name) { name.singular_route_key }, prefix, suffix)
end
def self.plural(prefix, suffix)
new(->(name) { name.route_key }, prefix, suffix)
end
def self.polymorphic_method(recipient, record_or_hash_or_array, action, type, options)
builder = get action, type
case record_or_hash_or_array
when Array
record_or_hash_or_array = record_or_hash_or_array.compact
if record_or_hash_or_array.empty?
raise ArgumentError, "Nil location provided. Can't build URI."
end
if record_or_hash_or_array.first.is_a?(ActionDispatch::Routing::RoutesProxy)
recipient = record_or_hash_or_array.shift
end
method, args = builder.handle_list record_or_hash_or_array
when String, Symbol
method, args = builder.handle_string record_or_hash_or_array
when Class
method, args = builder.handle_class record_or_hash_or_array
when nil
raise ArgumentError, "Nil location provided. Can't build URI."
else
method, args = builder.handle_model record_or_hash_or_array
end
if options.empty?
recipient.send(method, *args)
else
recipient.send(method, *args, options)
end
end
attr_reader :suffix, :prefix
def initialize(key_strategy, prefix, suffix)
@key_strategy = key_strategy
@prefix = prefix
@suffix = suffix
end
def handle_string(record)
[get_method_for_string(record), []]
end
def handle_string_call(target, str)
target.send get_method_for_string str
end
def handle_class(klass)
[get_method_for_class(klass), []]
end
def handle_class_call(target, klass)
target.send get_method_for_class klass
end
def handle_model(record)
args = []
model = record.to_model
name = if model.persisted?
args << model
model.model_name.singular_route_key
else
@key_strategy.call model.model_name
end
named_route = prefix + "#{name}_#{suffix}"
[named_route, args]
end
def handle_model_call(target, model)
method, args = handle_model model
target.send(method, *args)
end
def handle_list(list)
record_list = list.dup
record = record_list.pop
args = []
route = record_list.map { |parent|
case parent
when Symbol, String
parent.to_s
when Class
args << parent
parent.model_name.singular_route_key
else
args << parent.to_model
parent.to_model.model_name.singular_route_key
end
}
route <<
case record
when Symbol, String
record.to_s
when Class
@key_strategy.call record.model_name
else
model = record.to_model
if model.persisted?
args << model
model.model_name.singular_route_key
else
@key_strategy.call model.model_name
end
end
route << suffix
named_route = prefix + route.join("_")
[named_route, args]
end
private
def get_method_for_class(klass)
name = @key_strategy.call klass.model_name
prefix + "#{name}_#{suffix}"
end
def get_method_for_string(str)
prefix + "#{str}_#{suffix}"
end
[nil, 'new', 'edit'].each do |action|
CACHE['url'][action] = build action, 'url'
CACHE['path'][action] = build action, 'path'
end
end
end
end
end
| 34.477341 | 98 | 0.574746 |
d5f7100b67cce99e79db0287cc620880876e2566 | 3,031 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ContainerService::Mgmt::V2019_04_01
module Models
#
# The profile of an orchestrator and its available versions.
#
class OrchestratorVersionProfile
include MsRestAzure
# @return [String] Orchestrator type.
attr_accessor :orchestrator_type
# @return [String] Orchestrator version (major, minor, patch).
attr_accessor :orchestrator_version
# @return [Boolean] Installed by default if version is not specified.
attr_accessor :default
# @return [Boolean] Whether Kubernetes version is currently in preview.
attr_accessor :is_preview
# @return [Array<OrchestratorProfile>] The list of available upgrade
# versions.
attr_accessor :upgrades
#
# Mapper for OrchestratorVersionProfile class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'OrchestratorVersionProfile',
type: {
name: 'Composite',
class_name: 'OrchestratorVersionProfile',
model_properties: {
orchestrator_type: {
client_side_validation: true,
required: true,
serialized_name: 'orchestratorType',
type: {
name: 'String'
}
},
orchestrator_version: {
client_side_validation: true,
required: true,
serialized_name: 'orchestratorVersion',
type: {
name: 'String'
}
},
default: {
client_side_validation: true,
required: false,
serialized_name: 'default',
type: {
name: 'Boolean'
}
},
is_preview: {
client_side_validation: true,
required: false,
serialized_name: 'isPreview',
type: {
name: 'Boolean'
}
},
upgrades: {
client_side_validation: true,
required: false,
serialized_name: 'upgrades',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'OrchestratorProfileElementType',
type: {
name: 'Composite',
class_name: 'OrchestratorProfile'
}
}
}
}
}
}
}
end
end
end
end
| 30.009901 | 77 | 0.501815 |
e9283cfcb976a69feb0956e6080fd03e9fa1c009 | 386 | # frozen_string_literal: true
class FeedController < ApplicationController
def index
request.format = :atom
@team = Team.where(slug: params[:team], rss_token: params[:rss_token]).first
render("layouts/404", status: 404, formats: :html) && return if @team.nil?
@posts = Post.where(team: @team).last(25)
respond_to do |format|
format.atom
end
end
end
| 24.125 | 80 | 0.678756 |
6a9ae19e23d75ba25eb851bb74bb75c703cfddfc | 720 | require 'rails/generators'
module DateWrapper
module Generators
class JavascriptsGenerator < ::Rails::Generators::Base
desc <<-DOC
date_wrapper/date_wrapper.coffee.erb
date_wrapper.js
DOC
def self.source_root
File.expand_path(File.join(File.dirname(__FILE__), '../../../app/assets/javascripts'))
end
def initialize(*args, &block)
super
generate_javascripts
end
private
def generate_javascripts
template "date_wrapper.js", "app/assets/javascripts/date_wrapper.js"
copy_file "date_wrapper/date_wrapper.coffee.erb", "app/assets/javascripts/date_wrapper/date_wrapper.coffee.erb"
end
end
end
end
| 23.225806 | 119 | 0.669444 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.