hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
4aca5779acdc2e6b91ce8f3015fd876bb0d1c109
| 9,123 |
require 'spec_helper'
describe UsersController do
render_views
describe "GET 'index'" do
describe "for non-signed-in users" do
it "should deny access" do
get :index
response.should redirect_to(signin_path)
flash[:notice].should =~ /sign in/i
end
end
describe "for signed-in users" do
before(:each) do
@user = test_sign_in(Factory(:user))
second = Factory(:user, :name => "Bob", :email => "[email protected]")
third = Factory(:user, :name => "Ben", :email => "[email protected]")
@users = [@user, second, third]
30.times do
@users << Factory(:user, :email => Factory.next(:email))
end
end
it "should be successful" do
get :index
response.should be_success
end
it "should have the right title" do
get :index
response.should have_selector("title", :content => "All users")
end
it "should have an element for each user" do
get :index
@users[0..2].each do |user|
response.should have_selector("li", :content => user.name)
end
end
it "should paginate users" do
get :index
response.should have_selector("div.pagination")
response.should have_selector("span.disabled", :content => "Previous")
response.should have_selector("a", :href => "/users?page=2",
:content => "2")
response.should have_selector("a", :href => "/users?page=2",
:content => "Next")
end
end
end
describe "GET 'show'" do
before(:each) do
@user = Factory(:user)
end
it "should be successful" do
get :show, :id => @user
response.should be_success
end
it "should find the right user" do
get :show, :id => @user
assigns(:user).should == @user
end
it "should have the right title" do
get :show, :id => @user
response.should have_selector("title", :content => @user.name)
end
it "should include the user's name" do
get :show, :id => @user
response.should have_selector("h1", :content => @user.name)
end
it "should have a profile image" do
get :show, :id => @user
response.should have_selector("h1>img", :class => "gravatar")
end
it "should show the user's microposts" do
mp1 = Factory(:micropost, :user => @user, :content => "Foo bar")
mp2 = Factory(:micropost, :user => @user, :content => "Baz quux")
get :show, :id => @user
response.should have_selector("span.content", :content => mp1.content)
response.should have_selector("span.content", :content => mp2.content)
end
end
describe "GET 'new'" do
it "should be successful" do
get 'new'
response.should be_success
end
it "should have the right title" do
get 'new'
response.should have_selector("title", :content => "Sign up")
end
end
describe "POST 'create'" do
describe "failure" do
before(:each) do
@attr = { :name => "", :email => "", :password => "",
:password_confirmation => "" }
end
it "should have the right title" do
post :create, :user => @attr
response.should have_selector('title', :content => "Sign up")
end
it "should render the 'new' page" do
post :create, :user => @attr
response.should render_template('new')
end
it "should not create a user" do
lambda do
post :create, :user => @attr
end.should_not change(User, :count)
end
end
describe "success" do
before(:each) do
@attr = { :name => "New User", :email => "[email protected]",
:password => "foobar", :password_confirmation => "foobar" }
end
it "should create a user" do
lambda do
post :create, :user => @attr
end.should change(User, :count).by(1)
end
it "should redirect to the user show page" do
post :create, :user => @attr
response.should redirect_to(user_path(assigns(:user)))
end
it "should have a welcome message" do
post :create, :user => @attr
flash[:success].should =~ /welcome to the sample app/i
end
it "should sign the user in" do
post :create, :user => @attr
controller.should be_signed_in
end
end
end
describe "GET 'edit'" do
before(:each) do
@user = Factory(:user)
test_sign_in(@user)
end
it "should be successful" do
get :edit, :id => @user
response.should be_success
end
it "should have the right title" do
get :edit, :id => @user
response.should have_selector("title", :content => "Edit user")
end
it "should have a link to change the Gravatar" do
get :edit, :id => @user
gravatar_url = "http://gravatar.com/emails"
response.should have_selector("a", :href => gravatar_url,
:content => "change")
end
end
describe "PUT 'update'" do
before(:each) do
@user = Factory(:user)
test_sign_in(@user)
end
describe "failure" do
before(:each) do
@attr = { :email => "", :name => "", :password => "",
:password_confirmation => "" }
end
it "should render the 'edit' page" do
put :update, :id => @user, :user => @attr
response.should render_template('edit')
end
it "should have the right title" do
put :update, :id => @user, :user => @attr
response.should have_selector("title", :content => "Edit user")
end
end
describe "success" do
before(:each) do
@attr = { :name => "New Name", :email => "[email protected]",
:password => "barbaz", :password_confirmation => "barbaz" }
end
it "should change the user's attributes" do
put :update, :id => @user, :user => @attr
@user.reload
@user.name.should == @attr[:name]
@user.email.should == @attr[:email]
end
it "should redirect to the user show page" do
put :update, :id => @user, :user => @attr
response.should redirect_to(user_path(@user))
end
it "should have a flash message" do
put :update, :id => @user, :user => @attr
flash[:success].should =~ /updated/
end
end
end
describe "authentication of edit/update pages" do
before(:each) do
@user = Factory(:user)
end
describe "for non-signed-in users" do
it "should deny access to 'edit'" do
get :edit, :id => @user
response.should redirect_to(signin_path)
end
it "should deny access to 'update'" do
put :update, :id => @user, :user => {}
response.should redirect_to(signin_path)
end
end
end
describe "DELETE 'destroy'" do
before(:each) do
@user = Factory(:user)
end
describe "as a non-signed-in user" do
it "should deny access" do
delete :destroy, :id => @user
response.should redirect_to(signin_path)
end
end
describe "as a non-admin user" do
it "should protect the page" do
test_sign_in(@user)
delete :destroy, :id => @user
response.should redirect_to(root_path)
end
it "should not have destroy link" do
test_sign_in(@user)
get :index, :id => @user
response.should_not have_selector("a",
:href => user_path(@user),
:content => "delete")
end
end
describe "as an admin user" do
before(:each) do
admin = Factory(:user, :email => "[email protected]", :admin => true)
test_sign_in(admin)
end
it "should destroy the user" do
lambda do
delete :destroy, :id => @user
end.should change(User, :count).by(-1)
end
it "should redirect to the users page" do
delete :destroy, :id => @user
response.should redirect_to(users_path)
end
end
end
describe "follow pages" do
describe "when not signed in" do
it "should protect 'following'" do
get :following, :id => 1
response.should redirect_to(signin_path)
end
it "should protect 'followers'" do
get :followers, :id => 1
response.should redirect_to(signin_path)
end
end
describe "when signed in" do
before(:each) do
@user = test_sign_in(Factory(:user))
@other_user = Factory(:user, :email => Factory.next(:email))
@user.follow!(@other_user)
end
it "should show user following" do
get :following, :id => @user
response.should have_selector("a", :href => user_path(@other_user),
:content => @other_user.name)
end
it "should show user followers" do
get :followers, :id => @other_user
response.should have_selector("a", :href => user_path(@user),
:content => @user.name)
end
end
end
end
| 26.140401 | 80 | 0.567796 |
e99035ee8a105889e77191973656291429583964
| 3,654 |
require 'spec_helper'
describe CoinGate::Merchant::Order do
include_context 'shared'
let(:valid_order_params) do
{
order_id: 'ORDER-1412759367',
price_amount: 1050.99,
price_currency: 'USD',
receive_currency: 'EUR',
callback_url: 'https://example.com/payments/callback?token=6tCENGUYI62ojkuzDPX7Jg',
cancel_url: 'https://example.com/cart',
success_url: 'https://example.com/account/orders',
description: 'Apple Iphone 6s'
}
end
before do
CoinGate.config do |config|
config.auth_token = @authentication[:auth_token]
config.environment = @authentication[:environment]
end
end
describe 'create order' do
context 'invalid' do
it { expect(CoinGate::Merchant::Order.create({})).to be false }
it { expect { CoinGate::Merchant::Order.create!({}) }.to raise_error CoinGate::OrderIsNotValid }
end
context 'valid' do
it { expect(CoinGate::Merchant::Order.create(valid_order_params).new?).to be true }
it { expect(CoinGate::Merchant::Order.create!(valid_order_params).new?).to be true }
end
end
describe 'find order' do
context 'order exists' do
it do
order = CoinGate::Merchant::Order.create(valid_order_params)
expect(CoinGate::Merchant::Order.find(order.id).new?).to be true
expect(CoinGate::Merchant::Order.find!(order.id).new?).to be true
end
end
context 'order does not exists' do
it { expect(CoinGate::Merchant::Order.find(0)).to be false }
it { expect { CoinGate::Merchant::Order.find!(0) }.to raise_error CoinGate::OrderNotFound }
end
end
describe 'passing auth params through arguments' do
before do
CoinGate.config do |config|
config.auth_token = nil
end
end
describe 'create order' do
context 'params not passed' do
it { expect { CoinGate::Merchant::Order.create({}) }.to raise_error CoinGate::AuthTokenMissing }
it { expect { CoinGate::Merchant::Order.create!({}) }.to raise_error CoinGate::AuthTokenMissing }
end
context 'invalid params passed' do
authentication = {
auth_token: 'a'
}
it { expect { CoinGate::Merchant::Order.create({}, authentication) }.to raise_error CoinGate::BadAuthToken }
it { expect { CoinGate::Merchant::Order.create!({}, authentication) }.to raise_error CoinGate::BadAuthToken }
end
context 'valid params passed' do
it { expect(CoinGate::Merchant::Order.create({}, @authentication)).to be false }
it { expect { CoinGate::Merchant::Order.create!({}, @authentication) }.to raise_error CoinGate::OrderIsNotValid }
end
end
describe 'find order' do
context 'params not passed' do
it { expect { CoinGate::Merchant::Order.find(0) }.to raise_error CoinGate::AuthTokenMissing }
it { expect { CoinGate::Merchant::Order.find!(0) }.to raise_error CoinGate::AuthTokenMissing }
end
context 'invalid params passed' do
authentication = {
auth_token: 'a',
}
it { expect { CoinGate::Merchant::Order.find(0, authentication) }.to raise_error CoinGate::BadAuthToken }
it { expect { CoinGate::Merchant::Order.find!(0, authentication) }.to raise_error CoinGate::BadAuthToken }
end
context 'valid params passed' do
it { expect(CoinGate::Merchant::Order.find(0, @authentication)).to be false }
it { expect { CoinGate::Merchant::Order.find!(0, @authentication) }.to raise_error CoinGate::OrderNotFound }
end
end
end
end
| 35.475728 | 121 | 0.647236 |
916e88f0b13bb08f88c4aab30c674bab7a6432db
| 1,759 |
class ProjectsController < ApplicationController
before_action :authenticate_user!, only: [:new, :create, :destroy]
def new
@project = Project.new
if Category.find_by(id: params[:category_id]) != nil
@category = Category.find_by(id: params[:category_id])
@category = @category.id
else
@category = []
end
# @project.categories.build
end
def destroy
@project = Project.find(params[:id])
@project.destroy
flash[:error] = 'You have successfully deleted this project.'
redirect_to project_manager_path
end
def create
@project = Project.create(project_params)
@project.user_id = current_user.id
if @project.save
flash[:notice] = 'Project successfully created'
redirect_to category_project_path(ProjectCategory.limit(1).where(project_id: [@project]).pluck(:category_id), @project)
else
flash[:alert] = 'Project could not be created'
render 'new'
end
end
def show
# @project = Project.find(params[:id])
if params[:category_id]
@category = Category.find_by(id: params[:category_id])
@project = @category.projects.find_by(id: params[:id])
if @project.nil?
redirect_to category_projects(@project), alert: 'Project not found'
end
else
@project = Project.find(params[:id])
end
@user = User.find(@project.user_id).full_name
@comment = Comment.new
@comments = Comment.filter_comments(@project.id)
@task = Task.new
@tasks = Task.filter_tasks(@project.id)
end
def index
@projects = Project.all
end
private
def project_params
params.require(:project).permit(:title, :description, :task_id, :category_id, category_ids: [], categories_attributes: [:title])
end
end
| 27.920635 | 132 | 0.675384 |
1c9d2b5595c2eb149b5a182f7e380f53a583eb30
| 1,017 |
require File.expand_path("../../Abstract/abstract-php-extension", __FILE__)
class Php56Tidy < AbstractPhp56Extension
init
homepage "http://php.net/manual/en/book.tidy.php"
url PHP_SRC_TARBALL
sha256 PHP_CHECKSUM[:sha256]
version PHP_VERSION
bottle do
root_url "https://homebrew.bintray.com/bottles-php"
sha256 "eceaf9706530034cab44574be55ad1cd00d57f5321bbabfdc01a5e4f0480b4a5" => :yosemite
sha256 "ae5b48dfa422f79baeb31ff9052a985d92c698155554842afb6a4c14c7b3c40b" => :mavericks
sha256 "a03c52c64336fe2ab661611c7f48029911e232a05c76c73f4bf03cf27871d5d3" => :mountain_lion
end
def install
Dir.chdir "ext/tidy"
ENV.universal_binary if build.universal?
safe_phpize
system "./configure", "--prefix=#{prefix}",
phpconfig,
"--disable-dependency-tracking",
"--with-tidy"
system "make"
prefix.install "modules/tidy.so"
write_config_file if build.with? "config-file"
end
end
| 31.78125 | 95 | 0.695182 |
33de9ef4c4dfbf883372dbc2b2cd3edbf52fe207
| 284 |
cask 'digital' do
version '1.1'
sha256 '94fb4a2d4829cd4b717d5c250167cbe79cccf7870335c1fd4694462211c98802'
url "http://scoutshonour.com/lilyofthevalley/digital-#{version}.dmg"
name 'Digital: A Love Story'
homepage 'http://scoutshonour.com/digital/'
app 'Digital.app'
end
| 25.818182 | 75 | 0.764085 |
f7775ea6f127071c37ac2096c3a079a098207c78
| 996 |
# -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'epi_deploy/version'
Gem::Specification.new do |gem|
gem.name = "epi_deploy"
gem.version = EpiDeploy::VERSION
gem.authors = ["Anthony Nettleship", "Shuo Chen", "Chris Hunt", "James Gregory"]
gem.email = ["[email protected]", "[email protected]", "[email protected]", "[email protected]"]
gem.description = "A gem to facilitate deployment across multiple git branches and evironments"
gem.summary = "eD"
gem.homepage = "https://www.epigenesys.org.uk"
gem.files = `git ls-files`.split($/)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ["lib"]
gem.add_dependency('slop', '~> 3.6')
gem.add_dependency('git', '~> 1.2')
end
| 43.304348 | 160 | 0.65261 |
bb3f1832a76f93e1300ce5e5ddb36d7fe2e6b955
| 1,553 |
=begin
#Selling Partner API for Listings Items
#The Selling Partner API for Listings Items (Listings Items API) provides programmatic access to selling partner listings on Amazon. Use this API in collaboration with the Selling Partner API for Product Type Definitions, which you use to retrieve the information about Amazon product types needed to use the Listings Items API. For more information, see the [Listings Items API Use Case Guide](https://github.com/amzn/selling-partner-api-docs/blob/main/guides/en-US/use-case-guides/listings-items-api-use-case-guide/listings-items-api-use-case-guide_2021-08-01.md).
OpenAPI spec version: 2021-08-01
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.26
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for AmzSpApi::ListingsItemsApiModel::ErrorList
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'ErrorList' do
before do
# run before each test
@instance = AmzSpApi::ListingsItemsApiModel::ErrorList.new
end
after do
# run after each test
end
describe 'test an instance of ErrorList' do
it 'should create an instance of ErrorList' do
expect(@instance).to be_instance_of(AmzSpApi::ListingsItemsApiModel::ErrorList)
end
end
describe 'test attribute "errors"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 36.97619 | 567 | 0.768835 |
bb76659b5ad999b4e3348f3abca92201baa987f6
| 2,786 |
require 'spec_helper'
describe Admin::HybridRelationshipsController do
login_admin
before(:each) { hybrid_relationship_type }
let(:taxon_concept) { create(:taxon_concept) }
let(:hybrid) { create(:taxon_concept, :name_status => 'H') }
let(:hybrid_relationship) {
create(:taxon_relationship,
:taxon_relationship_type_id => hybrid_relationship_type.id,
:taxon_concept => taxon_concept,
:other_taxon_concept => hybrid
)
}
describe "XHR GET new" do
it "renders the new template" do
xhr :get, :new, :taxon_concept_id => taxon_concept.id
response.should render_template('new')
end
it "assigns the hybrid_relationship variable" do
xhr :get, :new, :taxon_concept_id => taxon_concept.id
assigns(:hybrid_relationship).should_not be_nil
end
end
describe "XHR POST create" do
it "renders create when successful" do
xhr :post, :create,
:taxon_concept_id => taxon_concept.id,
:taxon_relationship => {
other_taxon_concept_id: hybrid.id
}
response.should render_template("create")
end
it "renders new when not successful" do
xhr :post, :create,
:taxon_concept_id => taxon_concept.id,
:taxon_relationship => {
other_taxon_concept_id: nil
}
response.should render_template("new")
end
end
describe "XHR GET edit" do
it "renders the edit template" do
xhr :get, :edit, :taxon_concept_id => taxon_concept.id,
:id => hybrid_relationship.id
response.should render_template('new')
end
it "assigns the hybrid_relationship variable" do
xhr :get, :edit, :taxon_concept_id => taxon_concept.id,
:id => hybrid_relationship.id
assigns(:hybrid_relationship).should_not be_nil
end
end
describe "XHR PUT update" do
it "responds with 200 when successful" do
xhr :put, :update, :format => 'js',
:taxon_concept_id => taxon_concept.id,
:id => hybrid_relationship.id,
:taxon_relationship => {
other_taxon_concept_id: hybrid.id
}
response.should render_template("create")
end
it "responds with json when not successful" do
xhr :put, :update, :format => 'js',
:taxon_concept_id => taxon_concept.id,
:id => hybrid_relationship.id,
:taxon_relationship => {
other_taxon_concept_id: nil
}
response.should render_template('new')
end
end
describe "DELETE destroy" do
it "redirects after delete" do
delete :destroy,
:taxon_concept_id => taxon_concept.id,
:id => hybrid_relationship.id
response.should redirect_to(
admin_taxon_concept_names_url(hybrid_relationship.taxon_concept)
)
end
end
end
| 30.282609 | 72 | 0.657933 |
87f5b6d6c6e1a2be03b59f1b5699891be4745763
| 6,950 |
require 'swagger_helper'
describe Api::V1::UserResource, type: :request, swagger_doc: 'v1/swagger.json' do
let!(:updated_user) { create :user }
let(:valid_attributes) do
Swagger::V1::Users.definitions.dig(:user_updatable_attributes, :properties).transform_values do |v|
v[:example]
end
end
path '/api/v1/users/{id}' do
patch 'update user' do
tags 'Users'
security [{ JWT: {} }]
consumes 'application/vnd.api+json'
produces 'application/vnd.api+json'
parameter name: :id, in: :path, type: :string, required: true
parameter name: :user, in: :body, schema: { '$ref' => '#/definitions/user_patch_params' }
let(:id) { updated_user.id }
let(:attributes) { valid_attributes }
let(:user) do
{
data: {
type: 'users',
id: updated_user.id,
attributes: attributes
}
}
end
let(:Authorization) { 'Bearer dummy_json_web_token' }
response '200', 'OK: User updated' do
schema '$ref' => '#/definitions/user_response'
run_test! do
parsed_json_data_matches_db_record(updated_user)
end
end
it_behaves_like 'has response unauthorized'
context 'user attributes contain password' do
let(:password) { Faker::Internet.password }
let(:user) do
{
data: {
type: 'users',
id: updated_user.id,
attributes: attributes.merge(password: password)
}
}
end
context 'valid password and user is not admin' do
response '200', 'OK: User updated' do
schema '$ref' => '#/definitions/user_response'
run_test! do
expect(updated_user.reload.valid_password?(password)).to eql true
end
end
end
context 'valid password but user is admin' do
let!(:updated_user) { create :user, admin: true, password: Faker::Internet.password }
it_behaves_like 'has response bad request' do
let(:error_detail) { 'Admin password change via API is not allowed.' }
end
end
end
context 'user attributes contain ldap bind pair' do
description 'Any LDAP related params should be lowercase and use <i>ldap_</i> prefix, e.g. <i>ldap_samaccountname<i>'
let(:ldap_auth_bind_key) { 'samaccountname' }
let(:ldap_auth_bind_key_field) { "ldap_#{ldap_auth_bind_key}".to_sym }
let(:ldap_auth_bind_value) { Faker::Internet.username }
let(:attributes) { valid_attributes.merge({ ldap_auth_bind_key_field => ldap_auth_bind_value }) }
around do |example|
ClimateControl.modify AUTH_METHOD: 'ldap', LDAP_AUTH_BIND_KEY: ldap_auth_bind_key do
# we need to reload modules which use ENV variables we just had changed
Object.send(:remove_const, :UsesLdap)
Object.send(:remove_const, :ResourceUsesLdap)
load 'app/models/concerns/uses_ldap.rb'
load 'app/resources/concerns/resource_uses_ldap.rb'
User.include(UsesLdap)
Api::V1::UserResource.include(ResourceUsesLdap)
example.run
end
end
response '200', 'OK: User updated' do
schema '$ref' => '#/definitions/user_response'
run_test! do
expect(updated_user.reload.send(ldap_auth_bind_key_field)).to eql ldap_auth_bind_value
end
end
end
context 'epr_uuid is null' do
let(:attributes) { valid_attributes.merge(epr_uuid: '') }
response '200', 'OK: User updated' do
schema '$ref' => '#/definitions/user_response'
run_test! do
expect(updated_user.reload.epr_uuid).to eql nil
end
end
end
context 'user group relationships passed' do
let!(:user_group1) { create :user_group }
let!(:user_group2) { create :user_group }
let!(:user_group3) { create :user_group }
let!(:user_group4) { create :user_group }
let(:relationships) do
{
user_groups: {
data: [
{ type: 'user_groups', id: user_group1.id },
{ type: 'user_groups', id: user_group3.id }
]
}
}
end
let(:user) do
{
data: {
type: 'users',
id: updated_user.id,
attributes: attributes,
relationships: relationships
}
}
end
response '200', 'OK: User updated' do
schema '$ref' => '#/definitions/user_response_with_relationships'
run_test! do
expect(updated_user.user_groups.reload.pluck(:id)).to match_array [user_group1.id, user_group3.id]
end
end
context 'user already has user group assigned' do
before do
updated_user.user_groups << user_group2
end
it_behaves_like 'has response forbidden' do
let(:error_title) { 'Complete replacement forbidden' }
let(:error_detail) { 'User already has user group(s) assigned. Use memberships POST endpoint.' }
end
end
context 'user group ids are invalid' do
let(:invalid_group_id1) { 543 }
let(:invalid_group_id2) { 210 }
let(:relationships) do
{
user_groups: {
data: [
{ type: 'user_groups', id: invalid_group_id1 },
{ type: 'user_groups', id: invalid_group_id2 }
]
}
}
end
let(:error_title) { 'Record not found' }
let(:error_detail) { "User groups with ids #{invalid_group_id1}, #{invalid_group_id2} not found." }
let(:error_status) { '404' }
let(:error_code) { '404' }
response '404', 'Record not found' do
schema '$ref' => '#/definitions/error_404'
run_test! do
expect(parsed_json_error[:title]).to eql error_title
expect(parsed_json_error[:detail]).to eql error_detail
expect(parsed_json_error[:status]).to eql error_status
expect(parsed_json_error[:code]).to eql error_code
end
end
end
end
context 'user params include not permitted admin flag' do
response '400', 'Error: Bad Request' do
let(:attributes_with_admin) { valid_attributes.merge(admin: true) }
let(:user) do
{
data: {
type: 'users',
id: updated_user.id,
attributes: attributes_with_admin
}
}
end
schema '$ref' => '#/definitions/error_400'
run_test!
end
end
end
end
end
| 31.306306 | 125 | 0.561727 |
288b5f545636b3f0f9c21f1b12e98da22284b1b3
| 6,896 |
#
# Author:: Mathieu Sauve-Frankel <[email protected]>
# Copyright:: Copyright (c) 2009 Mathieu Sauve-Frankel
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/mixin/shell_out'
require 'chef/provider/service'
require 'chef/mixin/command'
class Chef
class Provider
class Service
class Simple < Chef::Provider::Service
include Chef::Mixin::ShellOut
def load_current_resource
@current_resource = Chef::Resource::Service.new(@new_resource.name)
@current_resource.service_name(@new_resource.service_name)
@status_load_success = true
@ps_command_failed = false
determine_current_status!
@current_resource
end
def whyrun_supported?
true
end
def shared_resource_requirements
super
requirements.assert(:all_actions) do |a|
a.assertion { @status_load_success }
a.whyrun ["Service status not available. Assuming a prior action would have installed the service.", "Assuming status of not running."]
end
end
def define_resource_requirements
# FIXME? need reload from service.rb
shared_resource_requirements
requirements.assert(:start) do |a|
a.assertion { @new_resource.start_command }
a.failure_message Chef::Exceptions::Service, "#{self.to_s} requires that start_command be set"
end
requirements.assert(:stop) do |a|
a.assertion { @new_resource.stop_command }
a.failure_message Chef::Exceptions::Service, "#{self.to_s} requires that stop_command be set"
end
requirements.assert(:restart) do |a|
a.assertion { @new_resource.restart_command || ( @new_resource.start_command && @new_resource.stop_command ) }
a.failure_message Chef::Exceptions::Service, "#{self.to_s} requires a restart_command or both start_command and stop_command be set in order to perform a restart"
end
requirements.assert(:reload) do |a|
a.assertion { @new_resource.reload_command }
a.failure_message Chef::Exceptions::UnsupportedAction, "#{self.to_s} requires a reload_command be set in order to perform a reload"
end
requirements.assert(:all_actions) do |a|
a.assertion { @new_resource.status_command or @new_resource.supports[:status] or
(!ps_cmd.nil? and !ps_cmd.empty?) }
a.failure_message Chef::Exceptions::Service, "#{@new_resource} could not determine how to inspect the process table, please set this node's 'command.ps' attribute"
end
requirements.assert(:all_actions) do |a|
a.assertion { !@ps_command_failed }
a.failure_message Chef::Exceptions::Service, "Command #{ps_cmd} failed to execute, cannot determine service current status"
end
end
def start_service
shell_out!(@new_resource.start_command)
end
def stop_service
shell_out!(@new_resource.stop_command)
end
def restart_service
if @new_resource.restart_command
shell_out!(@new_resource.restart_command)
else
stop_service
sleep 1
start_service
end
end
def reload_service
shell_out!(@new_resource.reload_command)
end
protected
def determine_current_status!
if @new_resource.status_command
Chef::Log.debug("#{@new_resource} you have specified a status command, running..")
begin
if shell_out(@new_resource.status_command).exitstatus == 0
@current_resource.running true
Chef::Log.debug("#{@new_resource} is running")
end
rescue Mixlib::ShellOut::ShellCommandFailed, SystemCallError
# ShellOut sometimes throws different types of Exceptions than ShellCommandFailed.
# Temporarily catching different types of exceptions here until we get Shellout fixed.
# TODO: Remove the line before one we get the ShellOut fix.
@status_load_success = false
@current_resource.running false
nil
end
elsif @new_resource.supports[:status]
Chef::Log.debug("#{@new_resource} supports status, running")
begin
if shell_out("#{default_init_command} status").exitstatus == 0
@current_resource.running true
Chef::Log.debug("#{@new_resource} is running")
end
# ShellOut sometimes throws different types of Exceptions than ShellCommandFailed.
# Temporarily catching different types of exceptions here until we get Shellout fixed.
# TODO: Remove the line before one we get the ShellOut fix.
rescue Mixlib::ShellOut::ShellCommandFailed, SystemCallError
@status_load_success = false
@current_resource.running false
nil
end
else
Chef::Log.debug "#{@new_resource} falling back to process table inspection"
r = Regexp.new(@new_resource.pattern)
Chef::Log.debug "#{@new_resource} attempting to match '#{@new_resource.pattern}' (#{r.inspect}) against process list"
begin
shell_out!(ps_cmd).stdout.each_line do |line|
if r.match(line)
@current_resource.running true
break
end
end
@current_resource.running false unless @current_resource.running
Chef::Log.debug "#{@new_resource} running: #{@current_resource.running}"
# ShellOut sometimes throws different types of Exceptions than ShellCommandFailed.
# Temporarily catching different types of exceptions here until we get Shellout fixed.
# TODO: Remove the line before one we get the ShellOut fix.
rescue Mixlib::ShellOut::ShellCommandFailed, SystemCallError
@ps_command_failed = true
end
end
end
def ps_cmd
@run_context.node[:command] && @run_context.node[:command][:ps]
end
end
end
end
end
| 39.861272 | 175 | 0.634426 |
792a239fb5a3616afa38375766e9aa56fc354719
| 1,890 |
=begin
# XXX: This code is unfinished
module Memorandom
module Plugins
class RSA < PluginTemplate
require 'openssl'
@@description = "This plugin looks for RSA keys by finding Bignum-encoded p-values"
@@confidence = 0.90
# Scan takes a buffer and an offset of where this buffer starts in the source
def scan(buffer, source_offset)
key_lengths = [1024, 2048]
key_lengths.each do |bits|
p_size = bits / 16
n_size = bits / 8
found = []
0.upto(buffer.length - (p_size + n_size)) do |p_offset|
# Look for a prime of the right size (p or q)
found_p = OpenSSL::BN.new( buffer[p_offset, p_size].unpack("H*").first.to_i(16).to_s )
next unless found_p.prime?
next unless found_p > 0x1000000000000000000000
# Look for a modulus that matches the found p/q value
0.upto(buffer.length - (p_size + n_size) ) do |n_offset|
next if (n_offset < p_offset and (n_offset + n_size) > p_offset)
next if (p_offset < n_offset and (p_offset + p_size) > n_offset)
found_n = OpenSSL::BN.new(buffer[n_offset, n_size].unpack("H*").first.to_i(16).to_s )
next if found_n == 0
next if found_n == found_p
next unless found_n > 0x1000000000000000000000
next unless (found_n % found_p == 0)
found << [found_p, found_n, p_offset]
end
end
found = found.uniq
next unless found.length > 0
mods = {}
# Track the last unique p/q value for a potential modulus
found.each do |info|
mods[ info[1] ] ||= {}
mods[ info[1] ][ info[0] ] = info[2]
end
p mods
next
mods.keys.each do |n|
uniq_pees = mods[n].keys.select do |k|
mods[n].keys.reject {|x| x == k}.select{|x| n == (x * k) }
end
end
end
end
end
end
end
=end
| 24.545455 | 95 | 0.593651 |
91b6b2d8669e2e388c9ce74d82935f614945a18c
| 21,392 |
require 'spec_helper'
require_relative './tool_members_create_mail_concern'
describe V1::ToolMembersController do
render_views
before(:each) do
request.env['HTTP_ACCEPT'] = 'application/json'
end
describe 'POST #create' do
context 'when the user is not authenticated' do
before(:each) do
sign_out :user
post :create, params: {
tool_member: {
tool_type: 'Assessment',
tool_id: 1,
role: 0,
user_id: 1
}
}
end
it {
is_expected.to respond_with :unauthorized
}
end
context 'when the user is authenticated' do
context 'when the user is a participant on the tool' do
let(:user) {
create(:user)
}
let(:candidate) {
create(:user)
}
let(:tool) {
create(:assessment)
}
let!(:tool_member) {
create(:tool_member, :as_participant, tool: tool, user: user)
}
before(:each) do
sign_in user
post :create, params: {
tool_member: {
tool_type: 'Assessment',
tool_id: tool.id,
role: ToolMember.member_roles[:participant],
user_id: candidate.id
}
}
end
it {
is_expected.to respond_with :forbidden
}
end
context 'when the user is a facilitator on the tool' do
context 'when the tool is assessment' do
it_behaves_like 'a created tool member with a specific tool', :assessment
end
context 'when the tool is inventory' do
it_behaves_like 'a created tool member with a specific tool', :inventory
end
context 'when the tool is analysis' do
it_behaves_like 'a created tool member with a specific tool', :analysis
end
end
end
end
describe 'GET #show' do
context 'when the user is not authenticated' do
before(:each) do
sign_out :user
get :show, params: { tool_type: 'assessment', tool_id: 1 }
end
it {
is_expected.to respond_with :unauthorized
}
end
context 'when the user is authenticated' do
let(:user) {
create(:user)
}
let(:tool) {
create(:assessment)
}
context 'when the passed tool name is invalid' do
let!(:participant_user) {
create(:tool_member, :as_participant, tool: tool, user: user)
}
before(:each) do
sign_in user
get :show, params: { tool_type: 'i_do_not_exist', tool_id: tool.id }
end
it {
is_expected.to respond_with :ok
}
it {
expect(json.length).to eq 0
}
end
context 'when the passed tool name is valid' do
context 'when the user is a participant on the tool' do
let!(:participant_user) {
create(:tool_member, :as_participant, tool: tool, user: user)
}
before(:each) do
sign_in user
get :show, params: { tool_type: 'assessment', tool_id: tool.id }
end
it {
is_expected.to respond_with :ok
}
it {
expect(json.length).to eq 1
}
end
context 'when the user is a facilitator on the tool' do
context 'when the user is not also a participant' do
let!(:facilitator_user) {
create(:tool_member, :as_facilitator, tool: tool, user: user)
}
before(:each) do
sign_in user
get :show, params: { tool_type: 'assessment', tool_id: tool.id }
end
it {
is_expected.to respond_with :ok
}
it {
expect(json.length).to eq 0
}
end
context 'when the user is also a participant' do
let!(:facilitator_user) {
create(:tool_member, :as_facilitator_and_participant, tool: tool, user: user)
}
before(:each) do
sign_in user
get :show, params: { tool_type: 'assessment', tool_id: tool.id }
end
it {
is_expected.to respond_with :ok
}
it {
expect(json.length).to eq 1
}
end
end
end
end
end
describe 'DELETE #destroy' do
context 'when the user is not authenticated' do
before(:each) do
sign_out :user
delete :destroy, params: { id: 0 }
end
it {
is_expected.to respond_with :unauthorized
}
end
context 'when the user is authenticated' do
let(:user) {
create(:user)
}
let(:tool) {
create(:assessment, :with_owner)
}
context 'when the user is a participant on the tool' do
let(:deleted_tool_member) {
create(:tool_member, :as_participant, tool: tool)
}
let!(:tool_member) {
create(:tool_member, :as_participant, tool: tool, user: user)
}
before(:each) do
sign_in user
delete :destroy, params: { id: deleted_tool_member.id }
end
it {
is_expected.to respond_with :forbidden
}
end
context 'when the user is a facilitator on the tool' do
let!(:tool_member) {
create(:tool_member, :as_facilitator, tool: tool, user: user)
}
context 'when the user being deleted is a participant' do
let(:deleted_tool_member) {
create(:tool_member, :as_participant, tool: tool)
}
let!(:deleted_tool_member_id) {
deleted_tool_member.id
}
before(:each) do
sign_in user
delete :destroy, params: { id: deleted_tool_member.id }
end
it {
is_expected.to respond_with :no_content
}
it {
expect(ToolMember.where(id: deleted_tool_member_id).size).to eq 0
}
end
context 'when the user being deleted does not exist' do
before(:each) do
sign_in user
delete :destroy, params: { id: -1 }
end
it {
is_expected.to respond_with :not_found
}
end
context 'when the user being deleted is a facilitator' do
context 'when the facilitator is not the current facilitator' do
let(:deleted_tool_member) {
create(:tool_member, :as_facilitator, tool: tool)
}
let!(:deleted_tool_member_id) {
deleted_tool_member.id
}
before(:each) do
sign_in user
delete :destroy, params: { id: deleted_tool_member.id }
end
it {
is_expected.to respond_with :no_content
}
it {
expect(ToolMember.where(id: deleted_tool_member_id).size).to eq 0
}
end
context 'when the facilitator is the tool owner' do
let(:deleted_tool_member) {
create(:tool_member, :as_facilitator, tool: tool, user: tool.owner)
}
before(:each) do
sign_in user
delete :destroy, params: { id: deleted_tool_member.id }
end
it {
is_expected.to respond_with :bad_request
}
it {
expect(json['errors']['base'][0]).to eq 'The owner may not be removed from this assessment.'
}
end
context 'when the facilitator is the current facilitator' do
let(:deleted_tool_member) {
tool_member
}
before(:each) do
sign_in user
delete :destroy, params: { id: deleted_tool_member.id }
end
it {
is_expected.to respond_with :bad_request
}
it {
expect(json['errors']['base'][0]).to eq 'You may not remove yourself from the facilitator role. Please ask another facilitator to handle this request.'
}
end
end
end
end
end
describe 'POST #request_access' do
context 'when the user is not authenticated' do
before(:each) do
sign_out :user
post :request_access, params: {
tool_type: 'Foo',
tool_id: -1,
access_request: {roles: [-2, -1]}
}
end
it {
is_expected.to respond_with :unauthorized
}
end
context 'when the user is authenticated' do
let(:user) {
create(:user)
}
let(:tool) {
create(:assessment)
}
context 'when the requested roles are invalid' do
let!(:tool_member) {
create(:tool_member, :as_participant, :as_assessment_member, user: user)
}
before(:each) do
sign_in user
post :request_access, params: {
tool_type: tool.class.to_s,
tool_id: tool.id,
access_request: {roles: [-100, -101]}
}
end
it {
is_expected.to respond_with :bad_request
}
it {
expect(json['errors']['base'][0]).to eq 'Invalid role(s) specified.'
}
end
context 'when the user is not a member of the tool' do
let!(:tool_member) {
create(:tool_member, :as_participant, :as_assessment_member, user: user)
}
before(:each) do
sign_in user
post :request_access, params: {
tool_type: tool.class.to_s,
tool_id: tool.id,
access_request: {roles: [0]}
}
end
it {
is_expected.to respond_with :created
}
it {
expect(ToolMemberAccessRequestNotificationWorker.jobs.size).to eq 1
}
it {
expect(ToolMemberAccessRequestNotificationWorker.jobs.first['args'][0]).to eq assigns(:request).id
}
it {
expect(assigns[:request].roles).to eq ['facilitator']
}
end
context 'when the user is a member of the tool' do
context 'when the user is already a participant' do
let!(:tool_member) {
create(:tool_member, :as_participant, tool: tool, user: user)
}
before(:each) do
sign_in user
post :request_access, params: {
tool_type: tool.class.to_s,
tool_id: tool.id,
access_request: {roles: [1]}
}
end
it {
is_expected.to respond_with :bad_request
}
it {
expect(json['errors']['base'][0]).to eq "Access for #{user.email} for #{tool.name} already exists at these levels: participant"
}
end
context 'when the user is already a participant but not a facilitator' do
let!(:tool_member) {
create(:tool_member, :as_participant, tool: tool, user: user)
}
before(:each) do
sign_in user
post :request_access, params: {
tool_type: tool.class.to_s,
tool_id: tool.id,
access_request: {roles: [0]}
}
end
it {
is_expected.to respond_with :created
}
it {
expect(ToolMemberAccessRequestNotificationWorker.jobs.size).to eq 1
}
it {
expect(ToolMemberAccessRequestNotificationWorker.jobs.first['args'][0]).to eq assigns(:request).id
}
it {
expect(assigns[:request].roles).to eq ['facilitator']
}
end
context 'when the user is already a facilitator' do
let!(:tool_member) {
create(:tool_member, :as_facilitator, tool: tool, user: user)
}
before(:each) do
sign_in user
post :request_access, params: {
tool_type: tool.class.to_s,
tool_id: tool.id,
access_request: {roles: [0]}
}
end
it {
is_expected.to respond_with :bad_request
}
it {
expect(json['errors']['base'][0]).to eq "Access for #{user.email} for #{tool.name} already exists at these levels: facilitator"
}
end
context 'when the user is already a facilitator but not a participant' do
let!(:tool_member) {
create(:tool_member, :as_facilitator, tool: tool, user: user)
}
before(:each) do
sign_in user
post :request_access, params: {
tool_type: tool.class.to_s,
tool_id: tool.id,
access_request: {roles: [1]}
}
end
it {
is_expected.to respond_with :created
}
it {
expect(ToolMemberAccessRequestNotificationWorker.jobs.size).to eq 1
}
it {
expect(ToolMemberAccessRequestNotificationWorker.jobs.first['args'][0]).to eq assigns(:request).id
}
it {
expect(assigns[:request].roles).to eq ['participant']
}
end
context 'when the user is already a facilitator and participant' do
let!(:tool_member) {
create(:tool_member, :as_facilitator_and_participant, tool: tool, user: user)
}
let(:roles) {
MembershipHelper.humanize_roles(ToolMember.where(tool: tool, user: user).first.roles)
}
before(:each) do
sign_in user
post :request_access, params: {
tool_type: tool.class.to_s,
tool_id: tool.id,
access_request: {roles: [0, 1]}
}
end
it {
is_expected.to respond_with :bad_request
}
it {
expect(json['errors']['base'][0]).to eq(
"Access for #{user.email} for #{tool.name} already exists at these levels: #{roles.join(', ')}"
)
}
end
end
end
end
describe 'POST #grant' do
context 'when the user is not authenticated' do
before(:each) do
sign_out :user
post :grant, params: { tool_type: 'Foo', tool_id: -1, id: -1 }
end
it {
is_expected.to respond_with :unauthorized
}
end
context 'when the user is authenticated' do
let(:user) {
create(:user)
}
let(:tool) {
create(:assessment, :with_owner)
}
context 'when the user is a participant on the tool' do
let!(:tool_member) {
create(:tool_member, :as_participant, tool: tool, user: user)
}
before(:each) do
sign_in user
post :grant, params: { tool_type: tool.class.to_s, tool_id: tool.id, id: -1 }
end
it {
is_expected.to respond_with :forbidden
}
end
context 'when the user is a facilitator on the tool' do
let!(:tool_member) {
create(:tool_member, :as_facilitator, tool: tool, user: user)
}
context 'when no access request exists' do
before(:each) do
sign_in user
post :grant, params: { tool_type: tool.class.to_s, tool_id: tool.id, id: 1 }
end
it {
is_expected.to respond_with :not_found
}
end
context 'when an access request exists' do
context 'when the access request contains only a facilitator role' do
let(:access_request) {
create(:access_request, :with_facilitator_role, tool: tool)
}
let!(:access_request_id) {
access_request.id
}
before(:each) do
sign_in user
post :grant, params: { tool_type: tool.class.to_s, tool_id: tool.id, id: access_request.id }
end
it {
is_expected.to respond_with :ok
}
it {
expect(json.size).to eq 1
}
it {
expect(json['user']['tool_id']).to eq tool.id
}
it {
expect(json['user']['tool_type']).to eq tool.class.to_s
}
it {
expect(json['user']['roles'].include?(ToolMember.member_roles[:facilitator])).to be true
}
it {
expect(json['user']['id']).to eq access_request.user.id
}
it {
expect(AccessRequest.find_by(id: access_request_id)).to be nil
}
end
context 'when the access request contains only a participant role' do
let(:access_request) {
create(:access_request, :with_participant_role, tool: tool)
}
let!(:access_request_id) {
access_request.id
}
before(:each) do
sign_in user
post :grant, params: { tool_type: tool.class.to_s, tool_id: tool.id, id: access_request.id }
end
it {
is_expected.to respond_with :ok
}
it {
expect(json.size).to eq 1
}
it {
expect(json['user']['tool_id']).to eq tool.id
}
it {
expect(json['user']['tool_type']).to eq tool.class.to_s
}
it {
expect(json['user']['roles'].include?(ToolMember.member_roles[:participant])).to be true
}
it {
expect(json['user']['id']).to eq access_request.user.id
}
it {
expect(AccessRequest.find_by(id: access_request_id)).to be nil
}
end
context 'when the access request contains both a facilitator and a participant role' do
let(:access_request) {
create(:access_request, :with_both_roles, tool: tool)
}
let!(:access_request_id) {
access_request.id
}
before(:each) do
sign_in user
post :grant, params: { tool_type: tool.class.to_s, tool_id: tool.id, id: access_request.id }
end
it {
is_expected.to respond_with :ok
}
it {
expect(json['user']['tool_id']).to eq tool.id
}
it {
expect(json['user']['tool_type']).to eq tool.class.to_s
}
it {
expect(json['user']['roles'].include?(ToolMember.member_roles[:participant])).to be true
}
it {
expect(json['user']['roles'].include?(ToolMember.member_roles[:facilitator])).to be true
}
it {
expect(json['user']['id']).to eq access_request.user.id
}
it {
expect(AccessRequest.find_by(id: access_request_id)).to be nil
}
end
end
end
end
end
describe 'POST #deny' do
context 'when the user is not authenticated' do
before(:each) do
sign_out :user
post :deny, params: { tool_type: 'Foo', tool_id: -1, id: -1 }
end
it {
is_expected.to respond_with :unauthorized
}
end
context 'when the user is authenticated' do
let(:user) {
create(:user)
}
let(:tool) {
create(:assessment, :with_owner)
}
context 'when the user is a participant on the tool' do
let!(:tool_member) {
create(:tool_member, :as_participant, tool: tool, user: user)
}
before(:each) do
sign_in user
post :deny, params: { tool_type: tool.class.to_s, tool_id: tool.id, id: -1 }
end
it {
is_expected.to respond_with :forbidden
}
end
context 'when the user is a facilitator on the tool' do
let!(:tool_member) {
create(:tool_member, :as_facilitator, tool: tool, user: user)
}
context 'when no access request exists' do
before(:each) do
sign_in user
post :deny, params: { tool_type: tool.class.to_s, tool_id: tool.id, id: 1 }
end
it {
is_expected.to respond_with :not_found
}
end
context 'when an access request exists' do
let(:access_request) {
create(:access_request, :with_both_roles, tool: tool)
}
let!(:access_request_id) {
access_request.id
}
before(:each) do
sign_in user
post :deny, params: { tool_type: tool.class.to_s, tool_id: tool.id, id: access_request.id }
end
it {
is_expected.to respond_with :no_content
}
it {
expect(AccessRequest.find_by(id: access_request_id)).to be nil
}
end
end
end
end
end
| 26.087805 | 166 | 0.515052 |
28247a8ec03396a9a7c369f19f0cbd37779bdb15
| 13,000 |
require 'test/unit'
require 'bigdecimal'
class TestBigDecimal < Test::Unit::TestCase
def test_bad_to_s_format_strings
bd = BigDecimal.new("1")
assert_equal("0.1E1", bd.to_s)
assert_equal("+0.1E1", bd.to_s("+-2"))
assert_equal("0.23", BigDecimal.new("0.23").to_s("F"))
end
def test_no_singleton_methods_on_bigdecimal
num = BigDecimal.new("0.001")
assert_raise(TypeError) { class << num ; def amethod ; end ; end }
assert_raise(TypeError) { def num.amethod ; end }
end
def test_can_instantiate_big_decimal
assert_nothing_raised {BigDecimal.new("4")}
assert_nothing_raised {BigDecimal.new("3.14159")}
end
def test_can_implicitly_instantiate_big_decimal
# JRUBY-153 issues
assert_nothing_raised {BigDecimal("4")}
assert_nothing_raised {BigDecimal("3.14159")}
end
def test_alphabetic_args_return_zero
assert_equal( BigDecimal("0.0"), BigDecimal("XXX"),
'Big Decimal objects instanitiated with a value that starts
with a letter should have a value of 0.0' )
end
class X
def to_str; "3.14159" end
end
def test_can_accept_arbitrary_objects_as_arguments
# as log as the object has a #to_str method...
x = X.new
assert_nothing_raised { BigDecimal.new(x) }
assert_nothing_raised { BigDecimal(x) }
end
def test_cmp
begin
BigDecimal.new('10') < "foo"
rescue ArgumentError => e
assert_equal 'comparison of BigDecimal with String failed', e.message
else
fail 'expected cmp to fail'
end
begin
BigDecimal.new('10') >= nil
rescue ArgumentError => e
assert_equal 'comparison of BigDecimal with nil failed', e.message
else
fail 'expected cmp to fail'
end
end
class MyNum
def *(other)
33
end
def /(other)
99
end
def coerce(other)
[MyNum.new, self]
end
end
def test_coerce_div_mul
require 'bigdecimal/util'
assert_equal 33, BigDecimal.new('10') * MyNum.new
assert_equal 99, 10.0 / MyNum.new
assert_equal 99, 10.0.to_d / MyNum.new
end
require "bigdecimal/newton"
include Newton
class Function
def initialize()
@zero = BigDecimal::new("0.0")
@one = BigDecimal::new("1.0")
@two = BigDecimal::new("2.0")
@ten = BigDecimal::new("10.0")
@eps = BigDecimal::new("1.0e-16")
end
def zero;@zero;end
def one ;@one ;end
def two ;@two ;end
def ten ;@ten ;end
def eps ;@eps ;end
def values(x) # <= defines functions solved
f = []
f1 = x[0]*x[0] + x[1]*x[1] - @two # f1 = x**2 + y**2 - 2 => 0
f2 = x[0] - x[1] # f2 = x - y => 0
f <<= f1
f <<= f2
f
end
end
def test_newton_extension
f = BigDecimal::limit(100)
f = Function.new
x = [f.zero,f.zero] # Initial values
n = nlsolve(f,x)
expected = [BigDecimal('0.1000000000262923315461642086010446338567975310185638386446002778855192224707966221794469725479649528E1'),
BigDecimal('0.1000000000262923315461642086010446338567975310185638386446002778855192224707966221794469725479649528E1')]
assert_equal expected, x
end
require "bigdecimal/math.rb"
include BigMath
def test_math_extension
expected = BigDecimal('0.31415926535897932384626433832795028841971693993751058209749445923078164062862089986280348253421170679821480865132823066453462141417033006060218E1')
# this test fails under C Ruby
# ruby 1.8.6 (2007-03-13 patchlevel 0) [i686-darwin8.9.1]
assert_equal expected, PI(100)
zero= BigDecimal("0")
one = BigDecimal("1")
two = BigDecimal("2")
three = BigDecimal("3")
assert_equal one * 1, one
assert_equal one / 1, one
assert_equal one + 1, two
assert_equal one - 1, zero
assert_equal zero, one % 1
assert_equal one, three % two
assert_equal BigDecimal("0.2"), BigDecimal("2.2") % two
assert_equal BigDecimal("0.003"), BigDecimal("15.993") % BigDecimal("15.99")
assert_equal 1*one, one
assert_equal 1/one, one
assert_equal 1+one, BigDecimal("2")
assert_equal 1-one, BigDecimal("0")
assert_equal one * 1.0, 1.0
assert_equal one / 1.0, 1.0
assert_equal one + 1.0, 2.0
assert_equal one - 1.0, 0.0
assert_equal 1.0*one, 1.0
assert_equal 1.0/one, 1.0
assert_equal 1.0+one, 2.0
assert_equal 1.0-one, 0.0
assert_equal("1.0", BigDecimal.new('1.0').to_s('F'))
assert_equal("0.0", BigDecimal.new('0.0').to_s)
assert_equal(BigDecimal("2"), BigDecimal("1.5").round)
assert_equal(BigDecimal("15"), BigDecimal("15").round)
assert_equal(BigDecimal("20"), BigDecimal("15").round(-1))
assert_equal(BigDecimal("0"), BigDecimal("15").round(-2))
assert_equal(BigDecimal("-10"), BigDecimal("-15").round(-1, BigDecimal::ROUND_CEILING))
assert_equal(BigDecimal("10"), BigDecimal("15").round(-1, BigDecimal::ROUND_HALF_DOWN))
assert_equal(BigDecimal("20"), BigDecimal("25").round(-1, BigDecimal::ROUND_HALF_EVEN))
assert_equal(BigDecimal("15.99"), BigDecimal("15.993").round(2))
assert_equal(BigDecimal("1"), BigDecimal("1.8").round(0, BigDecimal::ROUND_DOWN))
assert_equal(BigDecimal("2"), BigDecimal("1.2").round(0, BigDecimal::ROUND_UP))
assert_equal(BigDecimal("-1"), BigDecimal("-1.5").round(0, BigDecimal::ROUND_CEILING))
assert_equal(BigDecimal("-2"), BigDecimal("-1.5").round(0, BigDecimal::ROUND_FLOOR))
assert_equal(BigDecimal("-2"), BigDecimal("-1.5").round(0, BigDecimal::ROUND_FLOOR))
assert_equal(BigDecimal("1"), BigDecimal("1.5").round(0, BigDecimal::ROUND_HALF_DOWN))
assert_equal(BigDecimal("2"), BigDecimal("1.5").round(0, BigDecimal::ROUND_HALF_EVEN))
assert_equal(BigDecimal("2"), BigDecimal("2.5").round(0, BigDecimal::ROUND_HALF_EVEN))
end
def test_round_nan
nan = BigDecimal.new('NaN')
assert nan.round.nan? # nothing raised
assert nan.round(0).nan?
assert nan.round(2).nan?
end
def test_big_decimal_power
require 'bigdecimal/math'
n = BigDecimal("10")
assert_equal(n.power(0), BigDecimal("1"))
assert_equal(n.power(1), n)
assert_equal(n.power(2), BigDecimal("100"))
assert_equal(n.power(-1), BigDecimal("0.1"))
n.power(1.1)
begin
n.power('1.1')
rescue TypeError => e
assert_equal 'wrong argument type String (expected scalar Numeric)', e.message
else
fail 'expected to raise TypeError'
end
assert_equal BigDecimal('0.1E2'), n.power(1.0)
res = n.power(1.1)
#assert_equal BigDecimal('0.125892541E2'), res
# NOTE: we're not handling precision the same as MRI with pow
assert_equal '0.125892541', res.to_s[0..10]
assert_equal 'E2', res.to_s[-2..-1]
res = 2 ** BigDecimal(1.2, 2)
#assert_equal BigDecimal('0.229739671E1'), res
# NOTE: we're not handling precision the same as MRI with pow
assert_equal '0.22973967', res.to_s[0..9]
assert_equal 'E1', res.to_s[-2..-1]
res = BigDecimal(1.2, 2) ** 2.0
assert_equal BigDecimal('0.144E1'), res
end
def teardown
BigDecimal.mode(BigDecimal::EXCEPTION_OVERFLOW, false) rescue nil
BigDecimal.mode(BigDecimal::EXCEPTION_NaN, false) rescue nil
BigDecimal.mode(BigDecimal::EXCEPTION_INFINITY, false) rescue nil
end
def test_big_decimal_mode
# Accept valid arguments to #mode
assert BigDecimal.mode(BigDecimal::EXCEPTION_OVERFLOW)
assert BigDecimal.mode(BigDecimal::EXCEPTION_OVERFLOW,true)
assert BigDecimal.mode(BigDecimal::EXCEPTION_OVERFLOW,false)
# Reject invalid arguments to #mode
assert_raises(TypeError) { BigDecimal.mode(true) } # first argument must be a Fixnum
assert_raises(ArgumentError) { BigDecimal.mode(BigDecimal::EXCEPTION_OVERFLOW, 1) } # second argument must be [true|false]
assert_raises(TypeError) { BigDecimal.mode(512) } # first argument must be == 256, or return non-zero when AND-ed with 255
# exception mode defaults to 0
assert_equal 0, BigDecimal.mode(1) # value of first argument doesn't matter when retrieving the current exception mode, as long as it's a Fixnum <= 255
# set and clear a single exception mode
assert_equal BigDecimal::EXCEPTION_INFINITY, BigDecimal.mode(BigDecimal::EXCEPTION_INFINITY, true)
assert_equal 0, BigDecimal.mode(BigDecimal::EXCEPTION_INFINITY, false)
assert_equal BigDecimal::EXCEPTION_NaN, BigDecimal.mode(BigDecimal::EXCEPTION_NaN, true)
assert_equal 0, BigDecimal.mode(BigDecimal::EXCEPTION_NaN, false)
# set a composition of exception modes separately, make sure the final result is the composited value
BigDecimal.mode(BigDecimal::EXCEPTION_INFINITY, true)
BigDecimal.mode(BigDecimal::EXCEPTION_NaN, true)
assert_equal BigDecimal::EXCEPTION_INFINITY | BigDecimal::EXCEPTION_NaN, BigDecimal.mode(1)
# reset the exception mode to 0 for the following tests
BigDecimal.mode(BigDecimal::EXCEPTION_INFINITY, false)
BigDecimal.mode(BigDecimal::EXCEPTION_NaN, false)
# set a composition of exception modes with one call and retrieve it using the retrieval idiom
# note: this is to check compatibility with MRI, which currently sets only the last mode
# it checks for
BigDecimal.mode(BigDecimal::EXCEPTION_INFINITY | BigDecimal::EXCEPTION_NaN, true)
assert_equal BigDecimal::EXCEPTION_NaN, BigDecimal.mode(1)
# rounding mode defaults to BigDecimal::ROUND_HALF_UP
assert_equal BigDecimal::ROUND_HALF_UP, BigDecimal.mode(BigDecimal::ROUND_MODE)
# make sure each setting complete replaces any previous setting
[BigDecimal::ROUND_UP, BigDecimal::ROUND_DOWN, BigDecimal::ROUND_CEILING, BigDecimal::ROUND_FLOOR,
BigDecimal::ROUND_HALF_UP, BigDecimal::ROUND_HALF_DOWN, BigDecimal::ROUND_HALF_EVEN].each do |mode|
assert_equal mode, BigDecimal.mode(BigDecimal::ROUND_MODE, mode)
end
# reset rounding mode to 0 for following tests
BigDecimal.mode(BigDecimal::ROUND_MODE, BigDecimal::ROUND_HALF_UP)
assert_raises(TypeError) { BigDecimal.mode(BigDecimal::ROUND_MODE, true) } # second argument must be a Fixnum
assert_raises(ArgumentError) { BigDecimal.mode(BigDecimal::ROUND_MODE, 8) } # any Fixnum >= 8 should trigger this error, as the valid rounding modes are currently [0..6]
end
def test_marshaling
f = 123.456
bd = BigDecimal.new(f.to_s)
bd_serialized = Marshal.dump(bd)
assert_equal f, Marshal.restore(bd_serialized).to_f
end
#JRUBY-2272
def test_marshal_regression
assert_equal BigDecimal('0.0'), Marshal.load(Marshal.dump(BigDecimal.new('0.0')))
end
def test_large_bigdecimal_to_f
pos_inf = BigDecimal.new("5E69999999").to_f
assert pos_inf.infinite?
assert pos_inf > 0
assert BigDecimal.new("0E69999999").to_f < Float::EPSILON
neg_inf = BigDecimal.new("-5E69999999").to_f
assert neg_inf.infinite?
assert neg_inf < 0
assert BigDecimal.new("5E-69999999").to_f < Float::EPSILON
end
def test_infinity
assert_equal true, BigDecimal.new("0.0000000001").finite?
#if RUBY_VERSION > '1.9'
# assert_raises(FloatDomainError) { BigDecimal("Infinity") }
# assert_raises(FloatDomainError) { BigDecimal("+Infinity") }
# assert_raises(FloatDomainError) { BigDecimal("-Infinity") }
#else
assert_equal 1, BigDecimal("Infinity").infinite?
assert_equal false, BigDecimal("-Infinity").finite?
assert_equal false, BigDecimal("+Infinity").finite?
#end
assert_raises(TypeError) { BigDecimal(:"+Infinity") }
assert_equal BigDecimal('0'), BigDecimal("infinity")
assert_equal BigDecimal('0'), BigDecimal("+Infinit")
end
#JRUBY-5190
def test_large_precisions
a = BigDecimal("1").div(BigDecimal("3"), 307)
b = BigDecimal("1").div(BigDecimal("3"), 308)
assert_equal a.to_f, b.to_f
end
# GH-644, GH-648
def test_div_by_float_precision_gh644
a = BigDecimal.new(11023) / 2.2046
assert_equal 5_000, a.to_f
end
def test_div_by_float_precision_gh648
b = BigDecimal.new(1.05, 10) / 1.48
assert (b.to_f - 0.7094594594594595) < Float::EPSILON
end
def test_GH_2650
assert_equal(BigDecimal.new("10.91231", 1).to_f, 10.91231)
assert_equal(BigDecimal.new("10.9", 2).to_f, 10.9)
end
# GH-3527
def test_tail_junk
b = BigDecimal.new("5-6")
assert_equal BigDecimal('5'), b
b = BigDecimal.new("100+42")
assert_equal 100, b.to_i
end
class BigDeci < BigDecimal
# MRI does not invoke initialize on 1.8./1.9
def initialize(arg); raise super(arg.to_s) end
def abs; -super end
def infinite?; false end
end
def test_subclass
a = BigDeci.new 1.to_s
assert_equal(-1, a.abs)
assert_equal false, a.infinite?
a = BigDeci.new '-100'
assert_equal(-5, a.div(20))
assert_equal(-100, a.abs)
assert a.inspect.index('#<BigDecimal:')
assert_equal '-0.1E3', a.to_s
assert_equal BigDeci, a.class
assert a.is_a?(BigDeci)
assert a.kind_of?(BigDeci)
end
end
| 33.591731 | 176 | 0.690538 |
0392da6699c925d31da60702638dd2072c4597e2
| 236 |
#!/usr/bin/env ruby
file_path = File.expand_path("../day-01-input.txt", __FILE__)
input = File.read(file_path)
depths = input.split("\n").map(&:to_i)
puts depths.each_cons(4).count do |a, b, c, d|
(b + c + d) > (a + b + c)
end
| 21.454545 | 61 | 0.610169 |
f8140f4300e6dfea52dea8160638a2beb1b07a6d
| 24,594 |
require 'spec_helper'
require 'deep_merge'
describe 'zabbix::server' do
let :node do
'rspec.puppet.com'
end
on_supported_os(baseline_os_hash).each do |os, facts|
next if facts[:osfamily] == 'Archlinux' # zabbix server is currently not supported on archlinux
next if facts[:os]['name'] == 'windows'
context "on #{os} " do
systemd_fact = case facts[:osfamily]
when 'Archlinux', 'Fedora', 'Gentoo'
{ systemd: true }
else
{ systemd: false }
end
let :facts do
facts.merge(systemd_fact)
end
zabbix_version = '5.0'
describe 'with default settings' do
it { is_expected.to contain_class('zabbix::repo') }
it { is_expected.to contain_class('zabbix::params') }
it { is_expected.to contain_service('zabbix-server').with_ensure('running') }
it { is_expected.to contain_zabbix__startup('zabbix-server') }
end
if facts[:osfamily] == 'RedHat'
describe 'with enabled selinux' do
let :params do
{
manage_selinux: true
}
end
let :facts do
facts.deep_merge(os: { selinux: { enabled: true } })
end
it { is_expected.to contain_selboolean('zabbix_can_network').with('value' => 'on', 'persistent' => true) }
end
describe 'with defaults' do
it { is_expected.to contain_yumrepo('zabbix-nonsupported') }
it { is_expected.to contain_yumrepo('zabbix') }
end
end
describe 'with disabled selinux' do
let :params do
{
manage_selinux: false
}
end
it { is_expected.not_to contain_selboolean('zabbix_can_network').with('value' => 'on', 'persistent' => true) }
end
describe 'with database_type as postgresql' do
let :params do
{
database_type: 'postgresql',
server_configfile_path: '/etc/zabbix/zabbix_server.conf',
include_dir: '/etc/zabbix/zabbix_server.conf.d'
}
end
it { is_expected.to contain_package('zabbix-server-pgsql').with_ensure('present') }
it { is_expected.to contain_package('zabbix-server-pgsql').with_name('zabbix-server-pgsql') }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_require('Package[zabbix-server-pgsql]') }
end
describe 'with database_type as mysql' do
let :params do
{
database_type: 'mysql'
}
end
it { is_expected.to contain_package('zabbix-server-mysql').with_ensure('present') }
it { is_expected.to contain_package('zabbix-server-mysql').with_name('zabbix-server-mysql') }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_require('Package[zabbix-server-mysql]') }
end
# Include directory should be available.
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf.d').with_ensure('directory') }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf.d').with_require('File[/etc/zabbix/zabbix_server.conf]') }
context 'with zabbix::database::postgresql class' do
let :params do
{
database_type: 'postgresql',
database_user: 'zabbix-server',
database_password: 'zabbix-server',
database_host: 'localhost',
database_name: 'zabbix-server'
}
end
it { is_expected.to contain_class('zabbix::database::postgresql').with_zabbix_type('server') }
it { is_expected.to contain_class('zabbix::database::postgresql').with_zabbix_version(zabbix_version) }
it { is_expected.to contain_class('zabbix::database::postgresql').with_database_name('zabbix-server') }
it { is_expected.to contain_class('zabbix::database::postgresql').with_database_user('zabbix-server') }
it { is_expected.to contain_class('zabbix::database::postgresql').with_database_password('zabbix-server') }
it { is_expected.to contain_class('zabbix::database::postgresql').with_database_host('localhost') }
end
context 'with zabbix::database::mysql class' do
let :params do
{
database_type: 'mysql',
database_user: 'zabbix-server',
database_password: 'zabbix-server',
database_host: 'localhost',
database_name: 'zabbix-server'
}
end
it { is_expected.to contain_class('zabbix::database::mysql').with_zabbix_type('server') }
it { is_expected.to contain_class('zabbix::database::mysql').with_zabbix_version(zabbix_version) }
it { is_expected.to contain_class('zabbix::database::mysql').with_database_name('zabbix-server') }
it { is_expected.to contain_class('zabbix::database::mysql').with_database_user('zabbix-server') }
it { is_expected.to contain_class('zabbix::database::mysql').with_database_password('zabbix-server') }
it { is_expected.to contain_class('zabbix::database::mysql').with_database_host('localhost') }
end
# So if manage_firewall is set to true, it should install
# the firewall rule.
context 'when declaring manage_firewall is true' do
let :params do
{
manage_firewall: true
}
end
it { is_expected.to contain_firewall('151 zabbix-server') }
end
context 'when declaring manage_firewall is false' do
let :params do
{
manage_firewall: false
}
end
it { is_expected.not_to contain_firewall('151 zabbix-server') }
end
context 'it creates a startup script' do
case facts[:osfamily]
when 'Archlinux', 'Fedora', 'Gentoo'
it { is_expected.to contain_file('/etc/init.d/zabbix-server').with_ensure('absent') }
it { is_expected.to contain_file('/etc/systemd/system/zabbix-server.service').with_ensure('file') }
else
it { is_expected.to contain_file('/etc/init.d/zabbix-server').with_ensure('file') }
it { is_expected.not_to contain_file('/etc/systemd/system/zabbix-server.service') }
end
end
context 'when declaring manage_startup_script is false' do
let :params do
{
manage_startup_script: false
}
end
it { is_expected.not_to contain_zabbix__startup('zabbix-server') }
end
# If manage_service is true (default), it should create a service
# and ensure that it is running.
context 'when declaring manage_service is true' do
let :params do
{
manage_service: true
}
end
it { is_expected.to contain_service('zabbix-server').with_ensure('running') }
end
# When the manage_service is false, it may not make the service.
context 'when declaring manage_service is false' do
let :params do
{
manage_service: false
}
end
it { is_expected.not_to contain_service('zabbix-server') }
end
context 'with all zabbix_server.conf-related parameters' do
let :params do
{
alertscriptspath: '${datadir}/zabbix/alertscripts',
allowroot: '1',
cachesize: '8M',
cacheupdatefrequency: '30',
database_host: 'localhost',
database_name: 'zabbix-server',
database_password: 'zabbix-server',
database_port: 3306,
database_schema: 'zabbix-server',
database_socket: '/tmp/socket.db',
database_user: 'zabbix-server',
database_tlsconnect: 'verify_ca',
database_tlscafile: '/etc/zabbix/ssl/ca.cert',
database_tlscertfile: '/etc/zabbix/ssl/cert.cert',
database_tlskeyfile: '/etc/zabbix/ssl/key.key',
database_tlscipher: 'TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256:TLS_AES_128_GCM_SHA256',
database_tlscipher13: 'TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256:TLS_AES_128_GCM_SHA256',
debuglevel: '3',
externalscripts: '/usr/lib/zabbix/externalscripts0',
fping6location: '/usr/sbin/fping6',
fpinglocation: '/usr/sbin/fping',
historycachesize: '4M',
housekeepingfrequency: '1',
include_dir: '/etc/zabbix/zabbix_server.conf.d',
javagateway: '192.168.2.2',
javagatewayport: '10052',
listenip: '192.168.1.1',
listenport: '10051',
loadmodulepath: '${libdir}/modules',
loadmodule: 'pizza',
logfilesize: '10',
logfile: '/var/log/zabbix/zabbix_server.log',
logtype: 'file',
logslowqueries: '0',
maxhousekeeperdelete: '500',
pidfile: '/var/run/zabbix/zabbix_server.pid',
proxyconfigfrequency: '3600',
proxydatafrequency: '1',
snmptrapperfile: '/tmp/zabbix_traps.tmp',
sourceip: '192.168.1.1',
sshkeylocation: '/home/zabbix',
startdbsyncers: '4',
startalerters: 10,
startdiscoverers: '1',
startescalators: 10,
starthttppollers: '1',
startipmipollers: '12',
startpingers: '1',
startpollers: '12',
startpollersunreachable: '1',
startpreprocessors: 10,
startproxypollers: '1',
startsnmptrapper: '1',
starttimers: '1',
starttrappers: '5',
startlldprocessors: 5,
startvmwarecollectors: '5',
timeout: '3',
tmpdir: '/tmp',
trappertimeout: '30',
trendcachesize: '4M',
unavailabledelay: '30',
unreachabledelay: '30',
unreachableperiod: '30',
valuecachesize: '4M',
vmwarecachesize: '8M',
vmwarefrequency: '60',
zabbix_version: '5.0',
tlsciphercert: 'EECDH+aRSA+AES128:RSA+aRSA+AES128',
tlsciphercert13: 'EECDH+aRSA+AES128:RSA+aRSA+AES128',
tlscipherpsk: 'kECDHEPSK+AES128:kPSK+AES128',
tlscipherpsk13: 'TLS_CHACHA20_POLY1305_SHA256:TLS_AES_128_GCM_SHA256',
tlscipherall: 'TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256:TLS_AES_128_GCM_SHA256',
tlscipherall13: 'EECDH+aRSA+AES128:RSA+aRSA+AES128:kECDHEPSK+AES128:kPSK+AES128'
}
end
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^AlertScriptsPath=\$\{datadir\}/zabbix/alertscripts} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^AllowRoot=1} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^CacheSize=8M} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^CacheUpdateFrequency=30} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^DBHost=localhost} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^DBName=zabbix-server} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^DBPassword=zabbix-server} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^DBPort=3306} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^DBSchema=zabbix-server} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^DBSocket=/tmp/socket.db} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^DBTLSConnect=verify_ca} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^DBTLSCAFile=/etc/zabbix/ssl/ca.cert} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^DBTLSCertFile=/etc/zabbix/ssl/cert.cert} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^DBTLSKeyFile=/etc/zabbix/ssl/key.key} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^DBTLSCipher=TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^DBTLSCipher13=TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^DBUser=zabbix-server} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^DebugLevel=3} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^ExternalScripts=/usr/lib/zabbix/externalscripts} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^Fping6Location=/usr/sbin/fping6} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^FpingLocation=/usr/sbin/fping} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^HistoryCacheSize=4M} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^HousekeepingFrequency=1} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^Include=/etc/zabbix/zabbix_server.conf.d} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^JavaGateway=192.168.2.2} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^JavaGatewayPort=10052} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^ListenIP=192.168.1.1} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^ListenPort=10051$} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^LoadModulePath=\$\{libdir\}/modules} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^LoadModule = pizza} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^LogFileSize=10} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^LogFile=/var/log/zabbix/zabbix_server.log} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^LogType=file} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^LogSlowQueries=0} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^MaxHousekeeperDelete=500} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^PidFile=/var/run/zabbix/zabbix_server.pid} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^ProxyConfigFrequency=3600} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^ProxyDataFrequency=1} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^StartLLDProcessors=5} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^SNMPTrapperFile=/tmp/zabbix_traps.tmp} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^SourceIP=192.168.1.1} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^SSHKeyLocation=/home/zabbix} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^StartDBSyncers=4} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^StartAlerters=10} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^StartDiscoverers=1} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^StartEscalators=10} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^StartHTTPPollers=1} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^StartIPMIPollers=12} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^StartPingers=1} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^StartPollers=12} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^StartPollersUnreachable=1} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^StartPreprocessors=10} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^StartProxyPollers=1} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^StartSNMPTrapper=1} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^StartTimers=1} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^StartTrappers=5} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^StartVMwareCollectors=5} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^Timeout=3} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^TmpDir=/tmp} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^TrapperTimeout=30} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^TrendCacheSize=4M} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^UnavailableDelay=30} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^UnreachableDelay=30} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^UnreachablePeriod=30} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^ValueCacheSize=4M} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^VMwareCacheSize=8M} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^VMwareFrequency=60} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^TLSCipherCert=EECDH\+aRSA\+AES128:RSA\+aRSA\+AES128$} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^TLSCipherCert13=EECDH\+aRSA\+AES128:RSA\+aRSA\+AES128$} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^TLSCipherPSK=kECDHEPSK\+AES128:kPSK\+AES128$} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^TLSCipherPSK13=TLS_CHACHA20_POLY1305_SHA256:TLS_AES_128_GCM_SHA256$} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^TLSCipherAll=TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256:TLS_AES_128_GCM_SHA256$} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^TLSCipherAll13=EECDH\+aRSA\+AES128:RSA\+aRSA\+AES128:kECDHEPSK\+AES128:kPSK\+AES128$} }
end
context 'with zabbix_server.conf and version 5.0' do
let :params do
{
socketdir: '/var/run/zabbix',
zabbix_version: '5.0'
}
end
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^SocketDir=/var/run/zabbix} }
end
context 'with zabbix_server.conf and version 6.0' do
let :params do
{
servicemanagersyncfrequency: 2,
problemhousekeepingfrequency: 2,
startodbcpollers: 2,
zabbix_version: '6.0'
}
end
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^ServiceManagerSyncFrequency=2} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^ProblemHousekeepingFrequency=2} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^StartODBCPollers=2} }
end
context 'with zabbix_server.conf and version 6.0 ha' do
let :param do
{
servicemanagersyncfrequency: 2,
problemhousekeepingfrequency: 2,
startodbcpollers: 2,
zabbix_version: '6.0'
hanodename: 'node1'
nodeaddress: 'localhost:10051'
}
end
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^ServiceManagerSyncFrequency=2} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^ProblemHousekeepingFrequency=2} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^StartODBCPollers=2} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^HANodeName=node1} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^NodeAddress=localhost:10051} }
end
context 'with zabbix_server.conf and logtype declared' do
describe 'as system' do
let :params do
{
logtype: 'system'
}
end
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^LogType=system$} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').without_content %r{^LogFile=} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').without_content %r{^LogFileSize=} }
end
describe 'as console' do
let :params do
{
logtype: 'console'
}
end
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^LogType=console$} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').without_content %r{^LogFile=} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').without_content %r{^LogFileSize=} }
end
describe 'as file' do
let :params do
{
logtype: 'file'
}
end
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^LogType=file$} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^LogFile=/var/log/zabbix/zabbix_server.log$} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^LogFileSize=10$} }
end
end
# Zabbix Server 5.2 is not supported on RedHat family and Debian 11
if facts[:osfamily] != 'RedHat' && facts[:os]['release']['major'] != '11'
describe 'with zabbix_version 5.2 and Vault parameters defined' do
let :params do
{
zabbix_version: '5.2',
vaultdbpath: 'secret/zabbix/database',
vaulttoken: 'FKTYPEGL156DK',
vaulturl: 'https://127.0.0.1:8200',
}
end
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^VaultDBPath=secret/zabbix/database$} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^VaultToken=FKTYPEGL156DK$} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^VaultURL=https://127.0.0.1:8200$} }
end
describe 'with zabbix_version 5.4 and report parameters defined' do
let :params do
{
zabbix_version: '5.4',
startreportwriters: 1,
webserviceurl: 'http://localhost:10053/report',
}
end
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^StartReportWriters=1} }
it { is_expected.to contain_file('/etc/zabbix/zabbix_server.conf').with_content %r{^WebServiceURL=http://localhost:10053/report} }
end
end
end
end
end
| 52.551282 | 183 | 0.65146 |
33b7e89b14f1f28258f7161f4b9efa7c4f098696
| 1,343 |
#
# EDOS/src/REI/mixin.rb
# by IceDragon
# dc 19/05/2013
# dm 19/05/2013
# vr 1.0.0
module REI
module Mixin
module REIComponent
@@component = {}
def rei_register(name)
if klass = @@component[name]
raise ArgumentError, "#{self} cannot be registered. #{name} was registered as #{klass}"
end
@@component[name] = self
type name # Ygg4::Component
end
def self.[](name)
@@component.fetch(name)
end
end
##
# Allows the Component to register for event sends
module EventClient
def listen(type)
evs = comp(:event_server)
evs.add_listener(self, type)
end
def recieve(event)
raise RuntimeError, "abstract method #recieve called for #{self}"
end
end
##
# REI::Mixin::UnitHost
# Used for windows, shells, and any other class which can have a unit
# as an attribute
module UnitHost
def unit
@unit
end
def entity
@unit.entity
end
def set_unit(new_unit)
if @unit != new_unit
@unit = new_unit
on_unit_change
end
self
end
def unit=(new_unit)
@unit = new_unit
end
def on_unit_change
# overwrite in subclass
end
end
end
end
| 18.39726 | 97 | 0.557707 |
b9ed01128568ae42f89cb450057dd0cb023b18ee
| 5,354 |
require "active_support/core_ext/integer/time"
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress CSS using a preprocessor.
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Include generic and useful information about system operation, but avoid logging too much
# information to avoid inadvertent exposure of personally identifiable information (PII).
config.log_level = :info
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "rails_with_active_admin_app_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Log disallowed deprecations.
config.active_support.disallowed_deprecation = :log
# Tell Active Support which deprecation messages to disallow.
config.active_support.disallowed_deprecation_warnings = []
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require "syslog/logger"
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
end
| 44.247934 | 114 | 0.766156 |
9151abaf5668c6ca7852ed11d3c5da663a6d2c43
| 409 |
# vim: syntax=ruby:expandtab:shiftwidth=2:softtabstop=2:tabstop=2
name 'fb_helpers'
maintainer 'Facebook'
maintainer_email '[email protected]'
license 'BSD'
description 'Helper methods for Facebook open-source cookbooks'
source_url 'https://github.com/facebook/chef-cookbooks/'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
# never EVER change this number, ever.
version '0.1.0'
| 37.181818 | 72 | 0.792176 |
ac9068d330a441238fcf30e69dbb5f5b9e351516
| 814 |
class Daemontools < Formula
desc "Collection of tools for managing UNIX services"
homepage "http://cr.yp.to/daemontools.html"
url "http://cr.yp.to/daemontools/daemontools-0.76.tar.gz"
sha256 "a55535012b2be7a52dcd9eccabb9a198b13be50d0384143bd3b32b8710df4c1f"
bottle do
cellar :any
sha256 "76b0d6f7594ee7283cd9f03d081d2b05091d9e955c8a5742b3d4f182b560ec63" => :yosemite
sha256 "36deeab31e18b85393d47feb9f426c352a348796985c3a1e84ad2ceb658999e1" => :mavericks
sha256 "a4b87a1af9856c1287a822b630f045b64466e074135144eb4aaf3039a5af92c1" => :mountain_lion
end
def install
cd "daemontools-#{version}" do
system "package/compile"
bin.install Dir["command/*"]
end
end
test do
assert_match /Homebrew/, shell_output("#{bin}/softlimit -t 1 echo 'Homebrew'")
end
end
| 32.56 | 95 | 0.762899 |
017aeed43dd97a93fd1ab3582330720552732204
| 703 |
=begin
Copyright (c) 2015, Koichiro Tamura/Toshinari Naminatsu/Yuki Ishikawa/Tatsuya Onishi/Mamoru Endo/Kenji Suzuki
This software is released under the BSD 2-Clause License.
http://opensource.org/licenses/BSD-2-Clause
=end
require File.dirname(__FILE__) + '/../test_helper'
require 'zemi_controller'
# Re-raise errors caught by the controller.
class ZemiController; def rescue_action(e) raise e end; end
class ZemiControllerTest < Test::Unit::TestCase
def setup
@controller = ZemiController.new
@request = ActionController::TestRequest.new
@response = ActionController::TestResponse.new
end
# Replace this with your real tests.
def test_truth
assert true
end
end
| 24.241379 | 109 | 0.758179 |
abd9e8916fb1a330fbc6ee94789cd6e84428fe77
| 56,527 |
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Spree::Order, type: :model do
let(:store) { create(:store) }
let(:user) { create(:user, email: "[email protected]") }
let(:order) { create(:order, user: user, store: store) }
let(:promotion) do
FactoryBot.create(
:promotion,
:with_order_adjustment,
code: "discount"
)
end
let(:code) { promotion.codes.first }
describe '#finalize!' do
context 'with event notifications' do
it 'sends an email' do
expect(Spree::Config.order_mailer_class).to receive(:confirm_email).and_call_original
order.finalize!
end
it 'marks the order as confirmation_delivered' do
expect do
order.finalize!
end.to change(order, :confirmation_delivered).to true
end
# These specs show how notifications can be removed, one at a time or
# all the ones set by MailerSubscriber module
context 'when removing the default email notification subscription' do
before do
Spree::Event.unsubscribe Spree::MailerSubscriber.order_finalized_handler
end
after do
Spree::MailerSubscriber.subscribe!
end
it 'does not send the email' do
expect(Spree::Config.order_mailer_class).not_to receive(:confirm_email)
order.finalize!
end
end
context 'when removing all the email notification subscriptions' do
before do
Spree::MailerSubscriber.unsubscribe!
end
after do
Spree::MailerSubscriber.subscribe!
end
it 'does not send the email' do
expect(Spree::Config.order_mailer_class).not_to receive(:confirm_email)
order.finalize!
end
end
end
end
context '#store' do
it { is_expected.to respond_to(:store) }
context 'when there is no store assigned' do
subject { Spree::Order.new }
context 'when there is no default store' do
it "will not be valid" do
expect(subject).not_to be_valid
end
end
context "when there is a default store" do
let!(:store) { create(:store) }
it { is_expected.to be_valid }
end
end
context 'when a store is assigned' do
subject { Spree::Order.new(store: create(:store)) }
it { is_expected.to be_valid }
end
end
describe "#cancel!" do
subject { order.cancel! }
context "with captured store credit" do
let!(:store_credit_payment_method) { create(:store_credit_payment_method) }
let(:order_total) { 500.00 }
let(:store_credit) { create(:store_credit, amount: order_total) }
let(:order) { create(:order_with_line_items, user: store_credit.user, line_items_price: order_total) }
before do
order.add_store_credit_payments
order.finalize!
order.capture_payments!
end
it "cancels the order" do
expect{ subject }.to change{ order.can_cancel? }.from(true).to(false)
expect(order).to be_canceled
end
it 'should save canceled_at' do
subject
expect(order.reload.canceled_at).to_not be_nil
end
it "places the order into the canceled scope" do
expect{ subject }.to change{ Spree::Order.canceled.include?(order) }.from(false).to(true)
end
it "removes the order from the not_canceled scope" do
expect{ subject }.to change{ Spree::Order.not_canceled.include?(order) }.from(true).to(false)
end
end
context "with fully refunded payment" do
let(:order) { create(:completed_order_with_totals) }
let(:payment_amount) { 50 }
let(:payment) { create(:payment, order: order, amount: payment_amount, state: 'completed') }
before do
create(:refund, payment: payment, amount: payment_amount)
end
it "cancels the order" do
expect{ subject }.to change{ order.can_cancel? }.from(true).to(false)
expect(order).to be_canceled
end
end
end
context "#canceled_by" do
let(:admin_user) { create :admin_user }
let(:order) { create :order }
before do
allow(order).to receive(:cancel!)
end
subject { order.canceled_by(admin_user) }
it 'should cancel the order' do
expect(order).to receive(:cancel!)
subject
end
it 'should save canceler_id' do
subject
expect(order.reload.canceler_id).to eq(admin_user.id)
end
it 'should have canceler' do
subject
expect(order.reload.canceler).to eq(admin_user)
end
end
context "#create" do
let!(:store) { create :store }
let(:order) { Spree::Order.create }
it "should assign an order number" do
expect(order.number).not_to be_nil
end
it 'should create a randomized 22 character token' do
expect(order.guest_token.size).to eq(22)
end
end
context "creates shipments cost" do
let(:shipment) { double }
before { allow(order).to receive_messages shipments: [shipment] }
it "update and persist totals" do
expect(order.updater).to receive :update
Spree::Deprecation.silence do
order.set_shipments_cost
end
end
end
context "insufficient_stock_lines" do
let(:line_item) { mock_model Spree::LineItem, insufficient_stock?: true }
before { allow(order).to receive_messages(line_items: [line_item]) }
it "should return line_item that has insufficient stock on hand" do
expect(order.insufficient_stock_lines.size).to eq(1)
expect(order.insufficient_stock_lines.include?(line_item)).to be true
end
end
describe '#ensure_line_item_variants_are_not_deleted' do
subject { order.ensure_line_item_variants_are_not_deleted }
let(:order) { create :order_with_line_items }
context 'when variant is destroyed' do
before do
allow(order).to receive(:restart_checkout_flow)
order.line_items.first.variant.discard
end
it 'should restart checkout flow' do
expect(order).to receive(:restart_checkout_flow).once
subject
end
it 'should have error message' do
subject
expect(order.errors[:base]).to include(I18n.t('spree.deleted_variants_present'))
end
it 'should be false' do
expect(subject).to be_falsey
end
end
context 'when no variants are destroyed' do
it 'should not restart checkout' do
expect(order).to receive(:restart_checkout_flow).never
subject
end
it 'should be true' do
expect(subject).to be_truthy
end
end
end
context "empty!" do
let!(:order) { create(:order) }
before do
create(:line_item, order: order)
create(:shipment, order: order)
create(:adjustment, adjustable: order, order: order)
promotion.activate(order: order, promotion_code: code)
order.recalculate
# Make sure we are asserting changes
expect(order.line_items).not_to be_empty
expect(order.shipments).not_to be_empty
expect(order.adjustments).not_to be_empty
expect(order.promotions).not_to be_empty
expect(order.item_total).not_to eq 0
expect(order.item_count).not_to eq 0
expect(order.shipment_total).not_to eq 0
expect(order.adjustment_total).not_to eq 0
end
it "clears out line items, adjustments and update totals" do
order.empty!
expect(order.line_items).to be_empty
expect(order.shipments).to be_empty
expect(order.adjustments).to be_empty
expect(order.promotions).to be_empty
expect(order.item_total).to eq 0
expect(order.item_count).to eq 0
expect(order.shipment_total).to eq 0
expect(order.adjustment_total).to eq 0
end
end
context '#outstanding_balance' do
let(:order) { create(:order_ready_to_ship, line_items_count: 3) }
let(:payment) { order.payments.first }
it "should handle refunds properly" do
order.cancellations.short_ship([order.inventory_units.first])
expect(order.outstanding_balance).to be_negative
expect(order.payment_state).to eq('credit_owed')
create(:refund, amount: order.outstanding_balance.abs, payment: payment, transaction_id: nil)
order.reload
expect(order.outstanding_balance).to eq(0)
expect(order.payment_state).to eq('paid')
end
end
context "#display_outstanding_balance" do
it "returns the value as a spree money" do
allow(order).to receive(:outstanding_balance) { 10.55 }
expect(order.display_outstanding_balance).to eq(Spree::Money.new(10.55))
end
end
context "#display_item_total" do
it "returns the value as a spree money" do
allow(order).to receive(:item_total) { 10.55 }
expect(order.display_item_total).to eq(Spree::Money.new(10.55))
end
end
context "#display_adjustment_total" do
it "returns the value as a spree money" do
order.adjustment_total = 10.55
expect(order.display_adjustment_total).to eq(Spree::Money.new(10.55))
end
end
context "#display_total" do
it "returns the value as a spree money" do
order.total = 10.55
expect(order.display_total).to eq(Spree::Money.new(10.55))
end
end
context "#currency" do
context "when object currency is ABC" do
before { order.currency = "ABC" }
it "returns the currency from the object" do
expect(order.currency).to eq("ABC")
end
end
context "when object currency is nil" do
before { order.currency = nil }
it "returns the globally configured currency" do
expect(order.currency).to eq("USD")
end
end
end
describe '#merge!' do
let(:order1) { create(:order_with_line_items) }
let(:order2) { create(:order_with_line_items) }
it 'merges the orders' do
order1.merge!(order2)
expect(order1.line_items.count).to eq(2)
expect(order2.destroyed?).to be_truthy
end
describe 'order_merger_class customization' do
before do
class TestOrderMerger
def initialize(order)
@order = order
end
def merge!(other_order, user = nil)
[@order, other_order, user]
end
end
Spree::Config.order_merger_class = TestOrderMerger
end
let(:user) { build(:user) }
it 'uses the configured order merger' do
expect(order1.merge!(order2, user)).to eq([order1, order2, user])
end
end
end
context ".register_update_hook", partial_double_verification: false do
let(:order) { create(:order) }
before do
allow(Spree::Deprecation).to receive(:warn)
Spree::Order.register_update_hook :foo
end
after { Spree::Order.update_hooks.clear }
it "calls hooks during #recalculate" do
expect(order).to receive :foo
order.recalculate
end
it "calls hook during #finalize!" do
expect(order).to receive :foo
order.finalize!
end
end
context "ensure shipments will be updated" do
subject(:order) { create :order }
before do
Spree::Shipment.create!(order: order)
end
['payment', 'confirm'].each do |order_state|
context "when ther order is in the #{order_state} state" do
before do
order.state = order_state
order.shipments.create!
end
it "destroys current shipments" do
order.ensure_updated_shipments
expect(order.shipments).to be_empty
end
it "puts order back in address state" do
order.ensure_updated_shipments
expect(order.state).to eql "cart"
end
it "resets shipment_total" do
order.update_column(:shipment_total, 5)
order.ensure_updated_shipments
expect(order.shipment_total).to eq(0)
end
it "does nothing if any shipments are ready" do
shipment = create(:shipment, order: subject, state: "ready")
expect { subject.ensure_updated_shipments }.not_to change { subject.reload.shipments.pluck(:id) }
expect { shipment.reload }.not_to raise_error
end
it "does nothing if any shipments are shipped" do
shipment = create(:shipment, order: subject, state: "shipped")
expect { subject.ensure_updated_shipments }.not_to change { subject.reload.shipments.pluck(:id) }
expect { shipment.reload }.not_to raise_error
end
end
end
context 'when the order is in address state' do
before do
order.state = 'address'
order.shipments.create!
end
it "destroys current shipments" do
order.ensure_updated_shipments
expect(order.shipments).to be_empty
end
it "resets shipment_total" do
order.update_column(:shipment_total, 5)
order.ensure_updated_shipments
expect(order.shipment_total).to eq(0)
end
it "puts the order in the cart state" do
order.ensure_updated_shipments
expect(order.state).to eq "cart"
end
end
context 'when the order is completed' do
before do
order.state = 'complete'
order.completed_at = Time.current
order.update_column(:shipment_total, 5)
order.shipments.create!
end
it "does not destroy the current shipments" do
expect {
order.ensure_updated_shipments
}.not_to change { order.shipments }
end
it "does not reset the shipment total" do
expect {
order.ensure_updated_shipments
}.not_to change { order.shipment_total }
end
it "does not put the order back in the address state" do
expect {
order.ensure_updated_shipments
}.not_to change { order.state }
end
end
context "except when order is completed, that's OrderInventory job" do
it "doesn't touch anything" do
allow(order).to receive_messages completed?: true
order.update_column(:shipment_total, 5)
order.shipments.create!
expect {
order.ensure_updated_shipments
}.not_to change { order.shipment_total }
expect {
order.ensure_updated_shipments
}.not_to change { order.shipments }
expect {
order.ensure_updated_shipments
}.not_to change { order.state }
end
end
end
describe "#tax_address" do
let(:order) { build(:order, ship_address: ship_address, bill_address: bill_address, store: store) }
let(:store) { build(:store) }
before { stub_spree_preferences(tax_using_ship_address: tax_using_ship_address) }
subject { order.tax_address }
context "when the order has no addresses" do
let(:ship_address) { nil }
let(:bill_address) { nil }
context "when tax_using_ship_address is true" do
let(:tax_using_ship_address) { true }
it 'returns the stores default cart tax location' do
expect(subject).to eq(store.default_cart_tax_location)
end
end
context "when tax_using_ship_address is not true" do
let(:tax_using_ship_address) { false }
it 'returns the stores default cart tax location' do
expect(subject).to eq(store.default_cart_tax_location)
end
end
end
context "when the order has addresses" do
let(:ship_address) { build(:address) }
let(:bill_address) { build(:address) }
context "when tax_using_ship_address is true" do
let(:tax_using_ship_address) { true }
it 'returns ship_address' do
expect(subject).to eq(order.ship_address)
end
end
context "when tax_using_ship_address is not true" do
let(:tax_using_ship_address) { false }
it "returns bill_address" do
expect(subject).to eq(order.bill_address)
end
end
end
end
describe "#restart_checkout_flow" do
context "when in cart state" do
let(:order) { create(:order_with_totals, state: "cart") }
it "remains in cart state" do
expect { order.restart_checkout_flow }.not_to change { order.state }
end
end
it "updates the state column to the first checkout_steps value" do
order = create(:order_with_totals, state: "delivery")
expect(order.checkout_steps).to eql %w(address delivery payment confirm complete)
expect{ order.restart_checkout_flow }.to change{ order.state }.from("delivery").to("address")
end
context "without line items" do
it "updates the state column to cart" do
order = create(:order, state: "delivery")
expect{ order.restart_checkout_flow }.to change{ order.state }.from("delivery").to("cart")
end
end
end
# Regression tests for https://github.com/spree/spree/issues/4072
context "#state_changed" do
let(:order) { FactoryBot.create(:order) }
it "logs state changes" do
order.update_column(:payment_state, 'balance_due')
order.payment_state = 'paid'
expect(order.state_changes).to be_empty
Spree::Deprecation.silence do
order.state_changed('payment')
end
state_change = order.state_changes.find_by(name: 'payment')
expect(state_change.previous_state).to eq('balance_due')
expect(state_change.next_state).to eq('paid')
end
it "does not do anything if state does not change" do
order.update_column(:payment_state, 'balance_due')
expect(order.state_changes).to be_empty
Spree::Deprecation.silence do
order.state_changed('payment')
end
expect(order.state_changes).to be_empty
end
end
# Regression test for https://github.com/spree/spree/issues/4199
context "#available_payment_methods" do
it "includes frontend payment methods" do
payment_method = Spree::PaymentMethod::Check.create!({
name: "Fake",
active: true,
available_to_users: true,
available_to_admin: false
})
expect(order.available_payment_methods).to include(payment_method)
end
it "includes 'both' payment methods" do
payment_method = Spree::PaymentMethod::Check.create!({
name: "Fake",
active: true,
available_to_users: true,
available_to_admin: true
})
expect(order.available_payment_methods).to include(payment_method)
end
it "does not include a payment method twice" do
payment_method = Spree::PaymentMethod::Check.create!({
name: "Fake",
active: true,
available_to_users: true,
available_to_admin: true
})
expect(order.available_payment_methods.count).to eq(1)
expect(order.available_payment_methods).to include(payment_method)
end
it "does not include inactive payment methods" do
Spree::PaymentMethod::Check.create!({
name: "Fake",
active: false,
available_to_users: true,
available_to_admin: true
})
expect(order.available_payment_methods.count).to eq(0)
end
context "with more than one payment method" do
subject { order.available_payment_methods }
let!(:first_method) {
FactoryBot.create(:payment_method, available_to_users: true,
available_to_admin: true)
}
let!(:second_method) {
FactoryBot.create(:payment_method, available_to_users: true,
available_to_admin: true)
}
before do
second_method.move_to_top
end
it "respects the order of methods based on position" do
expect(subject).to eq([second_method, first_method])
end
end
context 'when the order has a store' do
let(:order) { create(:order) }
let!(:store_with_payment_methods) do
create(:store,
payment_methods: [payment_method_with_store])
end
let!(:payment_method_with_store) { create(:payment_method) }
let!(:store_without_payment_methods) { create(:store) }
let!(:payment_method_without_store) { create(:payment_method) }
context 'when the store has payment methods' do
before { order.update!(store: store_with_payment_methods) }
it 'returns only the matching payment methods for that store' do
expect(order.available_payment_methods).to match_array(
[payment_method_with_store]
)
end
context 'and the store has an extra payment method unavailable to users' do
let!(:admin_only_payment_method) do
create(:payment_method,
available_to_users: false,
available_to_admin: true)
end
before do
store_with_payment_methods.payment_methods << admin_only_payment_method
end
it 'returns only the payment methods available to users for that store' do
expect(order.available_payment_methods).to match_array(
[payment_method_with_store]
)
end
end
end
context 'when the store does not have payment methods' do
before { order.update!(store: store_without_payment_methods) }
it 'returns all matching payment methods regardless of store' do
expect(order.available_payment_methods).to match_array(
[payment_method_with_store, payment_method_without_store]
)
end
end
end
end
context "#apply_shipping_promotions" do
it "calls out to the Shipping promotion handler" do
expect_any_instance_of(Spree::PromotionHandler::Shipping).to(
receive(:activate)
).and_call_original
expect(order.updater).to receive(:update).and_call_original
order.apply_shipping_promotions
end
end
context "#products" do
before :each do
@variant1 = mock_model(Spree::Variant, product: "product1")
@variant2 = mock_model(Spree::Variant, product: "product2")
@line_items = [mock_model(Spree::LineItem, product: "product1", variant: @variant1, variant_id: @variant1.id, quantity: 1),
mock_model(Spree::LineItem, product: "product2", variant: @variant2, variant_id: @variant2.id, quantity: 2)]
allow(order).to receive_messages(line_items: @line_items)
end
it "contains?" do
expect(order.contains?(@variant1)).to be true
end
it "gets the quantity of a given variant" do
expect(order.quantity_of(@variant1)).to eq(1)
@variant3 = mock_model(Spree::Variant, product: "product3")
expect(order.quantity_of(@variant3)).to eq(0)
end
it "can find a line item matching a given variant" do
expect(order.find_line_item_by_variant(@variant1)).not_to be_nil
expect(order.find_line_item_by_variant(mock_model(Spree::Variant))).to be_nil
end
context "match line item with options", partial_double_verification: false do
before do
Spree::Order.register_line_item_comparison_hook(:foos_match)
end
after do
# reset to avoid test pollution
Spree::Order.line_item_comparison_hooks = Set.new
end
it "matches line item when options match" do
allow(order).to receive(:foos_match).and_return(true)
expect(order.line_item_options_match(@line_items.first, { foos: { bar: :zoo } })).to be true
end
it "does not match line item without options" do
allow(order).to receive(:foos_match).and_return(false)
expect(order.line_item_options_match(@line_items.first, {})).to be false
end
end
end
describe "#generate_order_number" do
let(:order) { build(:order) }
context "with default app configuration" do
it 'calls the default order number generator' do
expect_any_instance_of(Spree::Order::NumberGenerator).to receive(:generate)
order.generate_order_number
end
end
context "with order number generator configured" do
class TruthNumberGenerator
def initialize(options = {}); end
def generate
'42'
end
end
before do
expect(Spree::Config).to receive(:order_number_generator) do
TruthNumberGenerator.new
end
end
it 'calls the configured order number generator' do
order.generate_order_number
expect(order.number).to eq '42'
end
end
context "with number already present" do
before do
order.number = '123'
end
it 'does not generate new number' do
order.generate_order_number
expect(order.number).to eq '123'
end
end
context "passing options" do
it 'is deprecated' do
expect(Spree::Deprecation).to receive(:warn)
order.generate_order_number(length: 2)
end
end
end
context "#associate_user!" do
let!(:user) { FactoryBot.create(:user) }
it "should associate a user with a persisted order" do
order = FactoryBot.create(:order_with_line_items, created_by: nil)
order.user = nil
order.email = nil
order.associate_user!(user)
expect(order.user).to eq(user)
expect(order.email).to eq(user.email)
expect(order.created_by).to eq(user)
# verify that the changes we made were persisted
order.reload
expect(order.user).to eq(user)
expect(order.email).to eq(user.email)
expect(order.created_by).to eq(user)
end
it "should not overwrite the created_by if it already is set" do
creator = create(:user)
order = FactoryBot.create(:order_with_line_items, created_by: creator)
order.user = nil
order.email = nil
order.associate_user!(user)
expect(order.user).to eq(user)
expect(order.email).to eq(user.email)
expect(order.created_by).to eq(creator)
# verify that the changes we made were persisted
order.reload
expect(order.user).to eq(user)
expect(order.email).to eq(user.email)
expect(order.created_by).to eq(creator)
end
it "should associate a user with a non-persisted order" do
order = Spree::Order.new
expect do
order.associate_user!(user)
end.to change { [order.user, order.email] }.from([nil, nil]).to([user, user.email])
end
it "should not persist an invalid address" do
address = Spree::Address.new
order.user = nil
order.email = nil
order.ship_address = address
expect do
order.associate_user!(user)
end.not_to change { address.persisted? }.from(false)
end
end
context "#assign_default_user_addresses" do
let(:order) { Spree::Order.new }
subject { order.assign_default_user_addresses }
context "when no user is associated to the order" do
it "does not associate any bill address" do
expect { subject }.not_to change { order.bill_address }.from(nil)
end
it "does not associate any ship address" do
expect { subject }.not_to change { order.ship_address }.from(nil)
end
end
context "when user is associated to the order" do
let(:user) { build_stubbed(:user) }
let(:bill_address) { nil }
let(:ship_address) { nil }
before do
order.associate_user!(user)
user.bill_address = bill_address
user.ship_address = ship_address
end
context "but has no bill address associated" do
it "does not associate any bill address" do
expect { subject }.not_to change { order.bill_address }.from(nil)
end
end
context "and has an invalid bill address associated " do
let(:bill_address) { build(:address, city: nil) } # invalid address
it "does not associate any bill address" do
expect { subject }.not_to change { order.bill_address }.from(nil)
end
end
context "and has a valid address associated " do
let(:bill_address) { build(:address) }
it "does associate user bill address" do
expect { subject }.to change { order.bill_address }.from(nil).to(bill_address)
end
end
context "but has no ship address associated" do
it "does not associate any ship address" do
expect { subject }.not_to change { order.ship_address }.from(nil)
end
end
context "and has an invalid ship address associated " do
let(:ship_address) { build(:address, city: nil) } # invalid address
it "does not associate any ship address" do
expect { subject }.not_to change { order.ship_address }.from(nil)
end
end
context "and has a valid ship address associated" do
let(:ship_address) { build(:address) }
it "does associate user ship address" do
expect { subject }.to change { order.ship_address }.from(nil).to(ship_address)
end
context 'when checkout step does not include delivery' do
before do
expect(order).to receive(:checkout_steps) { %w[some step] }
end
it "does not associate any ship address" do
expect { subject }.not_to change { order.ship_address }.from(nil)
end
end
end
end
end
context "#can_ship?" do
let(:order) { Spree::Order.create }
it "should be true for order in the 'complete' state" do
allow(order).to receive_messages(complete?: true)
expect(order.can_ship?).to be true
end
it "should be true for order in the 'resumed' state" do
allow(order).to receive_messages(resumed?: true)
expect(order.can_ship?).to be true
end
it "should be true for an order in the 'awaiting return' state" do
allow(order).to receive_messages(awaiting_return?: true)
expect(order.can_ship?).to be true
end
it "should be true for an order in the 'returned' state" do
allow(order).to receive_messages(returned?: true)
expect(order.can_ship?).to be true
end
it "should be false if the order is neither in the 'complete' nor 'resumed' state" do
allow(order).to receive_messages(resumed?: false, complete?: false)
expect(order.can_ship?).to be false
end
end
context "#completed?" do
it "should indicate if order is completed" do
order.completed_at = nil
expect(order.completed?).to be false
order.completed_at = Time.current
expect(order.completed?).to be true
end
end
context "#allow_checkout?" do
it "should be true if there are line_items in the order" do
allow(order).to receive_message_chain(:line_items, count: 1)
expect(order.checkout_allowed?).to be true
end
it "should be false if there are no line_items in the order" do
allow(order).to receive_message_chain(:line_items, count: 0)
expect(order.checkout_allowed?).to be false
end
end
context "#amount" do
before do
@order = create(:order, user: user)
@order.line_items = [create(:line_item, price: 1.0, quantity: 2),
create(:line_item, price: 1.0, quantity: 1)]
end
it "should return the correct lum sum of items" do
expect(@order.amount).to eq(3.0)
end
end
context "#backordered?" do
it 'is backordered if one of the shipments is backordered' do
allow(order).to receive_messages(shipments: [mock_model(Spree::Shipment, backordered?: false),
mock_model(Spree::Shipment, backordered?: true)])
expect(order).to be_backordered
end
end
context "#can_cancel?" do
it "should be false for completed order in the canceled state" do
order.state = 'canceled'
order.shipment_state = 'ready'
order.completed_at = Time.current
expect(order.can_cancel?).to be false
end
it "should be true for completed order with no shipment" do
order.state = 'complete'
order.shipment_state = nil
order.completed_at = Time.current
expect(order.can_cancel?).to be true
end
end
context "#tax_total" do
it "adds included tax and additional tax" do
allow(order).to receive_messages(additional_tax_total: 10, included_tax_total: 20)
expect(order.tax_total).to eq 30
end
end
# Regression test for https://github.com/spree/spree/issues/4923
context "locking" do
let(:order) { Spree::Order.create } # need a persisted in order to test locking
it 'can lock' do
order.with_lock {}
end
end
describe "#item_total_excluding_vat" do
it "sums all of the line items' pre tax amounts" do
subject.line_items = [
Spree::LineItem.new(price: 10, quantity: 2, included_tax_total: 15.0),
Spree::LineItem.new(price: 30, quantity: 1, included_tax_total: 16.0)
]
# (2*10)-15 + 30-16 = 5 + 14 = 19
expect(subject.item_total_excluding_vat).to eq 19.0
end
end
context "#refund_total" do
let(:order) { create(:order_with_line_items) }
let!(:payment) { create(:payment_with_refund, order: order, amount: 5, refund_amount: 3) }
let!(:payment2) { create(:payment_with_refund, order: order, amount: 5, refund_amount: 2.5) }
it "sums the reimbursment refunds on the order" do
expect(order.refund_total).to eq(5.5)
end
end
describe '#quantity' do
# Uses a persisted record, as the quantity is retrieved via a DB count
let(:order) { create :order_with_line_items, line_items_count: 3 }
it 'sums the quantity of all line items' do
expect(order.quantity).to eq 3
end
end
describe '#has_non_reimbursement_related_refunds?' do
subject do
order.has_non_reimbursement_related_refunds?
end
context 'no refunds exist' do
it { is_expected.to eq false }
end
context 'a non-reimbursement related refund exists' do
let(:order) { refund.payment.order }
let(:refund) { create(:refund, reimbursement_id: nil, amount: 5) }
it { is_expected.to eq true }
end
context 'an old-style refund exists' do
let(:order) { create(:order_ready_to_ship) }
let(:payment) { order.payments.first.tap { |p| allow(p).to receive_messages(profiles_supported?: false) } }
let!(:refund_payment) {
build(:payment, amount: -1, order: order, state: 'completed', source: payment).tap do |p|
allow(p).to receive_messages(profiles_supported?: false)
p.save!
end
}
it { is_expected.to eq true }
end
context 'a reimbursement related refund exists' do
let(:order) { refund.payment.order }
let(:refund) { create(:refund, reimbursement_id: 123, amount: 5, payment_amount: 14) }
it { is_expected.to eq false }
end
end
describe "#create_proposed_shipments" do
subject(:order) { create(:order) }
it "assigns the coordinator returned shipments to its shipments" do
shipment = build(:shipment)
allow_any_instance_of(Spree::Stock::SimpleCoordinator).to receive(:shipments).and_return([shipment])
subject.create_proposed_shipments
expect(subject.shipments).to eq [shipment]
end
it "raises an error if any shipments are ready" do
shipment = create(:shipment, order: subject, state: "ready")
expect {
expect {
subject.create_proposed_shipments
}.to raise_error(Spree::Order::CannotRebuildShipments)
}.not_to change { subject.reload.shipments.pluck(:id) }
expect { shipment.reload }.not_to raise_error
end
it "raises an error if any shipments are shipped" do
shipment = create(:shipment, order: subject, state: "shipped")
expect {
expect {
subject.create_proposed_shipments
}.to raise_error(Spree::Order::CannotRebuildShipments)
}.not_to change { subject.reload.shipments.pluck(:id) }
expect { shipment.reload }.not_to raise_error
end
end
describe "#all_inventory_units_returned?" do
let(:order) { create(:order_with_line_items, line_items_count: 3) }
subject { order.all_inventory_units_returned? }
context "all inventory units are returned" do
before { order.inventory_units.update_all(state: 'returned') }
it "is true" do
expect(subject).to eq true
end
end
context "some inventory units are returned" do
before do
order.inventory_units.first.update_attribute(:state, 'returned')
end
it "is false" do
expect(subject).to eq false
end
end
context "no inventory units are returned" do
it "is false" do
expect(subject).to eq false
end
end
context "all inventory units are returned on the database (e.g. through another association)" do
it "is true" do
expect {
Spree::InventoryUnit
.where(id: order.inventory_unit_ids)
.update_all(state: 'returned')
}.to change {
order.all_inventory_units_returned?
}.from(false).to(true)
end
end
end
context "store credit" do
shared_examples "check total store credit from payments" do
context "with valid payments" do
let(:order) { payment.order }
let!(:payment) { create(:store_credit_payment) }
let!(:second_payment) { create(:store_credit_payment, order: order) }
subject { order }
it "returns the sum of the payment amounts" do
expect(subject.total_applicable_store_credit).to eq(payment.amount + second_payment.amount)
end
end
context "without valid payments" do
let(:order) { create(:order) }
subject { order }
it "returns 0" do
expect(subject.total_applicable_store_credit).to be_zero
end
end
end
describe "#add_store_credit_payments" do
let(:order_total) { 500.00 }
before { create(:store_credit_payment_method) }
subject { order.add_store_credit_payments }
context "there is no store credit" do
let(:order) { create(:order, total: order_total) }
context "there is a credit card payment" do
let!(:cc_payment) { create(:payment, order: order, amount: order_total) }
before do
# callbacks recalculate total based on line items
# this ensures the total is what we expect
order.update_column(:total, order_total)
subject
order.reload
end
it "charges the outstanding balance to the credit card" do
expect(order.errors.messages).to be_empty
expect(order.payments.count).to eq 1
expect(order.payments.first.source).to be_a(Spree::CreditCard)
expect(order.payments.first.amount).to eq order_total
end
end
end
context 'there is store credit in another currency' do
let(:order) { create(:order_with_totals, user: user, line_items_price: order_total).tap(&:recalculate) }
let!(:store_credit_usd) { create(:store_credit, user: user, amount: 1, currency: 'USD') }
let!(:store_credit_gbp) { create(:store_credit, user: user, amount: 1, currency: 'GBP') }
let(:user) { create(:user) }
it 'only adds the credit in the matching currency' do
expect {
order.add_store_credit_payments
}.to change {
order.payments.count
}.by(1)
applied_store_credits = order.payments.store_credits.map(&:source)
expect(applied_store_credits).to match_array([store_credit_usd])
end
end
context "there is enough store credit to pay for the entire order" do
let(:store_credit) { create(:store_credit, amount: order_total) }
let(:order) { create(:order_with_totals, user: store_credit.user, line_items_price: order_total).tap(&:recalculate) }
context "there are no other payments" do
before do
subject
order.reload
end
it "creates a store credit payment for the full amount" do
expect(order.errors.messages).to be_empty
expect(order.payments.count).to eq 1
expect(order.payments.first).to be_store_credit
expect(order.payments.first.amount).to eq order_total
end
end
context "there is a credit card payment" do
it "invalidates the credit card payment" do
cc_payment = create(:payment, order: order)
expect { subject }.to change { cc_payment.reload.state }.to 'invalid'
end
end
end
context "the available store credit is not enough to pay for the entire order" do
let(:order_total) { 500 }
let(:store_credit_total) { order_total - 100 }
let(:store_credit) { create(:store_credit, amount: store_credit_total) }
let(:order) { create(:order_with_totals, user: store_credit.user, line_items_price: order_total).tap(&:recalculate) }
context "there are no other payments" do
it "adds an error to the model" do
expect(subject).to be false
expect(order.errors.full_messages).to include(I18n.t('spree.store_credit.errors.unable_to_fund'))
end
end
context "there is a completed credit card payment" do
let!(:cc_payment) { create(:payment, order: order, state: "completed", amount: 100) }
it "successfully creates the store credit payments" do
expect { subject }.to change { order.payments.count }.from(1).to(2)
expect(order.errors).to be_empty
end
end
context "there is a credit card payment" do
let!(:cc_payment) { create(:payment, order: order, state: "checkout") }
before do
subject
end
it "charges the outstanding balance to the credit card" do
expect(order.errors.messages).to be_empty
expect(order.payments.count).to eq 2
expect(order.payments.first.source).to be_a(Spree::CreditCard)
expect(order.payments.first.amount).to eq 100
end
# see associated comment in order_decorator#add_store_credit_payments
context "the store credit is already in the pending state" do
before do
order.payments.store_credits.last.authorize!
order.add_store_credit_payments
end
it "charges the outstanding balance to the credit card" do
expect(order.errors.messages).to be_empty
expect(order.payments.count).to eq 2
expect(order.payments.first.source).to be_a(Spree::CreditCard)
expect(order.payments.first.amount).to eq 100
end
end
end
end
context "there are multiple store credits" do
context "they have different credit type priorities" do
let(:amount_difference) { 100 }
let!(:primary_store_credit) { create(:store_credit, amount: (order_total - amount_difference)) }
let!(:secondary_store_credit) { create(:store_credit, amount: order_total, user: primary_store_credit.user, credit_type: create(:secondary_credit_type)) }
let(:order) { create(:order_with_totals, user: primary_store_credit.user, line_items_price: order_total).tap(&:recalculate) }
before do
subject
order.reload
end
it "uses the primary store credit type over the secondary" do
primary_payment = order.payments.detect{ |x| x.source == primary_store_credit }
secondary_payment = order.payments.detect{ |x| x.source == secondary_store_credit }
expect(order.payments.size).to eq 2
expect(primary_payment.source).to eq primary_store_credit
expect(secondary_payment.source).to eq secondary_store_credit
expect(primary_payment.amount).to eq(order_total - amount_difference)
expect(secondary_payment.amount).to eq(amount_difference)
end
end
end
end
describe "#covered_by_store_credit" do
subject do
order.covered_by_store_credit
end
let(:order) { create(:order_with_line_items, user: user, store: store) }
context "order doesn't have an associated user" do
let(:user) { nil }
it { is_expected.to eq(false) }
end
context "order has an associated user" do
context "user has enough store credit to pay for the order" do
let!(:credit) { create(:store_credit, user: user, amount: 1000) }
it { is_expected.to eq(true) }
end
context "user does not have enough store credit to pay for the order" do
let!(:credit) { create(:store_credit, user: user, amount: 1) }
it { is_expected.to eq(false) }
end
end
end
describe "#total_available_store_credit" do
subject do
order.total_available_store_credit
end
context "order does not have an associated user" do
let(:user) { nil }
it { is_expected.to eq(0) }
end
context "order has an associated user" do
let!(:credit) { create(:store_credit, user: user, amount: 25) }
it { is_expected.to eq(25) }
end
end
describe "#order_total_after_store_credit" do
let(:order_total) { 100.0 }
subject { create(:order, total: order_total) }
before do
allow(subject).to receive_messages(total_applicable_store_credit: applicable_store_credit)
end
context "order's user has store credits" do
let(:applicable_store_credit) { 10.0 }
it "deducts the applicable store credit" do
expect(subject.order_total_after_store_credit).to eq(order_total - applicable_store_credit)
end
end
context "order's user does not have any store credits" do
let(:applicable_store_credit) { 0.0 }
it "returns the order total" do
expect(subject.order_total_after_store_credit).to eq order_total
end
end
end
describe "#total_applicable_store_credit" do
context "order is in the confirm state" do
before { order.update(state: 'confirm') }
include_examples "check total store credit from payments"
end
context "order is completed" do
before { order.update(state: 'complete') }
include_examples "check total store credit from payments"
end
context "order is in any state other than confirm or complete" do
context "the associated user has store credits" do
let(:store_credit) { create(:store_credit) }
let(:order) { create(:order, user: store_credit.user) }
subject { order }
context "the store credit is more than the order total" do
let(:order_total) { store_credit.amount - 1 }
before { order.update(total: order_total) }
it "returns the order total" do
expect(subject.total_applicable_store_credit).to eq order_total
end
end
context "the store credit is less than the order total" do
let(:order_total) { store_credit.amount * 10 }
before { order.update(total: order_total) }
it "returns the store credit amount" do
expect(subject.total_applicable_store_credit).to eq store_credit.amount
end
end
end
context "the associated user does not have store credits" do
let(:order) { create(:order) }
subject { order }
it "returns 0" do
expect(subject.total_applicable_store_credit).to be_zero
end
end
context "the order does not have an associated user" do
subject { create(:order, user: nil) }
it "returns 0" do
expect(subject.total_applicable_store_credit).to be_zero
end
end
end
end
describe "#display_total_applicable_store_credit" do
let(:total_applicable_store_credit) { 10.00 }
subject { create(:order) }
before { allow(subject).to receive_messages(total_applicable_store_credit: total_applicable_store_credit) }
it "returns a money instance" do
expect(subject.display_total_applicable_store_credit).to be_a(Spree::Money)
end
it "returns a negative amount" do
expect(subject.display_total_applicable_store_credit.money.cents).to eq(total_applicable_store_credit * -100.0)
end
end
describe "#record_ip_address" do
let(:ip_address) { "127.0.0.1" }
subject { -> { order.record_ip_address(ip_address) } }
it "updates the last used IP address" do
expect(subject).to change(order, :last_ip_address).to(ip_address)
end
# IP address tracking should not raise validation exceptions
context "with an invalid order" do
before { allow(order).to receive(:valid?).and_return(false) }
it "updates the IP address" do
expect(subject).to change(order, :last_ip_address).to(ip_address)
end
end
context "with a new order" do
let(:order) { build(:order) }
it "updates the IP address" do
expect(subject).to change(order, :last_ip_address).to(ip_address)
end
end
end
describe "#display_order_total_after_store_credit" do
let(:order_total_after_store_credit) { 10.00 }
subject { create(:order) }
before { allow(subject).to receive_messages(order_total_after_store_credit: order_total_after_store_credit) }
it "returns a money instance" do
expect(subject.display_order_total_after_store_credit).to be_a(Spree::Money)
end
it "returns the order_total_after_store_credit amount" do
expect(subject.display_order_total_after_store_credit.money.cents).to eq(order_total_after_store_credit * 100.0)
end
end
describe "#display_total_available_store_credit" do
let(:total_available_store_credit) { 10.00 }
subject { create(:order) }
before { allow(subject).to receive_messages(total_available_store_credit: total_available_store_credit) }
it "returns a money instance" do
expect(subject.display_total_available_store_credit).to be_a(Spree::Money)
end
it "returns the total_available_store_credit amount" do
expect(subject.display_total_available_store_credit.money.cents).to eq(total_available_store_credit * 100.0)
end
end
describe "#display_store_credit_remaining_after_capture" do
let(:total_available_store_credit) { 10.00 }
let(:total_applicable_store_credit) { 5.00 }
subject { create(:order) }
before do
allow(subject).to receive_messages(total_available_store_credit: total_available_store_credit,
total_applicable_store_credit: total_applicable_store_credit)
end
it "returns a money instance" do
expect(subject.display_store_credit_remaining_after_capture).to be_a(Spree::Money)
end
it "returns all of the user's available store credit minus what's applied to the order amount" do
amount_remaining = total_available_store_credit - total_applicable_store_credit
expect(subject.display_store_credit_remaining_after_capture.money.cents).to eq(amount_remaining * 100.0)
end
end
context 'when not capturing at order completion' do
let!(:store_credit_payment_method) do
create(
:store_credit_payment_method,
auto_capture: false, # not capturing at completion time
)
end
describe '#after_cancel' do
let(:user) { create(:user) }
let!(:store_credit) do
create(:store_credit, amount: 100, user: user)
end
let(:order) do
create(
:order_with_line_items,
user: user,
line_items_count: 1,
# order will be $20 total:
line_items_price: 10,
shipment_cost: 10
)
end
before do
order.contents.advance
order.complete!
end
it 'releases the pending store credit authorization' do
expect {
order.cancel!
}.to change {
store_credit.reload.amount_authorized
}.from(20).to(0)
expect(store_credit.amount_remaining).to eq 100
end
end
end
end
context 'update_params_payment_source' do
subject { described_class.new }
it 'is deprecated' do
subject.instance_variable_set('@updating_params', {})
expect(Spree::Deprecation).to receive(:warn)
subject.send(:update_params_payment_source)
end
end
describe "#validate_payments_attributes" do
let(:attributes) { [ActionController::Parameters.new(payment_method_id: payment_method.id)] }
subject do
order.validate_payments_attributes(attributes)
end
context "with empty array" do
let(:attributes) { [] }
it "doesn't error" do
subject
end
end
context "with no payment method specified" do
let(:attributes) { [ActionController::Parameters.new({})] }
it "doesn't error" do
subject
end
end
context "with valid payment method" do
let(:payment_method) { create(:check_payment_method) }
it "doesn't error" do
subject
end
end
context "with inactive payment method" do
let(:payment_method) { create(:check_payment_method, active: false) }
it "raises RecordNotFound" do
expect { subject }.to raise_error(ActiveRecord::RecordNotFound)
end
end
context "with unavailable payment method" do
let(:payment_method) { create(:check_payment_method, available_to_users: false) }
it "raises RecordNotFound" do
expect { subject }.to raise_error(ActiveRecord::RecordNotFound)
end
end
context "with soft-deleted payment method" do
let(:payment_method) { create(:check_payment_method, deleted_at: Time.current) }
it "raises RecordNotFound" do
expect { subject }.to raise_error(ActiveRecord::RecordNotFound)
end
end
end
describe '#create_shipments_for_line_item' do
subject { create :order_with_line_items }
let(:line_item) { build(:line_item) }
it 'creates at least one new shipment for the order' do
expect do
subject.create_shipments_for_line_item(line_item)
end.to change { subject.shipments.count }.by 1
end
end
end
| 32.788283 | 165 | 0.626745 |
21818bb81db0356405d9a725ced4ed4de9709e2a
| 75 |
# frozen_string_literal: true
module EsExperiment
VERSION = '0.1.0'
end
| 12.5 | 29 | 0.746667 |
ac6b45f05a51b54bee771affb02ecf2602475bce
| 376 |
class CreateAnimalStates < ActiveRecord::Migration[5.1]
def change
create_table :animal_states do |t|
t.references :animal_category, foreign_key: true
t.references :gender, foreign_key: true
t.string :name
t.decimal :weigh_min
t.decimal :weigh_max
t.string :description
t.boolean :obsolete
t.timestamps
end
end
end
| 23.5 | 55 | 0.678191 |
015f647b5dbae315ca18ad83ee55efe7051c87d2
| 332 |
class FourkVideoDownloader < Cask
version '3.1'
sha256 '5f2243e9d2352b411414e3819d2dbb36cbd2c4f4d9a72a9579e4fbc1f820a324'
url "http://downloads.4kdownload.com/app/4kvideodownloader_#{version}.dmg"
homepage 'http://www.4kdownload.com/products/product-videodownloader'
license :unknown
app '4K Video Downloader.app'
end
| 30.181818 | 76 | 0.801205 |
5d7b87be2e2f91cef5a70be0fc5e46340acbe02f
| 1,039 |
class RegexOpt < Formula
desc "Perl-compatible regular expression optimizer"
homepage "https://bisqwit.iki.fi/source/regexopt.html"
url "https://bisqwit.iki.fi/src/arch/regex-opt-1.2.4.tar.gz"
sha256 "128c8ba9570b1fd8a6a660233de2f5a4022740bc5ee300300709c3894413883f"
license "GPL-2.0"
bottle do
cellar :any_skip_relocation
rebuild 1
sha256 "8a561d7a4dfadf25fd39bd5b19d6a8161a2f0d1be2c459cbe691be17aef85bc0" => :catalina
sha256 "76b26dc9e766e7a8b0806660e966e3a49c593591b94d90439f89b7cbc797d019" => :mojave
sha256 "0e46dec5d46b145e32ca597c00c75fea2e7097e57c5d3131be141e5bea2b96db" => :high_sierra
sha256 "68b5f75c9fdb645334ae8a48a5b7e01620e19d5f103811579cb8bf96101c6ac7" => :sierra
sha256 "fd31e2648a4c0bb509b4f2424700dfba3386d91083bd37796adc009864f040b0" => :x86_64_linux
end
def install
system "make", "CC=#{ENV.cc}", "CXX=#{ENV.cxx}"
bin.install "regex-opt"
end
test do
output = shell_output("#{bin}/regex-opt foo...*..*bar")
assert_equal "foo.{3,}bar", output
end
end
| 37.107143 | 94 | 0.768046 |
18833f2081d502a4ae0c2be4ccda44f80242766e
| 7,442 |
# -------------------------------------------------------------------------- #
# Copyright 2002-2019, OpenNebula Project, OpenNebula Systems #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
module VNMMAD
############################################################################
# Module to use as mixin for implementing VLAN drivers based on special
# link devices though the Linux kernel features and bridges. It provides
# common functionality to handle bridges
############################################################################
class VLANDriver < VNMMAD::VNMDriver
def initialize(vm_tpl, xpath_filter, deploy_id = nil)
@locking = true
super(vm_tpl, xpath_filter, deploy_id)
end
# Activate the driver and creates bridges and tags devices as needed.
def activate
lock
@bridges = get_bridges
process do |nic|
@nic = nic
next if @nic[:phydev].nil?
# Get the name of the vlan device.
get_vlan_dev_name
# Create the bridge.
create_bridge
# Check that no other vlans are connected to this bridge
validate_vlan_id if @nic[:conf][:validate_vlan_id]
# Return if vlan device is already in the bridge.
next if @bridges[@nic[:bridge]].include? @nic[:vlan_dev]
# Create vlan device.
create_vlan_dev
# Add vlan device to the bridge.
OpenNebula.exec_and_log("#{command(:brctl)} addif"\
" #{@nic[:bridge]} #{@nic[:vlan_dev]}")
@bridges[@nic[:bridge]] << @nic[:vlan_dev]
end
unlock
0
end
# This function needs to be implemented by any VLAN driver to
# create the VLAN device. The device MUST be set up by this function
def create_vlan_dev
OpenNebula.log_error("create_vlan_dev function not implemented.")
exit -1
end
# This function needs to be implemented by any VLAN driver to
# delete the VLAN device. The device MUST be deleted by this function
def delete_vlan_dev
OpenNebula.log_error("delete_vlan_dev function not implemented.")
exit -1
end
# Deactivate the driver and delete bridges and tags devices as needed.
def deactivate
lock
@bridges = get_bridges
attach_nic_id = @vm['TEMPLATE/NIC[ATTACH="YES"]/NIC_ID']
process do |nic|
next if attach_nic_id && attach_nic_id != nic[:nic_id]
@nic = nic
next if @nic[:phydev].nil?
next if @bridges[@nic[:bridge]].nil?
# Get the name of the vlan device.
get_vlan_dev_name
# Return if the bridge doesn't exist because it was already deleted (handles last vm with multiple nics on the same vlan)
next if [email protected]? @nic[:bridge]
# Return if we want to keep the empty bridge
next if @nic[:conf][:keep_empty_bridge]
# Return if the vlan device is not the only left device in the bridge.
next if @bridges[@nic[:bridge]].length > 1 or !@bridges[@nic[:bridge]].include? @nic[:vlan_dev]
# Delete the vlan device.
delete_vlan_dev
@bridges[@nic[:bridge]].delete(@nic[:vlan_dev])
# Delete the bridge.
OpenNebula.exec_and_log("#{command(:ip)} link delete"\
" #{@nic[:bridge]}")
@bridges.delete(@nic[:bridge])
end if @bridges
unlock
0
end
private
# Generate the name of the vlan device which will be added to the bridge.
def get_vlan_dev_name
@nic[:vlan_dev] = "#{@nic[:phydev]}.#{@nic[:vlan_id]}"
end
# Creates a bridge if it does not exists, and brings it up.
# This function IS FINAL, exits if action cannot be completed
def create_bridge
return if @bridges.keys.include? @nic[:bridge]
OpenNebula.exec_and_log("#{command(:brctl)} addbr #{@nic[:bridge]}")
set_bridge_options
@bridges[@nic[:bridge]] = Array.new
OpenNebula.exec_and_log("#{command(:ip)} link set #{@nic[:bridge]} up")
end
# Calls brctl to set options stored in bridge_conf
def set_bridge_options
@nic[:bridge_conf].each do |option, value|
case value
when true
value = "on"
when false
value = "off"
end
cmd = "#{command(:brctl)} #{option} " <<
"#{@nic[:bridge]} #{value}"
OpenNebula.exec_and_log(cmd)
end
end
# Get hypervisor bridges
# @return [Hash<String>] with the bridge names
def get_bridges
bridges = Hash.new
brctl_exit =`#{VNMNetwork::COMMANDS[:brctl]} show`
cur_bridge = ""
brctl_exit.split("\n")[1..-1].each do |l|
l = l.split
if l.length > 1
cur_bridge = l[0]
bridges[cur_bridge] = Array.new
bridges[cur_bridge] << l[3] if l[3]
else
bridges[cur_bridge] << l[0]
end
end
bridges
end
def get_interface_vlan(name)
nil
end
def validate_vlan_id
@bridges[@nic[:bridge]].each do |interface|
vlan = get_interface_vlan(interface)
if vlan && vlan.to_s != @nic[:vlan_id]
OpenNebula.log_error("The interface #{interface} has "\
"vlan_id = #{vlan} but the network is configured "\
"with vlan_id = #{@nic[:vlan_id]}")
msg = "Interface with an incorrect vlan_id is already in "\
"the bridge"
OpenNebula.error_message(msg)
exit(-1)
end
end
end
end
end
| 34.775701 | 137 | 0.484413 |
03f841bab742d57bc6e302480f1d970de05abf53
| 237 |
%
def foo.foo(bar)
baz
end
%
def foo.foo bar
baz
end
-
def foo.foo(bar)
baz
end
%
def foo.foo(bar) # comment
end
%
def foo.foo()
end
%
def foo.foo() # comment
end
%
def foo.foo( # comment
)
end
%
def foo::foo
end
-
def foo.foo
end
| 7.40625 | 26 | 0.632911 |
ac61c4cc97cd653793e518536fe17347645d530b
| 282 |
# frozen_string_literal: true
module Jekyll
module Drops
class ExcerptDrop < DocumentDrop
def layout
@obj.doc.data["layout"]
end
def date
@obj.doc.date
end
def excerpt
nil
end
end
end
end
| 14.1 | 37 | 0.528369 |
ab51547b99da962013fc07d160d77159a4a51daa
| 675 |
require 'msf/core/exploit/sqli/utils/boolean_based_blind'
#
# Boolean-Based Blind SQL injection support for SQLite
#
class Msf::Exploit::SQLi::SQLitei::BooleanBasedBlind < Msf::Exploit::SQLi::SQLitei::Common
include Msf::Exploit::SQLi::BooleanBasedBlindMixin
#
# This method checks if the target is vulnerable to Blind boolean-based injection by checking that
# the values returned by the bloc for some boolean queries are correct.
# @return [Boolean] Whether the check confirmed that boolean-based blind SQL injection works
#
def test_vulnerable
out_true = blind_request('1=1')
out_false = blind_request('1=2')
out_true && !out_false
end
end
| 33.75 | 100 | 0.752593 |
ff296feb7c0ea4df2b265f451c108d37096add09
| 172 |
class ActivistImportPolicy < ApplicationPolicy
class Scope < Scope
def resolve
scope
end
end
def new?
true
end
def create?
true
end
end
| 10.75 | 46 | 0.639535 |
2626eba6a5c7fa501a041d4da8debdb1b15f2b38
| 1,077 |
cask 'bot-framework-emulator' do
version '4.2.1'
sha256 '6f9a525969ccd748d486d1b3a4d4f58dc7efbe0f4cc6802024bc9be542e8f8b3'
url "https://github.com/Microsoft/BotFramework-Emulator/releases/download/v#{version}/botframework-emulator-#{version}-mac.zip"
appcast 'https://github.com/Microsoft/BotFramework-Emulator/releases.atom'
name 'Microsoft Bot Framework Emulator'
homepage 'https://github.com/Microsoft/BotFramework-Emulator'
auto_updates true
app 'mac/Bot Framework Emulator.app'
uninstall quit: 'com.electron.botframework-emulator'
zap trash: [
'~/Library/Application Support/botframework-emulator',
'~/Library/Caches/com.electron.botframework-emulator',
'~/Library/Caches/com.electron.botframework-emulator.ShipIt',
'~/Library/Preferences/com.electron.botframework-emulator.helper.plist',
'~/Library/Preferences/com.electron.botframework-emulator.plist',
'~/Library/Saved Application State/com.electron.botframework-emulator.savedState',
]
end
| 43.08 | 129 | 0.721448 |
ff8023acd79f01368be29b58af9c09189f002809
| 390 |
# frozen_string_literal: true
require "metanorma/document/standard_document/sections/annex_section"
module Metanorma; module Document; module IsoDocument
# Annex appearing in ISO/IEC document.
class IsoAnnexSection < StandardDocument::AnnexSection
register_element do
# Appendixes to annex.
nodes :appendix, StandardDocument::ClauseSection
end
end
end; end; end
| 27.857143 | 69 | 0.776923 |
61984d3dd6a33e20ba815b1abd0f920b0b152151
| 265 |
module ApplicationHelper
def menu_link_class(this_link, current)
this_link == current ? "is-active font-bold" : ""
end
def menu_project_name(this_link, current, project_name)
this_link == current ? "📂 " + project_name : "📁 " + project_name
end
end
| 26.5 | 68 | 0.701887 |
1a56d308a3339c5208b7142a099320834b6c5870
| 58 |
Rails.application.routes.draw do
resources :projects
end
| 19.333333 | 32 | 0.827586 |
b96dd981e0ff32402ea0f74917e6d95cef79e877
| 7,960 |
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Issues::BuildService do
using RSpec::Parameterized::TableSyntax
let_it_be(:project) { create(:project, :repository) }
let_it_be(:developer) { create(:user) }
let_it_be(:guest) { create(:user) }
let(:user) { developer }
before_all do
project.add_developer(developer)
project.add_guest(guest)
end
def build_issue(issue_params = {})
described_class.new(project: project, current_user: user, params: issue_params).execute
end
context 'for a single discussion' do
describe '#execute' do
let(:merge_request) { create(:merge_request, title: "Hello world", source_project: project) }
let(:discussion) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, note: "Almost done").to_discussion }
subject { build_issue(merge_request_to_resolve_discussions_of: merge_request.iid, discussion_to_resolve: discussion.id) }
it 'references the noteable title in the issue title' do
expect(subject.title).to include('Hello world')
end
it 'adds the note content to the description' do
expect(subject.description).to include('Almost done')
end
end
end
context 'for discussions in a merge request' do
let(:merge_request) { create(:merge_request_with_diff_notes, source_project: project) }
describe '#items_for_discussions' do
it 'has an item for each discussion' do
create(:diff_note_on_merge_request, noteable: merge_request, project: merge_request.source_project, line_number: 13)
service = described_class.new(project: project, current_user: user, params: { merge_request_to_resolve_discussions_of: merge_request.iid })
service.execute
expect(service.items_for_discussions.size).to eq(2)
end
end
describe '#item_for_discussion' do
let(:service) { described_class.new(project: project, current_user: user, params: { merge_request_to_resolve_discussions_of: merge_request.iid }) }
it 'mentions the author of the note' do
discussion = create(:diff_note_on_merge_request, author: create(:user, username: 'author')).to_discussion
expect(service.item_for_discussion(discussion)).to include('@author')
end
it 'wraps the note in a blockquote' do
note_text = "This is a string\n"\
">>>\n"\
"with a blockquote\n"\
"> That has a quote\n"\
">>>\n"
note_result = " > This is a string\n"\
" > \n"\
" > > with a blockquote\n"\
" > > > That has a quote\n"\
" > \n"
discussion = create(:diff_note_on_merge_request, note: note_text).to_discussion
expect(service.item_for_discussion(discussion)).to include(note_result)
end
end
describe '#execute' do
let(:base_params) { { merge_request_to_resolve_discussions_of: merge_request.iid } }
context 'without additional params' do
subject { build_issue(base_params) }
it 'has the merge request reference in the title' do
expect(subject.title).to include(merge_request.title)
end
it 'has the reference of the merge request in the description' do
expect(subject.description).to include(merge_request.to_reference)
end
end
it 'uses provided title if title param given' do
issue = build_issue(base_params.merge(title: 'What an issue'))
expect(issue.title).to eq('What an issue')
end
it 'uses provided description if description param given' do
issue = build_issue(base_params.merge(description: 'Fix at your earliest convenience'))
expect(issue.description).to eq('Fix at your earliest convenience')
end
describe 'with multiple discussions' do
let!(:diff_note) { create(:diff_note_on_merge_request, noteable: merge_request, project: merge_request.target_project, line_number: 15) }
it 'mentions all the authors in the description' do
authors = merge_request.resolvable_discussions.map(&:author)
expect(build_issue(base_params).description).to include(*authors.map(&:to_reference))
end
it 'has a link for each unresolved discussion in the description' do
notes = merge_request.resolvable_discussions.map(&:first_note)
links = notes.map { |note| Gitlab::UrlBuilder.build(note) }
expect(build_issue(base_params).description).to include(*links)
end
it 'mentions additional notes' do
create_list(:diff_note_on_merge_request, 2, noteable: merge_request, project: merge_request.target_project, in_reply_to: diff_note)
expect(build_issue(base_params).description).to include('(+2 comments)')
end
end
end
end
context 'For a merge request without discussions' do
let(:merge_request) { create(:merge_request, source_project: project) }
describe '#execute' do
it 'mentions the merge request in the description' do
issue = build_issue(merge_request_to_resolve_discussions_of: merge_request.iid)
expect(issue.description).to include("Review the conversation in #{merge_request.to_reference}")
end
end
end
describe '#execute' do
context 'as developer' do
it 'builds a new issues with given params' do
milestone = create(:milestone, project: project)
issue = build_issue(milestone_id: milestone.id)
expect(issue.milestone).to eq(milestone)
expect(issue.issue_type).to eq('issue')
expect(issue.work_item_type.base_type).to eq('issue')
end
it 'sets milestone to nil if it is not available for the project' do
milestone = create(:milestone, project: create(:project))
issue = build_issue(milestone_id: milestone.id)
expect(issue.milestone).to be_nil
end
context 'when issue_type is incident' do
it 'sets the correct issue type' do
issue = build_issue(issue_type: 'incident')
expect(issue.issue_type).to eq('incident')
expect(issue.work_item_type.base_type).to eq('incident')
end
end
end
context 'as guest' do
let(:user) { guest }
it 'cannot set milestone' do
milestone = create(:milestone, project: project)
issue = build_issue(milestone_id: milestone.id)
expect(issue.milestone).to be_nil
end
context 'setting issue type' do
shared_examples 'builds an issue' do
specify do
issue = build_issue(issue_type: issue_type)
expect(issue.issue_type).to eq(resulting_issue_type)
expect(issue.work_item_type_id).to eq(work_item_type_id)
end
end
it 'cannot set invalid issue type' do
issue = build_issue(issue_type: 'project')
expect(issue).to be_issue
end
context 'with a corresponding WorkItem::Type' do
let_it_be(:type_issue_id) { WorkItem::Type.default_issue_type.id }
let_it_be(:type_incident_id) { WorkItem::Type.default_by_type(:incident).id }
where(:issue_type, :work_item_type_id, :resulting_issue_type) do
nil | ref(:type_issue_id) | 'issue'
'issue' | ref(:type_issue_id) | 'issue'
'incident' | ref(:type_incident_id) | 'incident'
'test_case' | ref(:type_issue_id) | 'issue' # update once support for test_case is enabled
'requirement' | ref(:type_issue_id) | 'issue' # update once support for requirement is enabled
'invalid' | ref(:type_issue_id) | 'issue'
end
with_them do
it_behaves_like 'builds an issue'
end
end
end
end
end
end
| 36.682028 | 153 | 0.653894 |
eda6c7734f16055fc56375dbef2e812bf9f76fae
| 8,809 |
# vFabric Administration Server Ruby API
# Copyright (c) 2012 VMware, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Sqlfire
# Used to enumerate, create, and delete server instances.
class ServerInstances < Shared::MutableCollection
private
CREATE_PAYLOAD_KEYS = ['bind-address',
'client-bind-address',
'client-port',
'critical-heap-percentage',
'initial-heap',
'jvm-options',
'max-heap',
'run-netserver']
public
# @private
def initialize(location, client)
super(location, client, "server-group-instances", ServerInstance)
end
# Creates a new server instance
#
# @param installation [Installation] the installation that the instance will use
# @param name [String] the name of the instance
# @param options [Hash] optional configuration for the instance
#
# @option options 'bind-address' [String] The property in a node's metadata to use to determine the address that the
# server binds to for peer-to-peer communication. If omitted, or if the property does not exist, the server will
# use the node's hostname
# @option options 'client-bind-address' [String] The property in a node's metadata to use to determine the address
# that the server binds to for client communication. If omitted, or if the property does not exist, the server
# will use the node's hostname. Only takes effect if +run-netserver+ is +true+
# @option options 'client-port' [Integer] The port that the server listens on for client connections. Only take
# effect if +run-netserver+ is +true+
# @option options 'critical-heap-percentage' [Integer] Critical heap threshold as a percentage of the old generation
# heap
# @option options 'initial-heap' [String] The intial heap size to be used by the server's JVM. If not specified,
# the JVM's default is used
# @option options 'jvm-options' [String[]] The JVM options that are passed to the server's JVM when it is started
# @option options 'max-heap' [String] The maximum heap size to be used by the server's JVM. If not specified, the
# JVM's default is used
# @option options 'run-netserver' [Boolean] Whether the locator should run a netserver that can service thin
# clients. Default is +true+.
#
# @return [ServerInstance] the new server instance
def create(installation, name, options = {})
payload = { :installation => installation.location, :name => name }
options.each { |key, value|
if (CREATE_PAYLOAD_KEYS.include?(key))
payload[key] = value
end
}
super(payload, 'server-group-instance')
end
end
# A server instance
class ServerInstance < Shared::Instance
private
UPDATE_PAYLOAD_KEYS = ['bind-address',
'client-bind-address',
'client-port',
'critical-heap-percentage',
'initial-heap',
'jvm-options',
'max-heap',
'run-netserver']
public
# @return [String] the property in a node's metadata used to determine the address that the server binds to for
# peer-to-peer communication. If +nil+, the server uses the node's hostname
attr_reader :bind_address
# @return [String] the property in a node's metadata used to determine the address that the server binds to for
# client communication. If +nil+, the server uses localhost. Only takes effect if +run_netserver+ is +true+
attr_reader :client_bind_address
# @return [Integer] the port that the server listens on for client connections. Only takes effect if +run_netserver+
# is +true+
attr_reader :client_port
# @return [Integer] critical heap percentage as a percentage of the old generation heap. +nil+ if the server uses
# the default
attr_reader :critical_heap_percentage
# @return [String] The initial heap size of the server's JVM. +nil+ if the default is used
attr_reader :initial_heap
# @return [String[]] The JVM options that are passed to the server's JVM when it is started
attr_reader :jvm_options
# @return [String] The max heap size of the server's JVM. +nil+ if the default is used
attr_reader :max_heap
# @return [Boolean] +true+ if the server runs a netserver that can service thin clients, otherwise +false+
attr_reader :run_netserver
# @private
def initialize(location, client)
super(location, client, Group, Installation, ServerLiveConfigurations, ServerPendingConfigurations, ServerNodeInstance, 'server-node-instance')
end
# Updates the instance using the supplied +options+.
#
# @param options [Hash] optional configuration for the instance
#
# @option options 'bind-address' [String] The property in a node's metadata to use to determine the address that the
# server binds to for peer-to-peer communication. If omitted or +nil+, the configuration will not be changed. If
# set to an empty string the server will use the node's hostname
# @option options 'client-bind-address' [String] The property in a node's metadata to use to determine the address
# that the server binds to for client communication. If omitted or +nil+, the configuration will not be changed.
# If set to an empty string the server will bind to localhost
# @option options 'client-port' [Integer] The port that the server listens on for client connections. If omitted or
# +nil+, the configuration will not be changed
# @option options 'critical-heap-percentage' [Integer] Critical heap threshold as a percentage of the old generation
# heap. Valid value are 0-100 inclusive. If omitted or +nil+, the configuration will not be changed
# @option options 'initial-heap' [String] The intial heap size to be used by the server's JVM. If omitted or +nil+,
# the configuration will not be changed. If set to an empty string the JVM's default is used
# @option options :installation [String] The installation to be used by the instance. If omitted or +nil+, the
# configuration will not be changed.
# @option options 'jvm-options' [String[]] The JVM options that are passed to the server's JVM when it is started.
# If omitted or +nil+, the configuration will not be chanaged
# @option options 'max-heap' [String] The maximum heap size to be used by the server's JVM. If omitted or +nil+, the
# configuration will not be changed. If set to an empty string the JVM's default is used
# @option options 'run-netserver' [Boolean] Whether the server should run a netserver that can service thin
# clients. If omitted or +nil+, the configuration will not be changed
#
# @return [void]
def update(options = {})
payload = {}
options.each { |key, value|
if (UPDATE_PAYLOAD_KEYS.include?(key))
payload[key] = value
end
}
if (options.has_key? :installation)
payload[:installation] = options[:installation].location
end
client.post(location, payload)
reload
end
# Reloads the instance's details from the server
# @return [void]
def reload
super
@bind_address = details['bind-address']
@client_bind_address = details['client-bind-address']
@client_port = details['client-port']
@critical_heap_percentage = details['critical-heap-percentage']
@initial_heap = details['initial-heap']
@jvm_options = details['jvm-options']
@max_heap = details['max-heap']
@run_netserver = details['run-netserver']
end
# @return [String] a string representation of the instance
def to_s
"#<#{self.class} name='#{name}' bind_address='#@bind_address' client_bind_address='#@client_bind_address' client_port='#@client_port' critical_heap_percentage='#@critical_heap_percentage' initial_heap='#@initial_heap' jvm_options='#@jvm_options' max_heap='#@max_heap' run_netserver='#@run_netserver'>"
end
end
end
| 45.880208 | 307 | 0.675446 |
621f3f4822d41702772dc47b36db4e79b9ff4b9d
| 2,084 |
#
# Author:: Vasundhara Jagdale (<[email protected]>)
# Copyright:: Copyright 2015-2018 Chef Software, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "chef/knife/openstack_helpers"
require "chef/knife/cloud/openstack_service_options"
require "chef/knife/cloud/command"
class Chef
class Knife
class Cloud
class OpenstackFloatingIpAllocate < Command
include OpenstackHelpers
include OpenstackServiceOptions
banner "knife openstack floating_ip allocate (options)"
option :pool,
short: "-p POOL",
long: "--pool POOL",
description: "Floating IP pool to allocate from.",
proc: proc { |key| Chef::Config[:knife][:pool] = key }
def execute_command
@resource = @service.allocate_address(locate_config_value(:pool))
end
def after_exec_command
@columns_with_info = [{ label: "ID", value: @resource["floating_ip"]["id"].to_s },
{ label: "Instance ID", value: @resource["floating_ip"]["instance_id"].to_s },
{ label: "Floating IP", value: @resource["floating_ip"]["ip"].to_s },
{ label: "Fixed IP", value: @resource["floating_ip"]["fixed_ip"].to_s },
{ label: "Pool", value: @resource["floating_ip"]["pool"].to_s },
]
@service.server_summary(nil, @columns_with_info)
end
end
end
end
end
| 37.890909 | 110 | 0.627639 |
ed1357672696f680287b24b608470bbb5968d28d
| 1,275 |
require 'test_helper'
class EmailLinksControllerTest < ActionDispatch::IntegrationTest
setup do
@email_link = email_links(:one)
end
test "should get index" do
get email_links_url
assert_response :success
end
test "should get new" do
get new_email_link_url
assert_response :success
end
test "should create email_link" do
assert_difference('EmailLink.count') do
post email_links_url, params: { email_link: { expires_at: @email_link.expires_at, token: @email_link.token, user_id: @email_link.user_id } }
end
assert_redirected_to email_link_url(EmailLink.last)
end
test "should show email_link" do
get email_link_url(@email_link)
assert_response :success
end
test "should get edit" do
get edit_email_link_url(@email_link)
assert_response :success
end
test "should update email_link" do
patch email_link_url(@email_link), params: { email_link: { expires_at: @email_link.expires_at, token: @email_link.token, user_id: @email_link.user_id } }
assert_redirected_to email_link_url(@email_link)
end
test "should destroy email_link" do
assert_difference('EmailLink.count', -1) do
delete email_link_url(@email_link)
end
assert_redirected_to email_links_url
end
end
| 26.020408 | 157 | 0.741176 |
ac2669d8b6fd614223ec8cda4020ce93db26260d
| 4,284 |
module Authlogic
module ORMAdapters
module ActiveRecordAdapter
module ActsAsAuthentic
# = Persistence
#
# This is responsible for all record persistence. Basically what your Authlogic session needs to persist the record's session.
#
# === Class Methods
#
# * <tt>forget_all!</tt> - resets ALL records persistence_token to a unique value, requiring all users to re-login
# * <tt>unique_token</tt> - returns a pretty hardcore random token that is finally encrypted with a hash algorithm
#
# === Instance Methods
#
# * <tt>forget!</tt> - resets the record's persistence_token which requires them to re-login
#
# === Alias Method Chains
#
# * <tt>#{options[:password_field]}</tt> - adds in functionality to reset the persistence token when the password is changed
module Persistence
def acts_as_authentic_with_persistence(options = {})
acts_as_authentic_without_persistence(options)
validates_presence_of options[:persistence_token_field]
validates_uniqueness_of options[:persistence_token_field], :if => "#{options[:persistence_token_field]}_changed?".to_sym
before_validation "reset_#{options[:persistence_token_field]}".to_sym, :if => "reset_#{options[:persistence_token_field]}?".to_sym
def forget_all!
# Paginate these to save on memory
records = nil
i = 0
begin
records = find(:all, :limit => 50, :offset => i)
records.each { |record| record.forget! }
i += 50
end while !records.blank?
end
class_eval <<-"end_eval", __FILE__, __LINE__
def self.unique_token
Authlogic::Random.hex_token
end
def forget!
self.#{options[:persistence_token_field]} = self.class.unique_token
save_without_session_maintenance(false)
end
def #{options[:password_field]}_with_persistence=(value)
reset_#{options[:persistence_token_field]} unless value.blank?
self.#{options[:password_field]}_without_persistence = value
end
alias_method_chain :#{options[:password_field]}=, :persistence
def reset_#{options[:persistence_token_field]}
self.#{options[:persistence_token_field]} = self.class.unique_token
end
def reset_#{options[:persistence_token_field]}!
reset_#{options[:persistence_token_field]}
save_without_session_maintenance(false)
end
def reset_#{options[:persistence_token_field]}?
#{options[:persistence_token_field]}.blank?
end
# When a user logs in we need to ensure they have a persistence token. Think about apps that are transitioning and
# never have a persistence token to begin with. When their users log in their persistence token needs to be set.
# The only other time persistence tokens are reset is in a before_validation on the user, and when a user is saved
# from the session we skip validation for performance reasons. We do save_without_session_maintenance(false), the false
# indicates to skip validation.
def valid_#{options[:password_field]}_with_persistence?(attempted_password)
result = valid_password_without_persistence?(attempted_password)
reset_#{options[:persistence_token_field]}! if result && #{options[:persistence_token_field]}.blank?
result
end
alias_method_chain :valid_#{options[:password_field]}?, :persistence
end_eval
end
end
end
end
end
end
ActiveRecord::Base.class_eval do
class << self
include Authlogic::ORMAdapters::ActiveRecordAdapter::ActsAsAuthentic::Persistence
alias_method_chain :acts_as_authentic, :persistence
end
end
| 45.574468 | 142 | 0.606443 |
ac58a1c7f29f43906ef54e55126c707e7423c988
| 419 |
Bouquet.find_or_create_by!(name: "Harper", price: 35.00)
Bouquet.find_or_create_by!(name: "Alexa", price: 35.00)
Bouquet.find_or_create_by!(name: "Adrian", price: 40.00)
ShippingOption.find_or_create_by!(name: "Free shipping", price: 0)
ShippingOption.find_or_create_by!(name: "Premium shipping", price: 2.50)
OrderType.find_or_create_by!(name: "Single delivery")
OrderType.find_or_create_by!(name: "3 month bundle")
| 41.9 | 72 | 0.775656 |
3800e9205cd18332bfaaa2b18c53fd2edbd18eb8
| 1,531 |
#
# Be sure to run `pod lib lint HelloWord.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'HelloWord'
s.version = '0.1.0'
s.summary = 'A short description of HelloWord.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/Anselz/HelloWorld'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Janselz' => '[email protected]' }
s.source = { :git => 'https://github.com/Anselz/HelloWorld.git'}
# s.social_media_url = 'https://twitter.com/Janselz'
s.ios.deployment_target = '8.0'
s.source_files = 'HelloWord/Classes/**/*'
# s.resource_bundles = {
# 'HelloWord' => ['HelloWord/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'AFNetworking'
s.dependency 'AFNetworking'
end
| 35.604651 | 88 | 0.639451 |
915f8b679d130c5d6c9de3fa4e525a77b04d7b8b
| 2,206 |
class PythonYq < Formula
desc "Command-line YAML and XML processor that wraps jq"
homepage "https://yq.readthedocs.io/"
url "https://files.pythonhosted.org/packages/89/67/e36d2ea4c0e273db3adabbc200ebb76dee4cfdfd9e1fea6e6fab73441098/yq-2.8.1.tar.gz"
sha256 "24d36c7e9e670209562a161b8506ff7e86959be49ba7aee4ca659810801e5710"
bottle do
cellar :any_skip_relocation
sha256 "cc8f999e7cd9aecdd2a853f89df3809fb337366652ca7b6f5ffc85977f8f7e96" => :catalina
sha256 "cc8f999e7cd9aecdd2a853f89df3809fb337366652ca7b6f5ffc85977f8f7e96" => :mojave
sha256 "cc8f999e7cd9aecdd2a853f89df3809fb337366652ca7b6f5ffc85977f8f7e96" => :high_sierra
end
depends_on "jq"
depends_on "python"
conflicts_with "yq", :because => "both install `yq` executables"
resource "PyYAML" do
url "https://files.pythonhosted.org/packages/e3/e8/b3212641ee2718d556df0f23f78de8303f068fe29cdaa7a91018849582fe/PyYAML-5.1.2.tar.gz"
sha256 "01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4"
end
resource "xmltodict" do
url "https://files.pythonhosted.org/packages/58/40/0d783e14112e064127063fbf5d1fe1351723e5dfe9d6daad346a305f6c49/xmltodict-0.12.0.tar.gz"
sha256 "50d8c638ed7ecb88d90561beedbf720c9b4e851a9fa6c47ebd64e99d166d8a21"
end
def install
xy = Language::Python.major_minor_version "python3"
ENV["PYTHONPATH"] = libexec/"lib/python#{xy}/site-packages"
ENV.prepend_create_path "PYTHONPATH", libexec/"vendor/lib/python#{xy}/site-packages"
resources.each do |r|
r.stage do
system "python3", *Language::Python.setup_install_args(libexec/"vendor")
end
end
ENV.prepend_create_path "PYTHONPATH", libexec/"lib/python#{xy}/site-packages"
system "python3", *Language::Python.setup_install_args(libexec)
bin.install Dir[libexec/"bin/*"]
env = {
:PATH => "#{Formula["jq"].opt_bin}:$PATH",
:PYTHONPATH => ENV["PYTHONPATH"],
}
bin.env_script_all_files(libexec/"bin", env)
end
test do
input = <<~EOS
foo:
bar: 1
baz: {bat: 3}
EOS
expected = <<~EOS
3
...
EOS
assert_equal expected, pipe_output("#{bin}/yq -y .foo.baz.bat", input, 0)
end
end
| 34.46875 | 140 | 0.731188 |
1da43962db149a8c2cdded6afff3725ffc00be6c
| 2,251 |
module CredentialDataProxy
def create_credential(opts)
begin
self.data_service_operation do |data_service|
data_service.create_credential(opts)
end
rescue => e
self.log_error(e, "Problem creating credential")
end
end
def create_cracked_credential(opts)
begin
self.data_service_operation do |data_service|
opts[:workspace_id] = workspace.id
opts[:private_data] = opts.delete(:password)
opts[:private_type] = :password
old_core = data_service.creds(id: opts.delete(:core_id)).first
if old_core
opts[:originating_core_id] = old_core.id
opts[:origin_type] = :cracked_password
end
new_core = data_service.create_credential(opts)
old_core.logins.each do |login|
service = data_service.services(id: login.service_id).first
data_service.create_credential_login(core: new_core, service_id: service.id, status: Metasploit::Model::Login::Status::UNTRIED)
end
new_core
end
rescue => e
self.log_error(e, "Problem creating cracked credential")
end
end
def create_credential_and_login(opts)
begin
self.data_service_operation do |data_service|
core = data_service.create_credential(opts)
opts[:core] = core
login = data_service.create_credential_login(opts)
core
end
rescue => e
self.log_error(e, "Problem creating credential and login")
end
end
def creds(opts = {})
begin
self.data_service_operation do |data_service|
add_opts_workspace(opts)
data_service.creds(opts)
end
rescue => e
self.log_error(e, "Problem retrieving credentials")
end
end
def update_credential(opts)
begin
self.data_service_operation do |data_service|
add_opts_workspace(opts)
data_service.update_credential(opts)
end
rescue => e
self.log_error(e, "Problem updating credential")
end
end
def delete_credentials(opts)
begin
self.data_service_operation do |data_service|
data_service.delete_credentials(opts)
end
rescue => e
self.log_error(e, "Problem deleting credentials")
end
end
end
| 27.790123 | 137 | 0.667259 |
e973f7cdabece26e57fdbdb61108db5b47a2fd1b
| 333 |
module VertexClient
module Response
class TaxArea < Base
def tax_area_id
@tax_area_id ||= primary_result[:@tax_area_id]
end
private
def primary_result
return @body[:tax_area_result][0] if @body[:tax_area_result].is_a?(Array)
@body[:tax_area_result]
end
end
end
end
| 19.588235 | 81 | 0.636637 |
e885bfa14d6a2d199d88e8083d1928454fb35ac2
| 4,347 |
require "action_view"
require "htmlentities"
module Govspeak
class AttachmentPresenter
attr_reader :attachment
include ActionView::Helpers::TagHelper
include ActionView::Helpers::NumberHelper
include ActionView::Helpers::TextHelper
def initialize(attachment)
@attachment = attachment
end
def id
attachment[:id]
end
def url
attachment[:url]
end
def title
attachment[:title]
end
def file_extension
# NOTE: this is a separate parameter rather than being calculated from the
# filename because at the time of writing not all apps were using the effects
# of this field.
attachment[:file_extension]
end
def attachment_attributes
attributes = []
if file_extension == "html"
attributes << content_tag(:span, "HTML", class: "type")
elsif attachment[:external?]
attributes << content_tag(:span, url, class: "url")
else
attributes << content_tag(:span, humanized_content_type(file_extension), class: "type") if file_extension
attributes << content_tag(:span, number_to_human_size(attachment[:file_size]), class: "file-size") if attachment[:file_size]
attributes << content_tag(:span, pluralize(attachment[:number_of_pages], "page"), class: "page-length") if attachment[:number_of_pages]
end
attributes.join(", ").html_safe
end
MS_WORD_DOCUMENT_HUMANIZED_CONTENT_TYPE = "MS Word Document".freeze
MS_EXCEL_SPREADSHEET_HUMANIZED_CONTENT_TYPE = "MS Excel Spreadsheet".freeze
MS_POWERPOINT_PRESENTATION_HUMANIZED_CONTENT_TYPE = "MS Powerpoint Presentation".freeze
def file_abbr_tag(abbr, title)
content_tag(:abbr, abbr, title: title)
end
def humanized_content_type(file_extension)
file_extension_vs_humanized_content_type = {
"chm" => file_abbr_tag("CHM", "Microsoft Compiled HTML Help"),
"csv" => file_abbr_tag("CSV", "Comma-separated Values"),
"diff" => file_abbr_tag("DIFF", "Plain text differences"),
"doc" => MS_WORD_DOCUMENT_HUMANIZED_CONTENT_TYPE,
"docx" => MS_WORD_DOCUMENT_HUMANIZED_CONTENT_TYPE,
"dot" => file_abbr_tag("DOT", "MS Word Document Template"),
"dxf" => file_abbr_tag("DXF", "AutoCAD Drawing Exchange Format"),
"eps" => file_abbr_tag("EPS", "Encapsulated PostScript"),
"gif" => file_abbr_tag("GIF", "Graphics Interchange Format"),
"gml" => file_abbr_tag("GML", "Geography Markup Language"),
"html" => file_abbr_tag("HTML", "Hypertext Markup Language"),
"ics" => file_abbr_tag("ICS", "iCalendar file"),
"jpg" => "JPEG",
"odp" => file_abbr_tag("ODP", "OpenDocument Presentation"),
"ods" => file_abbr_tag("ODS", "OpenDocument Spreadsheet"),
"odt" => file_abbr_tag("ODT", "OpenDocument Text document"),
"pdf" => file_abbr_tag("PDF", "Portable Document Format"),
"png" => file_abbr_tag("PNG", "Portable Network Graphic"),
"ppt" => MS_POWERPOINT_PRESENTATION_HUMANIZED_CONTENT_TYPE,
"pptx" => MS_POWERPOINT_PRESENTATION_HUMANIZED_CONTENT_TYPE,
"ps" => file_abbr_tag("PS", "PostScript"),
"rdf" => file_abbr_tag("RDF", "Resource Description Framework"),
"rtf" => file_abbr_tag("RTF", "Rich Text Format"),
"sch" => file_abbr_tag("SCH", "XML based Schematic"),
"txt" => "Plain text",
"wsdl" => file_abbr_tag("WSDL", "Web Services Description Language"),
"xls" => MS_EXCEL_SPREADSHEET_HUMANIZED_CONTENT_TYPE,
"xlsm" => file_abbr_tag("XLSM", "MS Excel Macro-Enabled Workbook"),
"xlsx" => MS_EXCEL_SPREADSHEET_HUMANIZED_CONTENT_TYPE,
"xlt" => file_abbr_tag("XLT", "MS Excel Spreadsheet Template"),
"xsd" => file_abbr_tag("XSD", "XML Schema"),
"xslt" => file_abbr_tag("XSLT", "Extensible Stylesheet Language Transformation"),
"zip" => file_abbr_tag("ZIP", "Zip archive"),
}
file_extension_vs_humanized_content_type.fetch(file_extension.to_s.downcase, "")
end
def link(body, url, options = {})
options_str = options.map { |k, v| %(#{encode(k)}="#{encode(v)}") }.join(" ")
%(<a href="#{encode(url)}" #{options_str}>#{body}</a>)
end
private
def encode(text)
HTMLEntities.new.encode(text)
end
end
end
| 40.25 | 143 | 0.658155 |
38e9c6f6be8e0b6d5a4769fa29f69f43b4efc004
| 1,326 |
require "paperclip"
require "paperclip/smart_data_uri_adapter/version"
require "base64" # this should have been required by Paperclip
module Paperclip
class SmartDataUriAdapter < DataUriAdapter
class << self
def replace_original_adapter
found = registered_handlers.find &original_adapter?
Paperclip::DataUriAdapter.register unless found
data_uri_test = find_and_delete_original_adapter
Paperclip.io_adapters.register Paperclip::SmartDataUriAdapter, &data_uri_test
end
private
def find_and_delete_original_adapter
data_uri_test, * = registered_handlers.find &original_adapter?
registered_handlers.delete_if &original_adapter?
data_uri_test
end
def registered_handlers
Paperclip.io_adapters.registered_handlers
end
def original_adapter?
@original_adapter_evaluator ||= ->(args) { test, adapter = args
adapter == Paperclip::DataUriAdapter
}
end
end
def initialize(*)
super
self.original_filename = "file.#{guessed_file_ext}"
end
private
def guessed_file_ext
MIME::Types[content_type].first.extensions.first || "jpeg"
end
end
end
Paperclip::SmartDataUriAdapter.replace_original_adapter # replace Paperclip::DataUriAdapter
| 25.018868 | 91 | 0.71267 |
18ae00ecc48fad9825c031dd4b2db5e5672abbc6
| 8,698 |
module Merb::InlineTemplates
end
module Merb::Template
EXTENSIONS = {} unless defined?(EXTENSIONS)
METHOD_LIST = {} unless defined?(METHOD_LIST)
SUPPORTED_LOCALS_LIST = Hash.new([].freeze) unless defined?(SUPPORTED_LOCALS_LIST)
MTIMES = {} unless defined?(MTIMES)
class << self
# Get the template's method name from a full path. This replaces
# non-alphanumeric characters with __ and "." with "_"
#
# Collisions are potentially possible with something like:
# ~foo.bar and __foo.bar or !foo.bar.
#
# ==== Parameters
# path<String>:: A full path to convert to a valid Ruby method name
#
# ==== Returns
# String:: The template name.
#
#---
# We might want to replace this with something that varies the
# character replaced based on the non-alphanumeric character
# to avoid edge-case collisions.
def template_name(path)
path = File.expand_path(path)
path.gsub(/[^\.a-zA-Z0-9]/, "__").gsub(/\./, "_")
end
# For a given path, get an IO object that responds to #path.
#
# This is so that plugins can override this if they provide
# mechanisms for specifying templates that are not just simple
# files. The plugin is responsible for ensuring that the fake
# path provided will work with #template_for, and thus the
# RenderMixin in general.
#
# ==== Parameters
# path<String>:: A full path to find a template for. This is the
# path that the RenderMixin assumes it should find the template
# in.
#
# ==== Returns
# IO#path:: An IO object that responds to path (File or VirtualFile).
#---
# @semipublic
def load_template_io(path)
File.open(path, "r")
end
# Get the name of the template method for a particular path.
#
# ==== Parameters
# path<String>:: A full path to find a template method for.
# template_stack<Array>:: The template stack. Not used.
# locals<Array[Symbol]>:: The names of local variables
#
# ==== Returns
# <String>:: name of the method that inlines the template.
#---
# @semipublic
def template_for(path, template_stack = [], locals=[])
path = File.expand_path(path)
if needs_compilation?(path, locals)
file = Dir["#{path}.{#{template_extensions.join(',')}}"].first
METHOD_LIST[path] = file ? inline_template(load_template_io(file), locals) : nil
end
METHOD_LIST[path]
end
# Decide if a template needs to be re/compiled.
#
# ==== Parameters
# path<String>:: The full path of the template to check support for.
# locals<Array[Symbol]>:: The list of locals that need to be supported
#
# ==== Returns
# Boolean:: Whether or not the template for the provided path needs to be recompiled
#---
def needs_compilation?(path, locals)
return true if Merb::Config[:reload_templates] || !METHOD_LIST[path]
current_locals = SUPPORTED_LOCALS_LIST[path]
current_locals != locals &&
!(locals - current_locals).empty?
end
# Get all known template extensions
#
# ==== Returns
# Array:: Extension strings.
#---
# @semipublic
def template_extensions
EXTENSIONS.keys
end
# Takes a template at a particular path and inlines it into a module and
# adds it to the METHOD_LIST table to speed lookup later.
#
# ==== Parameters
# io<#path>::
# An IO that responds to #path (File or VirtualFile)
# locals<Array[Symbol]>::
# A list of local names that should be assigned in the template method
# from the arguments hash. Defaults to [].
# mod<Module>::
# The module to put the compiled method into. Defaults to
# Merb::InlineTemplates
#
# ==== Notes
# Even though this method supports inlining into any module, the method
# must be available to instances of AbstractController that will use it.
#---
# @public
def inline_template(io, locals=[], mod = Merb::InlineTemplates)
full_file_path = File.expand_path(io.path)
engine_neutral_path = full_file_path.gsub(/\.[^\.]*$/, "")
SUPPORTED_LOCALS_LIST[engine_neutral_path] |= locals unless locals.empty?
ret = METHOD_LIST[engine_neutral_path] =
engine_for(full_file_path).compile_template(io, template_name(full_file_path), locals, mod)
io.close
ret
end
# Finds the engine for a particular path.
#
# ==== Parameters
# path<String>:: The path of the file to find an engine for.
#
# ==== Returns
# Class:: The engine.
#---
# @semipublic
def engine_for(path)
path = File.expand_path(path)
EXTENSIONS[path.match(/\.([^\.]*)$/)[1]]
end
# Registers the extensions that will trigger a particular templating
# engine.
#
# ==== Parameters
# engine<Class>:: The class of the engine that is being registered
# extensions<Array[String]>::
# The list of extensions that will be registered with this templating
# language
#
# ==== Raises
# ArgumentError:: engine does not have a compile_template method.
#
# ==== Example
# Merb::Template.register_extensions(Merb::Template::Erubis, ["erb"])
#---
# @public
def register_extensions(engine, extensions)
raise ArgumentError, "The class you are registering does not have a compile_template method" unless
engine.respond_to?(:compile_template)
extensions.each{|ext| EXTENSIONS[ext] = engine }
Merb::AbstractController.class_eval <<-HERE
include #{engine}::Mixin
HERE
end
end
require 'erubis'
class Erubis
# ==== Parameters
# io<#path>:: An IO containing the full path of the template.
# name<String>:: The name of the method that will be created.
# locals<Array[Symbol]>:: A list of locals to assign from the args passed into the compiled template.
# mod<Module>:: The module that the compiled method will be placed into.
def self.compile_template(io, name, locals, mod)
template = ::Erubis::BlockAwareEruby.new(io.read)
_old_verbose, $VERBOSE = $VERBOSE, nil
assigns = locals.inject([]) do |assigns, local|
assigns << "#{local} = _locals[#{local.inspect}]"
end.join(";")
code = "def #{name}(_locals={}); #{assigns}; #{template.src}; end"
mod.module_eval code, File.expand_path(io.path)
$VERBOSE = _old_verbose
name
end
module Mixin
# ==== Parameters
# *args:: Arguments to pass to the block.
# &block:: The template block to call.
#
# ==== Returns
# String:: The output of the block.
#
# ==== Examples
# Capture being used in a .html.erb page:
#
# <% @foo = capture do %>
# <p>Some Foo content!</p>
# <% end %>
def capture_erb(*args, &block)
_old_buf, @_erb_buf = @_erb_buf, ""
block.call(*args)
ret = @_erb_buf
@_erb_buf = _old_buf
ret
end
# DOC
def concat_erb(string, binding)
@_erb_buf << string
end
end
Merb::Template.register_extensions(self, %w[erb])
end
end
module Erubis
module BlockAwareEnhancer
def add_preamble(src)
src << "_old_buf, @_erb_buf = @_erb_buf, ''; "
src << "@_engine = 'erb'; "
end
def add_postamble(src)
src << "\n" unless src[-1] == ?\n
src << "_ret = @_erb_buf; @_erb_buf = _old_buf; _ret.to_s;\n"
end
def add_text(src, text)
src << " @_erb_buf.concat('" << escape_text(text) << "'); "
end
def add_expr_escaped(src, code)
src << ' @_erb_buf.concat(' << escaped_expr(code) << ');'
end
def add_stmt2(src, code, tailch)
src << code
src << " ).to_s; " if tailch == "="
src << ';' unless code[-1] == ?\n
end
def add_expr_literal(src, code)
if code =~ /(do|\{)(\s*\|[^|]*\|)?\s*\Z/
src << ' @_erb_buf.concat( ' << code << "; "
else
src << ' @_erb_buf.concat((' << code << ').to_s);'
end
end
end
class BlockAwareEruby < Eruby
include BlockAwareEnhancer
end
# module RubyEvaluator
#
# # DOC
# def def_method(object, method_name, filename=nil)
# m = object.is_a?(Module) ? :module_eval : :instance_eval
# setup = "@_engine = 'erb'"
# object.__send__(m, "def #{method_name}(locals={}); #{setup}; #{@src}; end", filename || @filename || '(erubis)')
# end
#
# end
end
| 31.064286 | 120 | 0.600943 |
030e9dcf9a5fc2a704080cb284cfc9480bef7193
| 375 |
require 'spec_helper'
describe 'The Payme App' do
include Rack::Test::Methods
def app
Sinatra::Application
end
it "get /form" do
get "/form"
expect(last_response).to be_ok
end
it "post /form" do
post '/form', params={message: "UnitTest"}
expect(last_response).to be_ok
expect(last_response.body).to eq("You said 'UnitTest'")
end
end
| 17.857143 | 59 | 0.666667 |
5d0862da757a9d2ba0e63d4714dc7791a6adc376
| 1,465 |
# -*- encoding: utf-8 -*-
require File.expand_path("../lib/google/cloud/logging/version", __FILE__)
Gem::Specification.new do |gem|
gem.name = "google-cloud-logging"
gem.version = Google::Cloud::Logging::VERSION
gem.authors = ["Mike Moore", "Chris Smith"]
gem.email = ["[email protected]", "[email protected]"]
gem.description = "google-cloud-logging is the official library for Stackdriver Logging."
gem.summary = "API Client library for Stackdriver Logging"
gem.homepage = "http://googlecloudplatform.github.io/google-cloud-ruby/"
gem.license = "Apache-2.0"
gem.files = `git ls-files -- lib/*`.split("\n")
gem.require_paths = ["lib"]
gem.required_ruby_version = ">= 2.0.0"
gem.add_dependency "google-cloud-core", "~> 0.20.0"
gem.add_dependency "grpc", "~> 1.0"
gem.add_dependency "google-protobuf", "~> 3.0"
gem.add_dependency "googleapis-common-protos", "~> 1.2"
gem.add_dependency "google-gax", "~> 0.4.4"
gem.add_development_dependency "minitest", "~> 5.9"
gem.add_development_dependency "minitest-autotest", "~> 1.0"
gem.add_development_dependency "minitest-focus", "~> 1.1"
gem.add_development_dependency "minitest-rg", "~> 5.2"
gem.add_development_dependency "autotest-suffix", "~> 1.1"
gem.add_development_dependency "rubocop", "<= 0.35.1"
gem.add_development_dependency "simplecov", "~> 0.9"
gem.add_development_dependency "yard", "~> 0.9"
end
| 41.857143 | 93 | 0.673038 |
d5c73864a304836c1589c522463d1171913986aa
| 233 |
module CivicDuty
class FindNodes < Job::Runner
param :matcher
step def find_nodes
each_node(path).select(&matcher)
end
def self.[](pattern)
super(matcher: Matcher::Node.new(pattern))
end
end
end
| 16.642857 | 48 | 0.652361 |
035cb59de3ee51cfad3f2f3b206c2dfea465fec1
| 237 |
require 'test_helper'
describe 'Number formatting helper' do
before do
@rendered = FullStack::Views::Cart::Show.render(format: :html, total: 1234.56)
end
it 'formats number' do
@rendered.must_include '1,234.56'
end
end
| 19.75 | 82 | 0.704641 |
91a550e70256e38758fbdce077903eecf1a4711d
| 167 |
require "test_helper"
class EffectiveBoilerplateTest < ActiveSupport::TestCase
test "it has a version number" do
assert EffectiveBoilerplate::VERSION
end
end
| 20.875 | 56 | 0.790419 |
187a77585d83c802ca808387d682f5709fdcc5e5
| 4,394 |
Capistrano::Configuration.instance(:must_exist).load do
namespace :soprano do
namespace :db do
desc <<-DESC
Create MySQL user and database using data from config/database.yml.
DESC
task :setup, :roles => :db, :only => { :primary => true } do
config = YAML::load(File.open("config/database.yml"))[rails_env]
root_password = Capistrano::CLI.password_prompt(prompt="Enter a root password for MySQL: ")
run "mysql --user='root' --password='#{root_password}' -e \"CREATE DATABASE IF NOT EXISTS #{config["database"]} DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci; GRANT ALL PRIVILEGES ON #{config["database"]}.* TO '#{config["username"]}'@'localhost' IDENTIFIED BY '#{config["password"]}' WITH GRANT OPTION;\""
end
desc <<-DESC
Dumps the database for the current environment into db/env-data.sql.bz2.
Any existing backup will be overwritten.
DESC
task :backup, :roles => :db, :only => { :primary => true } do
config = YAML::load(File.open("config/database.yml"))[rails_env]
case config["adapter"]
when "mysql", "mysql2"
cmd = ["mysqldump"]
cmd << "--host='#{config['host']}'" unless config["host"].nil?
cmd << "--user='#{config['username'].nil? ? 'root' : config['username']}'"
cmd << "--password='#{config['password']}'" unless config["password"].nil?
cmd << config["database"]
cmd << "| bzip2 > #{current_path}/db/#{rails_env}-data.sql.bz2"
run cmd.join(" ")
else
puts "Task not supported by '#{config['adapter']}'."
end
end
desc <<-DESC
Dump the database for the current environment and take a local copy.
DESC
task :download_backup, :roles => :db, :only => { :primary => true } do
backup
get "#{current_path}/db/#{rails_env}-data.sql.bz2", "db/#{rails_env}-data.sql.bz2"
end
desc <<-DESC
Load an existing database dump into the development environment's database.
DESC
task :load_backup do
run_locally "rake db:drop"
run_locally "rake db:create"
config = YAML::load(File.open("config/database.yml"))["development"]
case config["adapter"]
when "mysql", "mysql2"
cmd = ["bzcat db/#{rails_env}-data.sql.bz2 | mysql"]
cmd << "--host='#{config['host']}'" unless config["host"].nil?
cmd << "--user='#{config['username'].nil? ? 'root' : config['username']}'"
cmd << "--password='#{config['password']}'" unless config["password"].nil?
cmd << config["database"]
run_locally cmd.join(" ")
else
puts "Task not supported by '#{config['adapter']}'."
end
end
task :replicate do
download_backup
load_backup
end
desc <<-DESC
Crete database.yml into shared path to symlink it each deploy to the config folder
DESC
task :create_yml do
db_config = ERB.new <<-EOF
#{rails_env}:
adapter: mysql2
encoding: utf8
reconnect: false
database: #{application}
pool: 5
username: root
password:
host: localhost
EOF
run "mkdir -p #{File.join [shared_path, "config"]}"
put db_config.result, File.join([shared_path, "config", "database.yml"])
end
desc <<-DESC
Symlink shared database.yml to the config folder
DESC
task :symlink do
run "ln -nfs #{File.join [shared_path, "config", "database.yml"]} #{File.join [release_path, "config", "database.yml"]}"
end
end
end
on :load do
if fetch(:create_shared_database_file_before_deploy_setup, false)
before "deploy:setup", "soprano:db:create_yml"
if fetch(:autosymlink_shared_database_file, true)
after "deploy:update_code", "soprano:db:symlink"
end
end
if fetch(:setup_database_after_deploy_setup, false)
after "deploy:setup", "soprano:db:setup"
end
if fetch(:backup_database_before_migrations, false)
before "deploy:migrate", "soprano:db:backup"
end
if fetch(:disable_web_during_migrations, false)
before "deploy:migrations", "deploy:web:disable"
after "deploy:migrations", "deploy:web:enable"
end
end
depend :remote, :command, "mysql"
depend :remote, :command, "mysqldump"
depend :remote, :command, "bzip2"
end
| 35.152 | 318 | 0.613109 |
33684681cbbafb0e3bda717338df3a4f554a2ee6
| 1,066 |
class MkConfigure < Formula
desc "Lightweight replacement for GNU autotools"
homepage "https://github.com/cheusov/mk-configure"
url "https://downloads.sourceforge.net/project/mk-configure/mk-configure/mk-configure-0.32.1/mk-configure-0.32.1.tar.gz"
sha256 "0b9d9b409e6eb7d3820c64a972078f4380697c68abafee7ec16a7eb74cf2eb9e"
bottle do
cellar :any_skip_relocation
sha256 "23d95312221d156245f2812acbd886642e403af8b7f754d3230583d6dd1d3ee2" => :catalina
sha256 "23d95312221d156245f2812acbd886642e403af8b7f754d3230583d6dd1d3ee2" => :mojave
sha256 "23d95312221d156245f2812acbd886642e403af8b7f754d3230583d6dd1d3ee2" => :high_sierra
sha256 "561badf7674fbf4d619135a09de818a55dd2d88c838efdf023bed7b9b09d3a9f" => :x86_64_linux
end
depends_on "bmake"
depends_on "makedepend"
def install
ENV["PREFIX"] = prefix
ENV["MANDIR"] = man
system "bmake", "all"
system "bmake", "install"
doc.install "presentation/presentation.pdf"
end
test do
system "#{bin}/mkcmake", "-V", "MAKE_VERSION", "-f", "/dev/null"
end
end
| 34.387097 | 122 | 0.76454 |
1c2f6b98f8ef2a6e5ed9ab954d00e9dc639d976c
| 1,286 |
class String
def self.try_convert(obj)
return nil unless obj.respond_to?(:to_str)
Type.coerce_to(obj, String, :to_str)
end
def partition(pattern)
pattern = Type.coerce_to(pattern, String, :to_str) unless pattern.is_a? Regexp
i = index(pattern)
return [self, "", ""] unless i
if pattern.is_a? Regexp
match = Regexp.last_match
[match.pre_match, match[0], match.post_match]
else
last = i+pattern.length
[self[0...i], self[i...last], self[last...length]]
end
end
def upto(stop, exclusive=false)
return to_enum :upto, stop, exclusive unless block_given?
stop = StringValue(stop)
return self if self > stop
if stop.size == 1 && size == 1
after_stop = stop[0] + (exclusive ? 0 : 1)
current = self[0]
until current == after_stop
yield current.chr
current += 1
end
else
after_stop = exclusive ? stop : stop.succ
current = self
until current == after_stop
yield current
current = StringValue(current.succ)
break if current.size > stop.size || current.size == 0
end
end
self
end
def ord
self[0]
end
def encoding
@encoding ||= Encoding.new
end
def force_encoding(name)
self
end
end
| 22.172414 | 82 | 0.615086 |
9133f6dd82d230188caa503faf00d5cc64106494
| 573 |
require 'minitest/autorun'
require './lib/bank'
require './lib/team'
class DeepFreezableTest < Minitest::Test
def test_deep_freeze_to_array
assert_equal ['Japan', 'US', 'India'], Team::COUNTRIES
assert Team::COUNTRIES.frozen?
assert Team::COUNTRIES.all? { |country| country.frozen? }
end
def test_deep_freeze_to_hash
assert_equal(
{'Japan' => 'yen', 'US' => 'dollar', 'India' => 'rupee'},
Bank::CURRENCIES
)
assert Bank::CURRENCIES.frozen?
assert Bank::CURRENCIES.all? { |key, value| key.frozen? && value.frozen? }
end
end
| 27.285714 | 78 | 0.664921 |
bfa35778eacc0cd869567e46b0eefc6391bf3ea5
| 855 |
# frozen_string_literal: true
class FileCategory
MAP = {
'3d': %w[stl],
image: %w[eps],
compressed: %w[zip 7z rar gz tar],
book: %w[mobi epub pdf],
video: %w[mkv]
}.freeze
def self.from(extension)
new(extension).category
end
def initialize(extension)
@extension = extension.downcase
end
def category
from_map || from_mime || 'misc'
end
private
attr_reader :extension, :mime
def from_map
match = MAP.find do |_type, extensions|
extensions.include? extension
end
match&.first&.to_s
end
def from_mime
@mime = Rack::Mime::MIME_TYPES[".#{extension}"]
return unless mime
return from_application if application?
mime.gsub(%r{/.*}, '')
end
def from_application
mime.gsub(%r{.*/}, '')
end
def application?
mime.match(/^application/)
end
end
| 16.132075 | 51 | 0.62924 |
083d12e4d8332fa2e806edd66607bf32d40e59c7
| 1,297 |
# frozen_string_literal: true
module Heytmux
# Base class for pane actions. To implement a new type of action that is
# applied to a designated pane, one should write a class that derives from
# this class. Use register class method to associate the class with one or
# more labels.
class PaneAction
# Validation method to be overridden. Throw ArgumentError if body is
# invalid.
def validate(_body)
nil
end
# Defines the behavior of the action. Should be implemented by the
# subclasses. When the method is called, a block is passed that is for
# replacing {{ item }} in the body. One may or may not want to use it on
# body depending on the use case.
def process(_window_index, _pane_index, _body)
raise NotImplementedError
end
class << self
# Registers a PaneAction class with the label
def register(*labels, klass)
instance = klass.new
(@actions ||= {}).merge!(
Hash[labels.map { |label| [label, instance] }]
)
end
# Finds PaneAction class for the label
def for(label)
@actions[label.to_sym]
end
def inherited(klass)
def klass.register(*labels)
PaneAction.register(*labels, self)
end
end
end
end
end
| 28.822222 | 76 | 0.650732 |
d5b327511946362c4a8c67cae45b54eba30b798d
| 105 |
# -*- encoding : utf-8 -*-
class BackerReport < ActiveRecord::Base
# attr_accessible :title, :body
end
| 21 | 39 | 0.685714 |
d5c38fbe2a06aa4345c3c32911ac393a6d69a6b9
| 1,094 |
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::EventGrid::Mgmt::V2020_06_01
module Models
#
# PrivateEndpoint information.
#
class PrivateEndpoint
include MsRestAzure
# @return [String] The ARM identifier for Private Endpoint.
attr_accessor :id
#
# Mapper for PrivateEndpoint class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'PrivateEndpoint',
type: {
name: 'Composite',
class_name: 'PrivateEndpoint',
model_properties: {
id: {
client_side_validation: true,
required: false,
serialized_name: 'id',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 23.782609 | 70 | 0.542048 |
ab6321193a6c611867275a86fd5ab642764f60dc
| 2,489 |
# Bubble Sort With Sonic Pi
# Link to tutorial: https://www.earthtoabigail.com/blog/bubble-sort-ruby-sonicpi
unsorted_arr = [81, 79, 69, 59, 55, 71, 83, 52, 64, 74, 76, 62, 57, 67, 86, 88]
use_bpm 90
def sorted arr
4.times do
in_thread do
arr.each { |n|
play n, release: 0.1
sleep 0.25
}
end
in_thread do # Keeps track of the One
sample :bd_tek
sleep 16
end
# Gives a nice and steady rythm that marks we have successfully sorted the list
sample :loop_breakbeat, beat_stretch: 4, amp: 2
sleep 4
end
end
def bubble_sort array
arr = array.dup
swaped = false
r = arr.length - 2
# DATA - Tracking variables
array_states = []
total_swaps = 0
swaps_per_iter = []
num_iters = 0
time_of_exec = 0
arr.each { |n| play n; sleep 0.25 }
start_time = Time.now # Start calculating time of execution
while true do
swaps = 0
num_iters += 1 # Keep track on the number of iterations we did so far
in_thread do
use_synth :dsaw # Gives a base frequency (take lowest value of array)
play 52, amp: 0.5, attack: 2, sustain: 6, decay: 2, release: 4, cutoff: 60
sample :bd_tek # Tracking when we are entering the loop
end
in_thread do # Gives a sense of how many iterations we've done so far
num_iters.times do |i|
sample :drum_cymbal_closed, amp: 1.0 + (i.to_f / 2.0), rate: 2
sleep (2.0 / num_iters).round(2)
end
end
for i in 0..r # inclusive range
play arr[i], release: 0.1
sleep 0.25
if arr[i] > arr[i+1]
arr[i], arr[i+1] = arr[i+1], arr[i]
swaped = true if !swaped
sample :elec_blip2, amp: 1.5
sleep 0.25
play arr[i] # hear the value which the current value is being compared to
sleep 0.25
swaps += 1
end
end
total_swaps += swaps
swaps_per_iter.push(swaps) # remember how many swaps occured in this iteration
swaped ? swaped = false : break
array_states.push(arr.dup) # save a copy of the current state of the array
end
time_of_exec = Time.now - start_time
# Calling sorted function with sorted array
sorted arr
# return the sorted array and all the tracking data
[arr, total_swaps, swaps_per_iter, num_iters, time_of_exec, array_states]
end
with_fx :reverb, room: 1 do
live_loop :sort do
bubble_sort unsorted_arr
end
end
| 27.054348 | 84 | 0.620731 |
7a57281b2573c07da35de80d95ea3a3739d39513
| 79 |
module BBLib
class InvalidArgumentException < OptsParserException
end
end
| 13.166667 | 54 | 0.822785 |
e9f6d9f9a718dd54a78cd9eda408086b969dbef8
| 1,674 |
require 'test_helper'
class UserTest < ActiveSupport::TestCase
def setup
@user = User.new(name: "Example User", email: "[email protected]",
password: "foobar", password_confirmation: "foobar")
end
test "should be valid" do
assert @user.valid?
end
test "name should be present" do
@user.name = " "
assert_not @user.valid?
end
test "email should be present" do
@user.email = " "
assert_not @user.valid?
end
test "name should not be too long" do
@user.name = "a" * 51
assert_not @user.valid?
end
test "email should not be too long" do
@user.email = "a" * 244 + "@example.com"
assert_not @user.valid?
end
test "email validation should reject invalid addresses" do
invalid_addresses = %w[user@example,com user_at_foo.org user.name@example.
foo@bar_baz.com foo@bar+baz.com]
invalid_addresses.each do |invalid_address|
@user.email = invalid_address
assert_not @user.valid?, "#{invalid_address.inspect} should be invalid"
end
end
test "email addresses should be unique" do
duplicate_user = @user.dup
duplicate_user.email = @user.email.upcase
@user.save
assert_not duplicate_user.valid?
end
test "password should be present (nonblank)" do
@user.password = @user.password_confirmation = " " * 6
assert_not @user.valid?
end
test "password should have a minimum length" do
@user.password = @user.password_confirmation = "a" * 5
assert_not @user.valid?
end
test "authenticated? should return false for a user with nil digest" do
assert_not @user.authenticated?('')
end
end
| 24.617647 | 78 | 0.66129 |
ab5dcf45b03a9fcf3dea465df7dbbf85e86567b3
| 1,037 |
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'jones_view_tool/version'
Gem::Specification.new do |spec|
spec.name = "jones_view_tool"
spec.version = JonesViewTool::VERSION
spec.authors = ["Brody Jones"]
spec.email = ["[email protected]\n"]
spec.summary = %q{Various view specific methods for applications I use.}
spec.description = %q{Provides generated HTML data for Rails applications.}
spec.homepage = "TODO: Put your gem's website or public repo URL here."
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.11"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
end
| 39.884615 | 104 | 0.656702 |
79c39e2723d1edd7b5847bdf8447b01a1e2670e2
| 9,074 |
require 'daru/view'
def nyaplot_example_line
dv = Daru::Vector.new [:a, :a, :a, :b, :b, :c], type: :category
# default adapter is nyaplot only
@bar_graph = Daru::View::Plot.new(dv, type: :bar, adapter: :nyaplot)
df = Daru::DataFrame.new({b: [11,12,13,14,15], a: [1,2,3,4,5],
c: [11,22,33,44,55]},
order: [:a, :b, :c],
index: [:one, :two, :three, :four, :five])
@scatter_graph = Daru::View::Plot.new df, type: :scatter, x: :a, y: :b, adapter: :nyaplot
df = Daru::DataFrame.new({
a: [1, 3, 5, 7, 5, 0],
b: [1, 5, 2, 5, 1, 0],
c: [1, 6, 7, 2, 6, 0]
}, index: 'a'..'f')
@df_line = Daru::View::Plot.new(
df, type: :line, x: :a, y: :b, adapter: :nyaplot
)
@df_line.div
end
def nayplot_dependent_js
Daru::View.dependent_script(:nyaplot)
end
def nyaplot_example_scatter
df = Daru::DataFrame.new({b: [11,12,13,14,15], a: [1,2,3,4,5],
c: [11,22,33,44,55]},
order: [:a, :b, :c],
index: [:one, :two, :three, :four, :five])
@scatter_graph = Daru::View::Plot.new df, type: :scatter, x: :a, y: :b, adapter: :nyaplot
@scatter_graph.div
end
def nyaplot_example_bar
dv = Daru::Vector.new [:a, :a, :a, :b, :b, :c], type: :category
# default adapter is nyaplot only
@bar_graph = Daru::View::Plot.new(dv, type: :bar, adapter: :nyaplot)
@bar_graph.div
end
def highcharts_dependent_js
Daru::View.dependent_script(:highcharts)
end
def highcharts_example_line
opts = {
chart: {
defaultSeriesType: 'line',
height: 600,
width: 900,
},
title: {
text: 'Solar Employment Growth by Sector, 2010-2016'
},
subtitle: {
text: 'Source: thesolarfoundation.com'
},
yAxis: {
title: {
text: 'Number of Employees'
}
},
legend: {
layout: 'vertical',
align: 'right',
verticalAlign: 'middle'
},
# adapter: :highcharts # set the adapter
}
@line_1 = Daru::View::Plot.new([], adapter: :highcharts)
@line_1.chart.options = opts
@line_1.chart.series_data = ([{
name: 'Installation',
data: [43934, 52503, 57177, 69658, 97031, 119931, 137133, 154175]
}, {
name: 'Manufacturing',
data: [24916, 24064, 29742, 29851, 32490, 30282, 38121, 40434]
}, {
name: 'Sales & Distribution',
data: [11744, 17722, 16005, 19771, 20185, 24377, 32147, 39387]
}, {
name: 'Project Development',
data: [nil, nil, 7988, 12169, 15112, 22452, 34400, 34227]
}, {
name: 'Other',
data: [12908, 5948, 8105, 11248, 8989, 11816, 18274, 18111]
}])
[@line_1.chart.series_data, @line_1.div]
end
def highcharts_example_drag
contents = [
["201352",0.7695],
["201353",0.7648],
["201354",0.7645],
["201355",0.7638],
["201356",0.7549],
["201357",0.7562],
["201359",0.7574],
["2013510",0.7543],
["2013511",0.7510],
["2013512",0.7498],
]
opts = {
chart: {
zoomType: 'x',
height: 600,
width: 900,
},
title: {
text: 'USD to EUR exchange rate over time'
},
subtitle: {
text: "document.ontouchstart === undefined ?
'Click and drag in the plot area to zoom in' : 'Pinch the chart to zoom in'".js_code
},
# xAxis: {
# type: 'datetime' # for date time in javascript. currently not
# working
# },
yAxis: {
title: {
text: 'Exchange rate'
}
},
legend: {
enabled: true
},
rangeSelector: {
selected: 1
},
plotOptions: {
area: {
fillColor: {
linearGradient: {
x1: 0,
y1: 0,
x2: 0,
y2: 1
},
stops: [
[0, "Highcharts.getOptions().colors[0]".js_code],
[1, "Highcharts.Color(Highcharts.getOptions().colors[0]).setOpacity(0).get('rgba')".js_code]
]
},
marker: {
radius: 2
},
lineWidth: 1,
states: {
hover: {
lineWidth: 10
}
},
threshold: nil
}
},
}
series_dt = ([{
type: 'area',
name: 'USD to EUR',
data: contents
}])
line_3 = Daru::View::Plot.new([], adapter: :highcharts)
line_3.chart.options = opts;
line_3.chart.series_data = series_dt
[line_3.chart.series_data, line_3.div]
end
def highcharts_example_dynamic
# dynamic-update/
opts = {
chart: {
type: 'spline',
animation: "Highcharts.svg".js_code, # don't animate in old IE
marginRight: 10,
events: {
load: "function () {
// set up the updating of the chart each second
var series = this.series[0];
setInterval(function () {
var x = (new Date()).getTime(), // current time
y = Math.random();
series.addPoint([x, y], true, true);
}, 1000);
}".js_code
},
height: 600,
width: 900,
},
title: {
text: 'Live random data'
},
xAxis: {
type: 'datetime',
tickPixelInterval: 150
},
yAxis: {
title: {
text: 'Value'
},
plotLines: [{
value: 0,
width: 1,
color: '#808080'
}]
},
tooltip: {
formatter: "function () {
return '<b>' + this.series.name + '</b><br/>' +
Highcharts.dateFormat('%Y-%m-%d %H:%M:%S', this.x) + '<br/>' +
Highcharts.numberFormat(this.y, 2);
}".js_code
},
legend: {
enabled: false
},
exporting: {
enabled: false
},
}
series_dt = [
{
name: 'Random data',
data: "(function () {
// generate an array of random data
var data = [],
time = (new Date()).getTime(),
i;
for (i = -19; i <= 0; i += 1) {
data.push({
x: time + i * 1000,
y: Math.random()
});
}
return data;
}())".js_code
}
]
dyn_update = Daru::View::Plot.new([], adapter: :highcharts)
dyn_update.chart.options = opts;
dyn_update.chart.series_data = series_dt
[dyn_update.chart.series_data, dyn_update.div]
end
def googlecharts_dependent_js
Daru::View.dependent_script(:googlecharts)
end
def googlecharts_example_line
time_popularity = [
[0, 0], [1, 10], [2, 23], [3, 17], [4, 18], [5, 9],
[6, 11], [7, 27], [8, 33], [9, 40], [10, 32], [11, 35],
[12, 30], [13, 40], [14, 42], [15, 47], [16, 44], [17, 48],
[18, 52], [19, 54], [20, 42], [21, 55], [22, 56], [23, 57],
[24, 60], [25, 50], [26, 52], [27, 51], [28, 49], [29, 53],
[30, 55], [31, 60], [32, 61], [33, 59], [34, 62], [35, 65],
[36, 62], [37, 58], [38, 55], [39, 61], [40, 64], [41, 65],
[42, 63], [43, 66], [44, 67], [45, 69], [46, 69], [47, 70],
[48, 72], [49, 68], [50, 66], [51, 65], [52, 67], [53, 70],
[54, 71], [55, 72], [56, 73], [57, 75], [58, 70], [59, 68],
[60, 64], [61, 60], [62, 65], [63, 67], [64, 68], [65, 69],
[66, 70], [67, 72], [68, 75], [69, 80]
]
df_tp = Daru::DataFrame.rows(time_popularity)
# Time in X axis and Population in Y axis
df_tp.vectors = Daru::Index.new(['Time', 'Population'])
table = Daru::View::Table.new(df_tp, pageSize: 10, adapter: :googlecharts)
line = Daru::View::Plot.new(
table.table, type: :line, adapter: :googlecharts, height: 500, width: 800)
[table.div, line.div]
end
def googlecharts_example_geo
country_population = [
['Germany', 200],
['United States', 300],
['Brazil', 400],
['Canada', 500],
['France', 600],
['RU', 700]
]
df_cp = Daru::DataFrame.rows(country_population)
df_cp.vectors = Daru::Index.new(['Country', 'Population'])
table = Daru::View::Table.new(df_cp, pageSize: 5, adapter: :googlecharts, height: 200, width: 200)
geochart = Daru::View::Plot.new(
table.table, type: :geo, adapter: :googlecharts, height: 500, width: 800)
[table.div, geochart.div]
end
| 30.146179 | 114 | 0.461759 |
5dd9e7e3c6c455ece56e5034743a975f812bb854
| 9,814 |
# encoding: utf-8
require "avro"
require "open-uri"
require "schema_registry"
require "schema_registry/client"
require "logstash/codecs/base"
require "logstash/namespace"
require "logstash/event"
require "logstash/timestamp"
require "logstash/util"
require "base64"
MAGIC_BYTE = 0
# == Logstash Codec - Avro Schema Registry
#
# This plugin is used to serialize Logstash events as
# Avro datums, as well as deserializing Avro datums into
# Logstash events.
#
# Decode/encode Avro records as Logstash events using the
# associated Avro schema from a Confluent schema registry.
# (https://github.com/confluentinc/schema-registry)
#
#
# ==== Decoding (input)
#
# When this codec is used to decode the input, you may pass the following options:
# - ``endpoint`` - always required.
# - ``username`` - optional.
# - ``password`` - optional.
#
# If the input stream is binary encoded, you should use the ``ByteArrayDeserializer``
# in the Kafka input config.
#
# ==== Encoding (output)
#
# This codec uses the Confluent schema registry to register a schema and
# encode the data in Avro using schema_id lookups.
#
# When this codec is used to encode, you may pass the following options:
# - ``endpoint`` - always required.
# - ``username`` - optional.
# - ``password`` - optional.
# - ``schema_id`` - when provided, no other options are required.
# - ``subject_name`` - required when there is no ``schema_id``.
# - ``schema_version`` - when provided, the schema will be looked up in the registry.
# - ``schema_uri`` - when provided, JSON schema is loaded from URL or file.
# - ``schema_string`` - required when there is no ``schema_id``, ``schema_version`` or ``schema_uri``
# - ``check_compatibility`` - will check schema compatibility before encoding.
# - ``register_schema`` - will register the JSON schema if it does not exist.
# - ``binary_encoded`` - will output the encoded event as a ByteArray.
# Requires the ``ByteArraySerializer`` to be set in the Kafka output config.
# - ``client_certificate`` - Client TLS certificate for mutual TLS
# - ``client_key`` - Client TLS key for mutual TLS
# - ``ca_certificate`` - CA Certificate
# - ``verify_mode`` - SSL Verify modes. Valid options are `verify_none`, `verify_peer`, `verify_client_once`,
# and `verify_fail_if_no_peer_cert`. Default is `verify_peer`
#
# ==== Usage
# Example usage with Kafka input and output.
#
# [source,ruby]
# ----------------------------------
# input {
# kafka {
# ...
# codec => avro_wapiti {
# endpoint => "http://schemas.example.com"
# }
# value_deserializer_class => "org.apache.kafka.common.serialization.ByteArrayDeserializer"
# }
# }
# filter {
# ...
# }
# output {
# kafka {
# ...
# codec => avro_wapiti {
# endpoint => "http://schemas.example.com"
# subject_name => "my_kafka_subject_name"
# schema_uri => "/app/my_kafka_subject.avsc"
# register_schema => true
# }
# value_serializer => "org.apache.kafka.common.serialization.ByteArraySerializer"
# }
# }
# ----------------------------------
#
# Using signed certificate for registry authentication
#
# [source,ruby]
# ----------------------------------
# output {
# kafka {
# ...
# codec => avro_wapiti {
# endpoint => "http://schemas.example.com"
# schema_id => 47
# client_key => "./client.key"
# client_certificate => "./client.crt"
# ca_certificate => "./ca.pem"
# verify_mode => "verify_peer"
# }
# value_serializer => "org.apache.kafka.common.serialization.ByteArraySerializer"
# }
# }
# ----------------------------------
class LogStash::Codecs::AvroWapiti < LogStash::Codecs::Base
config_name "avro_wapiti"
EXCLUDE_ALWAYS = [ "@timestamp", "@version" ]
# schema registry endpoint and credentials
config :endpoint, :validate => :string, :required => true
config :username, :validate => :string, :default => nil
config :password, :validate => :string, :default => nil
config :schema_id, :validate => :number, :default => nil
config :subject_name, :validate => :string, :default => nil
config :schema_version, :validate => :number, :default => nil
config :schema_uri, :validate => :string, :default => nil
config :schema_string, :validate => :string, :default => nil
config :check_compatibility, :validate => :boolean, :default => false
config :register_schema, :validate => :boolean, :default => false
config :binary_encoded, :validate => :boolean, :default => true
config :client_certificate, :validate => :string, :default => nil
config :client_key, :validate => :string, :default => nil
config :ca_certificate, :validate => :string, :default => nil
config :verify_mode, :validate => :string, :default => 'verify_peer'
public
def register
@client = if client_certificate != nil
SchemaRegistry::Client.new(endpoint, username, password, SchemaRegistry::Client.connection_options(
client_certificate: client_certificate,
client_key: client_key,
ca_certificate: ca_certificate,
verify_mode: verify_mode
))
else
SchemaRegistry::Client.new(endpoint, username, password)
end
@schemas = Hash.new
@write_schema_id = nil
end
def get_schema(schema_id)
unless @schemas.has_key?(schema_id)
@schemas[schema_id] = Avro::Schema.parse(@client.schema(schema_id))
end
@schemas[schema_id]
end
def load_schema_json()
if @schema_uri
open(@schema_uri).read
elsif @schema_string
@schema_string
else
@logger.error('you must supply a schema_uri or schema_string in the config')
end
end
def get_write_schema_id()
# If schema id is passed, just use that
if @schema_id
@schema_id
else
# subject_name is required
if @subject_name == nil
@logger.error('requires a subject_name')
else
subject = @client.subject(@subject_name)
# If schema_version, load from subject API
if @schema_version != nil
schema = subject.version(@schema_version)
# Otherwise, load schema json and check with registry
else
schema_json = load_schema_json
# If not compatible, raise error
if @check_compatibility
unless subject.compatible?(schema_json)
@logger.error('the schema json is not compatible with the subject. you should fix your schema or change the compatibility level.')
end
end
if @register_schema
subject.register_schema(schema_json) unless subject.schema_registered?(schema_json)
end
schema = subject.verify_schema(schema_json)
end
schema.id
end
end
end
public
def decode(data)
if data.length < 5
@logger.error('message is too small to decode')
else
datum = StringIO.new(Base64.strict_decode64(data)) rescue StringIO.new(data)
magic_byte, schema_id = datum.read(5).unpack("cI>")
if magic_byte != MAGIC_BYTE
@logger.error('message does not start with magic byte')
else
schema = get_schema(schema_id)
decoder = Avro::IO::BinaryDecoder.new(datum)
datum_reader = Avro::IO::DatumReader.new(schema)
avdat = datum_reader.read(decoder)
wapiti_metadata = {
"submitted_from" => avdat["submitted_from"],
"originating_host" => avdat["originating_host"],
"vertical" => avdat["vertical"],
"environment" => avdat["environment"],
"processing_key" => avdat["processing_key"],
"message_format" => avdat["message_format"]
}
case avdat["message_format"]
when "json"
ev = LogStash::Event.new(JSON.parse(avdat["message"]))
ev.set("[@metadata][wapiti]", wapiti_metadata)
yield ev
when "binary"
@logger.error('FIXME: not implemented')
yield LogStash::Event.new("@metadata" => wapiti_metadata, "tags" => ["_wapitiwarning"])
else
@logger.error('Message does not have a message_format field in the AVRO record. Treating as plain.')
ev = LogStash::Event.new(avdat["message"])
ev.set("[@metadata][wapiti]", wapiti_metadata)
ev.tag("_wapitiwarning")
yield ev
end
end
end
end
public
def encode(event)
@write_schema_id ||= get_write_schema_id
schema = get_schema(@write_schema_id)
dw = Avro::IO::DatumWriter.new(schema)
buffer = StringIO.new
buffer.write(MAGIC_BYTE.chr)
buffer.write([@write_schema_id].pack("I>"))
encoder = Avro::IO::BinaryEncoder.new(buffer)
eh = {}
eh["submitted_from"] = event.get("[@metadata][wapiti][submitted_from]") || event.get("[host][name]") || event.get("[host][hostname]") || event.get("[host][ip]") || event.get("host") || "local"
eh["originating_host"] = event.get("[@metadata][wapiti][originating_host]") || event.get("[host][name]") || event.get("[host][hostname]") || event.get("[host][ip]") || event.get("host") || "local"
eh["vertical"] = event.get("[@metadata][wapiti][vertical]") || "unknown"
eh["environment"] = event.get("[@metadata][wapiti][environment]") || "unknown"
eh["processing_key"] = event.get("[@metadata][wapiti][processing_key]") || "none"
eh["message_format"] = event.get("[@metadata][wapiti][message_format]") || "json"
case eh["message_format"]
when "binary"
eh["message"] = event.get("message")
when "json"
eh["message"] = event.to_json
end
eh.delete_if { |key, _| EXCLUDE_ALWAYS.include? key }
dw.write(eh, encoder)
if @binary_encoded
@on_event.call(event, buffer.string)
else
@on_event.call(event, Base64.strict_encode64(buffer.string))
end
end
end
| 33.958478 | 200 | 0.642857 |
4a20cfdaf5f663530c4660c1ec9da8f5d5f91fdc
| 5,140 |
require 'spec_helper'
require 'ddtrace/sampling/rule_sampler'
require 'ddtrace/sampling/rule'
require 'ddtrace/sampling/rate_limiter'
RSpec.describe Datadog::Sampling::RuleSampler do
let(:rule_sampler) { described_class.new(rules, rate_limiter: rate_limiter, default_sampler: default_sampler) }
let(:rules) { [] }
let(:rate_limiter) { instance_double(Datadog::Sampling::RateLimiter) }
let(:default_sampler) { instance_double(Datadog::RateByServiceSampler) }
let(:effective_rate) { 0.9 }
let(:allow?) { true }
let(:span) { Datadog::Span.new(nil, 'dummy') }
before do
allow(default_sampler).to receive(:sample?).with(span).and_return(nil)
allow(rate_limiter).to receive(:effective_rate).and_return(effective_rate)
allow(rate_limiter).to receive(:allow?).with(1).and_return(allow?)
end
context '#initialize' do
subject(:rule_sampler) { described_class.new(rules) }
it { expect(rule_sampler.rate_limiter).to be_a(Datadog::Sampling::TokenBucket) }
it { expect(rule_sampler.default_sampler).to be_a(Datadog::RateByServiceSampler) }
context 'with rate_limit ENV' do
before do
allow(Datadog.configuration.sampling).to receive(:rate_limit)
.and_return(20.0)
end
it { expect(rule_sampler.rate_limiter).to be_a(Datadog::Sampling::TokenBucket) }
end
context 'with default_sample_rate ENV' do
before do
allow(Datadog.configuration.sampling).to receive(:default_rate)
.and_return(0.5)
end
it { expect(rule_sampler.default_sampler).to be_a(Datadog::RateSampler) }
end
context 'with rate_limit' do
subject(:rule_sampler) { described_class.new(rules, rate_limit: 1.0) }
it { expect(rule_sampler.rate_limiter).to be_a(Datadog::Sampling::TokenBucket) }
end
context 'with nil rate_limit' do
subject(:rule_sampler) { described_class.new(rules, rate_limit: nil) }
it { expect(rule_sampler.rate_limiter).to be_a(Datadog::Sampling::UnlimitedLimiter) }
end
context 'with default_sample_rate' do
subject(:rule_sampler) { described_class.new(rules, default_sample_rate: 1.0) }
it { expect(rule_sampler.default_sampler).to be_a(Datadog::RateSampler) }
end
end
shared_context 'matching rule' do
let(:rules) { [rule] }
let(:rule) { instance_double(Datadog::Sampling::Rule) }
let(:sample_rate) { 0.8 }
before do
allow(rule).to receive(:match?).with(span).and_return(true)
allow(rule).to receive(:sample?).with(span).and_return(sampled)
allow(rule).to receive(:sample_rate).with(span).and_return(sample_rate)
end
end
describe '#sample!' do
subject(:sample) { rule_sampler.sample!(span) }
shared_examples 'a sampled! span' do
before { subject }
it { is_expected.to eq(expected_sampled) }
it 'sets `span.sampled` flag' do
expect(span.sampled).to eq(expected_sampled)
end
it 'sets rule metrics' do
expect(span.get_metric(Datadog::Ext::Sampling::RULE_SAMPLE_RATE)).to eq(sample_rate)
end
it 'sets limiter metrics' do
expect(span.get_metric(Datadog::Ext::Sampling::RATE_LIMITER_RATE)).to eq(effective_rate)
end
end
context 'with matching rule' do
include_context 'matching rule'
context 'and sampled' do
let(:sampled) { true }
context 'and not rate limited' do
let(:allow?) { true }
it_behaves_like 'a sampled! span' do
let(:expected_sampled) { true }
end
end
context 'and rate limited' do
let(:allow?) { false }
it_behaves_like 'a sampled! span' do
let(:expected_sampled) { false }
end
end
end
context 'and not sampled' do
let(:sampled) { false }
it_behaves_like 'a sampled! span' do
let(:expected_sampled) { false }
let(:effective_rate) { nil } # Rate limiter was not evaluated
end
end
end
context 'with no matching rule' do
let(:delegated) { double }
before do
allow(default_sampler).to receive(:sample!).with(span).and_return(delegated)
end
it { is_expected.to eq(delegated) }
it 'skips metrics' do
expect(span.get_metric(Datadog::Ext::Sampling::RULE_SAMPLE_RATE)).to be_nil
expect(span.get_metric(Datadog::Ext::Sampling::RATE_LIMITER_RATE)).to be_nil
end
context 'when the default sampler is a RateByServiceSampler' do
let(:default_sampler) { Datadog::RateByServiceSampler.new }
let(:sample_rate) { rand }
it 'sets the agent rate metric' do
expect(default_sampler).to receive(:sample_rate)
.with(span)
.and_return(sample_rate)
sample
expect(span.get_metric(described_class::AGENT_RATE_METRIC_KEY)).to eq(sample_rate)
end
end
end
end
describe '#sample?' do
subject(:sample) { rule_sampler.sample?(span) }
it { expect { subject }.to raise_error(StandardError, 'RuleSampler cannot be evaluated without side-effects') }
end
end
| 30.778443 | 115 | 0.663035 |
e80b5aae85beff4e420a59155612857765b725b6
| 9,971 |
=begin
#Xero Payroll NZ
#This is the Xero Payroll API for orgs in the NZ region.
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.3.1
=end
require 'time'
require 'date'
module XeroRuby::PayrollNz
require 'bigdecimal'
class LeaveEarningsLine
# Xero identifier for payroll earnings line
attr_accessor :earnings_line_id
# Xero identifier for payroll leave earnings rate
attr_accessor :earnings_rate_id
# name of earnings rate for display in UI
attr_accessor :display_name
# Rate per unit for leave earnings line
attr_accessor :rate_per_unit
# Leave earnings number of units
attr_accessor :number_of_units
# Leave earnings fixed amount. Only applicable if the EarningsRate RateType is Fixed
attr_accessor :fixed_amount
# The amount of the earnings line.
attr_accessor :amount
# Identifies if the leave earnings is taken from the timesheet. False for leave earnings line
attr_accessor :is_linked_to_timesheet
# Identifies if the earnings is using an average daily pay rate
attr_accessor :is_average_daily_pay_rate
# Flag to indentify whether the earnings line is system generated or not.
attr_accessor :is_system_generated
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'earnings_line_id' => :'earningsLineID',
:'earnings_rate_id' => :'earningsRateID',
:'display_name' => :'displayName',
:'rate_per_unit' => :'ratePerUnit',
:'number_of_units' => :'numberOfUnits',
:'fixed_amount' => :'fixedAmount',
:'amount' => :'amount',
:'is_linked_to_timesheet' => :'isLinkedToTimesheet',
:'is_average_daily_pay_rate' => :'isAverageDailyPayRate',
:'is_system_generated' => :'isSystemGenerated'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'earnings_line_id' => :'String',
:'earnings_rate_id' => :'String',
:'display_name' => :'String',
:'rate_per_unit' => :'BigDecimal',
:'number_of_units' => :'BigDecimal',
:'fixed_amount' => :'BigDecimal',
:'amount' => :'BigDecimal',
:'is_linked_to_timesheet' => :'Boolean',
:'is_average_daily_pay_rate' => :'Boolean',
:'is_system_generated' => :'Boolean'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `XeroRuby::PayrollNz::LeaveEarningsLine` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `XeroRuby::PayrollNz::LeaveEarningsLine`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'earnings_line_id')
self.earnings_line_id = attributes[:'earnings_line_id']
end
if attributes.key?(:'earnings_rate_id')
self.earnings_rate_id = attributes[:'earnings_rate_id']
end
if attributes.key?(:'display_name')
self.display_name = attributes[:'display_name']
end
if attributes.key?(:'rate_per_unit')
self.rate_per_unit = attributes[:'rate_per_unit']
end
if attributes.key?(:'number_of_units')
self.number_of_units = attributes[:'number_of_units']
end
if attributes.key?(:'fixed_amount')
self.fixed_amount = attributes[:'fixed_amount']
end
if attributes.key?(:'amount')
self.amount = attributes[:'amount']
end
if attributes.key?(:'is_linked_to_timesheet')
self.is_linked_to_timesheet = attributes[:'is_linked_to_timesheet']
end
if attributes.key?(:'is_average_daily_pay_rate')
self.is_average_daily_pay_rate = attributes[:'is_average_daily_pay_rate']
end
if attributes.key?(:'is_system_generated')
self.is_system_generated = attributes[:'is_system_generated']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
earnings_line_id == o.earnings_line_id &&
earnings_rate_id == o.earnings_rate_id &&
display_name == o.display_name &&
rate_per_unit == o.rate_per_unit &&
number_of_units == o.number_of_units &&
fixed_amount == o.fixed_amount &&
amount == o.amount &&
is_linked_to_timesheet == o.is_linked_to_timesheet &&
is_average_daily_pay_rate == o.is_average_daily_pay_rate &&
is_system_generated == o.is_system_generated
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[earnings_line_id, earnings_rate_id, display_name, rate_per_unit, number_of_units, fixed_amount, amount, is_linked_to_timesheet, is_average_daily_pay_rate, is_system_generated].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(parse_date(value))
when :Date
Date.parse(parse_date(value))
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BigDecimal
BigDecimal(value.to_s)
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
XeroRuby::PayrollNz.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash(downcase: true)
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
key = downcase ? attr : param
hash[key] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
def parse_date(datestring)
if datestring.include?('Date')
seconds_since_epoch = datestring.scan(/[0-9]+/)[0].to_i / 1000.0
Time.at(seconds_since_epoch).utc.strftime('%Y-%m-%dT%H:%M:%S%z').to_s
else # handle date 'types' for small subset of payroll API's
Time.parse(datestring).strftime('%Y-%m-%dT%H:%M:%S').to_s
end
end
end
end
| 33.016556 | 216 | 0.64146 |
28afe2257ac4db79ede7e64688f3e9f21d5d5787
| 769 |
require File.expand_path("../../Abstract/abstract-php-extension", __FILE__)
class Php54Lzf < AbstractPhp54Extension
init
desc "handles LZF de/compression"
homepage "http://pecl.php.net/package/lzf"
url "http://pecl.php.net/get/LZF-1.6.3.tgz"
sha256 "42ec458ea10128a801e8d39736b519ba81fa75d2a617d2777b7d6b3276601a5d"
head "http://svn.php.net/repository/pecl/lzf/trunk/"
def install
Dir.chdir "LZF-#{version}" unless build.head?
ENV.universal_binary if build.universal?
safe_phpize
system "./configure", "--prefix=#{prefix}",
phpconfig
system "make"
prefix.install "modules/lzf.so"
write_config_file if build.with? "config-file"
end
test do
shell_output("php -m").include?("lzf")
end
end
| 27.464286 | 75 | 0.693108 |
4a6ec9a19be7f8aed572b51247c7f757fad65fb9
| 2,015 |
RSpec.describe NxtSchema do
subject { schema.apply(input: input) }
context 'any of multiple schemas' do
let(:schema) do
NxtSchema.any_of(:contacts) do |contact|
contact.schema do
required(:first_name, :String)
required(:last_name, :String)
required(:female, :Bool)
end
contact.schema do
required(:first_name, :String)
required(:last_name, :String)
required(:male, :Bool)
end
end
end
context 'when the input matches one of the schemas' do
let(:input) do
{ first_name: 'Andy', last_name: 'Superstar', male: true }
end
it { expect(subject).to be_valid }
it 'returns the correct output' do
expect(subject.output).to eq(input)
end
end
context 'when the input does not match one of the schemas' do
let(:input) { {} }
it { expect(subject).to_not be_valid }
it 'returns the correct schema errors' do
expect(subject.errors).to eq(
"contacts.0"=>["The following keys are missing: [:first_name, :last_name, :female]"],
"contacts.0.first_name"=>["NxtSchema::Undefined violates constraints (type?(String, NxtSchema::Undefined) failed)"],
"contacts.0.last_name"=>["NxtSchema::Undefined violates constraints (type?(String, NxtSchema::Undefined) failed)"],
"contacts.0.female"=>["NxtSchema::Undefined violates constraints (type?(FalseClass, NxtSchema::Undefined) failed)"],
"contacts.1"=>["The following keys are missing: [:first_name, :last_name, :male]"],
"contacts.1.first_name"=>["NxtSchema::Undefined violates constraints (type?(String, NxtSchema::Undefined) failed)"],
"contacts.1.last_name"=>["NxtSchema::Undefined violates constraints (type?(String, NxtSchema::Undefined) failed)"],
"contacts.1.male"=>["NxtSchema::Undefined violates constraints (type?(FalseClass, NxtSchema::Undefined) failed)"]
)
end
end
end
end
| 38.018868 | 126 | 0.636725 |
6a0b23c1f71fb4edd3dd4d3c2a1cd321b1916520
| 7,151 |
require 'openssl'
class Puppet::Provider::AcmeCertificate < Puppet::Provider
def exists?
Puppet.debug("Checking existence of #{resource[:certificate_path]}")
if !File.exist? resource[:private_key_path]
Puppet.debug("Private key #{resource[:private_key_path]} does not exist, so the certificate cannot be valid")
false
elsif !File.exist? resource[:certificate_path]
Puppet.debug("Certificate #{resource[:certificate_path]} does not exist")
false
else
cert = ::OpenSSL::X509::Certificate.new(File.read resource[:certificate_path])
key = ::OpenSSL::PKey::RSA.new(File.read resource[:private_key_path])
if cert.public_key.to_der != key.public_key.to_der
Puppet.debug("Certificate #{resource[:certificate_path]} does not match private key #{resource[:private_key_path]}")
false
elsif cert.subject.to_s != csr.csr.subject.to_s
Puppet.debug("Certificate #{resource[:certificate_path]} has subject '#{cert.subject}', expecting '#{csr.csr.subject}'")
false
elsif cert.not_after - resource[:renew_within_days] * 60 * 60 * 24 < Time.now
Puppet.debug("Certificate #{resource[:certificate_path]} will expire at '#{cert.not_after}', which is within #{resource[:renew_within_days]} days")
false
else
cert_alternate_names = cert.extensions.select {|e| e.oid == "subjectAltName"}.map { |e| e.value.split(',').map(&:strip) }.first || []
csr_alternate_names = csr.names.map { |name| "DNS:#{name}" }
if cert_alternate_names.sort != csr_alternate_names.sort
Puppet.debug("Certificate #{resource[:certificate_path]} has alternative names #{cert_alternate_names}, but wanted #{csr_alternate_names}")
false
else
true
end
end
end
end
def create
Puppet.debug("Creating certificate #{resource[:certificate_path]}")
private_key_existed = File.exist? resource[:private_key_path]
register_client
order = authorize_domains
finalize(order)
certificate, chain = order.certificate.split(/(?<=-----END CERTIFICATE-----)/, 2).map(&:strip)
if resource.generate_private_key? && !private_key_existed
Puppet.debug("Writing private key to #{resource[:private_key_path]}")
File.write(resource[:private_key_path], csr.private_key.to_pem, perm: resource[:private_key_mode])
end
cert_content = if resource.combine_certificate_and_chain?
"#{certificate}\n#{chain}"
else
certificate
end
Puppet.debug("Writing certificate to #{resource[:certificate_path]}")
File.write(resource[:certificate_path], cert_content, perm: resource[:certificate_mode])
if resource[:certificate_chain_path]
Puppet.debug("Writing certificate chain to #{resource[:certificate_chain_path]}")
File.write(resource[:certificate_chain_path], chain, resource[:certificate_chain_mode])
end
end
protected
def handle_authorization(authorization)
fail "Subclass hook handle_authorization not implemented"
end
def clean_authorization(authorization, challenge)
fail "Subclass hook clean_authorization not implemented"
end
private
def register_client
terms_of_service_uri = acme_client.terms_of_service
if terms_of_service_uri
if terms_of_service_uri == resource[:agree_to_terms_url]
terms_of_service_agreed = true
else
fail "ACME Server requires you to agree to the terms of service at #{terms_of_service_uri}.\n" \
'If you accept the terms, please set the agree_to_terms_url parameter to this URL'
end
end
acme_client.new_account(contact: resource[:contact], terms_of_service_agreed: terms_of_service_agreed)
end
def authorize_domains
acme_client.new_order(identifiers: csr.names).tap do |order|
challenges = order.authorizations.map do |auth|
next if auth.status == 'valid'
Puppet.debug("Authorizing domain '#{auth.domain}'")
challenge = handle_authorization auth
begin
challenge.request_validation
begin
Puppet.debug("Waiting for domain '#{auth.domain}' to be authorized")
Timeout::timeout(resource[:authorization_timeout]) do
wait_while(challenge, %w(pending processing), "Domain '#{auth.domain}' not yet authorized")
fail "Domain '#{auth.domain}' has unexpected authorization status '#{challenge.status}'. Error: '#{challenge.error}'" unless challenge.status == 'valid'
auth.reload
wait_while(auth, %w(pending processing), "Authorization for domain '#{auth.domain}' is not yet valid")
fail "Authorization for domain '#{auth.domain}' has unexpected status '#{auth.status}'" unless auth.status == 'valid'
end
rescue Timeout::Error
fail "Timed out waiting for ACME server to verify domain '#{auth.domain}' after #{resource[:authorization_timeout]} seconds"
end
ensure
clean_authorization auth, challenge
end
Puppet.debug("Domain '#{auth.domain}' successfully authorized")
end
end
end
def finalize(order)
order.finalize(csr: csr)
begin
Puppet.debug("Waiting for order to be finalized for #{csr.names}")
Timeout::timeout(resource[:order_timeout]) do
wait_while(order, %w(pending processing), "Order for #{csr.names} is still processing")
fail "Order for #{csr.names} has unexpected status '#{order.status}'" unless order.status == 'valid'
end
rescue Timeout::Error
fail "Timed out waiting for ACME server to finalize order for #{csr.names} after #{resource[:authorization_timeout]} seconds"
end
end
def wait_while(obj, statuses, msg)
while statuses.include? obj.status
Puppet.debug msg
sleep 1
obj.reload
end
end
def acme_client
@acme_client ||= ::Acme::Client.new(private_key: acme_private_key, directory: resource[:directory])
end
def csr
@csr ||= begin
csr_params = {
common_name: resource[:common_name],
names: Array(resource[:alternate_names]),
}
if File.exist? resource[:private_key_path]
csr_params[:private_key] = ::OpenSSL::PKey::RSA.new(File.read resource[:private_key_path])
elsif !resource.generate_private_key?
fail "Could not generate certificate #{resource[:certificate_path]}: private key #{resource[:private_key_path]} does not exist"
end
::Acme::Client::CertificateRequest.new(csr_params)
end
end
# Get the private key to use with the ACME client.
def acme_private_key
::OpenSSL::PKey::RSA.new(File.read(acme_private_key_path))
rescue => e
fail "Could not load puppet private key from #{acme_private_key_path} to register with ACME server: #{e}"
end
# Path to the private key to use with the ACME client. Defaults to the puppet agent's private key.
def acme_private_key_path
if resource[:acme_private_key_path].nil? || resource[:acme_private_key_path].empty?
Puppet[:hostprivkey]
else
resource[:acme_private_key_path]
end
end
end
| 40.40113 | 166 | 0.688295 |
6a6a11a52ec7ddec7d94985a956ad807550ff1ec
| 310 |
module RubyJmeter
class ExtendedDSL < DSL
def random_timer(delay=0, range=0, &block)
params = {}
params[:delay] = delay
params[:range] = range
node = RubyJmeter::GaussianRandomTimer.new(params)
attach_node(node, &block)
end
alias think_time random_timer
end
end
| 20.666667 | 56 | 0.654839 |
abaa2c9c331c4b75adfa1e06574e484e39df48ea
| 503 |
class AddSubsidies < ActiveRecord::Migration
def self.up
add_column :price_policies, :usage_subsidy, :decimal, precision: 9, scale: 2
add_column :price_policies, :reservation_subsidy, :decimal, precision: 9, scale: 2
add_column :price_policies, :overage_subsidy, :decimal, precision: 9, scale: 2
end
def self.down
remove_column :price_policies, :usage_subsidy
remove_column :price_policies, :reservation_subsidy
remove_column :price_policies, :overage_subsidy
end
end
| 31.4375 | 86 | 0.761431 |
ac7770e67855365d95e0570fe987297d4f603afe
| 284 |
module CampusSolutions
module WorkExperienceUpdatingModel
def passthrough(model_name, params)
proxy = model_name.new({user_id: @uid, params: params})
result = proxy.get
HubEdos::StudentApi::V2::Feeds::WorkExperiences.expire @uid
result
end
end
end
| 25.818182 | 65 | 0.707746 |
62daf24143ac2369b398d12b1179a177c9ea21a3
| 9,307 |
require 'mk4rb_test_helper'
# These tests are adapted from :
#// tstore3.cpp -- Regression test program, storage tests, part 3
#// $Id: tstore3.cpp 1230 2007-03-09 15:58:53Z jcw $
#// This is part of Metakit, the homepage is http://www.equi4.com/metakit.html
class Store3_Test < MetakitBaseTest
def test_s20_View_outlives_storage
W('s20a');
p1 = Metakit::IntProp.new("p1");
v1 = Metakit::View.new
Metakit::Storage.open("s20a", 1) {|s1|
v1 = s1.get_as("a[p1:I,p2:S]");
v1.add(p1[123]);
}
#// 19990916 - semantics changed, rows kept but no properties
# //A(p1 (v1[0]) == 123);
assert_equal 1, v1.get_size
assert_equal 0, v1.num_properties
#D('s20a');
R('s20a');
end
def test_s21_Test_demo_scenario
W('s21a');
p1, p2 = Metakit::StringProp[:p1, :p2]
Metakit::Storage.open("s21a", 1) {|storage|
storage.set_structure("a[p1:S,p2:S]");
v1 = Metakit::View.new;
r1 = Metakit::Row.new
p1.set(r1, "One")
p2.set(r1, "Un")
v1.add(r1);
assert_equal 1, v1.get_size()
p1.set(r1, "Two")
p2.set(r1, "Deux")
v1.add(r1);
assert_equal 2, v1.get_size()
#// changed 2000-03-15: Store is gone
#//v1 = storage.Store("a", v1);
v1 = storage.view_and_assign("a", v1)
assert_equal 2, v1.get_size
assert_equal "Two", p1.get(v1[1])
assert_equal "Deux", p2.get(v1[1])
assert_equal "One", p1.get(v1[0])
assert_equal "Un", p2.get(v1[0])
storage.commit();
assert_equal 2, v1.get_size
assert_equal "Two", p1.get(v1[1])
assert_equal "Deux", p2.get(v1[1])
assert_equal "One", p1.get(v1[0])
assert_equal "Un", p2.get(v1[0])
s1 = p1.get(v1[1])
s2 = p2.get(v1[1])
assert_equal "Two", s1
assert_equal "Deux", s2
storage.commit();
v1.add(p1["Three"] + p2["Trois"]);
storage.commit();
assert_equal 3, v1.get_size
assert_equal "Trois", p2.get(v1[2])
v1 = storage.get_as("a[p1:S,p2:S,p3:I]");
assert_equal 3, v1.get_size
assert_equal "Trois", p2.get(v1[2])
p3 = Metakit::IntProp.new("p3");
p3.set(v1[1], 123)
storage.commit();
assert_equal 3, v1.get_size()
assert_equal "Trois", p2.get(v1[2])
v2 = storage.get_as("b[p4:I]");
p4 = Metakit::IntProp.new("p4");
v2.add(p4[234]);
storage.commit();
assert_equal 3, v1.get_size
assert_equal "Trois", p2.get(v1[2])
p4a = Metakit::IntProp.new("p4");
v1.insert_row_at(2, p1["Four"] + p4a[345]);
storage.commit();
assert_equal 4, v1.get_size()
assert_equal "One", p1.get(v1[0])
assert_equal "Two", p1.get(v1[1])
assert_equal "Four", p1.get(v1[2])
assert_equal "Three", p1.get(v1[3])
assert_equal "Trois", p2.get(v1[3])
assert_equal 1, v2.get_size
assert_equal 234, p4.get(v2[0])
}
Metakit::Storage.open("s21a", 0) {|storage|
v1 = storage.view("a");
assert_equal 4, v1.get_size()
assert_equal "One", p1.get(v1[0])
assert_equal "Two", p1.get(v1[1])
assert_equal "Four", p1.get(v1[2])
assert_equal "Three", p1.get(v1[3])
v2 = storage.view("b");
p4 = Metakit::IntProp.new("p4");
assert_equal 1, v2.get_size
assert_equal 234, p4.get(v2[0])
}
#D(s21a);
R('s21a');
end
def test_s22_Double_storage
W('s22a');
p1 = Metakit::DoubleProp.new("p1");
Metakit::Storage.open("s22a", 1) {|s1|
s1.set_structure("a[p1:D]");
v1 = s1.view("a");
v1.add(p1[1234.5678]);
v1.add(p1[2345.6789]);
v1.insert_row_at(1, p1[3456.7890]);
s1.commit();
}
#D(s22a);
R('s22a');
end
def test_s23_Find_absent_record
W('s23a');
Metakit::Storage.open("s23a", 1) {|s1|
s1.set_structure("v[h:S,p:I,a:I,b:I,c:I,d:I,e:I,f:I,g:I,x:I]");
view = s1.view("v");
h = Metakit::StringProp.new("h");
p = Metakit::IntProp.new("p");
row = Metakit::Row.new
h.set(row, "someString")
p.set(row, 99)
x = view.find(row);
assert_equal x, - 1
}
#D(s23a);
R('s23a');
end
def test_s24_Bitwise_storage
W('s24a');
p1 = Metakit::IntProp.new("p1");
m = 9;
#// insert values in front, but check fractional sizes at each step
m.times {|n|
Metakit::Storage.open("s24a", 1) {|s1|
s1.set_structure("a1[p1:I],a2[p1:I],a3[p1:I],a4[p1:I]");
s1.autocommit();# // new feature in 1.6
v1 = s1.view("a1");
v2 = s1.view("a2");
v3 = s1.view("a3");
v4 = s1.view("a4");
row = Metakit::Row.new
k = ~n;
p1.set(row, k &0x01)
v1.insert_row_at(0, row);
p1.set(row, k &0x03)
v2.insert_row_at(0, row);
p1.set(row, k &0x0F)
v3.insert_row_at(0, row);
p1.set(row, k &0x7F)
v4.insert_row_at(0, row);
}
#// the following checks that all tiny size combinations work
Metakit::Storage.open("s24a", 0) {|s1|
v1 = s1.view("a1");
v2 = s1.view("a2");
v3 = s1.view("a3");
v4 = s1.view("a4");
assert_equal n + 1, v1.get_size
assert_equal n + 1, v2.get_size
assert_equal n + 1, v3.get_size
assert_equal n + 1, v4.get_size
}
}
Metakit::Storage.open("s24a", 0) {|s1|
v1 = s1.view("a1");
v2 = s1.view("a2");
v3 = s1.view("a3");
v4 = s1.view("a4");
assert_equal m, v1.get_size()
assert_equal m, v2.get_size()
assert_equal m, v3.get_size()
assert_equal m, v4.get_size()
#// now check that the inserted values are correct
m.times {|i|
j = m - i - 1;
k = ~i;
assert_equal(p1.get(v1[j]), (k &0x01))
assert_equal(p1.get(v2[j]), (k &0x03));
assert_equal(p1.get(v3[j]), (k &0x0F));
assert_equal(p1.get(v4[j]), (k &0x7F));
}
}
#D(s24a);
R('s24a');
end
def test_s25_Bytes_storage
W('s25a');
hi = Metakit::Bytes.new("hi", 2);
gday = Metakit::Bytes.new("gday", 4);
hello = Metakit::Bytes.new("hello", 5);
p1 = Metakit::BytesProp.new("p1");
Metakit::Storage.open("s25a", 1) {|s1|
s1.set_structure("a[p1:B]");
v1 = s1.view("a");
v1.add(p1[hi]);
assert_equal hi, p1.get(v1[0])
v1.add(p1[hello]);
assert_equal hi, p1.get(v1[0])
assert_equal hello, p1.get(v1[1])
v1.insert_row_at(1, p1[gday]);
assert_equal hi, p1.get(v1[0])
assert_equal gday, p1.get(v1[1])
assert_equal hello, p1.get(v1[2])
s1.commit();
assert_equal hi, p1.get(v1[0])
assert_equal gday, p1.get(v1[1])
assert_equal hello, p1.get(v1[2])
}
#D(s25a);
R('s25a');
end
def test_s26_Bitwise_autosizing
W('s26a');
p1, p2, p3, p4 = Metakit::IntProp[:p1, :p2, :p3, :p4]
Metakit::Storage.open("s26a", 1) {|s1|
s1.set_structure("a[p1:I,p2:I,p3:I,p4:I]");
v1 = s1.view("a");
v1.add(p1[1] + p2[3] + p3[15] + p4[127]);
assert_equal 1, p1.get(v1[0])
assert_equal 3, p2.get(v1[0])
assert_equal 15, p3.get(v1[0])
assert_equal 127, p4.get(v1[0])
p1.set(v1[0], 100000)
p2.set(v1[0], 100000)
p3.set(v1[0], 100000)
p4.set(v1[0], 100000)
#// these failed in 1.61
assert_equal 100000, p1.get(v1[0])
assert_equal 100000, p2.get(v1[0])
assert_equal 100000, p3.get(v1[0])
assert_equal 100000, p4.get(v1[0])
s1.commit();
}
#D(s26a);
R('s26a');
end
def test_s27_Bytes_restructuring
W('s27a');
test = Metakit::Bytes.new("test", 4);
p1 = Metakit::BytesProp.new("p1");
Metakit::Storage.open("s27a", 1) {|s1|
row = Metakit::Row.new
p1.set(row, test)
v1 = Metakit::View.new
v1.add(row);
#// changed 2000-03-15: Store is gone
#//s1.Store("a", v1); // asserts in 1.61
v2 = s1.get_as("a[p1:B]");
v2.insert_view_at(0, v1);
s1.commit();
}
#D(s27a);
R('s27a');
end
def test_s28_Doubles_added_later
W('s28a');
p1 = Metakit::FloatProp.new("p1");
p2 = Metakit::DoubleProp.new("p2");
p3 = Metakit::ViewProp.new("p3");
Metakit::Storage.open("s28a", 1) {|s1|
s1.set_structure("a[p1:F,p2:D,p3[p1:F,p2:D]]");
v1 = s1.view("a");
r1 = Metakit::Row.new
p1.set(r1, 123)
p2.set(r1, 123)
v2 = Metakit::View.new
v2.add(p1[234] + p2[234]);
p3.set(r1, v2)
v1.add(r1);
x1 = p1.get(v1[0]);
assert_equal x1, p2.get(v1[0])
v2 = p3.get(v1[0]);
x2 = p1.get(v2[0]);
assert_equal x2, p2.get(v2[0]) # // fails in 1.6
s1.commit();
}
#D(s28a);
R('s28a');
end
def test_s29_Delete_bytes_property
W('s29a');
Metakit::Storage.open("s29a", 1) {| s1|
p1 = Metakit::BytesProp.new("p1");
s1.set_structure("a[p1:B]");
v1 = s1.view("a");
data = "\x63\x00\x00\x00";
v1.add(p1[Metakit::Bytes.new(data, 4)]);
s1.commit();
}
Metakit::Storage.open("s29a", 1) {|s1|
v1 = s1.view("a");
v1.remove_at(0); #// asserts in 1.7
s1.commit();
}
#D(s29a);
R('s29a');
end
end
| 23.92545 | 78 | 0.541313 |
2616033a8ccc7b1c49c009f7eaeca4ee3babd472
| 2,418 |
# encoding: UTF-8
require 'logstash/devutils/rspec/spec_helper'
require 'logstash/logging/logger'
require 'logstash/inputs/oss'
java_import 'com.aliyun.oss.model.GetObjectRequest'
# This file contains the common logic used by integration tests
shared_context "plugin initialize" do
let(:endpoint) { ENV['OSS_ENDPOINT'] }
let(:bucket) { ENV['OSS_BUCKET'] }
let(:access_key_id) { ENV['OSS_ACCESS_KEY'] }
let(:access_key_secret) { ENV['OSS_SECRET_KEY'] }
let(:backup_add_prefix) { 'input-oss/' }
let(:backup_to_bucket) { ENV['BACKUP_BUCKET'] }
let(:backup_to_dir) { ENV['BACKUP_DIR'] }
let(:common_configurations) do
{
"endpoint" => endpoint,
"bucket" => bucket,
"access_key_id" => access_key_id,
"access_key_secret" => access_key_secret,
"stop_for_test" => true,
"include_object_properties" => true,
"mns_settings" => {
"endpoint" => ENV['MNS_ENDPOINT'],
"queue" => ENV['MNS_QUEUE'],
"poll_interval_seconds" => 3,
"wait_seconds" => 3
}
}
end
LogStash::Logging::Logger::configure_logging("debug") if ENV["DEBUG"]
let(:oss) { OSSClientBuilder.new().build(endpoint, access_key_id, access_key_secret) }
end
def fetch_events(settings, size)
queue = []
input = LogStash::Inputs::OSS.new(settings)
input.register
thread = Thread.start do
input.run(queue)
end
thread.join
expect(queue.size).to eq(size)
end
# remove object with `prefix`
def clean_bucket(bucket)
oss.listObjects(bucket, "").getObjectSummaries().each do |objectSummary|
oss.deleteObject(bucket, objectSummary.getKey())
end
end
def delete_bucket(bucket)
oss.deleteBucket(bucket)
end
def list_remote_files(bucket, prefix)
oss.listObjects(bucket, prefix).getObjectSummaries().collect(&:getKey)
end
def upload_local_file(local_file, remote_file_name)
file = File.join(File.dirname(__FILE__), local_file)
oss.putObject(bucket, remote_file_name, java.io.File.new(file))
end
def upload(prefix)
upload_local_file('../sample/uncompressed.log', "uncompressed.log/")
upload_local_file('../sample/uncompressed.log', "uncompressed.log")
upload_local_file('../sample/uncompressed.log', "uncompressed.log.1.gz")
upload_local_file('../sample/uncompressed.log', "#{prefix}uncompressed.log")
upload_local_file('../sample/uncompressed.log.1.gz', "#{prefix}exclude/uncompressed.log.1.gz")
end
| 30.607595 | 96 | 0.70306 |
3897187b795cd49f47cf34080a2ee25df5ed617a
| 125 |
class AddEthnicityToPerson < ActiveRecord::Migration[6.0]
def change
add_column :people, :ethnicity, :string
end
end
| 20.833333 | 57 | 0.752 |
4ad0124c844e312fddecadc98cb77fba319b4d59
| 742 |
class Player < Thing
it_is Controllable
it_has UserInterface
it_is_a Pod
# shape :poly, [CP::Vec2.new(-16,-16), CP::Vec2.new(-16,16), CP::Vec2.new(16,16), CP::Vec2.new(16,-16)]
shape :circle, 16
mass 80
moment 0.1
friction 10.0
collision_type :player
random_rotation
attach Helper, 0, 0
attr_accessor :souls_saved
def initialize *args
super *args
self.souls_saved = 0
end
def save_soul
self.souls_saved += 1
end
def move
self.rotation += rand / 1000
self.speed += window.gravity_vector_for(self) / 1000
bounce_off_border_x
bounce_off_border_y
end
def current_size
size = window.gravity_vector_for(self).length/30
[size, size]
end
end
| 17.666667 | 105 | 0.657682 |
393ed6f07db563667241a6cfd5a99305a5f32865
| 5,390 |
class CodesignRequirement < Requirement
include FileUtils
fatal true
satisfy(:build_env => false) do
mktemp do
touch "llvm_check.txt"
quiet_system "/usr/bin/codesign", "-s", "lldb_codesign", "--dryrun", "llvm_check.txt"
end
end
def message
<<-EOS.undent
lldb_codesign identity must be available to build with LLDB.
See: https://llvm.org/svn/llvm-project/lldb/trunk/docs/code-signing.txt
EOS
end
end
class Llvm < Formula
desc "llvm (Low Level Virtual Machine): a next-gen compiler infrastructure"
homepage "http://llvm.org/"
stable do
url "http://llvm.org/releases/3.6.2/llvm-3.6.2.src.tar.xz"
sha256 "f60dc158bfda6822de167e87275848969f0558b3134892ff54fced87e4667b94"
resource "clang" do
url "http://llvm.org/releases/3.6.2/cfe-3.6.2.src.tar.xz"
sha256 "ae9180466a23acb426d12444d866b266ff2289b266064d362462e44f8d4699f3"
end
resource "libcxx" do
url "http://llvm.org/releases/3.6.2/libcxx-3.6.2.src.tar.xz"
sha256 "52f3d452f48209c9df1792158fdbd7f3e98ed9bca8ebb51fcd524f67437c8b81"
end
resource "lld" do
url "http://llvm.org/releases/3.6.2/lld-3.6.2.src.tar.xz"
sha256 "43f553c115563600577764262f1f2fac3740f0c639750f81e125963c90030b33"
end
resource "lldb" do
url "http://llvm.org/releases/3.6.2/lldb-3.6.2.src.tar.xz"
sha256 "940dc96b64919b7dbf32c37e0e1d1fc88cc18e1d4b3acf1e7dfe5a46eb6523a9"
end
resource "clang-tools-extra" do
url "http://llvm.org/releases/3.6.2/clang-tools-extra-3.6.2.src.tar.xz"
sha256 "6a0ec627d398f501ddf347060f7a2ccea4802b2494f1d4fd7bda3e0442d04feb"
end
end
bottle do
cellar :any
sha256 "a0ec4b17ae8c1c61071e603d0dcf3e1c39a5aae63c3f8237b4363a06701a3319" => :yosemite
sha256 "17a62c19d119c88972fa3dce920cfbc6150af8892ba8e29ce551ae7e2e84f42e" => :mavericks
sha256 "6d780faae2647ebce704b2f0a246b52d4037ebf4a2f796644814607e7751af93" => :mountain_lion
end
head do
url "http://llvm.org/git/llvm.git"
resource "clang" do
url "http://llvm.org/git/clang.git"
end
resource "libcxx" do
url "http://llvm.org/git/libcxx.git"
end
resource "lld" do
url "http://llvm.org/git/lld.git"
end
resource "lldb" do
url "http://llvm.org/git/lldb.git"
end
resource "clang-tools-extra" do
url "http://llvm.org/git/clang-tools-extra.git"
end
end
option :universal
option "with-clang", "Build Clang support library"
option "with-lld", "Build LLD linker"
option "with-lldb", "Build LLDB debugger"
option "with-rtti", "Build with C++ RTTI"
option "with-python", "Build Python bindings against Homebrew Python"
option "without-assertions", "Speeds up LLVM, but provides less debug information"
deprecated_option "rtti" => "with-rtti"
deprecated_option "disable-assertions" => "without-assertions"
if MacOS.version <= :snow_leopard
depends_on :python
else
depends_on :python => :optional
end
depends_on "cmake" => :build
if build.with? "lldb"
depends_on "swig"
depends_on CodesignRequirement
end
keg_only :provided_by_osx
# Apple's libstdc++ is too old to build LLVM
fails_with :gcc
fails_with :llvm
def install
# Apple's libstdc++ is too old to build LLVM
ENV.libcxx if ENV.compiler == :clang
if build.with?("lldb") && build.without?("clang")
raise "Building LLDB needs Clang support library."
end
if build.with? "clang"
(buildpath/"projects/libcxx").install resource("libcxx")
(buildpath/"tools/clang").install resource("clang")
(buildpath/"tools/clang/tools/extra").install resource("clang-tools-extra")
end
(buildpath/"tools/lld").install resource("lld") if build.with? "lld"
(buildpath/"tools/lldb").install resource("lldb") if build.with? "lldb"
args = %w[
-DLLVM_OPTIMIZED_TABLEGEN=On
]
args << "-DLLVM_ENABLE_RTTI=On" if build.with? "rtti"
if build.with? "assertions"
args << "-DLLVM_ENABLE_ASSERTIONS=On"
else
args << "-DCMAKE_CXX_FLAGS_RELEASE='-DNDEBUG'"
end
if build.universal?
ENV.permit_arch_flags
args << "-DCMAKE_OSX_ARCHITECTURES=#{Hardware::CPU.universal_archs.as_cmake_arch_flags}"
end
mktemp do
system "cmake", "-G", "Unix Makefiles", buildpath, *(std_cmake_args + args)
system "make"
system "make", "install"
end
if build.with? "clang"
system "make", "-C", "projects/libcxx", "install",
"DSTROOT=#{prefix}", "SYMROOT=#{buildpath}/projects/libcxx"
(share/"clang/tools").install Dir["tools/clang/tools/scan-{build,view}"]
inreplace "#{share}/clang/tools/scan-build/scan-build", "$RealBin/bin/clang", "#{bin}/clang"
bin.install_symlink share/"clang/tools/scan-build/scan-build", share/"clang/tools/scan-view/scan-view"
man1.install_symlink share/"clang/tools/scan-build/scan-build.1"
end
# install llvm python bindings
(lib/"python2.7/site-packages").install buildpath/"bindings/python/llvm"
(lib/"python2.7/site-packages").install buildpath/"tools/clang/bindings/python/clang" if build.with? "clang"
end
def caveats
<<-EOS.undent
LLVM executables are installed in #{opt_bin}.
Extra tools are installed in #{opt_share}/llvm.
EOS
end
test do
system "#{bin}/llvm-config", "--version"
end
end
| 29.944444 | 112 | 0.687941 |
d5156391f19a0d5d5c9da71645e4cfceef862284
| 1,821 |
=begin
#Datadog API V1 Collection
#Collection of all Datadog Public endpoints.
The version of the OpenAPI document: 1.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
This product includes software developed at Datadog (https://www.datadoghq.com/).
Copyright 2020-Present Datadog, Inc.
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for DatadogAPIClient::V1::SyntheticsAssertionJSONPathTarget
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe DatadogAPIClient::V1::SyntheticsAssertionJSONPathTarget do
let(:instance) { DatadogAPIClient::V1::SyntheticsAssertionJSONPathTarget.new }
describe 'test an instance of SyntheticsAssertionJSONPathTarget' do
it 'should create an instance of SyntheticsAssertionJSONPathTarget' do
expect(instance).to be_instance_of(DatadogAPIClient::V1::SyntheticsAssertionJSONPathTarget)
end
end
describe 'test attribute "operator"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "property"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "target"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "type"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 32.517857 | 107 | 0.76112 |
ffa33fa435c978f587d1da6a34df24ccebacda2c
| 3,869 |
require "rails_helper"
RSpec.describe Pages::Page do
include_context "with fixture markdown pages"
shared_examples "a page" do |title, path, template|
it { is_expected.to be_instance_of described_class }
it { is_expected.to have_attributes title: title }
it { is_expected.to have_attributes path: path }
it { is_expected.to have_attributes template: template }
it { is_expected.to have_attributes data: instance_of(Pages::Data) }
end
describe ".find" do
context "with markdown page" do
subject { described_class.find "/page1" }
it_behaves_like "a page", "Hello World 1 Upwards", "/page1", "content/page1"
end
context "with non markdown page" do
it "raises an exception" do
expect { described_class.find "/unknown" }.to raise_error(Pages::Page::PageNotFoundError)
end
end
end
describe ".featured" do
subject { described_class.featured }
it_behaves_like "a page", "Hello World 1 Upwards", "/page1", "content/page1"
context "with multiple featured stories" do
before { allow(Pages::Frontmatter).to receive(:select).and_return pages }
let(:pages) do
{
"/stories/featured" => { featured: true, title: "Featured page" },
"/stories/second" => { featured: true, title: "Second page" },
}
end
it { expect { subject }.to raise_exception Pages::Page::MultipleFeatured }
end
end
describe "#parent" do
context "when the page depth does not exceed the max traversal depth" do
subject { described_class.find(path).parent&.path }
context "when a top-level page" do
let(:path) { "/first" }
it { is_expected.to be_nil }
end
context "when a sub-page" do
context "when the sub-page has a parent" do
let(:path) { "/subfolder/page2" }
it { is_expected.to eq("/subfolder") }
end
context "when the sub-page does not have an immediate parent" do
let(:path) { "/subfolder/transient/page3" }
it { is_expected.to eq("/subfolder") }
end
end
end
context "when the page depth exceeds the max traversal depth" do
let(:deep_path) { (described_class::MAX_TRAVERSAL_DEPTH + 5).times.collect { "/path" }.join }
before do
page = described_class.find("/subfolder/page2")
allow(page).to receive(:path) { deep_path }
allow(described_class).to receive(:find).and_raise(described_class::PageNotFoundError)
page.parent
end
it "only tries to traverse to a maximum depth of #{described_class::MAX_TRAVERSAL_DEPTH}" do
expect(described_class).to have_received(:find).exactly(described_class::MAX_TRAVERSAL_DEPTH).times
end
end
end
describe "#ancestors" do
context "when the page depth does not exceed the max traversal depth" do
subject { described_class.find(path).ancestors.map(&:path) }
context "when a top-level page" do
let(:path) { "/first" }
it { is_expected.to be_empty }
end
context "when a sub-page" do
context "when the sub-page has a parent" do
let(:path) { "/subfolder/page2" }
it { is_expected.to eq(["/subfolder"]) }
end
context "when the sub-page has multiple parents" do
let(:path) { "/subfolder/other/page4" }
it { is_expected.to eq(["/subfolder/other", "/subfolder"]) }
end
end
end
context "when the page depth exceeds the max traversal depth" do
let(:page) { described_class.find("/subfolder/page2") }
it "only tries to traverse to a maximum depth of #{described_class::MAX_TRAVERSAL_DEPTH}" do
expect(page).to receive(:parent).exactly(described_class::MAX_TRAVERSAL_DEPTH).times.and_return(page)
page.ancestors
end
end
end
end
| 31.455285 | 109 | 0.643319 |
9149c4995010f47d2d1e89c3829de71e870187b7
| 97 |
# frozen_string_literal: true
::Glimmer::DatabaseImporters::CommonMetrics::Importer.new.execute
| 24.25 | 65 | 0.824742 |
18bbdd1dcd7497663c3bd611314122b7bacbf6f5
| 4,344 |
class Abinit < Formula
desc "Atomic-scale first-principles simulation software"
homepage "http://www.abinit.org"
url "http://ftp.abinit.org/abinit-7.10.5.tar.gz"
sha256 "e9376a3e34790bce90992f28e5fa8554b51ba467bf5709c7fd25d300e7c4f56a"
bottle do
cellar :any
sha256 "1b8837dc6dd908ad27e3225a849887e28cd9d8ed4464363dc24a7830d53c4a25" => :el_capitan
sha256 "241aeffb5599e19d5492c450d3393dd12f6f233c291838d8d80941bb57becf59" => :yosemite
sha256 "1dca7b47daa3f5a8420fe44059bbd6e745d4db710047ebca3d31b89f3709b178" => :mavericks
end
option "without-check", "Skip build-time tests (not recommended)"
option "with-testsuite", "Run full test suite (time consuming)"
depends_on :mpi => [:cc, :cxx, :f77, :f90]
depends_on :fortran
depends_on "veclibfort"
depends_on "scalapack" => :recommended
depends_on "fftw" => ["with-mpi", "with-fortran", :recommended]
depends_on "libxc" => :recommended
depends_on "netcdf" => ["with-fortran", :recommended]
depends_on "etsf_io" => :recommended
depends_on "gsl" => :recommended
def install
# Environment variables CC, CXX, etc. will be ignored.
ENV.delete "CC"
ENV.delete "CXX"
ENV.delete "F77"
ENV.delete "FC"
args = %W[CC=#{ENV["MPICC"]}
CXX=#{ENV["MPICXX"]}
F77=#{ENV["MPIF77"]}
FC=#{ENV["MPIFC"]}
--prefix=#{prefix}
--enable-mpi=yes
--with-mpi-prefix=#{HOMEBREW_PREFIX}
--enable-optim=safe
--enable-openmp=no
--enable-gw-dpc
]
dft_flavor = "none"
trio_flavor = "none"
if build.with? "scalapack"
args << "--with-linalg-flavor=custom+scalapack"
args << "--with-linalg-libs=-L#{Formula["veclibfort"].opt_lib} -lvecLibFort -L#{Formula["scalapack"].opt_lib} -lscalapack"
else
args << "--with-linalg-flavor=custom"
args << "--with-linalg-libs=-L#{Formula["veclibfort"].opt_lib} -lvecLibFort"
end
if build.with? "etsf_io"
raise "Building with etsf_io support requires netcdf" if build.without? "netcdf"
trio_flavor = "netcdf+etsf_io"
args << "--with-etsf-io-incs=-I#{Formula["etsf_io"].opt_include}"
args << "--with-etsf-io-libs=-L#{Formula["etsf_io"].opt_lib} -letsf_io_low_level -letsf_io_utils -letsf_io"
args << "--with-netcdf-incs=-I#{Formula["netcdf"].opt_include}"
args << "--with-netcdf-libs=-L#{Formula["netcdf"].opt_lib} -lnetcdff -lnetcdf"
elsif build.with? "netcdf"
trio_flavor = "netcdf"
args << "--with-netcdf-incs=-I#{Formula["netcdf"].opt_include}"
args << "--with-netcdf-libs=-L#{Formula["netcdf"].opt_lib} -lnetcdff -lnetcdf"
end
if build.with? "gsl"
args << "--with-math-flavor=gsl"
args << "--with-math-incs=-I#{Formula["gsl"].opt_include}"
args << "--with-math-libs=-L#{Formula["gsl"].opt_lib} -lgsl"
end
if build.with? "libxc"
dft_flavor = "libxc"
args << "--with-libxc-incs=-I#{Formula["libxc"].opt_include}"
args << "--with-libxc-libs=-L#{Formula["libxc"].opt_lib} -lxc -lxcf90"
# Patch to make libXC 2.2+ supported by Abinit 7.10;
# libXC 2.2 will be supported in Abinit 8.0
inreplace "configure", "(major != 2) || (minor < 0) || (minor > 1)",
"(major != 2) || (minor < 2) || (minor > 3)"
end
# need to link against single precision as well, see https://trac.macports.org/ticket/45617 and http://forum.abinit.org/viewtopic.php?f=3&t=2631
if build.with? "fftw"
args << "--with-fft-flavor=fftw3"
args << "--with-fft-incs=-I#{Formula["fftw"].opt_include}"
args << "--with-fft-libs=-L#{Formula["fftw"].opt_lib} -lfftw3 -lfftw3f -lfftw3_mpi -lfftw3f_mpi"
end
args << "--with-dft-flavor=#{dft_flavor}"
args << "--with-trio-flavor=#{trio_flavor}"
system "./configure", *args
system "make"
if build.with? "check"
cd "tests"
if build.with? "testsuite"
system "./runtests.py 2>&1 | tee make-check.log"
else
system "./runtests.py built-in fast 2>&1 | tee make-check.log"
end
ohai `grep ", succeeded:" "make-check.log"`.chomp
prefix.install "make-check.log"
cd ".."
end
system "make", "install"
end
test do
system "#{bin}/abinit", "-b"
end
end
| 37.128205 | 148 | 0.622007 |
033ca6c55a0bba42db47b15adefe205ca3a3a8c9
| 1,768 |
#================================================================
# The MIT License
# Copyright (c) 2020 biud436
# ---------------------------------------------------------------
# Free for commercial and non commercial use.
#================================================================
# ======================================================================
# Name : Anti-Aliasing Picture
# Author : biud436
# Desc :
# This script allows you to apply the anti-aliasing to your picture.
#
# Notice that this script must require below stuff.
#
# DirectX Implementation of RGSS3
# Link : https://forums.rpgmakerweb.com/index.php?threads/rgd-directx-implementation-of-rgss3.95228/
#
# ======================================================================
Graphics.add_shader("
texture my_tex;
sampler2D mySampler = sampler_state
{
Texture = <my_tex>;
AddressU = Wrap;
AddressV = Wrap;
MinFilter = Linear;
MagFilter = Linear;
MipFilter = Linear;
};
PS_OUTPUT PS_AntiAliasing(float4 color : COLOR0,
float2 tex : TEXCOORD0) : COLOR
{
float4 res = tex2D(mySampler, tex);
return GetOutput(res * color);
}", "
pass AntiAliasing
{
AlphaBlendEnable = true;
SeparateAlphaBlendEnable = false;
BlendOp = MAX;
SrcBlend = ONE;
DestBlend = ONE;
PixelShader = compile ps_2_0 PS_AntiAliasing();
}
")
class Sprite_Picture < Sprite
alias iuiu_initialize initialize
def initialize(viewport, picture)
iuiu_initialize(viewport, picture)
self.effect_name = "AntiAliasing"
end
def update_bitmap
if @picture.name.empty?
self.bitmap = nil
else
self.bitmap = Cache.picture(@picture.name)
bitmap = Cache.picture(@picture.name)
self.set_effect_param("my_tex", bitmap)
end
end
end
| 27.625 | 100 | 0.567873 |
1d75215bc4497e75702805cb1bf7930e47724239
| 4,747 |
class User < ActiveRecord::Base
# Include default devise modules. Others available are:
# :token_authenticatable, :confirmable,
# :lockable, :timeoutable and :omniauthable
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :trackable, :validatable, :timeoutable,
:token_authenticatable, :bearer_token_authenticatable
devise :omniauthable, :omniauth_providers => Concord::AuthPortal.all_strategy_names
has_many :activities, :class_name => LightweightActivity
has_many :sequences
has_many :runs
has_many :imports
# Setup accessible (or protected) attributes for your model
attr_accessible :email, :password, :password_confirmation, :remember_me,
:is_admin, :is_author, :can_export,
:provider, :uid, :authentication_token, :api_key, :has_api_key
# attr_accessible :title, :body
has_many :authentications, :dependent => :delete_all
self.token_authentication_key = "api_key"
def self.find_for_token_authentication(condition)
self.where(condition).first
end
# access cancan outside of current_user
# see https://github.com/ryanb/cancan/wiki/ability-for-other-users
def ability
@ability ||= Ability.new(self)
end
delegate :can?, :cannot?, :to => :ability
def admin?
return is_admin
end
def author?
return is_author
end
def most_recent_authentication
authentications.order("updated_at desc").first
end
def authentication_token(provider=nil)
# TODO: token expiration
auth = nil
if provider
auth = authentications.find_last_by_provider provider
else
auth = most_recent_authentication
end
auth ? auth.token : nil
end
def self.get_random_password
Devise.friendly_token[0,20]
end
def self.possibly_make_user_author(auth, user)
# assign the author role if user is author on portal but not locally
# the chain of tests are needed because under rspec the auth object is generated inside OmniAuth and it does not include the extra info
if !user.is_author && (auth.respond_to? :extra) && (auth.extra.roles.include? "author")
user.is_author = true
user.save
end
end
def self.find_for_concord_portal_oauth(auth, signed_in_resource=nil)
authentication = Authentication.find_by_provider_and_uid auth.provider, auth.uid
if authentication
# update the authentication token for this user to make sure it stays fresh
authentication.update_attribute(:token, auth.credentials.token)
possibly_make_user_author(auth, authentication.user)
return authentication.user
end
# there is no authentication for this provider and uid
# see if we should create a new authentication for an existing user
# or make a whole new user
email = auth.info.email || "#{Devise.friendly_token[0,20]}@example.com"
# the devise validatable model enforces unique emails, so no need find_all
existing_user_by_email = User.find_by_email email
if existing_user_by_email
if existing_user_by_email.authentications.find_by_provider auth.provider
throw "Can't have duplicate email addresses: #{email}. " +
"There is an user with an authentication for this provider #{auth.provider} " +
"and the same email already."
end
# There is no authentication for this provider and user
user = existing_user_by_email
else
# no user with this email, so make a new user with a random password
user = User.create(
email: email,
password: User.get_random_password
)
end
possibly_make_user_author(auth, user)
# create new authentication for this user that we found or created
user.authentications.create(
provider: auth.provider,
uid: auth.uid,
token: auth.credentials.token
)
user
end
# Return a list of providers for this user by checking previous authorizations
# and available runs
def auth_providers
( authentications.map { |auth| auth.provider.upcase } + runs.map { |run| run.get_auth_provider } ).uniq
end
# Delete session data before we logout.
# Removes run_key, and user info from the browsers session.
# So that logged-in indicator will match actual logged in status.
def clear_session_data(rack_session)
rack_session.delete "portal_username"
rack_session.delete "portal_user_id"
rack_session.delete "portal_domain"
rack_session.delete "user_return_to"
end
def has_api_key
return api_key.present?
end
def has_api_key=(newvalue)
if newvalue == "1"
if api_key.blank?
update_attribute(:api_key, UUIDTools::UUID.random_create.to_s)
end
else
update_attribute(:api_key, nil)
end
end
end
| 32.292517 | 139 | 0.721298 |
ff3fea5b1f092f55c2f3851d52e74610f548eae6
| 2,754 |
class Deposit < ActiveRecord::Base
STATES = [:submitting, :cancelled, :submitted, :rejected, :accepted, :checked, :warning]
extend Enumerize
include AASM
include AASM::Locking
include Currencible
has_paper_trail on: [:update, :destroy]
enumerize :aasm_state, in: STATES, scope: true
alias_attribute :sn, :id
delegate :name, to: :member, prefix: true
delegate :id, to: :channel, prefix: true
delegate :coin?, :fiat?, to: :currency_obj
belongs_to :member
belongs_to :account
validates_presence_of \
:amount, :account, \
:member, :currency
validates_numericality_of :amount, greater_than: 0
scope :recent, -> { order('id DESC')}
after_update :sync_update
after_create :sync_create
after_destroy :sync_destroy
aasm :whiny_transitions => false do
state :submitting, initial: true, before_enter: :set_fee
state :cancelled
state :submitted
state :rejected
state :accepted, after_commit: [:do, :send_mail]
state :checked
state :warning
event :submit do
transitions from: :submitting, to: :submitted
end
event :cancel do
transitions from: :submitting, to: :cancelled
end
event :reject do
transitions from: :submitted, to: :rejected
end
event :accept do
transitions from: :submitted, to: :accepted
end
event :check do
transitions from: :accepted, to: :checked
end
event :warn do
transitions from: :accepted, to: :warning
end
end
def txid_desc
txid
end
class << self
def channel
DepositChannel.find_by_key(name.demodulize.underscore)
end
def resource_name
name.demodulize.underscore.pluralize
end
def params_name
name.underscore.gsub('/', '_')
end
def new_path
"new_#{params_name}_path"
end
end
def channel
self.class.channel
end
def update_confirmations(data)
update_column(:confirmations, data)
end
def txid_text
txid && txid.truncate(40)
end
private
def do
account.lock!.plus_funds amount, reason: Account::DEPOSIT, ref: self
end
def send_mail
DepositMailer.accepted(self.id).deliver if self.accepted?
end
def set_fee
amount, fee = calc_fee
self.amount = amount
self.fee = fee
end
def calc_fee
[amount, 0]
end
def sync_update
::Pusher["private-#{member.sn}"].trigger_async('deposits', { type: 'update', id: self.id, attributes: self.changes_attributes_as_json })
end
def sync_create
::Pusher["private-#{member.sn}"].trigger_async('deposits', { type: 'create', attributes: self.as_json })
end
def sync_destroy
::Pusher["private-#{member.sn}"].trigger_async('deposits', { type: 'destroy', id: self.id })
end
end
| 20.706767 | 140 | 0.673566 |
e20bf73e330a44e2060e97b55e83ad51dee1e9ec
| 1,069 |
# Copyright 2006-2011 Stanislav Senotrusov <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'thread'
require 'socket'
s=TCPServer.new("127.0.0.1", 9999)
t = Thread.new do
begin
loop do
puts "ACCEPTING"
puts s.accept.inspect
puts "ACCEPTED"
end
puts "DONE"
rescue Exception => ex
puts "EX"
puts ex.inspect #<Errno::EBADF: Bad file descriptor>
end
end
c = Thread.new do
s.close
puts "Closed"
t.run # Without that there are no error raised from accept()
end
t.join
c.join
| 22.744681 | 75 | 0.702526 |
1d40df8af146d4800910a58157d3ae0016de791f
| 1,170 |
# encoding: utf-8
require File.expand_path('../lib/open_civic_data/version', __FILE__)
Gem::Specification.new do |spec|
spec.add_dependency 'faraday', '~> 0.8.7'
spec.add_dependency 'faraday_middleware', '~> 0.9.0'
spec.add_dependency 'hashie', '~> 2.0'
spec.add_dependency 'json', '~> 1.8'
spec.add_dependency 'rash', '~> 0.4'
spec.add_development_dependency 'bundler', '~> 1.0'
spec.author = 'Sunny Juneja'
spec.description = %q{Ruby wrapper for the Sunlight Open Civic Data API, is an collaborative effort to define schemas and provide tools for gathering information on government organizations, people, legislation, and events provided by the Sunlight Foundation.}
spec.email = '[email protected]'
spec.files = `git ls-files`.split("\n")
spec.homepage = 'https://github.com/whatasunnyday/open_civic_data'
spec.licenses = ['MIT']
spec.name = 'open_civic_data'
spec.require_paths = ['lib']
spec.summary = %q{Ruby wrapper for the Sunlight Open Civic Data API}
spec.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
spec.version = OpenCivicData::VERSION
end
| 50.869565 | 264 | 0.686325 |
03b97926871f2e8f9f009d1781c871126821fe16
| 1,199 |
class Fastly
# Acces Control List configuration
class ACL < BelongsToServiceAndVersion
attr_accessor :id, :service_id, :name
##
# :attr: service_id
#
# The id of the service this belongs to.
##
# :attr: version
#
# The number of the version this belongs to.
##
# :attr: name
#
# The name for the ACL.
##
# List ACL entries that belong to the ACL
def list_entries
fetcher.list_acl_entries(:service_id => service_id, :acl_id => id)
end
##
# Create an ACL entry and add it to the ACL
#
def create_entry(opts = {})
fetcher.create_acl_entry(
service_id: service_id,
acl_id: id,
ip: opts[:ip],
negated: opts[:negated],
subnet: opts[:subnet],
comment: opts[:comment]
)
end
##
# Retrieve an ACL entry
#
def get_entry(entry_id)
fetcher.get_acl_entry(service_id, id, entry_id)
end
##
# Update an ACL entry
#
def update_entry(entry)
fetcher.update_acl_entry(entry)
end
##
# Delete an ACL entry
#
def delete_entry(entry)
fetcher.delete_acl_entry(entry)
end
end
end
| 19.031746 | 72 | 0.586322 |
d555170f22b6515734f76a8494cfc3c832539f06
| 372 |
require_dependency "whodat/application_controller"
module Whodat
class DashboardController < ApplicationController
helper_method :current_user, :user_signed_in?
def index
def current_user
@current_user ||= Whodat::User.find_by( id: session[:user_id] )
end
def user_signed_in?
current_user
end
end
end
end
| 19.578947 | 71 | 0.682796 |
f89cd7035a03d30623c68879ed80624db7c0e42b
| 404 |
class CreateDependencies < ActiveRecord::Migration[5.0]
def change
create_table :dependencies do |t|
t.integer :dependent_id
t.integer :depended_id
t.timestamps
end
add_index :dependencies, :dependent_id
add_index :dependencies, :depended_id
add_index :dependencies, [:dependent_id, :depended_id], unique: true, name: 'index_dependencies_on_uniqueness'
end
end
| 28.857143 | 114 | 0.732673 |
bfc8a7ff3a1b9befa67c2ed18145902f09ddc45a
| 98 |
#
# Cookbook:: nginx
# Recipe:: configure
#
# Copyright:: 2018, The Authors, All Rights Reserved.
| 16.333333 | 53 | 0.693878 |
e838206abdfccfa0aed843e7d94ea363eac36c7e
| 976 |
require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
describe "Kernel#exec" do
it "is a private method" do
Kernel.should have_private_instance_method(:exec)
end
it "raises a SystemCallError if cmd cannot execute" do
lambda { exec "" }.should raise_error(SystemCallError)
end
it "raises a SystemCallError if cmd cannot execute and contains '-'" do
lambda { exec 'cmd-plugin' }.should raise_error(SystemCallError)
end
it "runs the specified command, replacing current process" do
result = `#{RUBY_EXE} -e 'exec "echo hello"; puts "fail"'`
result.should == "hello\n"
end
ruby_version_is "1.9.2" do
it "passes environment vars to the child environment" do
result = `#{RUBY_EXE} -e 'exec({"FOO" => "BAR"}, "echo $FOO"); puts "fail"'`
result.should == "BAR\n"
end
end
end
describe "Kernel.exec" do
it "needs to be reviewed for spec completeness"
end
| 29.575758 | 82 | 0.686475 |
e87c6580fa7a3e231612a06627ff4ea9864b8a15
| 264 |
java_import "com.deflatedpickle.barque.Barque"
java_import "javax.swing.JPanel"
class Widget
def initialize(shell_index)
@shell = Barque.INSTANCE.shellList.get(shell_index)
@composite = JPanel.new
@shell.add @composite
end
def update
end
end
| 18.857143 | 55 | 0.746212 |
217bd8238b076f6915c0a694ecd0807439280592
| 886 |
edb_server_install 'EDB Postgres Advanced Server' do
password '12345'
port 5444
setup_repo true
version '9.6'
action [:install, :create]
end
edb_access 'enterprisedb host superuser' do
access_type 'host'
access_db 'all'
access_user 'enterprisedb'
access_addr '127.0.0.1/32'
access_method 'md5'
notifies :reload, 'service[edb-as9.6-server]'
end
postgresql_user 'edb_user' do
superuser true
password 'EDB123'
sensitive false
end
postgresql_access 'a edb_user local superuser' do
access_type 'local'
access_db 'all'
access_user 'edb_user'
access_method 'md5'
access_addr nil
notifies :reload, 'service[edb-as9.6-server]'
end
service 'edb-as9.6-server' do
extend EnterprisedbCookbook::Helpers
service_name lazy { platform_service_name }
supports restart: true, status: true, reload: true
action :nothing
end
| 22.717949 | 52 | 0.724605 |
6a539df20d60c02887c2482e27648ef52e5c7319
| 5,985 |
class Borgmatic < Formula
include Language::Python::Virtualenv
desc "Simple wrapper script for the Borg backup software"
homepage "https://torsion.org/borgmatic/"
url "https://files.pythonhosted.org/packages/41/7a/f5554b18226f6e05b0ce4dc3a652fee30c350cd2c7e647c8adc174a407d0/borgmatic-1.5.18.tar.gz"
sha256 "757d54d73cdaf333210e24cb13e0e0b4de912d17222e4b383433ae929287fe9a"
license "GPL-3.0-or-later"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "4878b473eab0b3df714a2ea1535d34f86c20777ec6981fb3a734c5ae3930f384"
sha256 cellar: :any_skip_relocation, big_sur: "3624e4d6f5b2070fada6cb58e4e1ceb9b293907b791f7b5c63985d531a588e44"
sha256 cellar: :any_skip_relocation, catalina: "98b369061ccf3c7eaa068eda2fda8eb753d0abae5634e36f49e87d031ad5ef24"
sha256 cellar: :any_skip_relocation, mojave: "0ffe59404b696738490d87e7e9dd0800779ab09e39a6e5166f2e9412c74ccd0f"
sha256 cellar: :any_skip_relocation, x86_64_linux: "c4a88275ada074a211907cfedac68ccd0ff5a801ec484d9801f4240f1f42bb1b" # linuxbrew-core
end
depends_on "libyaml"
depends_on "[email protected]"
resource "attrs" do
url "https://files.pythonhosted.org/packages/ed/d6/3ebca4ca65157c12bd08a63e20ac0bdc21ac7f3694040711f9fd073c0ffb/attrs-21.2.0.tar.gz"
sha256 "ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"
end
resource "certifi" do
url "https://files.pythonhosted.org/packages/6d/78/f8db8d57f520a54f0b8a438319c342c61c22759d8f9a1cd2e2180b5e5ea9/certifi-2021.5.30.tar.gz"
sha256 "2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee"
end
resource "charset-normalizer" do
url "https://files.pythonhosted.org/packages/e7/4e/2af0238001648ded297fb54ceb425ca26faa15b341b4fac5371d3938666e/charset-normalizer-2.0.4.tar.gz"
sha256 "f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3"
end
resource "colorama" do
url "https://files.pythonhosted.org/packages/1f/bb/5d3246097ab77fa083a61bd8d3d527b7ae063c7d8e8671b1cf8c4ec10cbe/colorama-0.4.4.tar.gz"
sha256 "5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/cb/38/4c4d00ddfa48abe616d7e572e02a04273603db446975ab46bbcd36552005/idna-3.2.tar.gz"
sha256 "467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"
end
resource "jsonschema" do
url "https://files.pythonhosted.org/packages/69/11/a69e2a3c01b324a77d3a7c0570faa372e8448b666300c4117a516f8b1212/jsonschema-3.2.0.tar.gz"
sha256 "c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"
end
resource "pyrsistent" do
url "https://files.pythonhosted.org/packages/f4/d7/0fa558c4fb00f15aabc6d42d365fcca7a15fcc1091cd0f5784a14f390b7f/pyrsistent-0.18.0.tar.gz"
sha256 "773c781216f8c2900b42a7b638d5b517bb134ae1acbebe4d1e8f1f41ea60eb4b"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/e7/01/3569e0b535fb2e4a6c384bdbed00c55b9d78b5084e0fb7f4d0bf523d7670/requests-2.26.0.tar.gz"
sha256 "b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"
end
resource "ruamel.yaml" do
url "https://files.pythonhosted.org/packages/83/59/4f576abb336d30b3d47061717224be714a9dc39c774061c34cb41c1795cf/ruamel.yaml-0.17.10.tar.gz"
sha256 "106bc8d6dc6a0ff7c9196a47570432036f41d556b779c6b4e618085f57e39e67"
end
resource "ruamel.yaml.clib" do
url "https://files.pythonhosted.org/packages/8b/25/08e5ad2431a028d0723ca5540b3af6a32f58f25e83c6dda4d0fcef7288a3/ruamel.yaml.clib-0.2.6.tar.gz"
sha256 "4ff604ce439abb20794f05613c374759ce10e3595d1867764dd1ae675b85acbd"
end
resource "six" do
url "https://files.pythonhosted.org/packages/71/39/171f1c67cd00715f190ba0b100d606d440a28c93c7714febeca8b79af85e/six-1.16.0.tar.gz"
sha256 "1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"
end
resource "urllib3" do
url "https://files.pythonhosted.org/packages/4f/5a/597ef5911cb8919efe4d86206aa8b2658616d676a7088f0825ca08bd7cb8/urllib3-1.26.6.tar.gz"
sha256 "f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"
end
def install
virtualenv_install_with_resources
end
test do
borg = (testpath/"borg")
config_path = testpath/"config.yml"
repo_path = testpath/"repo"
log_path = testpath/"borg.log"
# Create a fake borg executable to log requested commands
borg.write <<~EOS
#!/bin/sh
echo $@ >> #{log_path}
# Return error on info so we force an init to occur
if [ "$1" = "info" ]; then
exit 2
fi
EOS
borg.chmod 0755
# Generate a config
system bin/"generate-borgmatic-config", "--destination", config_path
# Replace defaults values
config_content = File.read(config_path)
.gsub(/# ?local_path: borg1/, "local_path: #{borg}")
.gsub(/user@backupserver:sourcehostname.borg/, repo_path)
.gsub("- user@backupserver:{fqdn}", "")
.gsub("- /var/log/syslog*", "")
.gsub("- /home/user/path with spaces", "")
File.open(config_path, "w") { |file| file.puts config_content }
# Initialize Repo
system bin/"borgmatic", "-v", "2", "--config", config_path, "--init", "--encryption", "repokey"
# Create a backup
system bin/"borgmatic", "--config", config_path
# See if backup was created
system bin/"borgmatic", "--config", config_path, "--list", "--json"
# Read in stored log
log_content = File.read(log_path)
# Assert that the proper borg commands were executed
assert_equal <<~EOS, log_content
info --debug #{repo_path}
init --encryption repokey --debug #{repo_path}
prune --keep-daily 7 --prefix {hostname}- #{repo_path}
create #{repo_path}::{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f} /etc /home
check --prefix {hostname}- #{repo_path}
list --json #{repo_path}
EOS
end
end
| 43.369565 | 148 | 0.753718 |
1c21a2f8bf857e5bcc0af95ddb1722e7fea55d81
| 2,242 |
require_relative 'collections/pages'
module VirtFS::ReFS
class Page
attr_accessor :fs
attr_accessor :id
attr_accessor :contents
attr_accessor :sequence
attr_accessor :virtual_page_number
attr_accessor :attributes
attr_accessor :object_id
attr_accessor :entries
def initialize
@attributes ||= []
end
def self.extract_all(fs)
page_id = PAGES[:first]
pages = Pages.new
fs.device.seek(page_id * PAGE_SIZE)
while contents = fs.device.read(PAGE_SIZE)
# only pull out metadata pages currently
extracted_id = id_from_contents(contents)
is_metadata = extracted_id == page_id
pages[page_id] = Page.parse(fs, page_id, contents) if is_metadata
page_id += 1
end
pages
end
def self.id_from_contents(contents)
contents.unpack('S').first
end
def offset
id * PAGE_SIZE
end
def attr_start
offset + ADDRESSES[:first_attr]
end
def root?
virtual_page_number == PAGES[:root]
end
def object_table?
virtual_page_number == PAGES[:object_table]
end
def self.parse(fs, id, contents)
store_pos
page = new
page.fs = fs
page.id = id
page.contents = contents
fs.device.seek(page.offset + ADDRESSES[:page_sequence])
page.sequence = fs.device.read(4).unpack('L').first
fs.device.seek(page.offset + ADDRESSES[:virtual_page_number])
page.virtual_page_number = fs.device.read(4).unpack('L').first
unless page.root? || page.object_table?
# TODO:
#page.parse_attributes
#page.parse_metadata
end
restore_pos
page
end
def has_attributes?
[email protected]? && [email protected]?
end
def parse_attributes
fs.device.seek(attr_start)
while true
attr = Attribute.read(fs)
break if attr.empty?
@attributes << attr
end
end
def parse_metadata
@object_id = @attributes.first.unpack("C*")[ADDRESSES[:object_id]]
@entries = @attributes.first.unpack("C*")[ADDRESSES[:num_objects]]
end
end # class Page
end # module VirtFS::ReFS
| 22.19802 | 74 | 0.62132 |
3954905ff8643975befc3a2f06c02f365d267857
| 300 |
class Api::ApplicationController < ApplicationController
protect_from_forgery with: :null_session
private
def authenticate_from_access_token
api_key = ApiKey.find_by_access_token(request.headers["APIAuthorization"])
head :unauthorized unless api_key && api_key.owner.admin?
end
end
| 27.272727 | 78 | 0.803333 |
26614afc55932a64ef02cfcece9409d576c53a20
| 1,515 |
require 'ruby-graphviz'
require 'fileutils'
require 'date'
module ActiveInteractionMapper
module Output
class Dot
def initialize(folder_name:'', file_name:'', show_duplicated_path: false)
d = DateTime.now
d_string = d.strftime("%d%m%Y_%H%M%S")
if folder_name.empty?
@folder_name = "dir_#{d_string}"
else
@folder_name = "#{folder_name}"
end
if file_name.empty?
@file_name = "img_#{d_string}.png"
else
@file_name = "#{file_name}_#{d_string}.png"
end
@show_duplicated_path = show_duplicated_path
@edges = []
@stack = []
@graph = GraphViz.new('CodeMapper')
end
def push(tp, normalized_class_name)
node = @graph.add_node("#{normalized_class_name}")
if @stack != []
if !@show_duplicated_path
if [email protected]? "#{@stack.last};#{node}"
@graph.add_edge(@stack.last,node)
@edges << "#{@stack.last};#{node}"
end
else
@graph.add_edge(@stack.last,node)
end
end
@stack << node
end
def pop(tp, normalized_class_name)
@stack.pop
end
def done
full_path = "./tmp/#{@folder_name}/#{@file_name}"
dirname = File.dirname(full_path)
unless File.directory?(dirname)
FileUtils.mkdir_p(dirname)
end
@graph.output( :png => full_path)
end
end
end
end
| 24.435484 | 79 | 0.545875 |
116e00985740abc6b27938e5f40723d5063bf6e0
| 145 |
class AddPriceAfterCreditAndPledge < ActiveRecord::Migration
def change
add_column :quotations, :final_price, :float, default: 0
end
end
| 24.166667 | 60 | 0.77931 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.