hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
1a2e9de56615d5237ea342f1aa30511217028166 | 2,453 | class Pcl < Formula
desc "Library for 2D/3D image and point cloud processing"
homepage "http://www.pointclouds.org/"
url "https://github.com/PointCloudLibrary/pcl/archive/pcl-1.8.1.tar.gz"
sha256 "5a102a2fbe2ba77c775bf92c4a5d2e3d8170be53a68c3a76cfc72434ff7b9783"
head "https://github.com/PointCloudLibrary/pcl.git"
bottle do
sha256 "18efc3f7b897d0f646d8d1dd6512f41cc6abfba296b1dfed2927823e78b60b81" => :high_sierra
sha256 "46229b4eb3d168ecaff4f83dcfff95642a05d0ff989ab89adb63ba0397c4a909" => :sierra
sha256 "176fb1d15c2dfbb323eb29d28929624bcf342a09dcb848f610af652e89ba5ec2" => :el_capitan
sha256 "bd0b24e10b74ba20d63c09af7a1292d70d8c9ff8f7ffae98fabf4591993c09d4" => :yosemite
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "boost"
depends_on "cminpack"
depends_on "eigen"
depends_on "flann"
depends_on "glew"
depends_on "libusb"
depends_on "qhull"
depends_on "vtk"
depends_on "homebrew/science/openni" => :optional
depends_on "homebrew/science/openni2" => :optional
def install
args = std_cmake_args + %w[
-DBUILD_SHARED_LIBS:BOOL=ON
-DBUILD_apps=AUTO_OFF
-DBUILD_apps_3d_rec_framework=AUTO_OFF
-DBUILD_apps_cloud_composer=AUTO_OFF
-DBUILD_apps_in_hand_scanner=AUTO_OFF
-DBUILD_apps_optronic_viewer=AUTO_OFF
-DBUILD_apps_point_cloud_editor=AUTO_OFF
-DBUILD_examples:BOOL=ON
-DBUILD_global_tests:BOOL=OFF
-DBUILD_outofcore:BOOL=AUTO_OFF
-DBUILD_people:BOOL=AUTO_OFF
-DBUILD_simulation:BOOL=AUTO_OFF
-DWITH_CUDA:BOOL=OFF
-DWITH_DOCS:BOOL=OFF
-DWITH_QT:BOOL=FALSE
-DWITH_TUTORIALS:BOOL=OFF
]
if build.head?
args << "-DBUILD_apps_modeler=AUTO_OFF"
else
args << "-DBUILD_apps_modeler:BOOL=OFF"
end
if build.with? "openni"
args << "-DOPENNI_INCLUDE_DIR=#{Formula["openni"].opt_include}/ni"
else
args << "-DCMAKE_DISABLE_FIND_PACKAGE_OpenNI:BOOL=TRUE"
end
if build.with? "openni2"
ENV.append "OPENNI2_INCLUDE", "#{Formula["openni2"].opt_include}/ni2"
ENV.append "OPENNI2_LIB", "#{Formula["openni2"].opt_lib}/ni2"
args << "-DBUILD_OPENNI2:BOOL=ON"
end
mkdir "build" do
system "cmake", "..", *args
system "make", "install"
prefix.install Dir["#{bin}/*.app"]
end
end
test do
assert_match "tiff files", shell_output("#{bin}/pcl_tiff2pcd -h", 255)
end
end
| 31.857143 | 93 | 0.714635 |
6274657f881a147618edee1600eb424c6b7cf273 | 226 | Rails.application.routes.draw do
resources :tasks, except: [:show, :index]
resources :projects do
member do
get 'tasks_feed'
end
end
devise_for :users
get 'welcome/index'
root 'welcome#index'
end
| 14.125 | 43 | 0.672566 |
f84b24cd60ff166ede1107b64280cbc5ef6684d4 | 955 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: xsuportal/services/admin/dashboard.proto
require 'google/protobuf'
require 'xsuportal/resources/leaderboard_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("xsuportal/services/admin/dashboard.proto", :syntax => :proto3) do
add_message "xsuportal.proto.services.admin.DashboardRequest" do
end
add_message "xsuportal.proto.services.admin.DashboardResponse" do
optional :leaderboard, :message, 1, "xsuportal.proto.resources.Leaderboard"
end
end
end
module Xsuportal
module Proto
module Services
module Admin
DashboardRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("xsuportal.proto.services.admin.DashboardRequest").msgclass
DashboardResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("xsuportal.proto.services.admin.DashboardResponse").msgclass
end
end
end
end
| 35.37037 | 145 | 0.769634 |
334e960a9f7f6efe641ed5b6133dd8b81e778366 | 3,154 | require "redis"
require "redis-namespace"
require_relative "./rails_performance/version.rb"
require_relative "rails_performance/rails/query_builder.rb"
require_relative "rails_performance/rails/middleware.rb"
require_relative "rails_performance/models/base_record.rb"
require_relative "rails_performance/models/request_record.rb"
require_relative "rails_performance/models/sidekiq_record.rb"
require_relative "rails_performance/models/delayed_job_record.rb"
require_relative "rails_performance/models/grape_record.rb"
require_relative "rails_performance/models/trace_record.rb"
require_relative "rails_performance/models/rake_record.rb"
require_relative "rails_performance/models/custom_record.rb"
require_relative "rails_performance/data_source.rb"
require_relative "rails_performance/utils.rb"
require_relative "rails_performance/reports/base_report.rb"
require_relative "rails_performance/reports/requests_report.rb"
require_relative "rails_performance/reports/crash_report.rb"
require_relative "rails_performance/reports/response_time_report.rb"
require_relative "rails_performance/reports/throughput_report.rb"
require_relative "rails_performance/reports/recent_requests_report.rb"
require_relative "rails_performance/reports/breakdown_report.rb"
require_relative "rails_performance/reports/trace_report.rb"
require_relative "rails_performance/extensions/trace.rb"
require_relative "rails_performance/thread/current_request.rb"
module RailsPerformance
FORMAT = "%Y%m%dT%H%M"
mattr_accessor :redis
@@redis = Redis::Namespace.new("{#{::Rails.env}-rails-performance}", redis: Redis.new)
mattr_accessor :duration
@@duration = 4.hours
mattr_accessor :debug
@@debug = false
mattr_accessor :enabled
@@enabled = true
mattr_accessor :rake_tasks_performance
@@rake_tasks_performance = true
# default path where to mount gem
mattr_accessor :mount_at
@@mount_at = "/rails/performance"
# Enable http basic authentication
mattr_accessor :http_basic_authentication_enabled
@@http_basic_authentication_enabled = false
# Enable http basic authentication
mattr_accessor :http_basic_authentication_user_name
@@http_basic_authentication_user_name = 'rails_performance'
# Enable http basic authentication
mattr_accessor :http_basic_authentication_password
@@http_basic_authentication_password = 'password12'
# If you want to enable access by specific conditions
mattr_accessor :verify_access_proc
@@verify_access_proc = proc { |controller| true }
mattr_reader :ignored_endpoints
def RailsPerformance.ignored_endpoints=(endpoints)
@@ignored_endpoints = Set.new(endpoints)
end
@@ignored_endpoints = []
# skip requests if it's inside Rails Performance view
mattr_accessor :skip
@@skip = false
def RailsPerformance.setup
yield(self)
end
def RailsPerformance.log(message)
return
if ::Rails.logger
# puts(message)
::Rails.logger.debug(message)
else
puts(message)
end
end
end
require "rails_performance/engine"
require_relative './rails_performance/gems/custom_ext.rb'
RailsPerformance.send :extend, RailsPerformance::Gems::CustomExtension
| 32.854167 | 88 | 0.811985 |
6a858cd99cf57700d4fc77e9904acb395e6de7da | 313 | cask :v1 => 'spideroak' do
version :latest
sha256 :no_check
url 'https://spideroak.com/getbuild?platform=mac'
name 'SpiderOak'
homepage 'http://spideroak.com'
license :unknown # todo: change license and remove this comment; ':unknown' is a machine-generated placeholder
app 'SpiderOak.app'
end
| 26.083333 | 115 | 0.722045 |
1ab031a4288f645d8c56690442a1509c5c819a1b | 137 | #!/usr/bin/env ruby
$:.unshift(File.dirname(__FILE__) + '/../lib')
require 'test/unit'
require 'active_shipping'
require 'mocha/setup'
| 17.125 | 46 | 0.70073 |
ab60dc6a5f00563b329116281880baa3228022f9 | 646 | # frozen_string_literal: true
# helper methods for Location-related views:
# ListCountries, ListLocations, ShowLocation
module LocationHelper
def country_link(country, count = nil)
str = country + (count ? ": #{count}" : "")
link_to(str, action: :list_by_country, country: country)
end
# title of a link to Observations at a location, with observation count
# Observations at this Location(nn)
def show_obs_link_title_with_count(location)
"#{:show_location_observations.t} (#{location.observations.count})"
end
def calc_counts(locations)
Observation.where(location: locations).group(:location_id).count
end
end
| 30.761905 | 73 | 0.744582 |
268e50a1f2c994c75a0aabdafd504f5c01f559ed | 1,055 | require 'spec_helper'
describe Runreg::Event do
describe '#initialize' do
subject { Runreg::Event.new hash }
context 'when has contains fields with keys containing \'Date\'' do
let(:hash) do
{
"StartDate" => 'start date',
"StartTime" => 'start time',
"EndDate" => 'end date'
}
end
it 'converts them' do
expect(Runreg::Event).to receive(:convert_datetime).with('start date') { 'converted start date' }
expect(Runreg::Event).to receive(:convert_datetime).with('end date') { 'converted end date' }
expect(Runreg::Event).not_to receive(:convert_datetime).with('start time')
expect(subject.StartDate).to eq('converted start date')
expect(subject.EndDate).to eq('converted end date')
expect(subject.StartTime).to eq('start time')
end
end
end
describe '.convert_datetime' do
subject { Runreg::Event.convert_datetime "/Date(1592625600000-0400)/" }
it { is_expected.to eq('2020-06-20T00:00:00-04:00') }
end
end
| 30.142857 | 105 | 0.63128 |
38af1bfe4b21d90bb504b5920df12f2dd5ba34bd | 829 | class ExtendedProfile < ActiveRecord::Migration
def self.up
add_column :users, :city, :string
add_column :users, :state, :integer
add_column :users, :zip, :string
add_column :users, :country, :integer
add_column :users, :phone, :string
add_column :users, :phone2, :string
add_column :users, :msn, :string
add_column :users, :skype, :string
add_column :users, :yahoo, :string
end
def self.down
remove_column :users, :city, :string
remove_column :users, :state, :integer
remove_column :users, :zip, :string
remove_column :users, :country, :integer
remove_column :users, :phone, :string
remove_column :users, :phone2, :string
remove_column :users, :msn, :string
remove_column :users, :skype, :string
remove_column :users, :yahoo, :string
end
end
| 30.703704 | 47 | 0.681544 |
5df9757764d36f4f9291d48a3a07f5cfd0359218 | 1,142 | require 'test_helper'
class CategoriesControllerTest < ActionDispatch::IntegrationTest
setup do
@category = categories(:one)
end
test "should get index" do
get categories_url
assert_response :success
end
test "should get new" do
get new_category_url
assert_response :success
end
test "should create category" do
assert_difference('Category.count') do
post categories_url, params: { category: { name: @category.name, parent: @category.parent } }
end
assert_redirected_to category_url(Category.last)
end
test "should show category" do
get category_url(@category)
assert_response :success
end
test "should get edit" do
get edit_category_url(@category)
assert_response :success
end
test "should update category" do
patch category_url(@category), params: { category: { name: @category.name, parent: @category.parent } }
assert_redirected_to category_url(@category)
end
test "should destroy category" do
assert_difference('Category.count', -1) do
delete category_url(@category)
end
assert_redirected_to categories_url
end
end
| 23.306122 | 107 | 0.721541 |
ede9c2c7e8db393db4b3f737081695f5c8eb937b | 1,599 | require File.dirname(__FILE__) + '/helper'
describe 'Registering extensions' do
module FooExtensions
def foo
end
private
def im_hiding_in_ur_foos
end
end
module BarExtensions
def bar
end
end
module BazExtensions
def baz
end
end
module QuuxExtensions
def quux
end
end
it 'will add the methods to the DSL for the class in which you register them and its subclasses' do
Sinatra::Base.register FooExtensions
assert Sinatra::Base.respond_to?(:foo)
Sinatra::Default.register BarExtensions
assert Sinatra::Default.respond_to?(:bar)
assert Sinatra::Default.respond_to?(:foo)
assert !Sinatra::Base.respond_to?(:bar)
end
it 'allows extending by passing a block' do
Sinatra::Base.register {
def im_in_ur_anonymous_module; end
}
assert Sinatra::Base.respond_to?(:im_in_ur_anonymous_module)
end
it 'will make sure any public methods added via Default#register are delegated to Sinatra::Delegator' do
Sinatra::Default.register FooExtensions
assert Sinatra::Delegator.private_instance_methods.include?("foo")
assert !Sinatra::Delegator.private_instance_methods.include?("im_hiding_in_ur_foos")
end
it 'will not delegate methods on Base#register' do
Sinatra::Base.register QuuxExtensions
assert !Sinatra::Delegator.private_instance_methods.include?("quux")
end
it 'will extend the Sinatra::Default application by default' do
Sinatra.register BazExtensions
assert !Sinatra::Base.respond_to?(:baz)
assert Sinatra::Default.respond_to?(:baz)
end
end
| 25.790323 | 106 | 0.730457 |
5df6bd341852eee9abfef703513318086876ebc3 | 2,750 | class Import::FogbugzController < Import::BaseController
before_action :verify_fogbugz_import_enabled
before_action :user_map, only: [:new_user_map, :create_user_map]
rescue_from Fogbugz::AuthenticationException, with: :fogbugz_unauthorized
def new
end
def callback
begin
res = Gitlab::FogbugzImport::Client.new(import_params.symbolize_keys)
rescue
# If the URI is invalid various errors can occur
return redirect_to new_import_fogbugz_path, alert: 'Could not connect to FogBugz, check your URL'
end
session[:fogbugz_token] = res.get_token
session[:fogbugz_uri] = params[:uri]
redirect_to new_user_map_import_fogbugz_path
end
def new_user_map
end
def create_user_map
user_map = params[:users]
unless user_map.is_a?(Hash) && user_map.all? { |k, v| !v[:name].blank? }
flash.now[:alert] = 'All users must have a name.'
return render 'new_user_map'
end
session[:fogbugz_user_map] = user_map
flash[:notice] = 'The user map has been saved. Continue by selecting the projects you want to import.'
redirect_to status_import_fogbugz_path
end
def status
unless client.valid?
return redirect_to new_import_fogbugz_path
end
@repos = client.repos
@already_added_projects = current_user.created_projects.where(import_type: 'fogbugz')
already_added_projects_names = @already_added_projects.pluck(:import_source)
@repos.reject! { |repo| already_added_projects_names.include? repo.name }
end
def jobs
jobs = current_user.created_projects.where(import_type: 'fogbugz').to_json(only: [:id, :import_status])
render json: jobs
end
def create
@repo_id = params[:repo_id]
repo = client.repo(@repo_id)
fb_session = { uri: session[:fogbugz_uri], token: session[:fogbugz_token] }
@target_namespace = current_user.namespace
@project_name = repo.name
namespace = @target_namespace
umap = session[:fogbugz_user_map] || client.user_map
@project = Gitlab::FogbugzImport::ProjectCreator.new(repo, fb_session, namespace, current_user, umap).execute
end
private
def client
@client ||= Gitlab::FogbugzImport::Client.new(token: session[:fogbugz_token], uri: session[:fogbugz_uri])
end
def user_map
@user_map ||= begin
user_map = client.user_map
stored_user_map = session[:fogbugz_user_map]
user_map.update(stored_user_map) if stored_user_map
user_map
end
end
def fogbugz_unauthorized(exception)
redirect_to new_import_fogbugz_path, alert: exception.message
end
def import_params
params.permit(:uri, :email, :password)
end
def verify_fogbugz_import_enabled
render_404 unless fogbugz_import_enabled?
end
end
| 26.699029 | 113 | 0.728 |
d54d4eaa705199bf79e25461a8d23ccf0c14d5ba | 1,249 | # == Schema Information
#
# Table name: partners
#
# id :integer not null, primary key
# name :string
# email :string
# created_at :datetime
# updated_at :datetime
# organization_id :integer
# status :string
#
class Partner < ApplicationRecord
require "csv"
belongs_to :organization
has_many :distributions
validates :organization, presence: true
validates :name, :email, presence: true, uniqueness: true
validates :email, format: { with: /\A([^@\s]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\Z/i, on: :create }
scope :for_csv_export, ->(organization) {
where(organization: organization)
.order(:name)
}
include DiaperPartnerClient
after_create :update_diaper_partner
def self.import_csv(filepath, organization_id)
data = File.read(filepath, encoding: "BOM|UTF-8")
CSV.parse(data, headers: true) do |row|
loc = Partner.new(row.to_hash)
loc.organization_id = organization_id
loc.save!
end
end
def self.csv_export_headers
%w{Name Email}
end
def csv_export_attributes
[name, email]
end
private
def update_diaper_partner
update(status: "Pending")
DiaperPartnerClient.post(attributes)
end
end
| 22.303571 | 96 | 0.653323 |
4a2c0ebfdb3fc03b555b5077ce791481323ec7be | 948 | module GroupMethods
protected
def get_group
@group = Group.find_by_url_key(params[:group_id]) || Group.find_by_url_key(params[:id]) || Group.find(params[:group_id]) || Group.find(params[:id])
if !@group
flash[:notice] = "取得群組訊息時出現問題,請重新嘗試一次"
permission_denied
end
end
def membership_required
return true if admin?
if [email protected]_participate?(logged_in_user)
flash[:notice] = "你必須先成為這個群組的成員"
permission_denied
false
end
end
# TODO decide if we need to keep this method or if we need to use authorized? from authenticated_system instead
def authorization_required
return true if admin?
if [email protected]_edit?(logged_in_user)
flash[:notice] = "你沒有權限執行這個動作"
permission_denied
@group = nil
false
end
end
def permission_denied
respond_to do |format|
format.html do
redirect_to index_path
end
end
end
end | 22.046512 | 151 | 0.668776 |
abd0b32e6e0e4b8785b933a94d74b8137a351ed8 | 2,045 | # Copyright (c) 2020-2021 Andy Maleh
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# This is a sample for the c_combo widget, a more customizable version of combo
class HelloCCombo
class Person
attr_accessor :country, :country_options
def initialize
self.country_options = ['', 'Canada', 'US', 'Mexico']
reset_country!
end
def reset_country!
self.country = 'Canada'
end
end
include Glimmer::UI::CustomShell
before_body do
@person = Person.new
end
body {
shell {
row_layout(:vertical) {
fill true
}
text 'Hello, C Combo!'
c_combo(:read_only) {
selection <=> [@person, :country] # also binds to country_options by convention
font height: 45 # unlike `combo`, `c_combo` changes height when setting the font height
}
button {
text 'Reset Selection'
on_widget_selected do
@person.reset_country!
end
}
}
}
end
HelloCCombo.launch
| 30.073529 | 95 | 0.692421 |
e8388825718710e5b53fbff0952ba7bfef9a43fb | 132 | class User < Sequel::Model
# def validate
# super
# errors.add(:name, 'cannot be empty') if !name || name.empty?
# end
end | 16.5 | 65 | 0.628788 |
3924775cb9e39d9b7b0067d10de04147c8581526 | 751 | cask "font-iosevka-ss06" do
version "5.0.5"
sha256 "c7c24d6cd732cf3b4614abe328673f8760f0f2b7e2855c2f3e875dd912a1132c"
url "https://github.com/be5invis/Iosevka/releases/download/v#{version}/ttc-iosevka-ss06-#{version}.zip"
appcast "https://github.com/be5invis/Iosevka/releases.atom"
name "Iosevka SS06"
desc "Sans-serif, slab-serif, monospace and quasi‑proportional typeface family"
homepage "https://github.com/be5invis/Iosevka/"
font "iosevka-ss06-bold.ttc"
font "iosevka-ss06-extrabold.ttc"
font "iosevka-ss06-extralight.ttc"
font "iosevka-ss06-heavy.ttc"
font "iosevka-ss06-light.ttc"
font "iosevka-ss06-medium.ttc"
font "iosevka-ss06-regular.ttc"
font "iosevka-ss06-semibold.ttc"
font "iosevka-ss06-thin.ttc"
end
| 35.761905 | 105 | 0.757656 |
ff7248edc0616dd72a34d9d8f50e9cff7da22d6e | 6,261 | # frozen_string_literal: false
require 'test/unit'
class TestDefined < Test::Unit::TestCase
class Foo
def foo
p :foo
end
protected :foo
def bar(f)
yield(defined?(self.foo))
yield(defined?(f.foo))
end
def baz(f)
end
attr_accessor :attr
def attrasgn_test
yield(defined?(self.attr = 1))
end
end
def defined_test
return !defined?(yield)
end
def test_defined
$x = nil
assert(defined?($x)) # global variable
assert_equal('global-variable', defined?($x))# returns description
assert_nil(defined?(foo)) # undefined
foo=5
assert(defined?(foo)) # local variable
assert(defined?(Array)) # constant
assert(defined?(::Array)) # toplevel constant
assert(defined?(File::Constants)) # nested constant
assert(defined?(Object.new)) # method
assert(defined?(Object::new)) # method
assert(!defined?(Object.print)) # private method
assert(defined?(1 == 2)) # operator expression
f = Foo.new
assert_nil(defined?(f.foo)) # protected method
f.bar(f) { |v| assert(v) }
assert_nil(defined?(f.quux)) # undefined method
assert_nil(defined?(f.baz(x))) # undefined argument
x = 0
assert(defined?(f.baz(x)))
assert_nil(defined?(f.quux(x)))
assert(defined?(print(x)))
assert_nil(defined?(quux(x)))
assert(defined?(f.attr = 1))
f.attrasgn_test { |v| assert(v) }
assert(defined_test) # not iterator
assert(!defined_test{}) # called as iterator
/a/ =~ ''
assert_equal nil, defined?($&)
assert_equal nil, defined?($`)
assert_equal nil, defined?($')
assert_equal nil, defined?($+)
assert_equal nil, defined?($1)
assert_equal nil, defined?($2)
/a/ =~ 'a'
assert_equal 'global-variable', defined?($&)
assert_equal 'global-variable', defined?($`)
assert_equal 'global-variable', defined?($') # '
assert_equal nil, defined?($+)
assert_equal nil, defined?($1)
assert_equal nil, defined?($2)
/(a)/ =~ 'a'
assert_equal 'global-variable', defined?($&)
assert_equal 'global-variable', defined?($`)
assert_equal 'global-variable', defined?($') # '
assert_equal 'global-variable', defined?($+)
assert_equal 'global-variable', defined?($1)
assert_equal nil, defined?($2)
/(a)b/ =~ 'ab'
assert_equal 'global-variable', defined?($&)
assert_equal 'global-variable', defined?($`)
assert_equal 'global-variable', defined?($') # '
assert_equal 'global-variable', defined?($+)
assert_equal 'global-variable', defined?($1)
assert_equal nil, defined?($2)
assert_equal("nil", defined?(nil))
assert_equal("true", defined?(true))
assert_equal("false", defined?(false))
assert_equal("expression", defined?(1))
bug8224 = '[ruby-core:54024] [Bug #8224]'
(1..3).each do |level|
expr = "("*level+")"*level
assert_equal("nil", eval("defined? #{expr}"), "#{bug8224} defined? #{expr}")
assert_equal("nil", eval("defined?(#{expr})"), "#{bug8224} defined?(#{expr})")
end
end
def test_defined_impl_specific
feature7035 = '[ruby-core:47558]' # not spec
assert_predicate(defined?(Foo), :frozen?, feature7035)
assert_same(defined?(Foo), defined?(Array), feature7035)
end
class TestAutoloadedSuperclass
autoload :A, "a"
end
class TestAutoloadedSubclass < TestAutoloadedSuperclass
def a?
defined?(A)
end
end
def test_autoloaded_subclass
bug = "[ruby-core:35509]"
x = TestAutoloadedSuperclass.new
class << x
def a?; defined?(A); end
end
assert_equal("constant", x.a?, bug)
assert_equal("constant", TestAutoloadedSubclass.new.a?, bug)
end
class TestAutoloadedNoload
autoload :A, "a"
def a?
defined?(A)
end
def b?
defined?(A::B)
end
end
def test_autoloaded_noload
loaded = $".dup
$".clear
loadpath = $:.dup
$:.clear
x = TestAutoloadedNoload.new
assert_equal("constant", x.a?)
assert_nil(x.b?)
assert_equal([], $")
ensure
$".replace(loaded)
$:.replace(loadpath)
end
def test_exception
bug5786 = '[ruby-dev:45021]'
assert_nil(defined?(raise("[Bug#5786]")::A), bug5786)
end
def test_define_method
bug6644 = '[ruby-core:45831]'
a = Class.new do
def self.def_f!;
singleton_class.send(:define_method, :f) { defined? super }
end
end
aa = Class.new(a)
a.def_f!
assert_nil(a.f)
assert_nil(aa.f)
aa.def_f!
assert_equal("super", aa.f, bug6644)
assert_nil(a.f, bug6644)
end
def test_super_in_included_method
c0 = Class.new do
def m
end
end
m1 = Module.new do
def m
defined?(super)
end
end
c = Class.new(c0) do include m1
def m
super
end
end
assert_equal("super", c.new.m)
end
def test_super_in_block
bug8367 = '[ruby-core:54769] [Bug #8367]'
c = Class.new do
def x; end
end
m = Module.new do
def b; yield; end
def x; b {return defined?(super)}; end
end
o = c.new
o.extend(m)
assert_equal("super", o.x, bug8367)
end
def test_super_toplevel
assert_separately([], "assert_nil(defined?(super))")
end
class ExampleRespondToMissing
attr_reader :called
def initialize
end
def respond_to_missing? *args
@called = true
false
def existing_method
end
def func_defined_existing_func
defined?(existing_method())
end
defined?(non_existing_method())
end
end
def test_method_by_respond_to_missing
bug_11211 = '[Bug #11211]'
obj = ExampleRespondToMissing.new
assert_equal("method", defined?(obj.existing_method), bug_11211)
assert_equal(false, obj.called, bug_11211)
assert_equal(nil, defined?(obj.non_existing_method), bug_11211)
assert_equal(true, obj.called, bug_11211)
bug_11212 = '[Bug #11212]'
obj = ExampleRespondToMissing.new
assert_equal("method", obj.func_defined_existing_func, bug_11212)
assert_equal(false, obj.called, bug_11212)
assert_equal(nil, obj.func_defined_non_existing_func, bug_11212)
assert_equal(true, obj.called, bug_11212)
end
end
| 24.944223 | 84 | 0.63089 |
1cf5f4b33eda0ac6848ce1f2c247ab19d03553b4 | 8,961 | require "test_helper"
class Api::V1::DeletionsControllerTest < ActionController::TestCase
context "with yank rubygem api key scope" do
setup do
@api_key = create(:api_key, key: "12345", yank_rubygem: true)
@user = @api_key.user
@request.env["HTTP_AUTHORIZATION"] = "12345"
end
context "for a gem SomeGem with a version 0.1.0" do
setup do
@rubygem = create(:rubygem, name: "SomeGem")
@v1 = create(:version, rubygem: @rubygem, number: "0.1.0", platform: "ruby")
@ownership = create(:ownership, user: @user, rubygem: @rubygem)
RubygemFs.instance.store("gems/#{@v1.full_name}.gem", "")
end
context "when mfa for UI and API is enabled" do
setup do
@user.enable_mfa!(ROTP::Base32.random_base32, :ui_and_api)
end
context "ON DELETE to create for existing gem version without OTP" do
setup do
delete :create, params: { gem_name: @rubygem.to_param, version: @v1.number }
end
should respond_with :unauthorized
end
context "ON DELETE to create for existing gem version with incorrect OTP" do
setup do
@request.env["HTTP_OTP"] = (ROTP::TOTP.new(@user.mfa_seed).now.to_i.succ % 1_000_000).to_s
delete :create, params: { gem_name: @rubygem.to_param, version: @v1.number }
end
should respond_with :unauthorized
end
context "ON DELETE to create for existing gem version with correct OTP" do
setup do
@request.env["HTTP_OTP"] = ROTP::TOTP.new(@user.mfa_seed).now
delete :create, params: { gem_name: @rubygem.to_param, version: @v1.number }
end
should respond_with :success
should "keep the gem, deindex, keep owner" do
assert_equal 1, @rubygem.versions.count
assert @rubygem.versions.indexed.count.zero?
end
should "record the deletion" do
assert_not_nil Deletion.where(user: @user,
rubygem: @rubygem.name,
number: @v1.number).first
end
end
end
context "when mfa for UI only is enabled" do
setup do
@user.enable_mfa!(ROTP::Base32.random_base32, :ui_only)
end
context "api key has mfa enabled" do
setup do
@api_key.mfa = true
@api_key.save!
delete :create, params: { gem_name: @rubygem.to_param, version: @v1.number }
end
should respond_with :unauthorized
end
context "api key does not have mfa enabled" do
setup do
delete :create, params: { gem_name: @rubygem.to_param, version: @v1.number }
end
should respond_with :success
end
end
context "when mfa is required" do
setup do
@v1.metadata = { "rubygems_mfa_required" => "true" }
@v1.save!
end
context "when user has mfa enabled" do
setup do
@user.enable_mfa!(ROTP::Base32.random_base32, :ui_and_api)
@request.env["HTTP_OTP"] = ROTP::TOTP.new(@user.mfa_seed).now
delete :create, params: { gem_name: @rubygem.to_param, version: @v1.number }
end
should respond_with :success
should "keep the gem, deindex, keep owner" do
assert_equal 1, @rubygem.versions.count
assert @rubygem.versions.indexed.count.zero?
end
should "record the deletion" do
assert_not_nil Deletion.where(user: @user,
rubygem: @rubygem.name,
number: @v1.number).first
end
end
context "when user has not mfa enabled" do
setup do
delete :create, params: { gem_name: @rubygem.to_param, version: @v1.number }
end
should respond_with :forbidden
end
end
context "ON DELETE to create for existing gem version" do
setup do
create(:global_web_hook, user: @user, url: "http://example.org")
delete :create, params: { gem_name: @rubygem.to_param, version: @v1.number }
end
should respond_with :success
should "keep the gem, deindex, keep owner" do
assert_equal 1, @rubygem.versions.count
assert @rubygem.versions.indexed.count.zero?
end
should "record the deletion" do
assert_not_nil Deletion.where(user: @user,
rubygem: @rubygem.name,
number: @v1.number).first
end
should "have enqueued a webhook" do
assert_instance_of Notifier, Delayed::Job.last.payload_object
end
end
context "and a version 0.1.1" do
setup do
@v2 = create(:version, rubygem: @rubygem, number: "0.1.1", platform: "ruby")
end
context "ON DELETE to create for version 0.1.1" do
setup do
delete :create, params: { gem_name: @rubygem.to_param, version: @v2.number }
end
should respond_with :success
should "keep the gem, deindex it, and keep the owners" do
assert_equal 2, @rubygem.versions.count
assert_equal 1, @rubygem.versions.indexed.count
assert_equal 1, @rubygem.ownerships.count
end
should "record the deletion" do
assert_not_nil Deletion.where(user: @user,
rubygem: @rubygem.name,
number: @v2.number).first
end
end
end
context "and a version 0.1.1 and platform x86-darwin-10" do
setup do
@v2 = create(:version, rubygem: @rubygem, number: "0.1.1", platform: "x86-darwin-10")
end
context "ON DELETE to create for version 0.1.1 and x86-darwin-10" do
setup do
delete :create, params: { gem_name: @rubygem.to_param, version: @v2.number, platform: @v2.platform }
end
should respond_with :success
should "keep the gem, deindex it, and keep the owners" do
assert_equal 2, @rubygem.versions.count
assert_equal 1, @rubygem.versions.indexed.count
assert_equal 1, @rubygem.ownerships.count
end
should "show platform in response" do
assert_equal "Successfully deleted gem: SomeGem (0.1.1-x86-darwin-10)", @response.body
end
should "record the deletion" do
assert_not_nil Deletion.where(
user: @user,
rubygem: @rubygem.name,
number: @v2.number,
platform: @v2.platform
).first
end
end
end
context "ON DELETE to create for existing gem with invalid version" do
setup do
delete :create, params: { gem_name: @rubygem.to_param, version: "0.2.0" }
end
should respond_with :not_found
should "not modify any versions" do
assert_equal 1, @rubygem.versions.count
assert_equal 1, @rubygem.versions.indexed.count
end
should "not record the deletion" do
assert_equal 0, @user.deletions.count
end
end
context "ON DELETE to create for someone else's gem" do
setup do
other_user = create(:user)
other_rubygem = create(:rubygem, name: "SomeOtherGem")
create(:version, rubygem: other_rubygem, number: "0.1.0", platform: "ruby")
create(:ownership, user: other_user, rubygem: other_rubygem)
delete :create, params: { gem_name: other_rubygem.to_param, version: "0.1.0" }
end
should respond_with :forbidden
should "not record the deletion" do
assert_equal 0, @user.deletions.count
end
end
context "ON DELETE to create for an already deleted gem" do
setup do
Deletion.create!(user: @user, version: @v1)
delete :create, params: { gem_name: @rubygem.to_param, version: @v1.number }
end
should respond_with :unprocessable_entity
should "not re-record the deletion" do
assert_equal 1, Deletion.where(user: @user,
rubygem: @rubygem.name,
number: @v1.number).count
end
end
end
end
context "without yank rubygem api key scope" do
setup do
api_key = create(:api_key, key: "12342")
@request.env["HTTP_AUTHORIZATION"] = "12342"
rubygem = create(:rubygem, number: "1.0.0", owners: [api_key.user])
delete :create, params: { gem_name: rubygem.to_param, version: "1.0.0" }
end
should respond_with :forbidden
end
end
| 37.651261 | 112 | 0.572369 |
1dfa60819e92b4a6c76f26b104f2612d7f2ba1cc | 238 | class ApplicationController < ActionController::Base
protect_from_forgery with: :null_session
include DeviseTokenAuth::Concerns::SetUserByToken
include ActionController::ImplicitRender
include ActionController::Serialization
end
| 29.75 | 52 | 0.848739 |
03fbe4a71edf554638ea0f7137300acf78d8603b | 10,882 | # frozen_string_literal: true
require "rails_helper"
require File.expand_path("config_shared_examples", __dir__)
module ActiveAdmin
RSpec.describe Resource do
it_should_behave_like "ActiveAdmin::Resource"
around do |example|
with_resources_during(example) { namespace.register Category }
end
let(:application) { ActiveAdmin::Application.new }
let(:namespace) { Namespace.new(application, :admin) }
def config(options = {})
@config ||= Resource.new(namespace, Category, options)
end
it { respond_to :resource_class }
describe "#resource_table_name" do
it "should return the resource's table name" do
expect(config.resource_table_name).to eq '"categories"'
end
context "when the :as option is given" do
it "should return the resource's table name" do
expect(config(as: "My Category").resource_table_name).to eq '"categories"'
end
end
end
describe "#resource_column_names" do
it "should return the resource's column names" do
expect(config.resource_column_names).to eq Category.column_names
end
end
describe "#decorator_class" do
it "returns nil by default" do
expect(config.decorator_class).to eq nil
end
context "when a decorator is defined" do
around do |example|
with_resources_during(example) { resource }
end
let(:resource) { namespace.register(Post) { decorate_with PostDecorator } }
specify "#decorator_class_name should return PostDecorator" do
expect(resource.decorator_class_name).to eq "::PostDecorator"
end
it "returns the decorator class" do
expect(resource.decorator_class).to eq PostDecorator
end
end
end
describe "controller name" do
it "should return a namespaced controller name" do
expect(config.controller_name).to eq "Admin::CategoriesController"
end
context "when non namespaced controller" do
let(:namespace) { ActiveAdmin::Namespace.new(application, :root) }
it "should return a non namespaced controller name" do
expect(config.controller_name).to eq "CategoriesController"
end
end
end
describe "#include_in_menu?" do
subject { resource }
around do |example|
with_resources_during(example) { resource }
end
context "when regular resource" do
let(:resource) { namespace.register(Post) }
it { is_expected.to be_include_in_menu }
end
context "when menu set to false" do
let(:resource) { namespace.register(Post) { menu false } }
it { is_expected.not_to be_include_in_menu }
end
end
describe "#belongs_to" do
it "should build a belongs to configuration" do
expect(config.belongs_to_config).to eq nil
config.belongs_to :posts
expect(config.belongs_to_config).to_not eq nil
end
it "should not set the target menu to the belongs to target" do
expect(config.navigation_menu_name).to eq ActiveAdmin::DEFAULT_MENU
config.belongs_to :posts
expect(config.navigation_menu_name).to eq ActiveAdmin::DEFAULT_MENU
end
end
describe "scoping" do
context "when using a block" do
before do
@resource = application.register Category do
scope_to do
"scoped"
end
end
end
it "should call the proc for the begin of association chain" do
begin_of_association_chain = @resource.controller.new.send(:begin_of_association_chain)
expect(begin_of_association_chain).to eq "scoped"
end
end
context "when using a symbol" do
before do
@resource = application.register Category do
scope_to :current_user
end
end
it "should call the method for the begin of association chain" do
controller = @resource.controller.new
expect(controller).to receive(:current_user).and_return(true)
begin_of_association_chain = controller.send(:begin_of_association_chain)
expect(begin_of_association_chain).to eq true
end
end
describe "getting the method for the association chain" do
context "when a simple registration" do
before do
@resource = application.register Category do
scope_to :current_user
end
end
it "should return the pluralized collection name" do
expect(@resource.controller.new.send(:method_for_association_chain)).to eq :categories
end
end
context "when passing in the method as an option" do
before do
@resource = application.register Category do
scope_to :current_user, association_method: :blog_categories
end
end
it "should return the method from the option" do
expect(@resource.controller.new.send(:method_for_association_chain)).to eq :blog_categories
end
end
end
end
describe "sort order" do
class MockResource
end
context "when resource class responds to primary_key" do
it "should sort by primary key desc by default" do
expect(MockResource).to receive(:primary_key).and_return("pk")
config = Resource.new(namespace, MockResource)
expect(config.sort_order).to eq "pk_desc"
end
end
context "when resource class does not respond to primary_key" do
it "should default to id" do
config = Resource.new(namespace, MockResource)
expect(config.sort_order).to eq "id_desc"
end
end
it "should be set-able" do
config.sort_order = "task_id_desc"
expect(config.sort_order).to eq "task_id_desc"
end
end
describe "adding a scope" do
it "should add a scope" do
config.scope :published
expect(config.scopes.first).to be_a(ActiveAdmin::Scope)
expect(config.scopes.first.name).to eq "Published"
expect(config.scopes.first.show_count).to eq true
end
context "when show_count disabled" do
it "should add a scope show_count = false" do
namespace.scopes_show_count = false
config.scope :published
expect(config.scopes.first.show_count).to eq false
end
end
it "should retrive a scope by its id" do
config.scope :published
expect(config.get_scope_by_id(:published).name).to eq "Published"
end
it "should retrieve the default scope by proc" do
config.scope :published, default: proc { true }
config.scope :all
expect(config.default_scope.name).to eq "Published"
end
end
describe "#csv_builder" do
context "when no csv builder set" do
it "should return a default column builder with id and content columns" do
expect(config.csv_builder.exec_columns.size).to eq @config.content_columns.size + 1
end
end
context "when csv builder set" do
it "shuld return the csv_builder we set" do
csv_builder = CSVBuilder.new
config.csv_builder = csv_builder
expect(config.csv_builder).to eq csv_builder
end
end
end
describe "#breadcrumb" do
subject { config.breadcrumb }
context "when no breadcrumb is set" do
it { is_expected.to eq(namespace.breadcrumb) }
end
context "when breadcrumb is set" do
context "when set to true" do
before { config.breadcrumb = true }
it { is_expected.to eq true }
end
context "when set to false" do
before { config.breadcrumb = false }
it { is_expected.to eq false }
end
end
end
describe "#find_resource" do
let(:post) { double }
around do |example|
with_resources_during(example) { resource }
end
context "without a decorator" do
let(:resource) { namespace.register(Post) }
it "can find the resource" do
allow(Post).to receive(:find_by).with("id" => "12345") { post }
expect(resource.find_resource("12345")).to eq post
end
end
context "with a decorator" do
let(:resource) { namespace.register(Post) { decorate_with PostDecorator } }
it "decorates the resource" do
allow(Post).to receive(:find_by).with("id" => "12345") { post }
expect(resource.find_resource("12345")).to eq PostDecorator.new(post)
end
it "does not decorate a not found resource" do
allow(Post).to receive(:find_by).with("id" => "54321") { nil }
expect(resource.find_resource("54321")).to equal nil
end
end
context "when using a nonstandard primary key" do
let(:resource) { namespace.register(Post) }
before do
allow(Post).to receive(:primary_key).and_return "something_else"
allow(Post).to receive(:find_by).with("something_else" => "55555") { post }
end
it "can find the post by the custom primary key" do
expect(resource.find_resource("55555")).to eq post
end
end
context "when using controller finder" do
let(:resource) do
namespace.register(Post) do
controller do
defaults finder: :find_by_title!
end
end
end
after do
Admin.send(:remove_const, :"PostsController")
end
it "can find the post by controller finder" do
allow(Post).to receive(:find_by_title!).with("title-name").and_return(post)
expect(resource.find_resource("title-name")).to eq post
end
end
end
describe "delegation" do
let(:controller) do
Class.new do
def method_missing(name, *args, &block)
"called #{name}"
end
end.new
end
let(:resource) { ActiveAdmin::ResourceDSL.new(double) }
before do
expect(resource).to receive(:controller).and_return(controller)
end
%w[
before_build after_build
before_create after_create
before_update after_update
before_save after_save
before_destroy after_destroy
skip_before_action skip_around_action skip_after_action
append_before_action append_around_action append_after_action
prepend_before_action prepend_around_action prepend_after_action
before_action around_action after_action
actions
].each do |method|
it "delegates #{method}" do
expect(resource.send(method)).to eq "called #{method}"
end
end
end
end
end
| 31.450867 | 103 | 0.631869 |
9132759931be6096e09d1241d834f5df8dfc3106 | 120 | module MyApp
class ApplicationController < ActionController::Base
protect_from_forgery with: :exception
end
end
| 20 | 54 | 0.8 |
1ac3fd7c64d5473853013963969f7dd2152156c2 | 2,128 | require File.expand_path(File.dirname(__FILE__) + '/../../spec_helper')
describe "include_text" do
describe "where target is a String" do
it 'should match submitted text using a string' do
string = 'foo'
string.should include_text('foo')
end
it 'should match if the text is contained' do
string = 'I am a big piece of text'
string.should include_text('big piece')
end
it 'should not match if text is not contained' do
string = 'I am a big piece of text'
string.should_not include_text('corey')
end
end
end
describe "include_text", :type => :controller do
['isolation','integration'].each do |mode|
if mode == 'integration'
integrate_views
end
describe "where target is a response (in #{mode} mode)" do
controller_name :render_spec
it "should pass with exactly matching text" do
post 'text_action'
response.should include_text("this is the text for this action")
end
it 'should pass with substring matching text' do
post 'text_action'
response.should include_text('text for this')
end
it "should fail with matching text" do
post 'text_action'
lambda {
response.should include_text("this is NOT the text for this action")
}.should fail_with("expected to find \"this is NOT the text for this action\" in \"this is the text for this action\"")
end
it "should fail when a template is rendered" do
post 'some_action'
failure_message = case mode
when 'isolation'
/expected to find \"this is the text for this action\" in \"render_spec\/some_action\"/
when 'integration'
/expected to find \"this is the text for this action\" in \"\"/
end
lambda {
response.should include_text("this is the text for this action")
}.should fail_with(failure_message)
end
it "should pass using should_not with incorrect text" do
post 'text_action'
response.should_not include_text("the accordian guy")
end
end
end
end
| 29.971831 | 127 | 0.642387 |
6a8fc7ee17b51c257f7a96f0ef45eb03243ff294 | 2,206 | begin
require 'stomp'
rescue LoadError
puts "Can't load stomp gem - all queue loops will be disabled!"
end
require 'timeout'
module Loops
class Queue < Base
def self.check_dependencies
raise "No stomp gem installed!" unless defined?(Stomp::Client)
end
def run
create_client
config['queue_name'] ||= "/queue/loops/#{name}"
config['prefetch_size'] ||= 1
debug "Subscribing for the queue #{config['queue_name']}..."
headers = { :ack => :client }
headers["activemq.prefetchSize"] = config['prefetch_size'] if config['prefetch_size']
@total_served = 0
@client.subscribe(config['queue_name'], headers) do |msg|
begin
if config['action_timeout']
timeout(config['action_timeout']) { process_message(msg) }
else
process_message(msg)
end
@client.acknowledge(msg)
@total_served += 1
if config['max_requests'] && @total_served >= config['max_requests'].to_i
disconnect_client_and_exit
end
rescue Exception => e
error "Exception from process message! We won't be ACKing the message."
error "Details: #{e} at #{e.backtrace.first}"
disconnect_client_and_exit
end
end
@client.join
rescue Exception => e
error "Closing queue connection because of exception: #{e} at #{e.backtrace.first}"
disconnect_client_and_exit
end
def process_message(msg)
raise "This method process_message(msg) should be overriden in the loop class!"
end
private
def create_client
config['port'] ||= config['port'].to_i == 0 ? 61613 : config['port'].to_i
config['host'] ||= 'localhost'
@client = Stomp::Client.open(config['user'], config['password'], config['host'], config['port'], true)
setup_signals
end
def disconnect_client_and_exit
debug "Unsubscribing..."
@client.unsubscribe(name) rescue nil
@client.close() rescue nil
exit(0)
end
def setup_signals
Signal.trap('INT') { disconnect_client_and_exit }
Signal.trap('TERM') { disconnect_client_and_exit }
end
end
end
| 27.924051 | 108 | 0.62602 |
e99e4056e70bcfd57021108141feddd91ac13953 | 1,915 | ExceptionHunter.setup do |config|
# == Enabling
#
# This flag allows disabling error tracking, it's set to track in
# any environment but development or test by default
#
# config.enabled = !(Rails.env.development? || Rails.env.test?)
# == Dashboard User
# Exception Hunter allows you to restrict users who can see the dashboard
# to the ones included in the database. You can change the table name in
# case you are not satisfied with the default one. You can also remove the
# configuration if you wish to have no access restrictions for the dashboard.
#
config.admin_user_class = 'AdminUser'
# == Current User
#
# Exception Hunter will include the user as part of the environment
# data, if it was to be available. The default configuration uses devise
# :current_user method. You can change it in case you named your user model
# in some other way (i.e. Member). You can also remove the configuration if
# you don't wish to track user data.
#
config.current_user_method = :current_user
# == Current User Attributes
#
# Exception Hunter will try to include the attributes defined here
# as part of the user information that is kept from the request.
#
config.user_attributes = [:id, :email]
# == Stale errors
#
# You can configure how long it takes for errors to go stale. This is
# taken into account when purging old error messages but nothing will
# happen automatically.
#
# config.errors_stale_time = 45.days
# == Slack notifications
#
# You can configure if you want to send notifications to slack for each error occurrence.
# You can enter multiple webhook urls.
# Default: []
#
# config.notifiers << {
# name: :slack,
# options: {
# webhook: 'SLACK_WEBHOOK_URL_1'
# }
# }
#
# config.notifiers << {
# name: :slack,
# options: {
# webhook: SLACK_WEBHOOK_URL_2'
# }
# }
end
| 30.887097 | 91 | 0.690862 |
1a13b828db62cb35f7b3c644c9d525d666851a3e | 3,166 | require 'support/aruba_support'
RSpec.describe 'Fail if no examples' do
include_context "aruba support"
before { clean_current_dir }
context 'when 1 passing example' do
def passing_example(fail_if_no_examples)
"
RSpec.configure { |c| c.fail_if_no_examples = #{fail_if_no_examples} }
RSpec.describe 'something' do
it 'succeeds' do
true
end
end
"
end
it 'succeeds if fail_if_no_examples set to true' do
write_file 'spec/example_spec.rb', passing_example(true)
run_command ""
expect(last_cmd_stdout).to include("1 example, 0 failures")
expect(last_cmd_exit_status).to eq(0)
end
it 'succeeds if fail_if_no_examples set to false' do
write_file 'spec/example_spec.rb', passing_example(false)
run_command ""
expect(last_cmd_stdout).to include("1 example, 0 failures")
expect(last_cmd_exit_status).to eq(0)
end
end
context 'when 1 failing example' do
def failing_example(fail_if_no_examples)
"
RSpec.configure { |c| c.fail_if_no_examples = #{fail_if_no_examples} }
RSpec.describe 'something' do
it 'fails' do
fail
end
end
"
end
it 'fails if fail_if_no_examples set to true' do
write_file 'spec/example_spec.rb', failing_example(true)
run_command ""
expect(last_cmd_stdout).to include("1 example, 1 failure")
expect(last_cmd_exit_status).to eq(1)
end
it 'fails if fail_if_no_examples set to false' do
write_file 'spec/example_spec.rb', failing_example(false)
run_command ""
expect(last_cmd_stdout).to include("1 example, 1 failure")
expect(last_cmd_exit_status).to eq(1)
end
end
context 'when 0 examples' do
def no_examples(fail_if_no_examples)
"
RSpec.configure { |c| c.fail_if_no_examples = #{fail_if_no_examples} }
RSpec.describe 'something' do
end
"
end
it 'fails if fail_if_no_examples set to true' do
write_file 'spec/example_spec.rb', no_examples(true)
run_command ""
expect(last_cmd_stdout).to include("0 examples, 0 failures")
expect(last_cmd_exit_status).to eq(1)
end
it 'succeeds if fail_if_no_examples set to false' do
write_file 'spec/example_spec.rb', no_examples(false)
run_command ""
expect(last_cmd_stdout).to include("0 examples, 0 failures")
expect(last_cmd_exit_status).to eq(0)
end
context 'when custom failure_exit_code set' do
def no_examples_custom_failure_exit_code(fail_if_no_examples)
"
RSpec.configure do |c|
c.fail_if_no_examples = #{fail_if_no_examples}
c.failure_exit_code = 15
end
RSpec.describe 'something' do
end
"
end
it 'fails if fail_if_no_examples set to true' do
write_file 'spec/example_spec.rb', no_examples_custom_failure_exit_code(true)
run_command ""
expect(last_cmd_stdout).to include("0 examples, 0 failures")
expect(last_cmd_exit_status).to eq(15)
end
end
end
end
| 29.045872 | 85 | 0.656033 |
b9afc037423a056549e62170df4e225eca7558ae | 3,734 | describe Spaceship::Client do
describe "UI" do
describe "#select_team" do
subject { Spaceship.client }
let(:username) { '[email protected]' }
let(:password) { 'so_secret' }
before do
Spaceship.login
client = Spaceship.client
end
it "uses the first team if there is only one" do
expect(subject.select_team).to eq("XXXXXXXXXX")
end
describe "Multiple Teams" do
before do
PortalStubbing.adp_stub_multiple_teams
end
it "Lets the user select the team if in multiple teams" do
allow($stdin).to receive(:gets).and_return("2")
expect(Spaceship::Client::UserInterface).to receive(:interactive?).and_return(true)
expect(subject.select_team).to eq("XXXXXXXXXX") # a different team
end
it "Falls back to user selection if team wasn't found" do
ENV["FASTLANE_TEAM_ID"] = "Not Here"
expect(Spaceship::Client::UserInterface).to receive(:interactive?).and_return(true)
allow($stdin).to receive(:gets).and_return("2")
expect(subject.select_team).to eq("XXXXXXXXXX") # a different team
end
it "Uses the specific team (1/2) using environment variables" do
ENV["FASTLANE_TEAM_ID"] = "SecondTeam"
expect(subject.select_team).to eq("SecondTeam") # a different team
end
it "Uses the specific team (2/2) using environment variables" do
ENV["FASTLANE_TEAM_ID"] = "XXXXXXXXXX"
expect(subject.select_team).to eq("XXXXXXXXXX") # a different team
end
it "Let's the user specify the team name using environment variables" do
ENV["FASTLANE_TEAM_NAME"] = "SecondTeamProfiName"
expect(subject.select_team).to eq("SecondTeam")
end
it "Uses the specific team (1/2) using method parameters" do
expect(subject.select_team(team_id: "SecondTeam")).to eq("SecondTeam") # a different team
end
it "Uses the specific team (2/2) using method parameters" do
expect(subject.select_team(team_id: "XXXXXXXXXX")).to eq("XXXXXXXXXX") # a different team
end
it "Let's the user specify the team name using method parameters" do
expect(subject.select_team(team_name: "SecondTeamProfiName")).to eq("SecondTeam")
end
it "Strips out spaces before and after the team name" do
ENV["FASTLANE_TEAM_NAME"] = " SecondTeamProfiName "
expect(subject.select_team).to eq("SecondTeam")
end
it "Asks for the team if the name couldn't be found (pick first)" do
ENV["FASTLANE_TEAM_NAME"] = "NotExistent"
expect(Spaceship::Client::UserInterface).to receive(:interactive?).and_return(true)
allow($stdin).to receive(:gets).and_return("1")
expect(subject.select_team).to eq("SecondTeam")
end
it "Asks for the team if the name couldn't be found (pick last)" do
ENV["FASTLANE_TEAM_NAME"] = "NotExistent"
expect(Spaceship::Client::UserInterface).to receive(:interactive?).and_return(true)
allow($stdin).to receive(:gets).and_return("2")
expect(subject.select_team).to eq("XXXXXXXXXX")
end
it "Raises an Error if shell is non interactive" do
expect(Spaceship::Client::UserInterface).to receive(:interactive?).and_return(false)
expect do
subject.select_team
end.to raise_error("Multiple Teams found; unable to choose, terminal not ineractive!")
end
after do
ENV.delete("FASTLANE_TEAM_ID")
ENV.delete("FASTLANE_TEAM_NAME")
end
end
end
end
end
| 38.895833 | 99 | 0.636047 |
624c3f9ba1280daa1053380246de39fd2955756f | 170 | class EnablePostgresDblink < ActiveRecord::Migration[5.1]
def change
if Rails.env.development? || Rails.env.test?
enable_extension 'dblink'
end
end
end
| 21.25 | 57 | 0.711765 |
4afea5d9f4b1e60546a14c6c52faf4f4276318da | 1,675 | #
# Author:: Adam Jacob (<[email protected]>)
# Author:: Christopher Walters (<[email protected]>)
# Copyright:: Copyright 2008-2016, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class Chef
module Mixin
module FromFile
# Loads a given ruby file, and runs instance_eval against it in the context of the current
# object.
#
# Raises an IOError if the file cannot be found, or is not readable.
def from_file(filename)
if File.exists?(filename) && File.readable?(filename)
instance_eval(IO.read(filename), filename, 1)
else
raise IOError, "Cannot open or read #{filename}!"
end
end
# Loads a given ruby file, and runs class_eval against it in the context of the current
# object.
#
# Raises an IOError if the file cannot be found, or is not readable.
def class_from_file(filename)
if File.exists?(filename) && File.readable?(filename)
class_eval(IO.read(filename), filename, 1)
else
raise IOError, "Cannot open or read #{filename}!"
end
end
end
end
end
| 32.843137 | 96 | 0.674627 |
bbd4a540c9cfbddd422a2f8661a6f5eeaca96bec | 982 | #@author : John Paul.H
#Update a user with the company details
require "rubygems"
require "rest_client"
require "json"
contact_id = 3213088
#you can also use apikey instead of user/passwd
site = RestClient::Resource.new("http://domain.freshdesk.com/contacts/#{contact_id}.json","[email protected]","test")
#you can update the company details of the user using the customer_id/ customer attribute.
#Using customer attribute, pass the existing company name if not present new company will be created.
# site.put({:user=>{:name=>"newcustomer11",:email=>"[email protected]",:customer=>"abc"},:content_type=>"application/json"})
#Using customer_id attribute, there is no validation for existing customer id so if wrongly assigned you will not see any customer associated.
response = site.put({:user=>{:name=>"newcustomer11",:email=>"[email protected]",:customer_id=>300},:content_type=>"application/json"})
puts "response: #{response.code} \n #{response.body}" | 49.1 | 143 | 0.763747 |
1a92d2ea5385a1b9c24658ad2bf850a878e58928 | 4,037 | if defined?(AssetSync)
AssetSync.configure do |config|
<%- if aws? -%>
config.fog_provider = 'AWS'
config.aws_access_key_id = ENV['AWS_ACCESS_KEY_ID']
config.aws_secret_access_key = ENV['AWS_SECRET_ACCESS_KEY']
config.aws_session_token = ENV['AWS_SESSION_TOKEN'] if ENV.key?('AWS_SESSION_TOKEN')
# To use AWS reduced redundancy storage.
# config.aws_reduced_redundancy = true
#
# Change AWS signature version. Default is 4
# config.aws_signature_version = 4
#
# Change canned ACL of uploaded object. Default is unset. Will override fog_public if set.
# Choose from: private | public-read | public-read-write | aws-exec-read |
# authenticated-read | bucket-owner-read | bucket-owner-full-control
# config.aws_acl = nil
#
# Change host option in fog (only if you need to)
# config.fog_host = "s3.amazonaws.com"
#
# Change port option in fog (only if you need to)
# config.fog_port = "9000"
#
# Use http instead of https. Default should be "https" (at least for fog-aws)
# config.fog_scheme = "http"
<%- elsif google? -%>
config.fog_provider = 'Google'
config.google_storage_access_key_id = ENV['GOOGLE_STORAGE_ACCESS_KEY_ID']
config.google_storage_secret_access_key = ENV['GOOGLE_STORAGE_SECRET_ACCESS_KEY']
<%- elsif rackspace? -%>
config.fog_provider = 'Rackspace'
config.rackspace_username = ENV['RACKSPACE_USERNAME']
config.rackspace_api_key = ENV['RACKSPACE_API_KEY']
# if you need to change rackspace_auth_url (e.g. if you need to use Rackspace London)
# config.rackspace_auth_url = "lon.auth.api.rackspacecloud.com"
<%- elsif azure_rm? -%>
config.fog_provider = 'AzureRM'
config.azure_storage_account_name = ENV['AZURE_STORAGE_ACCOUNT_NAME']
config.azure_storage_access_key = ENV['AZURE_STORAGE_ACCESS_KEY']
<%- elsif backblaze? -%>
config.fog_provider = 'Backblaze'
config.b2_key_id = ENV['B2_KEY_ID']
config.b2_key_token = ENV['B2_KEY_TOKEN']
config.b2_bucket_id = ENV['B2_BUCKET_ID']
# config.fog_directory specifies container name of Azure Blob storage
<%- end -%>
config.fog_directory = ENV['FOG_DIRECTORY']
# Invalidate a file on a cdn after uploading files
# config.cdn_distribution_id = "12345"
# config.invalidate = ['file1.js']
# Increase upload performance by configuring your region
# config.fog_region = 'eu-west-1'
#
# Set `public` option when uploading file depending on value,
# Setting to "default" makes asset sync skip setting the option
# Possible values: true, false, "default" (default: true)
# config.fog_public = true
#
# Don't delete files from the store
# config.existing_remote_files = "keep"
#
# Automatically replace files with their equivalent gzip compressed version
# config.gzip_compression = true
#
# Use the Rails generated 'manifest.yml' file to produce the list of files to
# upload instead of searching the assets directory.
# config.manifest = true
#
# Upload the manifest file also.
# config.include_manifest = false
#
# Upload files concurrently
# config.concurrent_uploads = false
#
# Path to cache file to skip scanning remote
# config.remote_file_list_cache_file_path = './.asset_sync_remote_file_list_cache.json'
#
# Fail silently. Useful for environments such as Heroku
# config.fail_silently = true
#
# Log silently. Default is `true`. But you can set it to false if more logging message are preferred.
# Logging messages are sent to `STDOUT` when `log_silently` is falsy
# config.log_silently = true
#
# Allow custom assets to be cacheable. Note: The base filename will be matched
# If you have an asset with name `app.0ba4d3.js`, only `app.0ba4d3` will need to be matched
# config.cache_asset_regexps = [ /\.[a-f0-9]{8}$/i, /\.[a-f0-9]{20}$/i ]
# config.cache_asset_regexp = /\.[a-f0-9]{8}$/i
end
end
| 41.618557 | 105 | 0.689621 |
33a7ac63388881a75ee817eef8d5a89e4c16aa22 | 432 | module IMS::LTI::Serializers::MembershipService
class LISPersonSerializer < IMS::LTI::Serializers::Base
set_attribute :id, key: :@id
set_attributes :name, :img, :email
set_attribute :family_name, key: :familyName
set_attribute :given_name, key: :givenName
set_attribute :result_sourced_id, key: :resultSourcedId
set_attribute :sourced_id, key: :sourcedId
set_attribute :user_id, key: :userId
end
end
| 36 | 59 | 0.743056 |
5d97f1e3039564c28812e71d7ca95b623eda7211 | 5,334 | =begin
Titan API
The ultimate, language agnostic, container based job processing framework.
OpenAPI spec version: 0.4.9
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end
require 'uri'
module IronTitan
class Configuration
# Defines url scheme
attr_accessor :scheme
# Defines url host
attr_accessor :host
# Defines url base path
attr_accessor :base_path
# Defines API keys used with API Key authentications.
#
# @return [Hash] key: parameter name, value: parameter value (API key)
#
# @example parameter name is "api_key", API key is "xxx" (e.g. "api_key=xxx" in query string)
# config.api_key['api_key'] = 'xxx'
attr_accessor :api_key
# Defines API key prefixes used with API Key authentications.
#
# @return [Hash] key: parameter name, value: API key prefix
#
# @example parameter name is "Authorization", API key prefix is "Token" (e.g. "Authorization: Token xxx" in headers)
# config.api_key_prefix['api_key'] = 'Token'
attr_accessor :api_key_prefix
# Defines the username used with HTTP basic authentication.
#
# @return [String]
attr_accessor :username
# Defines the password used with HTTP basic authentication.
#
# @return [String]
attr_accessor :password
# Defines the access token (Bearer) used with OAuth2.
attr_accessor :access_token
# Set this to enable/disable debugging. When enabled (set to true), HTTP request/response
# details will be logged with `logger.debug` (see the `logger` attribute).
# Default to false.
#
# @return [true, false]
attr_accessor :debugging
# Defines the logger used for debugging.
# Default to `Rails.logger` (when in Rails) or logging to STDOUT.
#
# @return [#debug]
attr_accessor :logger
# Defines the temporary folder to store downloaded files
# (for API endpoints that have file response).
# Default to use `Tempfile`.
#
# @return [String]
attr_accessor :temp_folder_path
# The time limit for HTTP request in seconds.
# Default to 0 (never times out).
attr_accessor :timeout
### TLS/SSL
# Set this to false to skip verifying SSL certificate when calling API from https server.
# Default to true.
#
# @note Do NOT set it to false in production code, otherwise you would face multiple types of cryptographic attacks.
#
# @return [true, false]
attr_accessor :verify_ssl
# Set this to customize the certificate file to verify the peer.
#
# @return [String] the path to the certificate file
#
# @see The `cainfo` option of Typhoeus, `--cert` option of libcurl. Related source code:
# https://github.com/typhoeus/typhoeus/blob/master/lib/typhoeus/easy_factory.rb#L145
attr_accessor :ssl_ca_cert
# Client certificate file (for client certificate)
attr_accessor :cert_file
# Client private key file (for client certificate)
attr_accessor :key_file
attr_accessor :inject_format
attr_accessor :force_ending_format
def initialize
@scheme = 'https'
@host = 'localhost:8080'
@base_path = '/v1'
@api_key = {}
@api_key_prefix = {}
@timeout = 0
@verify_ssl = true
@cert_file = nil
@key_file = nil
@debugging = false
@inject_format = false
@force_ending_format = false
@logger = defined?(Rails) ? Rails.logger : Logger.new(STDOUT)
yield(self) if block_given?
end
# The default Configuration object.
def self.default
@@default ||= Configuration.new
end
def configure
yield(self) if block_given?
end
def scheme=(scheme)
# remove :// from scheme
@scheme = scheme.sub(/:\/\//, '')
end
def host=(host)
# remove http(s):// and anything after a slash
@host = host.sub(/https?:\/\//, '').split('/').first
end
def base_path=(base_path)
# Add leading and trailing slashes to base_path
@base_path = "/#{base_path}".gsub(/\/+/, '/')
@base_path = "" if @base_path == "/"
end
def base_url
url = "#{scheme}://#{[host, base_path].join('/').gsub(/\/+/, '/')}".sub(/\/+\z/, '')
URI.encode(url)
end
# Gets API key (with prefix if set).
# @param [String] param_name the parameter name of API key auth
def api_key_with_prefix(param_name)
if @api_key_prefix[param_name]
"#{@api_key_prefix[param_name]} #{@api_key[param_name]}"
else
@api_key[param_name]
end
end
# Gets Basic Auth token string
def basic_auth_token
'Basic ' + ["#{username}:#{password}"].pack('m').delete("\r\n")
end
# Returns Auth Settings hash for api client.
def auth_settings
{
}
end
end
end
| 28.524064 | 120 | 0.660105 |
089d1286830cb842712d621d4f9b61a7ad9aadee | 1,174 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20170410164557) do
create_table "microposts", force: :cascade do |t|
t.text "content"
t.integer "user_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "users", force: :cascade do |t|
t.string "name"
t.string "email"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
end
| 37.870968 | 86 | 0.74276 |
03bdea4756b09ee554ce9fe2f7d5bcab5668ae08 | 2,828 | require 'spec_helper'
module CMIS
describe Document do
context 'when creating a document with content' do
it 'has properties and content' do
document = repository.new_document
document.name = 'apple_document'
document.object_type_id = 'cmis:document'
document.content = { stream: StringIO.new('apple is a fruit'),
mime_type: 'text/apple',
filename: 'apple.txt' }
document = document.create_in_folder(repository.root)
expect(document.content?).to be true
expect(document.content_stream_mime_type).to eq('text/apple')
expect(document.content_stream_file_name).to eq('apple.txt')
expect(document.content).to eq('apple is a fruit')
document.delete
end
end
context 'when creating a document without content' do
it 'has properties and no content' do
document = repository.new_document
document.name = 'apple_document'
document.object_type_id = 'cmis:document'
document = document.create_in_folder(repository.root)
expect(document.content?).to be false
expect(document.content_stream_mime_type).to be_nil
expect(document.content_stream_file_name).to be_nil
expect(document.content).to be_nil
document.delete
end
end
context 'when creating a document and then setting the content' do
it 'has properties and content' do
document = repository.new_document
document.name = 'apple_document'
document.object_type_id = 'cmis:document'
document = document.create_in_folder(repository.root)
document.content = { stream: StringIO.new('apple is a fruit'),
mime_type: 'text/apple',
filename: 'apple.txt' }
document = document.refresh
expect(document.content?).to be true
expect(document.content_stream_mime_type).to eq('text/apple')
expect(document.content_stream_file_name).to eq('apple.txt')
expect(document.content).to eq('apple is a fruit')
document.delete
end
end
context 'when creating a document with json content' do
it 'has the json content' do
document = repository.new_document
document.name = 'json_document'
document.object_type_id = 'cmis:document'
document.content = { stream: '{ "foo" : "bar" }',
mime_type: 'application/json',
filename: 'foo.json' }
document = document.create_in_folder(repository.root)
expect(document.content_stream_mime_type).to eq('application/json')
expect(document.content).to eq '{ "foo" : "bar" }'
document.delete
end
end
end
end
| 35.797468 | 75 | 0.631188 |
ff28de8654a3badfc8c17cb74eed807fcc58f0ca | 960 | require "bundler/setup"
require 'pry'
require 'active_record'
require 'database_cleaner'
require "frp-eventsourcing"
ActiveRecord::Base.establish_connection adapter: "sqlite3", database: ":memory:"
load File.dirname(__FILE__) + '/support/schema.rb'
require File.dirname(__FILE__) + '/support/models.rb'
require File.dirname(__FILE__) + '/support/publish_events.rb'
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
config.before(:suite) do
DatabaseCleaner.clean_with(:truncation)
end
config.before(:each) do
DatabaseCleaner.strategy = :transaction
end
config.before(:each) do
DatabaseCleaner.start
end
config.after(:each) do
DatabaseCleaner.clean
end
end
| 24 | 80 | 0.75 |
ac41372ae4fb976fa1d2b5b25cd97686b61cb6d4 | 2,248 | ###
#
# framework-util-svn
# --------------
#
# The class provides methods for parsing the SVN information in the framework directory
#
###
require 'date'
module Msf
module Util
class SVN
def self.load_root
info = {}
path = ::File.join(::File.dirname(__FILE__), "..", "..", "..", ".svn", "entries")
if !::File.exists?(path)
return info
end
contents = ''
File.open(path, "rb") do |fd|
contents = fd.read(::File.size(path))
end
if contents.include? "<?xml"
require 'rexml/document'
rd = REXML::Document.new(contents).root
rd.elements.each { |e|
if e.attributes['name'] == ""
info[:root] = e.attributes['url']
info[:revision] = e.attributes['revision']
info[:updated] = e.attributes['committed-date']
break
end
}
else
ents = contents.split("\x0c")
ents[0].split("\n").each do |line|
line.strip!
next if line.empty?
case line
when /framework3/
info[:root] = line
when /^\d+$/
info[:revision] = line.to_i
when /^\d{4}-\d.*T/
info[:updated] = line
end
break if (info[:root] and info[:revision] and info[:updated])
end
end
info
end
def self.revision
@@info ||= load_root
@@info[:revision]
end
def self.updated
@@info ||= load_root
@@info[:updated]
end
def self.root
@@info ||= load_root
@@info[:root]
end
def self.days_since_update
@@info ||= load_root
svnt = @@info[:updated]
if(not svnt)
return
end
# Date.parse and Date.strptime are both broken beyond repair in
# ruby 1.8.6 and older. Just bail if the parsing doesn't work.
begin
diff = (Date.parse(Time.now.to_s) - Date.parse(svnt)).to_f
rescue ArgumentError
end
end
def self.last_updated_friendly
diff = self.days_since_update
case diff
when nil
"at an unknown date"
when -2.0 .. 1.0
"today"
when 1.0 .. 2.0
"yesterday"
else
if (diff.to_i > 7)
"%red#{diff.to_i} days ago%clr"
else
"#{diff.to_i} days ago"
end
end
end
def self.last_updated_date
@@info ||= load_root
svnt = @@info[:updated]
if(not svnt)
return
end
begin
Date.parse(@@info[:updated])
rescue ArgumentError
end
end
end
end
end
| 18.733333 | 87 | 0.59742 |
e884303a44e51128d76046ea5d48ede48113793a | 1,968 | class WorkampJobs::Cli
@@muted="\e[1;31m"
@@grn="\e[1;32m"
@@blu="\e[1;34m"
@@mag="\e[1;35m"
@@cyn="\e[1;36m"
@@white="\e[0m"
def call
puts "\nWelcome to Workamp jobs!\n"
@input = ""
until @input == "exit"
get_jobs
job_list
more_info
next_step
end
goodbye
end
def get_jobs #method to get job objects from the Jobs class.
#binding.pry
@jobs = WorkampJobs::Jobs.all
end
def job_list #method iterate through job objects and display the title and number each one.
puts "\nHere is today's list of workamp jobs.\n"
@jobs.each.with_index(1) do |job, idx|
puts "#{idx}. #{job.title}"
#binding.pry
end
puts "please choose a number to see more info."
end
def more_info #method to get users input and also checks for valid input.
input = gets.strip.to_i
if input.between?(1,WorkampJobs::Jobs.all.length)
job = WorkampJobs::Jobs.all[input - 1]
display_job_details(job) if valid_input(input, @jobs)
end
end
def valid_input(input, data) #this method is connected to more_info to check user input.
input.to_i <= data.length && input.to_i > 0
end
def display_job_details(job) #display method.
puts "--------------------------------------------------------------------------------"
puts "#{@@grn}#{job.title}#{@@white}"
puts "#{job.about}"
puts "\nlocation for job selected: #{@@cyn}#{job.location}.#{@@white}\n"
puts "\nThe job was posted on #{@@mag}#{job.post_date}.#{@@white}\n"
puts "--------------------------------------------------------------------------------"
puts "To see the full job post click on the link"
puts "#{@@blu}#{job.url}#{@@white}"
end
def next_step
puts "\nOr hit any key and enter to see the list or type 'exit' to exit.\n"
@input = gets.strip.downcase
end
def goodbye
puts "Thank you and come back soon to see new job listings"
end
end | 29.373134 | 94 | 0.573679 |
61b741c30ebc4b181be77d1d8b4bdb6c05de2fbe | 1,549 | class BalanceReport
extend ActiveModel::Naming
attr_accessor :user
def initialize(user, year)
@user = user
@years = @user.transactions.group_by{|t| t.year}.keys
@year = year
end
def exec
balance_report
yearly_total
my_current_balance
end
def get_instance_variable
hash = {}
hash[:year] = @year
hash[:years] = @years
hash[:results] = @results
hash[:total] = @total
hash[:yearly_total] = @yearly_total
return hash
end
private
def balance_report
@results = []
transactions = @user.transactions.where("year = ?", @year).group_by{|t| t.month}
(1..12).each do |month|
@results << {
month: month,
income: transactions[month].present? ? transactions[month].flatten.collect{|t| t.type_id == 2 ? t.amount : 0.0 }.sum : 0.0,
expense: transactions[month].present? ? transactions[month].flatten.collect{|t| t.type_id == 1 ? t.amount : 0.0 }.sum : 0.0,
investment: transactions[month].present? ? transactions[month].flatten.collect{|t| t.type_id == 3 ? t.amount : 0.0}.sum : 0.0
}
end
end
def yearly_total
income = @results.map{|t| t[:income]}.sum
expense = @results.map{|t| t[:expense]}.sum
investment = @results.map{|t| t[:investment]}.sum
@yearly_total = income - (expense + investment)
end
def my_current_balance
income = @user.transactions.incomes.map{|t| t.amount}.sum
expense = @user.transactions.expenses.map{|t| t.amount}.sum
investment = @user.transactions.investments.map{|t| t.amount}.sum
@total = income - (expense + investment)
end
end | 28.163636 | 129 | 0.671401 |
7a1a0da14a35bd52a7b95486d7ecfa7d1e8365bf | 1,282 | module VagrantPlugins
module Docker
class DockerInstaller
def initialize(machine, version)
@machine = machine
@version = version
end
# This handles verifying the Docker installation, installing it if it was
# requested, and so on. This method will raise exceptions if things are
# wrong.
def ensure_installed
if [email protected]?(:docker_installed)
@machine.ui.warn(I18n.t("vagrant.docker_cant_detect"))
return
end
if [email protected](:docker_installed)
@machine.ui.info(I18n.t("vagrant.docker_installing", version: @version.to_s))
@machine.guest.capability(:docker_install, @version)
if [email protected](:docker_installed)
raise DockerError, :install_failed
end
end
if @machine.guest.capability?(:docker_configure_auto_start)
@machine.guest.capability(:docker_configure_auto_start)
else
@machine.env.ui.warn I18n.t('vagrant.docker_auto_start_not_available')
end
if @machine.guest.capability?(:docker_configure_vagrant_user)
@machine.guest.capability(:docker_configure_vagrant_user)
end
end
end
end
end
| 32.05 | 87 | 0.659906 |
33f779f11b914dece1a1ef381a8c86ca156f2f56 | 814 | # frozen_string_literal: true
require "dry/effects/providers/reader"
require "dry/effects/instructions/raise"
module Dry
module Effects
module Providers
class State < Reader[:state]
def write(value:)
case value
when state_type
@state = value
else
Instructions.Raise(Errors::InvalidValueError.new(state, value))
end
end
# Yield the block with the handler installed
#
# @api private
def call(state = Undefined)
r = super
[self.state, r]
end
# @param [Effect] effect
# @return [Boolean]
# @api public
def provide?(effect)
effect.type.equal?(:state) && scope.equal?(effect.scope)
end
end
end
end
end
| 22 | 75 | 0.558968 |
6a20b1650cebfdc10de892bb718a683814010cee | 1,091 | module Build
def self.root_path
Pathname.new(__FILE__).dirname.parent
end
def self.data_path
if ENV.key?('LOLSCHEDULE_DATA_DIR')
Pathname.new(ENV['LOLSCHEDULE_DATA_DIR'])
else
root_path + 'data'
end
end
def self.source_path
data_path + 'source.json'
end
def self.archived_source_path
data_path + 'archived.json'
end
def self.build_path
root_path + 'build'
end
def self.logos_path
build_path + 'logos'
end
def self.used_logos_path
build_path + 'used_logos'
end
def self.output_path
if ENV.key?('LOLSCHEDULE_OUTPUT_DIR')
Pathname.new(ENV['LOLSCHEDULE_OUTPUT_DIR'])
else
root_path + 'output'
end
end
def self.grouped_logos
group = {}
logos_path.children.each do |file|
hash = Magick::Image.read(file.to_s).first.signature
group[hash] ||= []
group[hash] << file
end
group.values
end
def self.write_with_gz(path:, data:)
path.write(data)
Pathname.new("#{path.to_s}.gz").write(Zlib.gzip(data, level: Zlib::BEST_COMPRESSION))
end
end | 18.810345 | 89 | 0.659945 |
0890587fe14d009c91baf564613e4eedec4d8f37 | 1,687 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ContainerRegistry::Mgmt::V2017_10_01
module Models
#
# The registry node that generated the event. Put differently, while the
# actor initiates the event, the source generates it.
#
class Source
include MsRestAzure
# @return [String] The IP or hostname and the port of the registry node
# that generated the event. Generally, this will be resolved by
# os.Hostname() along with the running port.
attr_accessor :addr
# @return [String] The running instance of an application. Changes after
# each restart.
attr_accessor :instance_id
#
# Mapper for Source class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'Source',
type: {
name: 'Composite',
class_name: 'Source',
model_properties: {
addr: {
client_side_validation: true,
required: false,
serialized_name: 'addr',
type: {
name: 'String'
}
},
instance_id: {
client_side_validation: true,
required: false,
serialized_name: 'instanceID',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 27.655738 | 78 | 0.543568 |
0390b06eef424d350fe3dd5738558dd71796b12a | 788 | {
matrix_id: '1344',
name: 'rajat17',
group: 'Rajat',
description: 'Rajat/rajat17 circuit simulation matrix',
author: 'Rajat',
editor: 'T. Davis',
date: '2006',
kind: 'circuit simulation problem',
problem_2D_or_3D: '0',
num_rows: '94294',
num_cols: '94294',
nonzeros: '479246',
num_explicit_zeros: '161913',
num_strongly_connected_components: '5229',
num_dmperm_blocks: '8546',
structural_full_rank: 'true',
structural_rank: '94294',
pattern_symmetry: '0.990',
numeric_symmetry: '0.271',
rb_type: 'real',
structure: 'unsymmetric',
cholesky_candidate: 'no',
positive_definite: 'no',
image_files: 'rajat17.png,rajat17_dmperm.png,rajat17_scc.png,rajat17_APlusAT_graph.gif,rajat17_graph.gif,',
}
| 29.185185 | 111 | 0.664975 |
26d94e1f8730811e5812c3f59d8ec03109deb3b9 | 287 | FactoryBot.define do
# Define your Spree extensions Factories within this file to enable applications, and other extensions to use and override them.
#
# Example adding this to your spec_helper will load these Factories for use:
# require 'spree_gem_configuration/factories'
end
| 41 | 130 | 0.794425 |
ed894aad27ab4fa80fff6552508871065b60b86b | 10,500 | # Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
require 'logger'
require_relative 'base_announcement'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# A summary representation of an announcement.
#
class AnnouncementsService::Models::AnnouncementSummary < AnnouncementsService::Models::BaseAnnouncement
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'id': :'id',
'type': :'type',
'reference_ticket_number': :'referenceTicketNumber',
'summary': :'summary',
'time_one_title': :'timeOneTitle',
'time_one_type': :'timeOneType',
'time_one_value': :'timeOneValue',
'time_two_title': :'timeTwoTitle',
'time_two_type': :'timeTwoType',
'time_two_value': :'timeTwoValue',
'services': :'services',
'affected_regions': :'affectedRegions',
'announcement_type': :'announcementType',
'lifecycle_state': :'lifecycleState',
'is_banner': :'isBanner',
'time_created': :'timeCreated',
'time_updated': :'timeUpdated',
'environment_name': :'environmentName',
'platform_type': :'platformType'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'id': :'String',
'type': :'String',
'reference_ticket_number': :'String',
'summary': :'String',
'time_one_title': :'String',
'time_one_type': :'String',
'time_one_value': :'DateTime',
'time_two_title': :'String',
'time_two_type': :'String',
'time_two_value': :'DateTime',
'services': :'Array<String>',
'affected_regions': :'Array<String>',
'announcement_type': :'String',
'lifecycle_state': :'String',
'is_banner': :'BOOLEAN',
'time_created': :'DateTime',
'time_updated': :'DateTime',
'environment_name': :'String',
'platform_type': :'String'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [String] :id The value to assign to the {OCI::AnnouncementsService::Models::BaseAnnouncement#id #id} proprety
# @option attributes [String] :reference_ticket_number The value to assign to the {OCI::AnnouncementsService::Models::BaseAnnouncement#reference_ticket_number #reference_ticket_number} proprety
# @option attributes [String] :summary The value to assign to the {OCI::AnnouncementsService::Models::BaseAnnouncement#summary #summary} proprety
# @option attributes [String] :time_one_title The value to assign to the {OCI::AnnouncementsService::Models::BaseAnnouncement#time_one_title #time_one_title} proprety
# @option attributes [String] :time_one_type The value to assign to the {OCI::AnnouncementsService::Models::BaseAnnouncement#time_one_type #time_one_type} proprety
# @option attributes [DateTime] :time_one_value The value to assign to the {OCI::AnnouncementsService::Models::BaseAnnouncement#time_one_value #time_one_value} proprety
# @option attributes [String] :time_two_title The value to assign to the {OCI::AnnouncementsService::Models::BaseAnnouncement#time_two_title #time_two_title} proprety
# @option attributes [String] :time_two_type The value to assign to the {OCI::AnnouncementsService::Models::BaseAnnouncement#time_two_type #time_two_type} proprety
# @option attributes [DateTime] :time_two_value The value to assign to the {OCI::AnnouncementsService::Models::BaseAnnouncement#time_two_value #time_two_value} proprety
# @option attributes [Array<String>] :services The value to assign to the {OCI::AnnouncementsService::Models::BaseAnnouncement#services #services} proprety
# @option attributes [Array<String>] :affected_regions The value to assign to the {OCI::AnnouncementsService::Models::BaseAnnouncement#affected_regions #affected_regions} proprety
# @option attributes [String] :announcement_type The value to assign to the {OCI::AnnouncementsService::Models::BaseAnnouncement#announcement_type #announcement_type} proprety
# @option attributes [String] :lifecycle_state The value to assign to the {OCI::AnnouncementsService::Models::BaseAnnouncement#lifecycle_state #lifecycle_state} proprety
# @option attributes [BOOLEAN] :is_banner The value to assign to the {OCI::AnnouncementsService::Models::BaseAnnouncement#is_banner #is_banner} proprety
# @option attributes [DateTime] :time_created The value to assign to the {OCI::AnnouncementsService::Models::BaseAnnouncement#time_created #time_created} proprety
# @option attributes [DateTime] :time_updated The value to assign to the {OCI::AnnouncementsService::Models::BaseAnnouncement#time_updated #time_updated} proprety
# @option attributes [String] :environment_name The value to assign to the {OCI::AnnouncementsService::Models::BaseAnnouncement#environment_name #environment_name} proprety
# @option attributes [String] :platform_type The value to assign to the {OCI::AnnouncementsService::Models::BaseAnnouncement#platform_type #platform_type} proprety
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
attributes['type'] = 'AnnouncementSummary'
super(attributes)
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
id == other.id &&
type == other.type &&
reference_ticket_number == other.reference_ticket_number &&
summary == other.summary &&
time_one_title == other.time_one_title &&
time_one_type == other.time_one_type &&
time_one_value == other.time_one_value &&
time_two_title == other.time_two_title &&
time_two_type == other.time_two_type &&
time_two_value == other.time_two_value &&
services == other.services &&
affected_regions == other.affected_regions &&
announcement_type == other.announcement_type &&
lifecycle_state == other.lifecycle_state &&
is_banner == other.is_banner &&
time_created == other.time_created &&
time_updated == other.time_updated &&
environment_name == other.environment_name &&
platform_type == other.platform_type
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[id, type, reference_ticket_number, summary, time_one_title, time_one_type, time_one_value, time_two_title, time_two_type, time_two_value, services, affected_regions, announcement_type, lifecycle_state, is_banner, time_created, time_updated, environment_name, platform_type].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 47.727273 | 285 | 0.698286 |
e8d6d8b0687cb97af3a0b0836c00e4fe9981b215 | 1,074 | module AutotestNotification
class Mac
@last_test_failed = false
class << self
def notify(title, msg, img, total = 1, failures = 0, priority = 0)
system "growlnotify -n autotest --image #{img} #{'-s ' if ((failures > 0) and STICKY)}-p #{priority} -m '#{msg}' -t #{title}"
play(SUCCESS_SOUND) unless SUCCESS_SOUND.empty? or failures > 0
play(FAILURE_SOUND) unless FAILURE_SOUND.empty? or failures == 0
say(total, failures) if SPEAKING
end
def say(total, failures)
if failures > 0
DOOM_EDITION ? Doom.play_sound(total, failures) : system("say #{failures} test#{'s' unless failures == 1} failed.")
@last_test_failed = true
elsif @last_test_failed
DOOM_EDITION ? Doom.play_sound(total, failures) : system("say All tests passed successfully.")
@last_test_failed = false
end
end
def play(sound_file)
`#{File.expand_path(File.dirname(__FILE__) + "/../../bin/")}/playsound #{sound_file}`
end
end
end
end
| 34.645161 | 133 | 0.608939 |
18b2d1706b7f2757848a8215368c49c56663b21a | 8,265 | module SmsTools
# UTF-8 to GSM-7 (GSM 03.38) mapping. Based on code from:
# https://github.com/threez/smspromote/blob/master/lib/smspromote/encoding.rb
module GsmEncoding
extend self
GSM_EXTENSION_TABLE_ESCAPE_CODE = "\x1B".freeze
UTF8_TO_GSM_BASE_TABLE = {
0x0040 => "\x00", # COMMERCIAL AT
0x00A3 => "\x01", # POUND SIGN
0x0024 => "\x02", # DOLLAR SIGN
0x00A5 => "\x03", # YEN SIGN
0x00E8 => "\x04", # LATIN SMALL LETTER E WITH GRAVE
0x00E9 => "\x05", # LATIN SMALL LETTER E WITH ACUTE
0x00F9 => "\x06", # LATIN SMALL LETTER U WITH GRAVE
0x00EC => "\x07", # LATIN SMALL LETTER I WITH GRAVE
0x00F2 => "\x08", # LATIN SMALL LETTER O WITH GRAVE
0x00C7 => "\x09", # LATIN CAPITAL LETTER C WITH CEDILLA
0x000A => "\x0A", # LINE FEED
0x00D8 => "\x0B", # LATIN CAPITAL LETTER O WITH STROKE
0x00F8 => "\x0C", # LATIN SMALL LETTER O WITH STROKE
0x000D => "\x0D", # CARRIAGE RETURN
0x00C5 => "\x0E", # LATIN CAPITAL LETTER A WITH RING ABOVE
0x00E5 => "\x0F", # LATIN SMALL LETTER A WITH RING ABOVE
0x0394 => "\x10", # GREEK CAPITAL LETTER DELTA
0x005F => "\x11", # LOW LINE
0x03A6 => "\x12", # GREEK CAPITAL LETTER PHI
0x0393 => "\x13", # GREEK CAPITAL LETTER GAMMA
0x039B => "\x14", # GREEK CAPITAL LETTER LAMDA
0x03A9 => "\x15", # GREEK CAPITAL LETTER OMEGA
0x03A0 => "\x16", # GREEK CAPITAL LETTER PI
0x03A8 => "\x17", # GREEK CAPITAL LETTER PSI
0x03A3 => "\x18", # GREEK CAPITAL LETTER SIGMA
0x0398 => "\x19", # GREEK CAPITAL LETTER THETA
0x039E => "\x1A", # GREEK CAPITAL LETTER XI
nil => "\x1B", # ESCAPE TO EXTENSION TABLE or NON-BREAKING SPACE
0x00C6 => "\x1C", # LATIN CAPITAL LETTER AE
0x00E6 => "\x1D", # LATIN SMALL LETTER AE
0x00DF => "\x1E", # LATIN SMALL LETTER SHARP S (German)
0x00C9 => "\x1F", # LATIN CAPITAL LETTER E WITH ACUTE
0x0020 => "\x20", # SPACE
0x0021 => "\x21", # EXCLAMATION MARK
0x0022 => "\x22", # QUOTATION MARK
0x0023 => "\x23", # NUMBER SIGN
0x00A4 => "\x24", # CURRENCY SIGN
0x0025 => "\x25", # PERCENT SIGN
0x0026 => "\x26", # AMPERSAND
0x0027 => "\x27", # APOSTROPHE
0x0028 => "\x28", # LEFT PARENTHESIS
0x0029 => "\x29", # RIGHT PARENTHESIS
0x002A => "\x2A", # ASTERISK
0x002B => "\x2B", # PLUS SIGN
0x002C => "\x2C", # COMMA
0x002D => "\x2D", # HYPHEN-MINUS
0x002E => "\x2E", # FULL STOP
0x002F => "\x2F", # SOLIDUS
0x0030 => "\x30", # DIGIT ZERO
0x0031 => "\x31", # DIGIT ONE
0x0032 => "\x32", # DIGIT TWO
0x0033 => "\x33", # DIGIT THREE
0x0034 => "\x34", # DIGIT FOUR
0x0035 => "\x35", # DIGIT FIVE
0x0036 => "\x36", # DIGIT SIX
0x0037 => "\x37", # DIGIT SEVEN
0x0038 => "\x38", # DIGIT EIGHT
0x0039 => "\x39", # DIGIT NINE
0x003A => "\x3A", # COLON
0x003B => "\x3B", # SEMICOLON
0x003C => "\x3C", # LESS-THAN SIGN
0x003D => "\x3D", # EQUALS SIGN
0x003E => "\x3E", # GREATER-THAN SIGN
0x003F => "\x3F", # QUESTION MARK
0x00A1 => "\x40", # INVERTED EXCLAMATION MARK
0x0041 => "\x41", # LATIN CAPITAL LETTER A
0x0042 => "\x42", # LATIN CAPITAL LETTER B
0x0043 => "\x43", # LATIN CAPITAL LETTER C
0x0044 => "\x44", # LATIN CAPITAL LETTER D
0x0045 => "\x45", # LATIN CAPITAL LETTER E
0x0046 => "\x46", # LATIN CAPITAL LETTER F
0x0047 => "\x47", # LATIN CAPITAL LETTER G
0x0048 => "\x48", # LATIN CAPITAL LETTER H
0x0049 => "\x49", # LATIN CAPITAL LETTER I
0x004A => "\x4A", # LATIN CAPITAL LETTER J
0x004B => "\x4B", # LATIN CAPITAL LETTER K
0x004C => "\x4C", # LATIN CAPITAL LETTER L
0x004D => "\x4D", # LATIN CAPITAL LETTER M
0x004E => "\x4E", # LATIN CAPITAL LETTER N
0x004F => "\x4F", # LATIN CAPITAL LETTER O
0x0050 => "\x50", # LATIN CAPITAL LETTER P
0x0051 => "\x51", # LATIN CAPITAL LETTER Q
0x0052 => "\x52", # LATIN CAPITAL LETTER R
0x0053 => "\x53", # LATIN CAPITAL LETTER S
0x0054 => "\x54", # LATIN CAPITAL LETTER T
0x0055 => "\x55", # LATIN CAPITAL LETTER U
0x0056 => "\x56", # LATIN CAPITAL LETTER V
0x0057 => "\x57", # LATIN CAPITAL LETTER W
0x0058 => "\x58", # LATIN CAPITAL LETTER X
0x0059 => "\x59", # LATIN CAPITAL LETTER Y
0x005A => "\x5A", # LATIN CAPITAL LETTER Z
0x00C4 => "\x5B", # LATIN CAPITAL LETTER A WITH DIAERESIS
0x00D6 => "\x5C", # LATIN CAPITAL LETTER O WITH DIAERESIS
0x00D1 => "\x5D", # LATIN CAPITAL LETTER N WITH TILDE
0x00DC => "\x5E", # LATIN CAPITAL LETTER U WITH DIAERESIS
0x00A7 => "\x5F", # SECTION SIGN
0x00BF => "\x60", # INVERTED QUESTION MARK
0x0061 => "\x61", # LATIN SMALL LETTER A
0x0062 => "\x62", # LATIN SMALL LETTER B
0x0063 => "\x63", # LATIN SMALL LETTER C
0x0064 => "\x64", # LATIN SMALL LETTER D
0x0065 => "\x65", # LATIN SMALL LETTER E
0x0066 => "\x66", # LATIN SMALL LETTER F
0x0067 => "\x67", # LATIN SMALL LETTER G
0x0068 => "\x68", # LATIN SMALL LETTER H
0x0069 => "\x69", # LATIN SMALL LETTER I
0x006A => "\x6A", # LATIN SMALL LETTER J
0x006B => "\x6B", # LATIN SMALL LETTER K
0x006C => "\x6C", # LATIN SMALL LETTER L
0x006D => "\x6D", # LATIN SMALL LETTER M
0x006E => "\x6E", # LATIN SMALL LETTER N
0x006F => "\x6F", # LATIN SMALL LETTER O
0x0070 => "\x70", # LATIN SMALL LETTER P
0x0071 => "\x71", # LATIN SMALL LETTER Q
0x0072 => "\x72", # LATIN SMALL LETTER R
0x0073 => "\x73", # LATIN SMALL LETTER S
0x0074 => "\x74", # LATIN SMALL LETTER T
0x0075 => "\x75", # LATIN SMALL LETTER U
0x0076 => "\x76", # LATIN SMALL LETTER V
0x0077 => "\x77", # LATIN SMALL LETTER W
0x0078 => "\x78", # LATIN SMALL LETTER X
0x0079 => "\x79", # LATIN SMALL LETTER Y
0x007A => "\x7A", # LATIN SMALL LETTER Z
0x00E4 => "\x7B", # LATIN SMALL LETTER A WITH DIAERESIS
0x00F6 => "\x7C", # LATIN SMALL LETTER O WITH DIAERESIS
0x00F1 => "\x7D", # LATIN SMALL LETTER N WITH TILDE
0x00FC => "\x7E", # LATIN SMALL LETTER U WITH DIAERESIS
0x00E0 => "\x7F", # LATIN SMALL LETTER A WITH GRAVE
}.freeze
UTF8_TO_GSM_EXTENSION_TABLE = {
0xA => "\x1B\x0A", # FORM FEED1
0x000C => "\x1B\x0A", # FORM FEED
0x005E => "\x1B\x14", # CIRCUMFLEX ACCENT
0x007B => "\x1B\x28", # LEFT CURLY BRACKET
0x007D => "\x1B\x29", # RIGHT CURLY BRACKET
0x005C => "\x1B\x2F", # REVERSE SOLIDUS
0x005B => "\x1B\x3C", # LEFT SQUARE BRACKET
0x007E => "\x1B\x3D", # TILDE
0x005D => "\x1B\x3E", # RIGHT SQUARE BRACKET
0x007C => "\x1B\x40", # VERTICAL LINE
0x20AC => "\x1B\x65" # EURO SIGN
}.freeze
UTF8_TO_GSM = UTF8_TO_GSM_BASE_TABLE.merge(UTF8_TO_GSM_EXTENSION_TABLE).freeze
GSM_TO_UTF8 = UTF8_TO_GSM.invert.freeze
def valid?(utf8_encoded_string)
utf8_encoded_string.unpack('U*').all? { |char| UTF8_TO_GSM[char] }
end
def double_byte?(char)
UTF8_TO_GSM_EXTENSION_TABLE[char.unpack('U').first]
end
def from_utf8(utf8_encoded_string)
gsm_encoded_string = ''
utf8_encoded_string.unpack('U*').each do |char|
if converted = UTF8_TO_GSM[char]
gsm_encoded_string << converted
else
raise "Unsupported symbol in GSM-7 encoding: 0x#{char.to_s(16).upcase}"
end
end
gsm_encoded_string
end
def to_utf8(gsm_encoded_string)
utf8_encoded_string = ''
escape = false
gsm_encoded_string.each_char do |char|
if char == GSM_EXTENSION_TABLE_ESCAPE_CODE
escape = true
elsif escape
escape = false
utf8_encoded_string << [fetch_utf8_char(GSM_EXTENSION_TABLE_ESCAPE_CODE + char)].pack('U')
else
utf8_encoded_string << [fetch_utf8_char(char)].pack('U')
end
end
utf8_encoded_string
end
private
def fetch_utf8_char(char)
GSM_TO_UTF8.fetch(char) { raise "Unsupported symbol in GSM-7 encoding: #{char}" }
end
end
end
| 40.514706 | 100 | 0.581972 |
f71407eb41e57cd3807112ac72e64247c7d78262 | 450 | # frozen_string_literal: true
class RenameTopicCustomFieldTopicPostEventStartsAtIndex < ActiveRecord::Migration[6.0]
def up
remove_index :topic_custom_fields, name: "idx_topic_custom_fields_post_event_starts_at"
add_index :topic_custom_fields,
%i[name topic_id],
name: :idx_topic_custom_fields_topic_post_event_starts_at,
unique: true,
where: "name = '#{DiscoursePostEvent::TOPIC_POST_EVENT_STARTS_AT}'"
end
end
| 32.142857 | 91 | 0.777778 |
4a0d4116d4bd9f7b2d5978b078b968943c5ebc1a | 764 | Types::BerryType = GraphQL::ObjectType.define do
name 'Berry'
description ''
field :id, types.ID, '', property: :id
field :item, Types::ItemType, '', property: :item
field :firmness, Types::BerryFirmnessType, '', property: :berry_firmness
field :naturalGiftPower, types.Int, '', property: :natural_gift_power
field :naturalGiftType, Types::TypeType, '', property: :type
field :size, types.Int, '', property: :size
field :maxHarvest, types.Int, '', property: :max_harvest
field :growthTime, types.Int, '', property: :growth_time
field :soilDryness, types.Int, '', property: :soil_dryness
field :smoothness, types.Int, '', property: :smoothness
field :berryFlavors, types[Types::BerryFlavorType], '',
property: :berry_flavors
end
| 40.210526 | 74 | 0.704188 |
d594b608fa01bf186ffff9477daf06b3d6dcec93 | 757 | # frozen_string_literal: true
module Admin
class RoomsController < Admin::BaseController
before_action :find_room, except: %i[index new create]
def index
@rooms = Room.all
end
def new
@room = Room.new
end
def create
@room = Room.new(room_params)
return redirect_to admin_rooms_path if @room.save
render :new
end
def edit; end
def update
return redirect_to admin_rooms_path if @room.update(room_params)
render :edit
end
def destroy
@room.destroy
redirect_to admin_rooms_path
end
private
def find_room
@room = Room.find(params[:id])
end
def room_params
params.require(:room).permit(:name, :order)
end
end
end
| 16.456522 | 70 | 0.638045 |
0134c887928cbffc92f785d477ba3814081d54c7 | 183 | Rails.application.routes.draw do
resources :microposts
resources :users
get 'static_pages/home'
get 'static_pages/help'
get 'static_pages/prueba'
root 'users#index'
end | 16.636364 | 32 | 0.748634 |
e2f3eff96f36d6c5f37037675b656c328a017b37 | 318 | module Instructions
class IncrementMilitaryOps < Instruction
fancy_accessor :player, :amount
needs :military_ops
def initialize(player:, amount:)
super
self.player = player
self.amount = amount
end
def action
military_ops.increment(player, amount)
end
end
end
| 15.142857 | 44 | 0.669811 |
18fea411283224602d93a6c2bb1dddbc63c7d7a3 | 3,165 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Compute::Mgmt::V2019_07_01
module Models
#
# The source user image virtual hard disk. Only tags may be updated.
#
class ImageUpdate < UpdateResource
include MsRestAzure
# @return [SubResource] The source virtual machine from which Image is
# created.
attr_accessor :source_virtual_machine
# @return [ImageStorageProfile] Specifies the storage settings for the
# virtual machine disks.
attr_accessor :storage_profile
# @return [String] The provisioning state.
attr_accessor :provisioning_state
# @return [HyperVGenerationTypes] Gets the HyperVGenerationType of the
# VirtualMachine created from the image. Possible values include: 'V1',
# 'V2'
attr_accessor :hyper_vgeneration
#
# Mapper for ImageUpdate class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'ImageUpdate',
type: {
name: 'Composite',
class_name: 'ImageUpdate',
model_properties: {
tags: {
client_side_validation: true,
required: false,
serialized_name: 'tags',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
source_virtual_machine: {
client_side_validation: true,
required: false,
serialized_name: 'properties.sourceVirtualMachine',
type: {
name: 'Composite',
class_name: 'SubResource'
}
},
storage_profile: {
client_side_validation: true,
required: false,
serialized_name: 'properties.storageProfile',
type: {
name: 'Composite',
class_name: 'ImageStorageProfile'
}
},
provisioning_state: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.provisioningState',
type: {
name: 'String'
}
},
hyper_vgeneration: {
client_side_validation: true,
required: false,
serialized_name: 'properties.hyperVGeneration',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 30.728155 | 77 | 0.504897 |
e9ed6510c182e9172b94118992b9891764bcb2d6 | 6,118 | require 'support/helper'
describe 'MyJohnDeereApi::Client' do
include JD::ResponseHelpers
it 'includes the CaseConversion helper' do
client = JD::Client.new(api_key, api_secret)
assert_equal 'thisIsATest', client.send(:camelize, :this_is_a_test)
end
describe '#initialize(api_key, api_secret, options={})' do
it 'sets the api key/secret' do
client = JD::Client.new(api_key, api_secret)
assert_equal api_key, client.api_key
assert_equal api_secret, client.api_secret
end
it 'accepts token_hash' do
client = JD::Client.new(api_key, api_secret, token_hash: token_hash)
assert_equal token_hash, client.token_hash
end
it 'accepts the environment' do
environment = :sandbox
client = JD::Client.new(api_key, api_secret, environment: environment)
assert_equal environment, client.environment
end
it 'accepts a contribution_definition_id' do
client = JD::Client.new(api_key, api_secret, contribution_definition_id: contribution_definition_id)
assert_equal contribution_definition_id, client.contribution_definition_id
end
it 'accepts a list of parameters for NetHttpRetry' do
custom_retries = JD::NetHttpRetry::Decorator::DEFAULTS[:max_retries] + 10
VCR.use_cassette('catalog') do
new_client = JD::Client.new(
api_key,
api_secret,
contribution_definition_id: contribution_definition_id,
environment: :sandbox,
token_hash: token_hash,
http_retry: {max_retries: custom_retries}
)
assert_equal custom_retries, new_client.accessor.max_retries
end
end
end
describe '#contribution_definition_id' do
it 'can be set after instantiation' do
client = JD::Client.new(api_key, api_secret)
assert_nil client.contribution_definition_id
client.contribution_definition_id = '123'
assert_equal '123', client.contribution_definition_id
end
end
describe '#get' do
it 'returns the response as a Hash' do
response = VCR.use_cassette('get_organizations') { client.get('/platform/organizations') }
assert_kind_of Hash, response
assert_kind_of Integer, response['total']
assert response['values'].all?{|value| value['@type'] == 'Organization'}
assert response['values'].all?{|value| value.has_key?('links')}
end
it 'prepends the leading slash if needed' do
response = VCR.use_cassette('get_organizations') { client.get('platform/organizations') }
assert_kind_of Hash, response
assert_kind_of Integer, response['total']
assert response['values'].all?{|value| value['@type'] == 'Organization'}
assert response['values'].all?{|value| value.has_key?('links')}
end
end
describe '#post' do
let(:attributes) do
CONFIG.asset_attributes.merge(
links: [
{
'@type' => 'Link',
'rel' => 'contributionDefinition',
'uri' => "#{CONFIG.url}/contributionDefinitions/#{contribution_definition_id}"
}
]
)
end
it 'returns the response as a Hash' do
response = VCR.use_cassette('post_assets') do
client.post("/platform/organizations/#{organization_id}/assets", attributes)
end
assert_created response
assert_equal "#{base_url}/platform/assets/#{asset_id}", response.response.headers['Location']
end
it 'prepends the leading slash if needed' do
response = VCR.use_cassette('post_assets') do
client.post("platform/organizations/#{organization_id}/assets", attributes)
end
assert_created response
assert_equal "#{base_url}/platform/assets/#{asset_id}", response.response.headers['Location']
end
end
describe '#put' do
let(:new_title) { 'i REALLY like turtles!' }
let(:attributes) do
CONFIG.asset_attributes.slice(
:asset_category, :asset_type, :asset_sub_type, :links
).merge(
title: new_title,
links: [
{
'@type' => 'Link',
'rel' => 'contributionDefinition',
'uri' => "#{CONFIG.url}/contributionDefinitions/#{contribution_definition_id}"
}
]
)
end
it 'sends the request' do
response = VCR.use_cassette('put_asset') { client.put("/platform/assets/#{asset_id}", attributes) }
assert_no_content response
end
it 'prepends the leading slash if needed' do
response = VCR.use_cassette('put_asset') { client.put("platform/assets/#{asset_id}", attributes) }
assert_no_content response
end
end
describe '#delete' do
it 'sends the request' do
response = VCR.use_cassette('delete_asset') { client.delete("/platform/assets/#{asset_id}") }
assert_no_content response
end
it 'prepends the leading slash if needed' do
response = VCR.use_cassette('delete_asset') { client.delete("platform/assets/#{asset_id}") }
assert_no_content response
end
end
describe '#organizations' do
it 'returns a collection of organizations for this account' do
organizations = VCR.use_cassette('get_organizations') { client.organizations.all; client.organizations }
assert_kind_of JD::Request::Collection::Organizations, organizations
organizations.each do |organization|
assert_kind_of JD::Model::Organization, organization
end
end
end
describe '#contribution_products' do
it 'returns a collection of contribution products for this account' do
contribution_products = VCR.use_cassette('get_contribution_products') { client.contribution_products.all; client.contribution_products }
assert_kind_of JD::Request::Collection::ContributionProducts, contribution_products
contribution_products.each do |contribution_product|
assert_kind_of JD::Model::ContributionProduct, contribution_product
end
end
end
describe '#accessor' do
it 'returns an object that can make user-specific requests' do
assert_kind_of JD::NetHttpRetry::Decorator, accessor
end
end
end | 32.031414 | 142 | 0.681105 |
03490a554deb43f50f1eda4a6e7299a2ca43058f | 1,751 | raise "JRuby Required" unless defined?(JRUBY_VERSION)
#c = Solr::HTTP::Adapter::ApacheCommons.new('http://localhost:8983/solr')
#puts c.get('/select', :q=>'*:*')
#puts c.post('/update', '<commit/>', {}, {"Content-Type" => 'text/xml; charset=utf-8'})
require 'java'
proc {|files|
files.each do |f|
require File.join(File.dirname(__FILE__), f)
end
}.call(
%W(commons-codec-1.3.jar commons-httpclient-3.1.jar commons-logging-1.1.1.jar)
)
require 'uri'
class Solr::HTTP::Adapter::ApacheCommons
include Solr::HTTP::Util
attr :c, :uri
include_package 'org.apache.commons.httpclient'
include_package 'org.apache.commons.httpclient.methods'
include_package 'org.apache.commons.httpclient.params.HttpMethodParams'
include_package 'java.io'
def initialize(url)
@c = HttpClient.new
@uri = URI.parse(url)
end
def get(path, params={})
method = GetMethod.new(_build_url(path, params))
@c.executeMethod(method)
response = method.getResponseBodyAsString
method.releaseConnection()
response
end
def post(path, data, params={}, headers={})
method = PostMethod.new(_build_url(path, params))
method.setRequestBody(data)
headers.each_pair do |k,v|
method.addRequestHeader(k, v)
end
entity = StringRequestEntity.new(data);
method.setRequestEntity(entity);
@c.executeMethod(method)
#response = java.lang.String.new(method.getResponseBody)
response = method.getResponseBodyAsString
method.releaseConnection()
response
end
protected
def _build_url(path, params={})
url = @uri.scheme + '://' + @uri.host
url += ':' + @uri.port.to_s if @uri.port
url += @uri.path + path
build_url(url, params, @uri.query)
end
end | 26.134328 | 87 | 0.677327 |
332c761da02584367dbfda5cddbc3922babbf995 | 507 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2020_05_01
module Models
#
# Defines values for SecurityPartnerProviderConnectionStatus
#
module SecurityPartnerProviderConnectionStatus
Unknown = "Unknown"
PartiallyConnected = "PartiallyConnected"
Connected = "Connected"
NotConnected = "NotConnected"
end
end
end
| 26.684211 | 70 | 0.731755 |
acc5419167dcd9ba2b5c54f273f0c2174cae60f3 | 5,275 | require 'pty'
module Fastlane
module Actions
module SharedValues
LANE_NAME = :LANE_NAME
PLATFORM_NAME = :PLATFORM_NAME
ENVIRONMENT = :ENVIRONMENT
end
# Helper Methods
def self.git_author
s = `git log --name-status HEAD^..HEAD`
s = s.match(/Author:.*<(.*)>/)[1]
return s if s.to_s.length > 0
return nil
rescue
return nil
end
def self.last_git_commit
s = `git log -1 --pretty=%B`.strip
return s if s.to_s.length > 0
nil
end
# Returns the current git branch - can be replaced using the environment variable `GIT_BRANCH`
def self.git_branch
return ENV['GIT_BRANCH'] if ENV['GIT_BRANCH'].to_s.length > 0 # set by Jenkins
s = `git rev-parse --abbrev-ref HEAD`
return s.to_s.strip if s.to_s.length > 0
nil
end
def self.executed_actions
@executed_actions ||= []
end
# The shared hash can be accessed by any action and contains information like the screenshots path or beta URL
def self.lane_context
@lane_context ||= {}
end
# Used in tests to get a clear lane before every test
def self.clear_lane_context
@lane_context = nil
end
# Pass a block which should be tracked. One block = one testcase
# @param step_name (String) the name of the currently built code (e.g. snapshot, sigh, ...)
# This might be nil, in which case the step is not printed out to the terminal
def self.execute_action(step_name)
start = Time.now # before the raise block, since `start` is required in the ensure block
raise 'No block given'.red unless block_given?
error = nil
exc = nil
begin
Helper.log_alert("Step: " + step_name) if step_name
yield
rescue => ex
exc = ex
error = caller.join("\n") + "\n\n" + ex.to_s
end
ensure
# This is also called, when the block has a return statement
if step_name
duration = Time.now - start
executed_actions << {
name: step_name,
error: error,
time: duration
}
end
raise exc if exc
end
# Execute a shell command
# This method will output the string and execute it
# Just an alias for sh_no_action
# @param log [boolean] should fastlane print out the executed command
def self.sh(command, log: true)
sh_no_action(command, log: log)
end
def self.sh_no_action(command, log: true)
# Set the encoding first, the user might have set it wrong
previous_encoding = [Encoding.default_external, Encoding.default_internal]
Encoding.default_external = Encoding::UTF_8
Encoding.default_internal = Encoding::UTF_8
command = command.join(' ') if command.is_a?(Array) # since it's an array of one element when running from the Fastfile
Helper.log.info ['[SHELL COMMAND]', command.yellow].join(': ') if log
result = ''
unless Helper.test?
exit_status = nil
status = IO.popen(command, err: [:child, :out]) do |io|
io.each do |line|
Helper.log.info ['[SHELL]', line.strip].join(': ')
result << line
end
io.close
exit_status = $?.to_i
end
if exit_status != 0
# this will also append the output to the exception (for the Jenkins reports)
raise "Exit status of command '#{command}' was #{exit_status} instead of 0. \n#{result}"
end
else
result << command # only for the tests
end
result
rescue => ex
raise ex
ensure
Encoding.default_external = previous_encoding.first
Encoding.default_internal = previous_encoding.last
end
# returns a list of official integrations
def self.get_all_official_actions
Dir[File.expand_path '*.rb', File.dirname(__FILE__)].collect do |file|
File.basename(file).gsub('.rb', '').to_sym
end
end
def self.load_default_actions
Dir[File.expand_path '*.rb', File.dirname(__FILE__)].each do |file|
require file
end
end
def self.load_external_actions(path)
raise 'You need to pass a valid path' unless File.exist?(path)
Dir[File.expand_path('*.rb', path)].each do |file|
require file
file_name = File.basename(file).gsub('.rb', '')
class_name = file_name.fastlane_class + 'Action'
class_ref = nil
begin
class_ref = Fastlane::Actions.const_get(class_name)
if class_ref.respond_to?(:run)
Helper.log.info "Successfully loaded custom action '#{file}'.".green
else
Helper.log.error "Could not find method 'run' in class #{class_name}.".red
Helper.log.error 'For more information, check out the docs: https://github.com/KrauseFx/fastlane'
raise "Plugin '#{file_name}' is damaged!"
end
rescue NameError => ex
# Action not found
Helper.log.error "Could not find '#{class_name}' class defined.".red
Helper.log.error 'For more information, check out the docs: https://github.com/KrauseFx/fastlane'
raise "Plugin '#{file_name}' is damaged!"
end
end
end
end
end
| 31.029412 | 125 | 0.620664 |
4a54558674fc4525abba085d3318c52f50035f5b | 1,501 | class BeersController < ApplicationController
before_action :find_user
before_action :authorized
def index
@beers = @user.beers.find_by(id: params[:id])
render json: @beers
end
def show
@beer = @user.beers.find_by(id: params[:id])
render json: @beer
end
def create
@beer= @user.beers.build(beer_params)
if @user.valid? && @beer.save
render json: @beer
else
render json: {error: "We cant create that", status: 400}
end
end
def update
@beer = Beer.find_by_id(params[:id])
if Beer.update(beer_params)
render json: @beer
else
render json: {error: "You can't update that beer", status:400}
end
end
def destroy
@beer = Beer.find_by_id(params[:id])
if @user.valid? && @beer
@beer.destroy
render json: { message: 'Beer was deleted'}, status: :accepted
else
render json: { error: 'Beer was not found'}, status: :not_acceptable
end
end
def showAllBeers
@beers = Beer.all
end
private
def find_user
@user = User.find_by(id: params[:user_id])
end
def user_params
params.require(:user).permint(:name)
end
def beer_params
params.require(:beer).permit(:name, :malt_type, :malt_amount, :hop_type, :hop_amount, :yeast_type, :yeast_amount, :water_ph, :water_amount, :user_id)
end
end
| 24.209677 | 157 | 0.581612 |
61bc3edec2e7586bf1cfcdbc9920324fca6958c2 | 3,119 | class Sip < Formula
desc "Tool to create Python bindings for C and C++ libraries"
homepage "http://www.riverbankcomputing.co.uk/software/sip"
url "https://downloads.sf.net/project/pyqt/sip/sip-4.16.5/sip-4.16.5.tar.gz"
sha1 "d5d7b6765de8634eccf48a250dbd915f01b2a771"
bottle do
revision 1
sha1 "ac110e13e8b0f3f3c908fe4e9b4f6a010d483b64" => :yosemite
sha1 "d7cbfc32de5096fd1aa20a9123fba628ce546bf0" => :mavericks
sha1 "c67e51ac70a5258c16d6694010f2b9fd2363f346" => :mountain_lion
end
head "http://www.riverbankcomputing.co.uk/hg/sip", :using => :hg
depends_on :python => :recommended
depends_on :python3 => :optional
if build.without?("python3") && build.without?("python")
odie "sip: --with-python3 must be specified when using --without-python"
end
def install
if build.head?
# Link the Mercurial repository into the download directory so
# build.py can use it to figure out a version number.
ln_s cached_download + ".hg", ".hg"
# build.py doesn't run with python3
system "python", "build.py", "prepare"
end
Language::Python.each_python(build) do |python, version|
# Note the binary `sip` is the same for python 2.x and 3.x
system python, "configure.py",
"--deployment-target=#{MacOS.version}",
"--destdir=#{lib}/python#{version}/site-packages",
"--bindir=#{bin}",
"--incdir=#{include}",
"--sipdir=#{HOMEBREW_PREFIX}/share/sip"
system "make"
system "make", "install"
system "make", "clean"
end
end
def post_install
mkdir_p "#{HOMEBREW_PREFIX}/share/sip"
end
def caveats
"The sip-dir for Python is #{HOMEBREW_PREFIX}/share/sip."
end
test do
(testpath/"test.h").write <<-EOS.undent
#pragma once
class Test {
public:
Test();
void test();
};
EOS
(testpath/"test.cpp").write <<-EOS.undent
#include "test.h"
#include <iostream>
Test::Test() {}
void Test::test()
{
std::cout << "Hello World!" << std::endl;
}
EOS
(testpath/"test.sip").write <<-EOS.undent
%Module test
class Test {
%TypeHeaderCode
#include "test.h"
%End
public:
Test();
void test();
};
EOS
(testpath/"generate.py").write <<-EOS.undent
from sipconfig import SIPModuleMakefile, Configuration
m = SIPModuleMakefile(Configuration(), "test.build")
m.extra_libs = ["test"]
m.extra_lib_dirs = ["."]
m.generate()
EOS
(testpath/"run.py").write <<-EOS.undent
from test import Test
t = Test()
t.test()
EOS
system ENV.cxx, "-shared", "-o", "libtest.dylib", "test.cpp"
system "#{bin}/sip", "-b", "test.build", "-c", ".", "test.sip"
Language::Python.each_python(build) do |python, version|
ENV["PYTHONPATH"] = lib/"python#{version}/site-packages"
system python, "generate.py"
system "make", "-j1", "clean", "all"
system python, "run.py"
end
end
end
| 29.704762 | 78 | 0.599231 |
33375b8cee6a7fce01b065c595840b2905da64bb | 15,877 | require 'spec_helper'
describe Account do
it_should_behave_like "it has a logged-in user"
describe "creating an Account" do
before do
@account = Account.make
end
it "should allow currency to be set to a Currency object" do
currency = Currency.new("USD")
@account.currency = currency
@account.save!
@account.reload
@account.read_attribute(:currency).should == "USD"
end
end
describe "associations" do
it_should_behave_like 'it has a logged-in user'
before do
@account = Account.make(:user => current_user)
@account.balance = 0.0
end
it "should belong to a financial institution" do
@account.financial_inst.should == FinancialInst.find(@account.financial_inst_id)
end
it "should have many transactions" do
@tx1, @tx2 = Txaction.make(:account => @account), Txaction.make(:account => @account)
@account.txactions.should contain_same_elements_as([@tx1, @tx2])
end
it "should have many account balances" do
@ab1, @ab2 = AccountBalance.make(:account => @account), AccountBalance.make(:account => @account)
@account.account_balances.should include(@ab1)
@account.account_balances.should include(@ab2)
end
it "should return the newest balance as the last balance" do
@account.account_balances.destroy_all
yesterday = AccountBalance.make(:balance_date => 1.week.ago, :account_id => @account.id)
@account.last_balance(true).should == yesterday
today = AccountBalance.make(:balance_date => 1.day.ago, :account_id => @account.id)
@account.last_balance(true).should == today
end
it "should have and belong to many uploads" do
@u1 = Upload.make
@account.uploads << @u1
@account.uploads.should == [@u1]
end
end
describe "class" do
it "should provide the last 4 numbers of a given account number" do
Account.last4("123456789").should == "6789"
end
it "should strip any non-word characters before taking the last 4" do
Account.last4("12-3456-789").should == "6789"
end
it "should find the significant digits using the regex, if provided" do
Account.last4("12-3456-789-S.S000", '(\d{2})S+(\d+)').should == "89000"
end
it "should ignore a blank regex" do
Account.last4("12-3456-789-S.S000", '').should == "S000"
end
it "should raise an exception if no groupings are provided in the regex" do
lambda {
Account.last4("12-3456-789-S.S000", '\d{2}S+\d+')
}.should raise_error
end
end
shared_examples_for "a transaction account" do
it "should return a txaction hooked up to itself" do
@account.new_txaction.account.should == @account
end
end
shared_examples_for "a manual account" do
it_should_behave_like 'a transaction account'
it "should claim to be a manual account" do
@account.should be_a_manual_account
end
end
describe 'type CASH' do
before do
@account = Account.make(:cash)
end
it_should_behave_like 'a transaction account'
it "should claim to be a cash account" do
@account.should be_a_cash_account
end
it "should have no balance" do
@account.balance.should be_nil
@account.should_not have_balance
end
it "should not have an editable balance" do
@account.editable_balance?.should be_false
end
it "should raise on setting a balance" do
lambda { @account.balance = 0.0 }.
should raise_error(ArgumentError, "Cannot set balance on account with type Cash")
end
end
describe 'type MANUAL' do
before do
# given
@account = Account.new(:account_type_id => AccountType::MANUAL)
@txactions = [mock_model(Txaction, :calculate_balance! => 12.50, :amount => 2.50)]
end
it_should_behave_like 'a manual account'
it "should have a balance" do
@account.should have_balance
end
it "should have an editable balance" do
@account.editable_balance?.should be_true
end
end
describe 'type CHECKING' do
before do
@account = Account.new(:account_type_id => AccountType::CHECKING)
@new_balance = mock_model(AccountBalance, :balance_date => 6.hours.ago, :balance => 123.45)
@older_txaction = mock_model(Txaction, :date_posted => 1.day.ago, :calculate_balance! => 130.00)
@old_balance = mock_model(AccountBalance, :balance_date => 5.days.ago, :balance => 90.00)
end
it_should_behave_like 'a transaction account'
it "should not claim to be a cash account" do
@account.should_not be_a_cash_account
end
it "should not claim to be a manual account" do
@account.should_not be_a_manual_account
end
end
describe "callbacks" do
it_should_behave_like 'it has a logged-in user'
before do
@account = Account.make(:user => current_user)
end
it "should delete an account on destroy" do
@account.status.should == Constants::Status::ACTIVE
@account.destroy
Account.find_by_id(@account.id).should be_nil
end
it "should delete txactions on destroy" do
@tx = Txaction.make(:account => @account)
@account.destroy
Txaction.find_by_id(@tx.id).should be_nil
end
end
describe "created by SSU" do
before do
# given
@account_cred = mock_model(AccountCred)
@accounts = [@account]
@accounts.stub!(:count).and_return(1)
@account_cred.stub!(:accounts).and_return(@accounts)
@account_cred.stub!(:destroy)
@account = Account.make
@account.stub!(:account_cred).and_return(@account_cred)
@account.stub!(:ssu?).and_return(true)
@job = mock_model(SsuJob)
end
it "should set account status to disabled on destroy if its AccountCred has active accounts" do
# when
@accounts = [@account, @account]
@accounts.stub!(:count).and_return(2)
@account_cred.stub!(:accounts).and_return(@accounts)
@account.destroy
# then
Account.find(@account.id).status.should == Constants::Status::DISABLED
end
it "should destroy the account if its AccountCred has only this account" do
# when
@account.destroy
# then
lambda { Account.find(@account.id) }.should raise_error(ActiveRecord::RecordNotFound)
end
it "knows it was newly created if it has one upload and is listed in the job" do
# when
uploads = mock(:uploads, :count => 1)
@account.stub!(:uploads).and_return(uploads)
@job.stub!(:accounts).and_return([@account])
# then
@account.should be_newly_created_by(@job)
end
it "knows it was not newly created if it has more than one upload" do
# when
uploads = mock(:uploads, :count => 2)
@account.stub!(:uploads).and_return(uploads)
@job.stub!(:accounts).and_return([@account])
# then
@account.should_not be_newly_created_by(@job)
end
it "knows it was not newly created if the job does not list it" do
# when
uploads = mock(:uploads, :count => 1)
@account.stub!(:uploads).and_return(uploads)
@job.stub!(:accounts).and_return([])
# then
@account.should_not be_newly_created_by(@job)
end
it "knows it was newly created by a cred if it has one upload and is listed in the cred's job" do
# when
uploads = mock(:uploads, :count => 1)
@account.stub!(:uploads).and_return(uploads)
@job.stub!(:accounts).and_return([@account])
@cred = mock_model(AccountCred, :last_job => @job)
# then
@account.should be_newly_created_by(@cred)
end
end
describe "guids" do
before do
@account = Account.make
end
it "should generate a guid when created" do
@account.guid.should match(/[a-f0-9]{64}/)
end
it "should not generate a new guid when edited" do
lambda {
@account.update_attribute(:account_number, 1234)
}.should_not change(@account, :guid)
end
it "should not allow duplicate guids to be saved" do
@second = Account.make
@second.guid = @account.guid
lambda { @second.save! }.should raise_error
end
it "should be a protected attribute" do
@second = Account.new(:guid => "abc")
@second.guid.should_not == "abc"
end
end
describe Account do
before do
@account = Account.make
end
it "should be able to generate a guid" do
lambda { @account.send(:generate_guid) }.should change(@account, :guid)
end
it "should set account_type_id when account_type is set" do
@account.account_type = AccountType.find(3)
@account.account_type_id.should == 3
end
it "should provide its financial institution's name" do
@account.financial_inst = mock_model(FinancialInst, :name => "Citibank")
@account.financial_inst_name.should == "Citibank"
end
it "should provide the current balance" do
@account.should_receive(:last_balance).with(true).and_return(mock_model(AccountBalance, :balance => 25))
@account.balance.should == 25
end
it "should not provide a balance for cash accounts, which don't have a balance" do
@account.account_type = AccountType.find_by_raw_name("Cash")
@account.balance.should be_nil
end
it "should know if it is a cash account" do
@account.account_type = AccountType.find_by_raw_name("Cash")
@account.should be_cash_account
end
it "should know if it is a brokerage account" do
@account.account_type = AccountType.find_by_raw_name("Brokerage")
@account.should be_brokerage_account
end
it "should delete itself on destroy" do
@account.save; @account.destroy
Account.find_by_id(@account.id).should be_nil
end
it "should delete account balances on destroy" do
@account.save
account_balance = AccountBalance.make(:account => @account)
AccountBalance.find_by_id(account_balance.id).should_not be_nil
@account.destroy
AccountBalance.find_by_id(account_balance.id).should be_nil
end
it "should delete associated uploads on destroy if they don't reference other accounts" do
@account.save
@account.uploads << [Upload.make, Upload.make]
@account.uploads.count.should == 2
@account.destroy
@account.uploads.count.should == 0
end
it "should not delete associated uploads on destroy if they reference other accounts" do
account1 = Account.make
account2 = Account.make
upload1 = Upload.make
upload2 = Upload.make
account1.uploads << [upload1]
account2.uploads << [upload1, upload2]
account1.uploads.count.should == 1
account2.uploads.count.should == 2
account2.destroy
Upload.find_by_id(upload1.id).should_not be_nil
Upload.find_by_id(upload2.id).should be_nil
account1.destroy
Upload.find_by_id(upload1.id).should be_nil
end
it "should provide the last upload it received" do
@account.save
upload = Upload.make
AccountUpload.create(:upload => upload, :account => @account)
@account.last_upload.should == upload
end
describe "negate_balance! method" do
it "should not error if there is no last balance" do
lambda { @account.negate_balance! }.should_not raise_error
end
it "should set the negate balance flag on the account" do
lambda { @account.negate_balance! }.should change(@account, :negate_balance?)
end
it "should remove the negate balance flag if it is already set" do
lambda { @account.negate_balance! }.should change(@account, :negate_balance?)
end
it "should reverse the balance of the last balance" do
@account.last_balance = AccountBalance.new(:balance => 10.0)
@account.negate_balance!
@account.last_balance.balance.should == -10.0
end
end
describe "transaction disabling" do
it "disable_txactions_before_date should call change status with a date param" do
User.current = @account.user
@txaction_before_date = Txaction.make(:account => @account, :date_posted => Date.parse("05 July 2005"), :amount => 1, :status => Constants::Status::ACTIVE)
@txaction_after_date = Txaction.make(:account => @account, :date_posted => Date.parse("05 July 2008"), :amount => 1, :status => Constants::Status::ACTIVE)
@date = Date.parse("01 Jan 2007")
Txaction.should_receive(:change_status).with(@account.txactions.find_all_by_id(@txaction_before_date.id), Constants::Status::DISABLED)
@account.disable_txactions_before_date(@date)
end
end
describe "balance" do
before do
@account2 = Account.make
end
context "setting a balance" do
it "should cause the balance to to be set" do
@account2.balance=2.0
@account2.balance.should == 2.0
end
it "should adjust the balance date according to the user's time zone" do
user = User.make(:time_zone => "Eastern Time (US & Canada)")
User.stub!(:current).and_return(user)
time_now = Time.mktime(2010, 1, 17, 20, 46, 0)
Time.should_receive(:now).any_number_of_times.and_return(time_now)
@account2.balance = 3.0
@account2.reload.balance_date.should == time_now.in_time_zone("Eastern Time (US & Canada)")
end
end
end
end
describe "currency attribute" do
before(:each) do
@account = Account.make
@usd = Currency.new("USD")
end
it "allows assigning with a currency object" do
@account.currency = @usd
@account.currency.should == @usd
end
it "allows assigning with a string" do
@account.currency = "USD"
@account.currency.should == @usd
end
it "is not valid if the currency is unknown" do
@account[:currency] = "---"
lambda { @account.valid? }.should raise_error(Currency::UnknownCurrencyException)
end
it "returns a nil currency if currency is nil" do
@account.currency = nil
@account.currency.should be_nil
end
it "does not validate if currency is nil" do
@account.currency = nil
@account.should have(1).error_on(:currency)
end
end
describe "user-scoped unique id number" do
before do
@user = User.make
@account = Account.make(:user => @user)
@second_account = Account.make(:user => @user)
end
it "should exist as id_for_user" do
@account.should respond_to(:id_for_user)
end
it "should be provided when to_param is called" do
@account.to_param.should == 1
end
it "should be set when the account is saved" do
@account.id_for_user.should == 1
end
it "should increment to be bigger than the biggest id_for_user so far" do
@second_account.id_for_user.should == 2
end
it "should show up in the user's accounts" do
@user.accounts.should include(@account)
end
it "should not change for other accounts when an account is deleted" do
@account.destroy
@user.accounts.first.id_for_user.should == 2
end
it "should validate an id that is unique for its user" do
@second_account.should be_valid
end
it "should validate the the name is not blank" do
@account.name = ""
@account.should_not be_valid
@account.name = nil
@account.should_not be_valid
end
it "should fix ids that are not unique on validate" do
@second_account.id_for_user = @account.id_for_user
@second_account.should_not be_new_record
@second_account.save!
@second_account.id_for_user.should == 3
end
it "should be a protected attribute" do
@account = Account.new(:id_for_user => 1)
@account.id_for_user.should be_nil
end
end
end
| 30.650579 | 163 | 0.662216 |
e845d3394c7b64f242674a1102334af6ab81d055 | 5,312 | # powering tv for a period
# driving a car for a distance
# number of showers lasting 8 minutes
#
# Usage:
# equivalence = EnergyEquivalences.new(10000000000.0, :kwh)
# pounds = equlivalece.value(:£)
# car_distance = equlivalece.value(:ice_car_distance_km)
# car_distance, units, description = equlivalece.verbose_value(:car_distance_km)
#
# research:
# https://www.epa.gov/energy/greenhouse-gas-equivalencies-calculator but wrong grid carbon
# 1000 kWh = 90,000 smartphones charged
# = 27 incancescent bulbs switched to LED
# = 12 tree seedlings grown for 10 years
# BBC
# driving a car X miles
# heating the average home for X days
# litres of shower water = X 8 minute showers
# https://en.wikipedia.org/wiki/Carbon_footprint
# flights, cars, trucks, rail, sea, cement
# other ideas:
# cycling
# homes, gas, electricity, tvs, tumbler dryers, washing machines, school dinners, lifetime emissions
# recycling boxes of recycling (B&NES), jam sandwiches, laptop for X years
# Carbon trust
# http://www.knowlton.org.uk/wp-content/files/Energy%20carbon%20conversions.pdf
# different sized cars, airplanes, rail
#
class EnergyEquivalences
attr_reader :kwh_deprecated, :fuel_type_deprecated
def initialize_deprecated(value, units, fuel_type, grid_intensity)
@fuel_type = fuel_type
@kwh = EnergyEquivalences.conversion_ratio(value, units, fuel_type, :kwh, fuel_type, units, grid_intensity)
end
def self.conversion_ratio_deprecated(value, from_unit, from_type, to_unit, to_type, via_unit, grid_intensity)
ratio, _from_desc, _to_desc = convert(value, from_unit, from_type, to_unit, to_type, via_unit, grid_intensity)
ratio
end
def self.convert(value, from_unit, from_type, to_unit, to_type, via_unit, grid_intensity)
# ap( ENERGY_EQUIVALENCES2)
check_got_co2_kwh_or_£(via_unit)
from_unit_conversion, from_conversion_description, from_type_description = equivalence_conversion_rate_and_description(from_type, via_unit, grid_intensity)
to_unit_conversion, to_conversion_description, to_type_description = equivalence_conversion_rate_and_description(to_type, via_unit, grid_intensity)
equivalent = value * from_unit_conversion / to_unit_conversion
equivalence = equivalence_description(value, from_unit, from_type_description, equivalent, to_unit, to_type_description)
calc = calculation_description(value, from_unit, from_unit_conversion, to_unit_conversion, equivalent, to_unit, via_unit)
[equivalent, equivalence, calc, from_conversion_description, to_conversion_description]
end
def self.equivalence_description(from_value, from_unit, from_type_description, to_value, to_unit, to_type_description)
equivalence = # commented out following CT request 27Mar2019; description(from_value, from_unit, from_type_description) +
'This saving is equivalent to ' +
description(to_value, to_unit, to_type_description)
end
def self.calculation_description(from_value, from_unit, from_unit_conversion, to_unit_conversion, to_value, to_unit, via_unit)
"Therefore " +
"#{FormatEnergyUnit.format(from_unit, from_value)} " +
(from_unit_conversion == 1.0 ? '' : " × #{FormatEnergyUnit.format(via_unit, from_unit_conversion)}/#{from_unit}") +
" ÷ #{FormatEnergyUnit.format(via_unit, to_unit_conversion)}/#{to_unit.to_s.humanize} "\
"= #{FormatEnergyUnit.format(to_unit, to_value)} "\
end
def self.description(value, unit, description)
description % FormatEnergyUnit.format(unit, value)
end
def self.random_equivalence_type_and_via_type(grid_intensity)
random_type = equivalence_types(false)[rand(equivalence_types(false).length)]
equivalence = equivalence_configuration(random_type, grid_intensity)
random_via_type = equivalence[:conversions].keys[rand(equivalence[:conversions].length)]
[random_type, random_via_type]
end
def self.equivalence_conversion_rate_and_description(type, via_unit, grid_intensity)
type = :electricity if type == :storage_heaters || type == :solar_pv
type_data = equivalence_configuration(type, grid_intensity)
type_description = type_data[:description]
rate = type_data[:conversions][via_unit][:rate]
description = type_data[:conversions][via_unit][:description]
[rate, description, type_description]
end
def self.check_got_co2_kwh_or_£(unit)
raise EnergySparksUnexpectedStateException.new('Unexpected nil unit for conversion from electricity or gas') if unit.nil?
unless [:kwh, :co2, :£].include?(unit)
raise EnergySparksUnexpectedStateException.new("Unexpected unit #{unit} for conversion from electricity or gas")
end
end
private
def value_units(value, units) # should be moved to seperate class
case units
when :kwh
sprintf('%6.0f kWh', value)
when :£
sprintf('£%6.0f', value)
when :ice_car_distance_km, :bev_car_distance_km
sprintf('%6.1fkm', value)
else
unknown_type(units)
end
end
end
class ElectricityEquivalencekWh < EnergyEquivalences
def initialize(kwh)
super(kwh, :kwh, :electricity)
end
end
class GasEquivalencekWh < EnergyEquivalences
def initialize(kwh)
super(kwh, :kwh, :gas)
end
end
| 42.496 | 159 | 0.748494 |
2853852233685e6ae63427a25983a9eba6e2d304 | 949 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'ops_build/version'
Gem::Specification.new do |spec|
spec.name = 'ops_build'
spec.version = OpsBuild::VERSION
spec.authors = ['HAMSIK Adam']
spec.email = ['[email protected]']
spec.summary = %q(RSD Devops related build tool to run packer, berkshelf)
spec.description = %q(RSD Devops related build tool to run packer, berkshelf)
spec.homepage = 'http://www.rsd.com'
spec.license = 'BSD'
spec.required_ruby_version = '>= 2.1.0'
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
spec.add_runtime_dependency 'thor', '~> 0.19.1'
spec.add_development_dependency 'bundler', '~> 1.6'
end
| 35.148148 | 81 | 0.643836 |
08c3031cde706dbcea225b4636293be6a61a39a2 | 941 | =begin
#Datadog API V2 Collection
#Collection of all Datadog Public endpoints.
The version of the OpenAPI document: 1.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
This product includes software developed at Datadog (https://www.datadoghq.com/).
Copyright 2020-Present Datadog, Inc.
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for DatadogAPIClient::V2::LogsSort
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe DatadogAPIClient::V2::LogsSort do
let(:instance) { DatadogAPIClient::V2::LogsSort.new }
describe 'test an instance of LogsSort' do
it 'should create an instance of LogsSort' do
expect(instance).to be_instance_of(DatadogAPIClient::V2::LogsSort)
end
end
end
| 29.40625 | 107 | 0.77577 |
33d691519dbc8a6829caf8f1ee4b6917f5b7c732 | 1,454 | require 'chronic'
module hbEshopReports
class Orders
def initialize(start__time, end__time)
@start_time = start__time
@end_time = end__time
@legders = Order.where("created_at >= ? AND created_at <= ?", start_time, end_time)
end
def finished_order_ids
@order_ids ||= Order.finished.where("created_at >= ? AND created_at <= ?", start_time, end_time).pluck(:id)
end
def quantity
@quantity ||= finished_order_ids.size
end
def taxes_collected
@taxes_collected ||= (Invoice.where("created_at >= ? AND created_at <= ?", start_time, end_time).where({:invoices => {:state => 'paid'}}).sum(:tax_amount)).to_f / 100.0
end
def total_amount
@total_amount ||= Invoice.where("created_at >= ? AND created_at <= ?", start_time, end_time).where({:invoices => {:state => 'paid'}}).sum(:amount).round_at(2)
end
def taxes_returned
@taxes_returned ||= (ReturnAuthorization.where("created_at >= ? AND created_at <= ?", start_time, end_time).where({:return_authorizations => {:state => 'complete'}}).sum(:tax_amount)).to_f / 100.0
end
def amount_returned
@amount_returned ||= ReturnAuthorization.where("created_at >= ? AND created_at <= ?", start_time, end_time).where({:return_authorizations => {:state => 'complete'}}).sum(:amount).round_at(2)
end
def start_time
@start_time
end
def end_time
@end_time
end
end
end
| 32.311111 | 204 | 0.650619 |
61a377c947642ca7fa9ce1845a0c46f57e67ebfe | 2,061 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Recipe to put in the necessary changes for limits.conf - allowing for more open files and processes.
# The deploy user is hard coded, but may want to be put in as a variable at some point.
include_recipe "mongodb"
file2append = '/etc/security/limits.conf'
if File.exists?(file2append)
file file2append do
additional_content = %Q{
# Automatically added to #{file2append}-mongodb
deploy soft nofile 65572
deploy hard nofile 65572
deploy soft noproc 16384
deploy hard noproc 16384
mongodb soft nofile 65572
mongodb hard nofile 65572
mongodb soft noproc 16384
mongodb hard noproc 16384
root soft nofile 65572
root hard nofile 65572
root soft noproc 16384
root hard noproc 16384
# End appending of #{file2append}-mongodb
}
only_if do
current_content = File.read(file2append)
current_content.index(additional_content).nil?
end
current_content = File.read(file2append)
orig_content = current_content.gsub(/\n# Automatically added to #{file2append}-mongodb(.|\n)*# End appending of #{file2append}-mongodb\n/, '')
owner "root"
group "root"
mode "0644"
content orig_content + additional_content
notifies :restart, "service[mongodb]", :immediately
end
end
| 32.714286 | 153 | 0.73605 |
1d8d217c7dcfa49f7df78f9084bf91accd6d0821 | 571 | class UsersController < ApplicationController
before_action :require_login, except: [:home, :new, :create]
def new
@user = User.new
end
def create
@user = User.create(user_params)
if @user.save
session[:user_id] = @user.id
redirect_to user_path(@user)
else
render :new
end
end
def show
@user = User.find_by(id: params[:id])
end
private
def user_params
params.require(:user).permit(:name, :email, :password, :expertise)
end
end
| 18.419355 | 74 | 0.567426 |
2617b25252f2e8ec7d36950de474f3c42c391f8e | 9,209 | =begin
#Influx OSS API Service
#No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
The version of the OpenAPI document: 2.0.0
Generated by: https://openapi-generator.tech
=end
require 'date'
require 'time'
module InfluxDB2::API
class ResourceOwner
attr_accessor :id
attr_accessor :oauth_id
attr_accessor :name
# If inactive the user is inactive.
attr_reader :status
attr_accessor :links
attr_reader :role
class EnumAttributeValidator
attr_reader :datatype
attr_reader :allowable_values
def initialize(datatype, allowable_values)
@allowable_values = allowable_values.map do |value|
case datatype.to_s
when /Integer/i
value.to_i
when /Float/i
value.to_f
else
value
end
end
end
def valid?(value)
!value || allowable_values.include?(value)
end
end
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'id' => :'id',
:'oauth_id' => :'oauthID',
:'name' => :'name',
:'status' => :'status',
:'links' => :'links',
:'role' => :'role',
}
end
# Attribute type mapping.
def self.openapi_types
{
:'id' => :'String',
:'oauth_id' => :'String',
:'name' => :'String',
:'status' => :'String',
:'links' => :'UserResponseLinks',
:'role' => :'String'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# List of class defined in allOf (OpenAPI v3)
def self.openapi_all_of
[
:'ResourceOwnerAllOf',
:'UserResponse'
]
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `InfluxDB2::ResourceOwner` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `InfluxDB2::ResourceOwner`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'id')
self.id = attributes[:'id']
end
if attributes.key?(:'oauth_id')
self.oauth_id = attributes[:'oauth_id']
end
if attributes.key?(:'name')
self.name = attributes[:'name']
end
if attributes.key?(:'status')
self.status = attributes[:'status']
else
self.status = 'active'
end
if attributes.key?(:'links')
self.links = attributes[:'links']
end
if attributes.key?(:'role')
self.role = attributes[:'role']
else
self.role = 'owner'
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @name.nil?
invalid_properties.push('invalid value for "name", name cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @name.nil?
status_validator = EnumAttributeValidator.new('String', ["active", "inactive"])
return false unless status_validator.valid?(@status)
role_validator = EnumAttributeValidator.new('String', ["owner"])
return false unless role_validator.valid?(@role)
true
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] status Object to be assigned
def status=(status)
validator = EnumAttributeValidator.new('String', ["active", "inactive"])
unless validator.valid?(status)
fail ArgumentError, "invalid value for \"status\", must be one of #{validator.allowable_values}."
end
@status = status
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] role Object to be assigned
def role=(role)
validator = EnumAttributeValidator.new('String', ["owner"])
unless validator.valid?(role)
fail ArgumentError, "invalid value for \"role\", must be one of #{validator.allowable_values}."
end
@role = role
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
id == o.id &&
oauth_id == o.oauth_id &&
name == o.name &&
status == o.status &&
links == o.links &&
role == o.role
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[id, oauth_id, name, status, links, role].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
elsif type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :Time
Time.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
InfluxDB2::API.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 28.868339 | 202 | 0.605386 |
039a66df4b1e484aeddc5ab6c04fccfd7bc9f306 | 640 | module Steps::Lateness
class InTimeForm < BaseForm
attribute :in_time, String
def self.choices
InTime.values.map(&:to_s)
end
validates_inclusion_of :in_time, in: choices
private
def in_time_value
InTime.new(in_time)
end
def changed?
tribunal_case.in_time != in_time_value
end
def persist!
raise 'No TribunalCase given' unless tribunal_case
return true unless changed?
tribunal_case.update(
in_time: in_time_value,
# The following are dependent attributes that need to be reset
lateness_reason: nil
)
end
end
end
| 20 | 70 | 0.654688 |
ffb6ccb0e890f5c98715e21fe01655c3e5cc9a2f | 3,121 | require 'spec_helper'
describe Specjour::CLI do
subject { Specjour::CLI.new }
let(:fake_pid) { 100_000_000 }
before do
stub(Specjour::CPU).cores.returns(27)
stub(Specjour::Dispatcher).new.returns(stub!)
stub(Specjour::Manager).new.returns(stub!)
stub(Specjour::Worker).new.returns(stub!)
end
describe "#listen" do
let(:manager) { NullObject.new }
before do
stub(Dir).pwd { '/home/someone/foo-bar' }
end
def manager_receives_options(options)
expected_options = hash_including(options)
mock(Specjour::Manager).new(expected_options).returns(manager)
end
it "defaults workers to system cores" do
manager_receives_options("worker_size" => 27)
Specjour::CLI.start %w(listen -p none)
end
it "accepts an array of projects to listen to" do
manager_receives_options("registered_projects" => %w(one two three))
Specjour::CLI.start %w(listen --projects one two three)
end
it "listens to the current path by default" do
manager_receives_options("registered_projects" => %w(foo-bar))
Specjour::CLI.start %w(listen)
end
end
describe "#dispatch" do
let(:dispatcher) { NullObject.new }
def dispatcher_receives_options(options)
expected_options = hash_including(options)
mock(Specjour::Dispatcher).new(expected_options).returns(dispatcher)
end
it "defaults path to the current directory" do
stub(Dir).pwd.returns("eh")
dispatcher_receives_options("project_path" => "eh")
Specjour::CLI.start %w(dispatch)
end
it "defaults workers to system cores" do
dispatcher_receives_options("worker_size" => 27)
Specjour::CLI.start %w(dispatch)
end
it "accepts a project alias" do
dispatcher_receives_options("project_alias" => "eh")
Specjour::CLI.start %w(dispatch --alias eh)
end
end
describe "#work" do
it "starts a worker with the required parameters" do
worker = NullObject.new
args = {'project_path' => "eh", 'printer_uri' => "specjour://1.1.1.1:12345", 'number' => 1, 'task' => 'run_tests'}
mock(Specjour::Worker).new(hash_including(args)).returns(worker)
Specjour::CLI.start %w(work --project-path eh --printer-uri specjour://1.1.1.1:12345 --number 1 --task run_tests)
end
end
describe "#handle_logging" do
before do
stub(subject).options.returns({})
end
it "enables logging" do
subject.options['log'] = true
mock(Specjour).new_logger(Logger::DEBUG).returns(stub!)
subject.send(:handle_logging)
end
it "doesn't enable logging" do
dont_allow(Specjour).new_logger
subject.send(:handle_logging)
end
end
describe "#prepare" do
let(:dispatcher) { NullObject.new }
def dispatcher_receives_options(options)
expected_options = hash_including(options)
mock(Specjour::Dispatcher).new(expected_options).returns(dispatcher)
end
it "sets the worker task to 'prepare'" do
dispatcher_receives_options("worker_task" => "prepare")
Specjour::CLI.start %w(prepare)
end
end
end
| 29.443396 | 120 | 0.674784 |
114a256a6091633273fb3c1dbb7de5a44b278440 | 38,610 | require 'redis'
require_relative '../../spec/spec_helper.rb'
rack_env = ENV['RACK_ENV']
describe DynamicDynamoDBManager do
before do
puts 'Creating all dynamoDB tables from an API list. Use December to test a new year'
Timecop.freeze Date.new(2014, 12, 24) do
@manager = DynamicDynamoDBManager.new(verbose: true)
@manager.get_all_required_tables(true)
@manager.get_all_tables(true)
@manager.create_tables
end
end
after do
@manager.get_all_tables(true, true).each do |t|
@manager.delete_table(t)
end
end
it 'returns a list of created tables' do
Timecop.freeze Date.new(2014, 12, 24) do
# Refresh tables
tables = @manager.get_all_tables(true)
# Daily with rotate 2 means 1 day ahead and 2 behind
expect(tables).to include("#{rack_env}.daily-purge2.20141225")
expect(tables).to include("#{rack_env}.daily-purge2.20141224")
expect(tables).to include("#{rack_env}.daily-purge2.20141223")
# Weekly with rotate 2 means 1 week ahead and 2 behind.
# Starting on the next monday for the one in the future.
expect(tables).to include("#{rack_env}.weekly-purge2.20141229")
expect(tables).to include("#{rack_env}.weekly-purge2.20141222")
expect(tables).to include("#{rack_env}.weekly-purge2.20141215")
# Monthly with rotate 4 means 1 month ahead and 4 behind.
# Starting on the first day of the month.
expect(tables).to include("#{rack_env}.monthly-purge4.20150101")
expect(tables).to include("#{rack_env}.monthly-purge4.20140901")
expect(tables).to include("#{rack_env}.monthly-purge4.20141001")
expect(tables).to include("#{rack_env}.monthly-purge4.20141101")
expect(tables).to include("#{rack_env}.monthly-purge4.20141201")
# daily with rotate 4 means 1 day ahead and 4 behind.
# The nopurge means they will not get deleted.
expect(tables).to include("#{rack_env}.daily-nopurge.20141225")
expect(tables).to include("#{rack_env}.daily-nopurge.20141224")
expect(tables).to include("#{rack_env}.daily-nopurge.20141223")
expect(tables).to include("#{rack_env}.daily-nopurge.20141222")
expect(tables).to include("#{rack_env}.daily-nopurge.20141221")
end
end
it 'sets primary and secondary redis keys' do
Timecop.freeze Date.new(2014, 12, 24) do
# Refresh tables
@manager.get_all_tables(true)
env_override = 'env_override_hash'
primary_field = 'PRIMARY_DYNAMODB_TABLE'
secondary_field = 'SECONDARY_DYNAMODB_TABLE'
redis_client = Redis.new(url: ENV['REDIS_URL'])
begin
redis_client.hdel(env_override, redis_client.hkeys(env_override))
rescue Redis::CommandError
# ignore
end
expect(redis_client.hlen(env_override)).to eql(0)
# Set Redis keys
@manager.update_redis
# Refresh and get tables
tables = @manager.get_all_required_tables(true)
primaries = %W(#{rack_env}.daily-purge2.20141224 #{rack_env}.daily-nopurge.20141224 #{rack_env}.weekly-purge2.20141222 #{rack_env}.monthly-purge4.20141201)
secondaries = %W(#{rack_env}.daily-purge2.20141223 #{rack_env}.daily-nopurge.20141223 #{rack_env}.weekly-purge2.20141215 #{rack_env}.monthly-purge4.20141101)
expect(redis_client.hlen(env_override)).to eql(primaries.length + secondaries.length)
tables.each do |table|
table_name = table['TableName']
clean_tablename = table_name.split(/\./)[1].gsub(/[^\w]/, '_').upcase
primary_field_name = "DYNAMODB_PRIMARY_#{clean_tablename}"
secondary_field_name = "DYNAMODB_SECONDARY_#{clean_tablename}"
if primaries.include?(table_name)
expect(table).to include(primary_field)
expect(table[primary_field]).to be true
expect(redis_client.hexists(env_override, primary_field_name)).to be true
expect(redis_client.hget(env_override, primary_field_name)).to eql(table_name)
end
if secondaries.include?(table_name)
expect(table).to include(secondary_field)
expect(table[secondary_field]).to be true
expect(redis_client.hexists(env_override, secondary_field_name)).to be true
expect(redis_client.hget(env_override, secondary_field_name)).to eql(table_name)
end
unless primaries.include?(table_name) or secondaries.include?(table_name)
expect(table).to_not include(primary_field)
expect(table).to_not include(secondary_field)
expect(redis_client.hget(env_override, primary_field_name)).to_not eql(table_name)
expect(redis_client.hget(env_override, secondary_field_name)).to_not eql(table_name)
end
end
end
Timecop.freeze Date.new(2015, 1, 1) do
# Refresh tables
@manager.get_all_tables(true)
env_override = 'env_override_hash'
primary_field = 'PRIMARY_DYNAMODB_TABLE'
secondary_field = 'SECONDARY_DYNAMODB_TABLE'
redis_client = Redis.new(url: ENV['REDIS_URL'])
# create tables
@manager.create_tables
@manager.get_all_tables(true)
# Set Redis keys
@manager.update_redis
# Refresh and get tables
tables = @manager.get_all_required_tables(true)
primaries = %W(#{rack_env}.daily-purge2.20150101 #{rack_env}.daily-nopurge.20150101 #{rack_env}.weekly-purge2.20141229 #{rack_env}.monthly-purge4.20150101)
secondaries = %W(#{rack_env}.daily-purge2.20141231 #{rack_env}.daily-nopurge.20141231 #{rack_env}.weekly-purge2.20141222 #{rack_env}.monthly-purge4.20141201)
expect(redis_client.hlen(env_override)).to eql(primaries.length + secondaries.length)
tables.each do |table|
table_name = table['TableName']
clean_tablename = table_name.split(/\./)[1].gsub(/[^\w]/, '_').upcase
primary_field_name = "DYNAMODB_PRIMARY_#{clean_tablename}"
secondary_field_name = "DYNAMODB_SECONDARY_#{clean_tablename}"
if primaries.include?(table_name)
expect(table).to include(primary_field)
expect(table[primary_field]).to be true
expect(redis_client.hexists(env_override, primary_field_name)).to be true
expect(redis_client.hget(env_override, primary_field_name)).to eql(table_name)
end
if secondaries.include?(table_name)
expect(table).to include(secondary_field)
expect(table[secondary_field]).to be true
expect(redis_client.hexists(env_override, secondary_field_name)).to be true
expect(redis_client.hget(env_override, secondary_field_name)).to eql(table_name)
end
unless primaries.include?(table_name) or secondaries.include?(table_name)
expect(table).to_not include(primary_field)
expect(table).to_not include(secondary_field)
expect(redis_client.hget(env_override, primary_field_name)).to_not eql(table_name)
expect(redis_client.hget(env_override, secondary_field_name)).to_not eql(table_name)
end
end
end
end
it 'sets the throughput to values that are cost-effective' do
Timecop.freeze Date.new(2014, 12, 24) do
# Refresh tables
@manager.get_all_tables(true)
tables = @manager.get_all_required_tables(true)
# Daily rotation for 20141224, and 20141225 should be set higher than any other table for the same prefix
#
# Weekly rotation with date 20141222 should be set higher than any other table for the same prefix.
# the week after, 20141229 should be set lower
#
# Monthly rotation with date 20141201 should be set higher than any other table for the same prefix.
# the month after, 20150101 should be set lower
high_tp = %W(#{rack_env}.daily-purge2.20141224 #{rack_env}.daily-purge2.20141225 #{rack_env}.weekly-purge2.20141222 #{rack_env}.monthly-purge4.20141201)
low_tp = %W(#{rack_env}.daily-purge2.20141223 #{rack_env}.weekly-purge2.20141215 #{rack_env}.weekly-purge2.20141229 #{rack_env}.monthly-purge4.20140901 #{rack_env}.monthly-purge4.20141001 #{rack_env}.monthly-purge4.20141101 #{rack_env}.monthly-purge4.20150101)
tables.each do |table|
table_name = table['TableName']
write_cap = table['Properties']['ProvisionedThroughput']['WriteCapacityUnits']
read_cap = table['Properties']['ProvisionedThroughput']['ReadCapacityUnits']
if low_tp.include?(table_name)
expect(read_cap).to eq(5)
expect(write_cap).to eq(30)
elsif high_tp.include?(table_name)
expect(read_cap).to eq(50)
expect(write_cap).to eq(600)
end
end
end
Timecop.freeze Date.new(2014, 12, 28) do
# Refresh tables
@manager.get_all_tables(true)
tables = @manager.get_all_required_tables(true)
# Daily rotation for 20141228, and 20141229 should be set higher than any other table for the same prefix
#
# weekly rotation with date 20141222 should be set higher than any other table for the same prefix.
# the week after, 20141229 should also be set higher
#
# monthly rotation with date 20141201 should be set higher than any other table for the same prefix.
# the month after, 20150101 should be set lower
low_tp = %W(#{rack_env}.daily-purge2.20141227 #{rack_env}.weekly-purge2.20141215 #{rack_env}.monthly-purge4.20140901 #{rack_env}.monthly-purge4.20141001 #{rack_env}.monthly-purge4.20141101 #{rack_env}.monthly-purge4.20150101)
high_tp = %W(#{rack_env}.daily-purge2.20141228 #{rack_env}.daily-purge2.20141229 #{rack_env}.weekly-purge2.20141222 #{rack_env}.weekly-purge2.20141229 #{rack_env}.monthly-purge4.20141201)
tables.each do |table|
table_name = table['TableName']
write_cap = table['Properties']['ProvisionedThroughput']['WriteCapacityUnits']
read_cap = table['Properties']['ProvisionedThroughput']['ReadCapacityUnits']
if low_tp.include?(table_name)
expect(read_cap).to eq(5)
expect(write_cap).to eq(30)
elsif high_tp.include?(table_name)
expect(read_cap).to eq(50)
expect(write_cap).to eq(600)
end
end
end
Timecop.freeze Date.new(2014, 12, 31) do
# Refresh tables
@manager.get_all_tables(true)
tables = @manager.get_all_required_tables(true)
# Daily rotation for 20141231, and 20150101 should be set higher than any other table for the same prefix
#
# weekly rotation with date 20141229 should be set higher than any other table for the same prefix.
# the week after, 20150105 should be set lower
#
# monthly rotation with date 20141201 should be set higher than any other table for the same prefix.
# the month after, 20150101 should also be set higher
low_tp = %W(#{rack_env}.daily-purge2.20141230 #{rack_env}.weekly-purge2.20141222 #{rack_env}.weekly-purge2.20150105 #{rack_env}.monthly-purge4.20140901 #{rack_env}.monthly-purge4.20141001 #{rack_env}.monthly-purge4.20141101)
high_tp = %W(#{rack_env}.daily-purge2.20141231 #{rack_env}.daily-purge2.20150101 #{rack_env}.weekly-purge2.20141229 #{rack_env}.monthly-purge4.20141201 #{rack_env}.monthly-purge4.20150101)
tables.each do |table|
table_name = table['TableName']
write_cap = table['Properties']['ProvisionedThroughput']['WriteCapacityUnits']
read_cap = table['Properties']['ProvisionedThroughput']['ReadCapacityUnits']
if low_tp.include?(table_name)
expect(read_cap).to eq(5)
expect(write_cap).to eq(30)
elsif high_tp.include?(table_name)
expect(read_cap).to eq(50)
expect(write_cap).to eq(600)
end
end
end
end
it 'Updates the throughput to correct values' do
Timecop.freeze Date.new(2014, 12, 24) do
# Refresh all tables
@manager.get_all_tables(true)
tables = @manager.get_all_required_tables(true)
tables.each do |table|
table_name = table['TableName']
table_info = @manager.dynamo_client.describe_table({:table_name => table_name})
if table['Properties'].include? 'OutdatedTableProvisionedThroughput'
read_cap = table['Properties']['ProvisionedThroughput']['ReadCapacityUnits']
write_cap = table['Properties']['ProvisionedThroughput']['WriteCapacityUnits']
old_read_cap = table['Properties']['OutdatedTableProvisionedThroughput']['ReadCapacityUnits']
old_write_cap = table['Properties']['OutdatedTableProvisionedThroughput']['WriteCapacityUnits']
actual_read_cap = table_info.table[:provisioned_throughput][:read_capacity_units]
actual_write_cap = table_info.table[:provisioned_throughput][:write_capacity_units]
# Daily rotation for 20141224, and 20141225 should be set higher than any other table for the same prefix
if table_name === "#{rack_env}.daily-purge2.20141223"
expect(actual_read_cap).to eq(old_read_cap)
expect(actual_write_cap).to eq(old_write_cap)
end
if table_name === "#{rack_env}.daily-purge2.20141224"
expect(actual_read_cap).to eq(read_cap)
expect(actual_write_cap).to eq(write_cap)
end
if table_name === "#{rack_env}.daily-purge2.20141225"
expect(actual_read_cap).to eq(read_cap)
expect(actual_write_cap).to eq(write_cap)
end
# weekly rotation with date 20141222 should be set higher than any other table for the same prefix.
# the week after, 20141229 should be set lower
if table_name === "#{rack_env}.weekly-purge2.20141215"
expect(actual_read_cap).to eq(old_read_cap)
expect(actual_write_cap).to eq(old_write_cap)
end
if table_name === "#{rack_env}.weekly-purge2.20141222"
expect(actual_read_cap).to eq(read_cap)
expect(actual_write_cap).to eq(write_cap)
end
if table_name === "#{rack_env}.weekly-purge2.20141229"
expect(actual_read_cap).to eq(old_read_cap)
expect(actual_write_cap).to eq(old_write_cap)
end
# monthly rotation with date 20141201 should be set higher than any other table for the same prefix.
# the month after, 20150101 should be set lower
if table_name === "#{rack_env}.monthly-purge4.20141101"
expect(actual_read_cap).to eq(old_read_cap)
expect(actual_write_cap).to eq(old_write_cap)
end
if table_name === "#{rack_env}.monthly-purge4.20141201"
expect(actual_read_cap).to eq(read_cap)
expect(actual_write_cap).to eq(write_cap)
end
if table_name === "#{rack_env}.monthly-purge4.20150101"
expect(actual_read_cap).to eq(old_read_cap)
expect(actual_write_cap).to eq(old_write_cap)
end
end
if table['Properties'].include? 'GlobalSecondaryIndexes'
if table['Properties']['GlobalSecondaryIndexes'][0].include? 'OutdatedTableProvisionedThroughput'
gsi_read_cap = table['Properties']['GlobalSecondaryIndexes'][0]['ProvisionedThroughput']['ReadCapacityUnits']
gsi_write_cap = table['Properties']['GlobalSecondaryIndexes'][0]['ProvisionedThroughput']['WriteCapacityUnits']
old_gsi_read_cap = table['Properties']['GlobalSecondaryIndexes'][0]['OutdatedTableProvisionedThroughput']['ReadCapacityUnits']
old_gsi_write_cap = table['Properties']['GlobalSecondaryIndexes'][0]['OutdatedTableProvisionedThroughput']['WriteCapacityUnits']
actual_gsi_read_cap = table_info.table[:global_secondary_indexes][0][:provisioned_throughput][:read_capacity_units]
actual_gsi_write_cap = table_info.table[:global_secondary_indexes][0][:provisioned_throughput][:write_capacity_units]
# monthly rotation with date 20141201 should be set higher than any other table for the same prefix.
# the month after, 20150101 should be set lower
if table_name === "#{rack_env}.monthly-purge4.20141101"
expect(actual_gsi_read_cap).to eq(old_gsi_read_cap)
expect(actual_gsi_write_cap).to eq(old_gsi_write_cap)
end
if table_name === "#{rack_env}.monthly-purge4.20141201"
expect(actual_gsi_read_cap).to eq(gsi_read_cap)
expect(actual_gsi_write_cap).to eq(gsi_write_cap)
end
if table_name === "#{rack_env}.monthly-purge4.20150101"
expect(actual_gsi_read_cap).to eq(old_gsi_read_cap)
expect(actual_gsi_write_cap).to eq(old_gsi_write_cap)
end
end
end
end
end
Timecop.freeze Date.new(2014, 12, 31) do
# Refresh all tables
@manager.get_all_required_tables(true)
# Refresh all tables
@manager.get_all_tables(true)
# create some tables
@manager.create_tables
# Now we update the tables throughput
puts 'Updating throughput on tables'
@manager.update_tables
# Refresh all tables
tables = @manager.get_all_required_tables(true)
tables.each do |table|
table_name = table['TableName']
table_info = @manager.dynamo_client.describe_table({:table_name => table_name})
if table['Properties'].include? 'OutdatedTableProvisionedThroughput'
read_cap = table['Properties']['ProvisionedThroughput']['ReadCapacityUnits']
write_cap = table['Properties']['ProvisionedThroughput']['WriteCapacityUnits']
old_read_cap = table['Properties']['OutdatedTableProvisionedThroughput']['ReadCapacityUnits']
old_write_cap = table['Properties']['OutdatedTableProvisionedThroughput']['WriteCapacityUnits']
actual_read_cap = table_info.table[:provisioned_throughput][:read_capacity_units]
actual_write_cap = table_info.table[:provisioned_throughput][:write_capacity_units]
# Daily rotation for 20141231, and 20150101 should be set higher than any other table for the same prefix
if table_name === "#{rack_env}.daily-purge2.20141230"
expect(actual_read_cap).to eq(old_read_cap)
expect(actual_write_cap).to eq(old_write_cap)
end
if table_name === "#{rack_env}.daily-purge2.20141231"
expect(actual_read_cap).to eq(read_cap)
expect(actual_write_cap).to eq(write_cap)
end
if table_name === "#{rack_env}.daily-purge2.20150101"
expect(actual_read_cap).to eq(read_cap)
expect(actual_write_cap).to eq(write_cap)
end
# weekly rotation with date 20141229 should be set higher than any other table for the same prefix.
# the week after, 20150105 should be set lower
if table_name === "#{rack_env}.weekly-purge2.20141222"
expect(actual_read_cap).to eq(old_read_cap)
expect(actual_write_cap).to eq(old_write_cap)
end
if table_name === "#{rack_env}.weekly-purge2.20141229"
expect(actual_read_cap).to eq(read_cap)
expect(actual_write_cap).to eq(write_cap)
end
if table_name === "#{rack_env}.weekly-purge2.20150105"
expect(actual_read_cap).to eq(old_read_cap)
expect(actual_write_cap).to eq(old_write_cap)
end
# monthly rotation with date 20141201 should be set higher than any other table for the same prefix.
# the month after, 20150101 should also be set higher
if table_name === "#{rack_env}.monthly-purge4.20141101"
expect(actual_read_cap).to eq(old_read_cap)
expect(actual_write_cap).to eq(old_write_cap)
end
if table_name === "#{rack_env}.monthly-purge4.20141201"
expect(actual_read_cap).to eq(read_cap)
expect(actual_write_cap).to eq(write_cap)
end
if table_name === "#{rack_env}.monthly-purge4.20150101"
expect(actual_read_cap).to eq(read_cap)
expect(actual_write_cap).to eq(write_cap)
end
end
if table['Properties'].include? 'GlobalSecondaryIndexes'
if table['Properties']['GlobalSecondaryIndexes'][0].include? 'OutdatedTableProvisionedThroughput'
gsi_read_cap = table['Properties']['GlobalSecondaryIndexes'][0]['ProvisionedThroughput']['ReadCapacityUnits']
gsi_write_cap = table['Properties']['GlobalSecondaryIndexes'][0]['ProvisionedThroughput']['WriteCapacityUnits']
old_gsi_read_cap = table['Properties']['GlobalSecondaryIndexes'][0]['OutdatedTableProvisionedThroughput']['ReadCapacityUnits']
old_gsi_write_cap = table['Properties']['GlobalSecondaryIndexes'][0]['OutdatedTableProvisionedThroughput']['WriteCapacityUnits']
actual_gsi_read_cap = table_info.table[:global_secondary_indexes][0][:provisioned_throughput][:read_capacity_units]
actual_gsi_write_cap = table_info.table[:global_secondary_indexes][0][:provisioned_throughput][:write_capacity_units]
# monthly rotation with date 20141201 should be set higher than any other table for the same prefix.
# the month after, 20150101 should also be set higher
if table_name === "#{rack_env}.monthly-purge4.20141101"
expect(actual_gsi_read_cap).to eq(old_gsi_read_cap)
expect(actual_gsi_write_cap).to eq(old_gsi_write_cap)
end
if table_name === "#{rack_env}.monthly-purge4.20141201"
expect(actual_gsi_read_cap).to eq(gsi_read_cap)
expect(actual_gsi_write_cap).to eq(gsi_write_cap)
end
if table_name === "#{rack_env}.monthly-purge4.20150101"
expect(actual_gsi_read_cap).to eq(gsi_read_cap)
expect(actual_gsi_write_cap).to eq(gsi_write_cap)
end
end
end
end
end
Timecop.freeze Date.new(2015, 1, 1) do
# Refresh all tables
@manager.get_all_required_tables(true)
# Refresh all tables
@manager.get_all_tables(true)
# create some tables
@manager.create_tables
# Now we update the tables throughput
puts 'Updating throughput on tables'
@manager.update_tables
# Refresh all tables
tables = @manager.get_all_required_tables(true)
tables.each do |table|
table_name = table['TableName']
table_info = @manager.dynamo_client.describe_table({:table_name => table_name})
if table['Properties'].include? 'OutdatedTableProvisionedThroughput'
read_cap = table['Properties']['ProvisionedThroughput']['ReadCapacityUnits']
write_cap = table['Properties']['ProvisionedThroughput']['WriteCapacityUnits']
old_read_cap = table['Properties']['OutdatedTableProvisionedThroughput']['ReadCapacityUnits']
old_write_cap = table['Properties']['OutdatedTableProvisionedThroughput']['WriteCapacityUnits']
actual_read_cap = table_info.table[:provisioned_throughput][:read_capacity_units]
actual_write_cap = table_info.table[:provisioned_throughput][:write_capacity_units]
# Daily rotation for 20150101, 20150101 and 20150102 should be set higher than any other table for the same prefix
if table_name === "#{rack_env}.daily-purge2.20141231"
expect(actual_read_cap).to eq(old_read_cap)
expect(actual_write_cap).to eq(old_write_cap)
end
if table_name === "#{rack_env}.daily-purge2.20150101"
expect(actual_read_cap).to eq(read_cap)
expect(actual_write_cap).to eq(write_cap)
end
if table_name === "#{rack_env}.daily-purge2.20150102"
expect(actual_read_cap).to eq(read_cap)
expect(actual_write_cap).to eq(write_cap)
end
# weekly rotation with date 20141229 should be set higher than any other table for the same prefix.
# the week after, 20150105 should be set lower
if table_name === "#{rack_env}.weekly-purge2.20141222"
expect(actual_read_cap).to eq(old_read_cap)
expect(actual_write_cap).to eq(old_write_cap)
end
if table_name === "#{rack_env}.weekly-purge2.20141229"
expect(actual_read_cap).to eq(read_cap)
expect(actual_write_cap).to eq(write_cap)
end
if table_name === "#{rack_env}.weekly-purge2.20150105"
expect(actual_read_cap).to eq(old_read_cap)
expect(actual_write_cap).to eq(old_write_cap)
end
# monthly rotation with date 20150101 should be set higher than any other table for the same prefix.
# the month after, 20150201 should also be set lower
if table_name === "#{rack_env}.monthly-purge4.20141101"
expect(actual_read_cap).to eq(old_read_cap)
expect(actual_write_cap).to eq(old_write_cap)
end
if table_name === "#{rack_env}.monthly-purge4.20141201"
expect(actual_read_cap).to eq(old_read_cap)
expect(actual_write_cap).to eq(old_write_cap)
end
if table_name === "#{rack_env}.monthly-purge4.20150101"
expect(actual_read_cap).to eq(read_cap)
expect(actual_write_cap).to eq(write_cap)
end
if table_name === "#{rack_env}.monthly-purge4.20150201"
expect(actual_read_cap).to eq(old_read_cap)
expect(actual_write_cap).to eq(old_write_cap)
end
end
if table['Properties'].include? 'GlobalSecondaryIndexes'
if table['Properties']['GlobalSecondaryIndexes'][0].include? 'OutdatedTableProvisionedThroughput'
gsi_read_cap = table['Properties']['GlobalSecondaryIndexes'][0]['ProvisionedThroughput']['ReadCapacityUnits']
gsi_write_cap = table['Properties']['GlobalSecondaryIndexes'][0]['ProvisionedThroughput']['WriteCapacityUnits']
old_gsi_read_cap = table['Properties']['GlobalSecondaryIndexes'][0]['OutdatedTableProvisionedThroughput']['ReadCapacityUnits']
old_gsi_write_cap = table['Properties']['GlobalSecondaryIndexes'][0]['OutdatedTableProvisionedThroughput']['WriteCapacityUnits']
actual_gsi_read_cap = table_info.table[:global_secondary_indexes][0][:provisioned_throughput][:read_capacity_units]
actual_gsi_write_cap = table_info.table[:global_secondary_indexes][0][:provisioned_throughput][:write_capacity_units]
# monthly rotation with date 20150101 should be set higher than any other table for the same prefix.
# the month after, 20150201 should also be set lower
if table_name === "#{rack_env}.monthly-purge4.20141101"
expect(actual_gsi_read_cap).to eq(old_gsi_read_cap)
expect(actual_gsi_write_cap).to eq(old_gsi_write_cap)
end
if table_name === "#{rack_env}.monthly-purge4.20141201"
expect(actual_gsi_read_cap).to eq(old_gsi_read_cap)
expect(actual_gsi_write_cap).to eq(old_gsi_write_cap)
end
if table_name === "#{rack_env}.monthly-purge4.20150101"
expect(actual_gsi_read_cap).to eq(gsi_read_cap)
expect(actual_gsi_write_cap).to eq(gsi_write_cap)
end
if table_name === "#{rack_env}.monthly-purge4.20150201"
expect(actual_gsi_read_cap).to eq(old_gsi_read_cap)
expect(actual_gsi_write_cap).to eq(old_gsi_write_cap)
end
end
end
end
end
end
it 'successfully cleans up old tables' do
Timecop.freeze Date.new(2014, 9, 1) do
puts 'Creating all dynamoDB tables from an API list but with an older date'
# Refresh all tables
@manager.get_all_tables(true)
# Refresh all tables
@manager.get_all_required_tables(true)
# create some old tables
@manager.create_tables
# Verify we created the tables
puts 'Checking if all tables of that date were created'
tables = @manager.get_all_tables(true)
# Daily with rotate 2 means 1 day ahead and 2 behind
expect(tables).to include("#{rack_env}.daily-purge2.20140902")
expect(tables).to include("#{rack_env}.daily-purge2.20140901")
expect(tables).to include("#{rack_env}.daily-purge2.20140831")
# Weekly with rotate 2 means 1 week ahead and 2 behind.
# Starting on the next monday for the one in the future.
expect(tables).to include("#{rack_env}.weekly-purge2.20140908")
expect(tables).to include("#{rack_env}.weekly-purge2.20140901")
expect(tables).to include("#{rack_env}.weekly-purge2.20140825")
# Monthly with rotate 4 means 1 month ahead and 4 behind.
# Starting on the first day of the month.
expect(tables).to include("#{rack_env}.monthly-purge4.20140601")
expect(tables).to include("#{rack_env}.monthly-purge4.20140701")
expect(tables).to include("#{rack_env}.monthly-purge4.20140801")
expect(tables).to include("#{rack_env}.monthly-purge4.20140901")
expect(tables).to include("#{rack_env}.monthly-purge4.20141001")
# daily with rotate 4 means 1 day ahead and 4 behind.
# The nopurge means they will not get deleted.
expect(tables).to include("#{rack_env}.daily-nopurge.20140902")
expect(tables).to include("#{rack_env}.daily-nopurge.20140901")
expect(tables).to include("#{rack_env}.daily-nopurge.20140831")
expect(tables).to include("#{rack_env}.daily-nopurge.20140830")
expect(tables).to include("#{rack_env}.daily-nopurge.20140829")
end
# Set time to a newer data
Timecop.freeze Date.new(2014, 12, 24) do
# Refresh all tables
@manager.get_all_required_tables(true)
# Refresh all tables
@manager.get_all_tables(true)
# Now we cleanup the tables so that only the tables of today remain
puts 'Cleaning up unnecessary tables'
@manager.cleanup_tables
tables = @manager.get_all_tables(true)
# Daily with rotate 2 means 1 day ahead and 2 behind
expect(tables).to_not include("#{rack_env}.daily-purge2.20140902")
expect(tables).to_not include("#{rack_env}.daily-purge2.20140901")
expect(tables).to_not include("#{rack_env}.daily-purge2.20140831")
# Weekly with rotate 2 means 1 week ahead and 2 behind.
# Starting on the next monday for the one in the future.
expect(tables).to_not include("#{rack_env}.weekly-purge2.20140908")
expect(tables).to_not include("#{rack_env}.weekly-purge2.20140901")
expect(tables).to_not include("#{rack_env}.weekly-purge2.20140825")
# Monthly with rotate 4 means 1 month ahead and 4 behind.
# Starting on the first day of the month.
expect(tables).to_not include("#{rack_env}.monthly-purge4.20140501")
expect(tables).to_not include("#{rack_env}.monthly-purge4.20140601")
expect(tables).to_not include("#{rack_env}.monthly-purge4.20140701")
expect(tables).to_not include("#{rack_env}.monthly-purge4.20140801")
# daily with rotate 4 means 1 day ahead and 4 behind.
# The nopurge means they will not get deleted.
expect(tables).to include("#{rack_env}.daily-nopurge.20140902")
expect(tables).to include("#{rack_env}.daily-nopurge.20140901")
expect(tables).to include("#{rack_env}.daily-nopurge.20140831")
expect(tables).to include("#{rack_env}.daily-nopurge.20140830")
expect(tables).to include("#{rack_env}.daily-nopurge.20140829")
end
end
it 'returns an empty list after deleting all tables' do
Timecop.freeze Date.new(2014, 12, 24) do
tables = @manager.get_all_required_tables(true)
tables.each do |t|
@manager.delete_table(t['TableName'])
end
all_tables = @manager.get_all_tables(true)
tables.each do |table|
expect(all_tables).to_not include(table['TableName'])
end
end
end
it 'does not delete tables not listed in JSON' do
Timecop.freeze Date.new(2015, 1, 1) do
# Refresh tables
@manager.get_all_required_tables(true)
# Refresh all tables
@manager.get_all_tables(true)
# Create a few tables that should persist a cleanup
names = [
'DONT_DELETE_ME',
"#{rack_env}.DONT_DELETE_ME",
"#{rack_env}.DONT_DELETE_ME.20160101"
]
names.each do |n|
puts "Creating #{n}..."
@manager.dynamo_client.create_table({
table_name: n,
attribute_definitions: [
{
attribute_name: 'foo',
attribute_type: 'S'
},
{
attribute_name: 'bar',
attribute_type: 'N'
}
],
key_schema: [
{
attribute_name: 'foo',
key_type: 'HASH'
},
{
attribute_name: 'bar',
key_type: 'RANGE'
}
],
provisioned_throughput: {
read_capacity_units: 1,
write_capacity_units: 1
}
})
end
# cleanup tables
puts 'Cleaning up unnecessary tables'
@manager.cleanup_tables
tables = @manager.get_all_tables(true, true)
names.each do |n|
expect(tables).to include(n)
end
end
end
end
| 53.328729 | 272 | 0.581896 |
38fa3e31b356974241922c70e2cd9824d1135f2c | 698 | SS::Application.routes.draw do
Category::Initializer
concern :deletion do
get :delete, on: :member
end
content "category" do
get "/" => redirect { |p, req| "#{req.path}/nodes" }, as: :main
resources :nodes, concerns: :deletion
resources :pages, concerns: :deletion
end
node "category" do
get "node/(index.:format)" => "public#index", cell: "nodes/node"
get "node/rss.xml" => "public#rss", cell: "nodes/page", format: "xml"
get "page/(index.:format)" => "public#index", cell: "nodes/page"
get "page/rss.xml" => "public#rss", cell: "nodes/page", format: "xml"
end
part "category" do
get "node" => "public#index", cell: "parts/node"
end
end
| 25.851852 | 73 | 0.621777 |
38bdac52d08cc75f06705584c1610bf9986af0e4 | 388 | # Challenge 5
def number_length(number)
Math.log10(number).to_i + 1
end
def is_curious_number? (number)
number ** 2 % (10 ** number_length(number) ) == number
end
puts is_curious_number?(25)
puts is_curious_number?(76)
puts is_curious_number?(212890625)
puts ""
puts is_curious_number?(1)
puts is_curious_number?(30)
puts is_curious_number?(10)
puts is_curious_number?(229348) | 20.421053 | 58 | 0.755155 |
33312ec5d6cd9db7c732920b2f61c9cfbe070806 | 5,621 | # This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
require 'rack/test'
require 'rspec'
require 'byebug'
require 'webmock/rspec'
ENV['RACK_ENV'] = 'test'
require File.expand_path '../../my_account.rb', __FILE__
module RSpecMixin
include Rack::Test::Methods
def app() Sinatra::Application end
end
RSpec.configure do |config|
config.include RSpecMixin
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# This option will default to `:apply_to_host_groups` in RSpec 4 (and will
# have no way to turn it off -- the option exists only for backwards
# compatibility in RSpec 3). It causes shared context metadata to be
# inherited by the metadata hash of host groups and examples, rather than
# triggering implicit auto-inclusion in groups with matching metadata.
config.shared_context_metadata_behavior = :apply_to_host_groups
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
=begin
# This allows you to limit a spec run to individual examples or groups
# you care about by tagging them with `:focus` metadata. When nothing
# is tagged with `:focus`, all examples get run. RSpec also provides
# aliases for `it`, `describe`, and `context` that include `:focus`
# metadata: `fit`, `fdescribe` and `fcontext`, respectively.
config.filter_run_when_matching :focus
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options. We recommend
# you configure your source control system to ignore this file.
config.example_status_persistence_file_path = "spec/examples.txt"
# Limits the available syntax to the non-monkey patched syntax that is
# recommended. For more details, see:
# - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
config.disable_monkey_patching!
# This setting enables warnings. It's recommended, but in some cases may
# be too noisy due to issues in dependencies.
config.warnings = true
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = "doc"
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
=end
end
def stub_alma_get_request(url:, body:,status: 200)
stub_request(:get, "#{ENV["ALMA_API_HOST"]}/almaws/v1/#{url}").with(
headers: {
accept: 'application/json',
Authorization: "apikey #{ENV['ALMA_API_KEY']}",
'Accept-Encoding'=>'gzip;q=1.0,deflate;q=0.6,identity;q=0.3',
'User-Agent'=>'Ruby'
}).to_return(body: body, status: status, headers: {content_type: 'application/json'})
end
| 44.259843 | 94 | 0.731365 |
ff95ea62c613189a55dff56330d1222ad7965ed6 | 35 | require './app'
run FixerData::App | 11.666667 | 18 | 0.714286 |
d537f268449cd4192a5fbd496459c0560ee136b9 | 232 | class AddAuthenticationTokenToUsers < ActiveRecord::Migration
def change
add_column :users, :auth_token, :string, default: "", unique: true
add_column :users, :password, :string
add_index :users, :auth_token
end
end
| 29 | 70 | 0.737069 |
610df82446faf13591d6d483cdada8e371bde832 | 740 | require 'fantasy_faker/utils/array_utils'
module FantasyFaker
module BaseModule
def const_missing(const_name)
file_content = File.readlines(data_path(const_name)).map(&:strip)
data = FantasyFaker::ArrayUtils.const_array(file_content)
const_set(const_name, data)
data
end
def data_path(file_name)
"#{FantasyFaker::BASE_LIB_PATH}/fantasy_faker/data/#{underscore(module_name)}/#{underscore(file_name.to_s)}"
end
def module_name
ancestors.first.to_s.split('::').last
end
def underscore(string)
string.gsub(/::/, '/').
gsub(/([A-Z]+)([A-Z][a-z])/,'\1_\2').
gsub(/([a-z\d])([A-Z])/,'\1_\2').
tr("-", "_").
downcase
end
end
end
| 21.764706 | 114 | 0.614865 |
62b02cd6d8a0a240819a50ee107e52ad976e6b8e | 4,224 | # frozen_string_literal: true
require 'exception_notification/sidekiq'
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Attempt to read encrypted secrets from `config/secrets.yml.enc`.
# Requires an encryption key in `ENV["RAILS_MASTER_KEY"]` or
# `config/secrets.yml.key`.
config.read_encrypted_secrets = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [:request_id]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "gemsurance_as_a_service_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV['RAILS_LOG_TO_STDOUT'].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
host = 'gaas.keltec.ch'
config.action_mailer.default_url_options = {host: host}
Rails.application.routes.default_url_options = {host: host}
Rails.application.config.middleware.use ExceptionNotification::Rack, email: {
email_prefix: '[Gemsurance As A Service] ',
sender_address: '"error notifier" <[email protected]>',
exception_recipients: Rails.application.secrets[:exception_notification][:recipient],
}
config.redis.database = 1
end
| 39.849057 | 100 | 0.758049 |
3905f2306792fde96667bcfcba38bca6d566058b | 1,048 | require_relative "../canvas_base_resolver"
require_relative "../../types/canvas/grade_change_event"
module LMSGraphQL
module Resolvers
module Canvas
class GradeChangeLogQueryByCourse < CanvasBaseResolver
type [LMSGraphQL::Types::Canvas::CanvasGradeChangeEvent], null: false
argument :get_all, Boolean, required: false
argument :course_id, ID, required: true
argument :start_time, LMSGraphQL::Types::DateTimeType, required: false
argument :end_time, LMSGraphQL::Types::DateTimeType, required: false
def resolve(course_id:, start_time: nil, end_time: nil, get_all: false)
result = context[:canvas_api].call("GRADE_CHANGE_LOG_QUERY_BY_COURSE").proxy(
"GRADE_CHANGE_LOG_QUERY_BY_COURSE",
{
"course_id": course_id,
"start_time": start_time,
"end_time": end_time },
nil,
get_all,
)
get_all ? result : result.parsed_response
end
end
end
end
end | 38.814815 | 87 | 0.64313 |
33594bc45da15652d78ccc34837db310630be7f6 | 4,024 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe Course::Forum, type: :model do
it { is_expected.to have_many(:topics).inverse_of(:forum).dependent(:destroy) }
it { is_expected.to have_many(:subscriptions).inverse_of(:forum).dependent(:destroy) }
it { is_expected.to belong_to(:course).inverse_of(:forums) }
let!(:instance) { Instance.default }
with_tenant(:instance) do
let(:course) { create(:course) }
describe '#slug_candidates' do
let!(:first_forum) { create(:forum, name: 'slug', course: course) }
let!(:second_forum) { create(:forum, name: 'slug', course: course) }
context 'when name is unique' do
it 'generates a slug based on the name' do
expect(first_forum.slug).to eq('slug')
end
end
context 'when name is not unique but course id is unique' do
it 'generates a slug based on name and course_id' do
expect(second_forum.slug).to eq("slug-#{course.id}")
end
end
end
describe '.topic_count' do
let(:forum) { create(:forum, course: course) }
let!(:forum_topics) { create_list(:forum_topic, 2, forum: forum) }
it 'shows the correct count' do
expect(course.forums.calculated(:topic_count).first.topic_count).to eq(forum_topics.size)
end
end
describe '.topic_post_count' do
let(:forum) { create(:forum, course: course) }
let(:first_topic) { create(:forum_topic, forum: forum) }
let(:second_topic) { create(:forum_topic, forum: forum) }
let!(:first_topic_posts) do
create_list(:course_discussion_post, 2, topic: first_topic.acting_as)
end
let!(:second_topic_posts) do
create_list(:course_discussion_post, 1, topic: second_topic.acting_as)
end
it 'shows the correct count' do
expect(course.forums.calculated(:topic_post_count).first.topic_post_count).
to eq(first_topic_posts.size + second_topic_posts.size + 2)
end
end
describe '.topic_view_count' do
let(:forum) { create(:forum, course: course) }
let(:first_topic) { create(:forum_topic, forum: forum) }
let(:second_topic) { create(:forum_topic, forum: forum) }
context 'when the topic has views' do
let!(:first_topic_views) { create_list(:forum_topic_view, 2, topic: first_topic) }
let!(:second_topic_views) { create_list(:forum_topic_view, 1, topic: second_topic) }
it 'shows the sum of all views' do
expect(course.forums.calculated(:topic_view_count).first.topic_view_count).
to eq(first_topic_views.size + second_topic_views.size)
end
end
context 'when the topic has no views' do
it 'shows zero views' do
first_topic
second_topic
expect(course.forums.calculated(:topic_view_count).first.topic_view_count).to eq(0)
end
end
end
describe '.with_forum_statistics' do
let(:forum) { create(:forum, course: course) }
let(:first_topic) { create(:forum_topic, forum: forum) }
let(:second_topic) { create(:forum_topic, forum: forum) }
let!(:user) { create(:user) }
let!(:first_topic_posts) do
create_list(:course_discussion_post, 1, topic: first_topic.acting_as)
end
let!(:second_topic_posts) do
create_list(:course_discussion_post, 2, topic: second_topic.acting_as)
end
let!(:first_topic_views) { create_list(:forum_topic_view, 2, topic: first_topic) }
let!(:second_topic_views) { create_list(:forum_topic_view, 1, topic: second_topic) }
subject { course.forums.with_forum_statistics(user).first }
it 'shows the correct count' do
expect(subject.topic_count).to eq(2)
expect(subject.topic_post_count).to eq(first_topic_posts.size + second_topic_posts.size + 2)
expect(subject.topic_view_count).to eq(first_topic_views.size + second_topic_views.size)
expect(subject.topic_unread_count).to eq(2)
end
end
end
end
| 38.692308 | 100 | 0.666252 |
4aa28d6f5755b09d3d07c012c65de3f106ab0329 | 5,746 | # MIT License
#
# Copyright (c) 2018 Sebastian Katzer
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Parser for command line arguments.
class OptParser
# Initialize the parser.
#
# @return [ OptParser ]
def initialize
@flags, @opts, @tail = [], {}, []
@unknown = ->(opts) { raise "unknown option: #{opts.join ', '}" }
yield(self) if block_given?
end
# The tail of the argument list.
#
# @return [ Array<String> ]
attr_reader :tail
# Add a flag and a callback to invoke if flag is given later.
#
# @param [ String ] flag The name of the option value.
# Possible values: object, string, int, float, bool
# @param [ Symbol ] type The type of the option v
# @param [ Object ] dval The value to use if nothing else given.
# @param [ Proc ] blk The callback to be invoked.
#
# @return [ Void ]
def on(opt, type = :object, dval = nil, &blk)
if opt == :unknown
@unknown = blk
else
@opts[flag = opt.to_s] = [type, dval, blk]
@flags << flag[0] if type == :bool
end
end
alias add on
# Same as `on` however is does exit after the block has been called.
#
# @return [ Void ]
def on!(opt, type = :object, dval = nil)
on(opt, type, dval) do |val|
if opt_given? opt.to_s
puts yield(val)
exit
end
end
end
# Parse all given flags and invoke their callback.
#
# @param [ Array<String> ] args List of arguments to parse.
# @param [ Bool] ignore_unknown
#
# @return [ Hash<String, Object> ]
def parse(args, ignore_unknown = false)
params = {}
normalize_args(args)
@unknown.call(unknown_opts) if !ignore_unknown && unknown_opts.any?
@opts.each do |opt, opts|
type, dval, blk = opts
val = opt_value(opt, type, dval)
params[opt.to_sym] = val unless val.nil?
blk&.call(val)
end
params
end
# Returns a hash with all opts and their value.
#
# @return [ Hash<String, Object> ]
def opts
params = {}
@opts.each { |opt, opts| params[opt.to_sym] = opt_value(opt, *opts[0, 2]) }
params
end
# List of all unknown options.
#
# @return [ Array<String> ]
def unknown_opts
@args.reject { |opt| !opt.is_a?(String) || valid_flag?(opt) }
end
# If the specified flag is given in opts list.
#
# @param [ String ] name The (long) flag name.
#
# @return [ Boolean ]
def valid_flag?(flag)
if flag.length == 1
@opts.keys.any? { |opt| opt[0] == flag[0] }
else
@opts.include?(flag)
end
end
# If the specified flag is given in args list.
#
# @param [ String ] opt The (long) flag name.
#
# @return [ Boolean ]
def opt_given?(opt)
@args.any? do |arg|
if opt.length == 1 || arg.length == 1
true if arg[0] == opt[0]
else
arg == opt
end
end
end
# Extract the value of the specified options.
# Raises an error if the option has been specified but without an value.
#
# @param [ String ] opt The option to look for.
# @param [ Object ] dval The default value to use for unless specified.
#
# @return [ Object ]
def opt_value(opt, type = :object, dval = nil)
pos = @args.index(opt)
@args.each_index { |i| pos = i if !pos && opt[0] == @args[i][0] } unless pos
val = @args[pos + 1] if pos
case val
when Array then convert(val[0], type)
when nil then pos && type == :bool ? true : dval
else convert(val, type)
end
end
private
# rubocop:disable CyclomaticComplexity
# Convert the value into the specified type.
# Raises an error for unknown type.
#
# @param [ Object ] val The value to convert.
# @param [ Symbol ] type The type to convert into.
# Possible values: object, string, int, float, bool
#
# @return [ Object] The converted value.
def convert(val, type)
case type
when :object then val
when :string then val.to_s
when :int then val.to_i
when :float then val.to_f
when :bool then val && (val != '0' || val != 'off')
else raise "Cannot convert #{val} into #{type}."
end
end
# rubocop:enable CyclomaticComplexity
# Removes all leading slashes or false friends from args.
#
# @param [ Array<String> ] args The arguments to normalize.
#
# @return [ Void ]
def normalize_args(args)
@args, @tail, flag = [], [], false
args.each do |opt|
if opt.to_s[0] == '-'
@args << (arg = opt[(opt[1] == '-' ? 2 : 1)..-1]) && flag = false
@args << [flag = true] if @flags.include?(arg[0])
elsif flag || @args.empty?
@tail << opt
else
@args << [opt] && flag = true
end
end
end
end
| 28.029268 | 80 | 0.621128 |
bb8208dc27f30da0278bf1451a22b282e78b23f5 | 17,303 | #
# Author:: Daniel DeLeo (<[email protected]>)
# Copyright:: Copyright (c) 2008 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
describe Chef::Provider::Git do
before(:each) do
STDOUT.stub!(:tty?).and_return(true)
Chef::Log.level = :info
@current_resource = Chef::Resource::Git.new("web2.0 app")
@current_resource.revision("d35af14d41ae22b19da05d7d03a0bafc321b244c")
@resource = Chef::Resource::Git.new("web2.0 app")
@resource.repository "git://github.com/opscode/chef.git"
@resource.destination "/my/deploy/dir"
@resource.revision "d35af14d41ae22b19da05d7d03a0bafc321b244c"
@node = Chef::Node.new
@events = Chef::EventDispatch::Dispatcher.new
@run_context = Chef::RunContext.new(@node, {}, @events)
@provider = Chef::Provider::Git.new(@resource, @run_context)
@provider.current_resource = @current_resource
end
context "determining the revision of the currently deployed checkout" do
before do
@stdout = mock("standard out")
@stderr = mock("standard error")
@exitstatus = mock("exitstatus")
end
it "sets the current revision to nil if the deploy dir does not exist" do
::File.should_receive(:exist?).with("/my/deploy/dir/.git").and_return(false)
@provider.find_current_revision.should be_nil
end
it "determines the current revision when there is one" do
::File.should_receive(:exist?).with("/my/deploy/dir/.git").and_return(true)
@stdout = "9b4d8dc38dd471246e7cfb1c3c1ad14b0f2bee13\n"
@provider.should_receive(:shell_out!).with('git rev-parse HEAD', {:cwd => '/my/deploy/dir', :returns => [0,128]}).and_return(mock("ShellOut result", :stdout => @stdout))
@provider.find_current_revision.should eql("9b4d8dc38dd471246e7cfb1c3c1ad14b0f2bee13")
end
it "gives the current revision as nil when there is no current revision" do
::File.should_receive(:exist?).with("/my/deploy/dir/.git").and_return(true)
@stderr = "fatal: Not a git repository (or any of the parent directories): .git"
@stdout = ""
@provider.should_receive(:shell_out!).with('git rev-parse HEAD', :cwd => '/my/deploy/dir', :returns => [0,128]).and_return(mock("ShellOut result", :stdout => "", :stderr => @stderr))
@provider.find_current_revision.should be_nil
end
end
it "creates a current_resource with the currently deployed revision when a clone exists in the destination dir" do
@provider.stub!(:find_current_revision).and_return("681c9802d1c62a45b490786c18f0b8216b309440")
@provider.load_current_resource
@provider.current_resource.name.should eql(@resource.name)
@provider.current_resource.revision.should eql("681c9802d1c62a45b490786c18f0b8216b309440")
end
it "keeps the node and resource passed to it on initialize" do
@provider.node.should equal(@node)
@provider.new_resource.should equal(@resource)
end
context "resolving revisions to a SHA" do
before do
@git_ls_remote = "git ls-remote git://github.com/opscode/chef.git "
end
it "returns resource.revision as is if revision is already a full SHA" do
@provider.target_revision.should eql("d35af14d41ae22b19da05d7d03a0bafc321b244c")
end
it "converts resource.revision from a tag to a SHA" do
@resource.revision "v1.0"
@stdout = "503c22a5e41f5ae3193460cca044ed1435029f53\trefs/heads/0.8-alpha\n"
@provider.should_receive(:shell_out!).with(@git_ls_remote + "v1.0", {:log_tag=>"git[web2.0 app]", :log_level=>:debug}).and_return(mock("ShellOut result", :stdout => @stdout))
@provider.target_revision.should eql("503c22a5e41f5ae3193460cca044ed1435029f53")
end
it "raises an invalid remote reference error if you try to deploy from ``origin'' and assertions are run" do
@resource.revision "origin/"
@provider.action = :checkout
@provider.define_resource_requirements
::File.stub!(:directory?).with("/my/deploy").and_return(true)
lambda {@provider.process_resource_requirements}.should raise_error(Chef::Exceptions::InvalidRemoteGitReference)
end
it "raises an unresolvable git reference error if the revision can't be resolved to any revision and assertions are run" do
@resource.revision "FAIL, that's the revision I want"
@provider.action = :checkout
@provider.should_receive(:shell_out!).and_return(mock("ShellOut result", :stdout => "\n"))
@provider.define_resource_requirements
lambda { @provider.process_resource_requirements }.should raise_error(Chef::Exceptions::UnresolvableGitReference)
end
it "does not raise an error if the revision can't be resolved when assertions are not run" do
@resource.revision "FAIL, that's the revision I want"
@provider.should_receive(:shell_out!).and_return(mock("ShellOut result", :stdout => "\n"))
@provider.target_revision.should == nil
end
it "does not raise an error when the revision is valid and assertions are run." do
@resource.revision "v1.0"
@stdout = "503c22a5e41f5ae3193460cca044ed1435029f53\trefs/heads/0.8-alpha\n"
@provider.should_receive(:shell_out!).with(@git_ls_remote + "v1.0", {:log_tag=>"git[web2.0 app]", :log_level=>:debug}).and_return(mock("ShellOut result", :stdout => @stdout))
@provider.action = :checkout
::File.stub!(:directory?).with("/my/deploy").and_return(true)
@provider.define_resource_requirements
lambda { @provider.process_resource_requirements }.should_not raise_error(RuntimeError)
end
it "gives the latest HEAD revision SHA if nothing is specified" do
@stdout =<<-SHAS
28af684d8460ba4793eda3e7ac238c864a5d029a\tHEAD
503c22a5e41f5ae3193460cca044ed1435029f53\trefs/heads/0.8-alpha
28af684d8460ba4793eda3e7ac238c864a5d029a\trefs/heads/master
c44fe79bb5e36941ce799cee6b9de3a2ef89afee\trefs/tags/0.5.2
14534f0e0bf133dc9ff6dbe74f8a0c863ff3ac6d\trefs/tags/0.5.4
d36fddb4291341a1ff2ecc3c560494e398881354\trefs/tags/0.5.6
9e5ce9031cbee81015de680d010b603bce2dd15f\trefs/tags/0.6.0
9b4d8dc38dd471246e7cfb1c3c1ad14b0f2bee13\trefs/tags/0.6.2
014a69af1cdce619de82afaf6cdb4e6ac658fede\trefs/tags/0.7.0
fa8097ff666af3ce64761d8e1f1c2aa292a11378\trefs/tags/0.7.2
44f9be0b33ba5c10027ddb030a5b2f0faa3eeb8d\trefs/tags/0.7.4
d7b9957f67236fa54e660cc3ab45ffecd6e0ba38\trefs/tags/0.7.8
b7d19519a1c15f1c1a324e2683bd728b6198ce5a\trefs/tags/0.7.8^{}
ebc1b392fe7e8f0fbabc305c299b4d365d2b4d9b\trefs/tags/chef-server-package
SHAS
@resource.revision ''
@provider.should_receive(:shell_out!).with(@git_ls_remote, {:log_tag=>"git[web2.0 app]", :log_level=>:debug}).and_return(mock("ShellOut result", :stdout => @stdout))
@provider.target_revision.should eql("28af684d8460ba4793eda3e7ac238c864a5d029a")
end
end
it "responds to :revision_slug as an alias for target_revision" do
@provider.should respond_to(:revision_slug)
end
it "runs a clone command with default git options" do
@resource.user "deployNinja"
@resource.ssh_wrapper "do_it_this_way.sh"
expected_cmd = "git clone git://github.com/opscode/chef.git /my/deploy/dir"
@provider.should_receive(:shell_out!).with(expected_cmd, :user => "deployNinja",
:environment =>{"GIT_SSH"=>"do_it_this_way.sh"}, :log_level => :info, :log_tag => "git[web2.0 app]", :live_stream => STDOUT)
@provider.clone
@provider.converge
end
it "runs a clone command with escaped destination" do
@resource.user "deployNinja"
@resource.destination "/Application Support/with/space"
@resource.ssh_wrapper "do_it_this_way.sh"
expected_cmd = "git clone git://github.com/opscode/chef.git /Application\\ Support/with/space"
@provider.should_receive(:shell_out!).with(expected_cmd, :user => "deployNinja",
:environment =>{"GIT_SSH"=>"do_it_this_way.sh"}, :log_level => :info, :log_tag => "git[web2.0 app]", :live_stream => STDOUT)
@provider.clone
@provider.converge
end
it "compiles a clone command using --depth for shallow cloning" do
@resource.depth 5
expected_cmd = 'git clone --depth 5 git://github.com/opscode/chef.git /my/deploy/dir'
@provider.should_receive(:shell_out!).with(expected_cmd, {:log_level => :info, :log_tag => "git[web2.0 app]", :live_stream => STDOUT})
@provider.clone
@provider.converge
end
it "compiles a clone command with a remote other than ``origin''" do
@resource.remote "opscode"
expected_cmd = 'git clone -o opscode git://github.com/opscode/chef.git /my/deploy/dir'
@provider.should_receive(:shell_out!).with(expected_cmd, {:log_level => :info, :log_tag => "git[web2.0 app]", :live_stream => STDOUT})
@provider.clone
@provider.converge
end
it "runs a checkout command with default options" do
expected_cmd = 'git checkout -b deploy d35af14d41ae22b19da05d7d03a0bafc321b244c'
@provider.should_receive(:shell_out!).with(expected_cmd, :cwd => "/my/deploy/dir", :log_level => :debug, :log_tag => "git[web2.0 app]")
@provider.checkout
@provider.converge
end
it "runs an enable_submodule command" do
@resource.enable_submodules true
expected_cmd = "git submodule update --init --recursive"
@provider.should_receive(:shell_out!).with(expected_cmd, :cwd => "/my/deploy/dir", :log_level => :info, :log_tag => "git[web2.0 app]", :live_stream => STDOUT)
@provider.enable_submodules
@provider.converge
end
it "does nothing for enable_submodules if resource.enable_submodules #=> false" do
@provider.should_not_receive(:shell_out!)
@provider.enable_submodules
@provider.converge
end
it "runs a sync command with default options" do
expected_cmd = "git fetch origin && git fetch origin --tags && git reset --hard d35af14d41ae22b19da05d7d03a0bafc321b244c"
@provider.should_receive(:shell_out!).with(expected_cmd, :cwd=> "/my/deploy/dir", :log_level => :debug, :log_tag => "git[web2.0 app]")
@provider.fetch_updates
@provider.converge
end
it "runs a sync command with the user and group specified in the resource" do
@resource.user("whois")
@resource.group("thisis")
expected_cmd = "git fetch origin && git fetch origin --tags && git reset --hard d35af14d41ae22b19da05d7d03a0bafc321b244c"
@provider.should_receive(:shell_out!).with(expected_cmd, :cwd => "/my/deploy/dir",
:user => "whois", :group => "thisis", :log_level => :debug, :log_tag => "git[web2.0 app]")
@provider.fetch_updates
@provider.converge
end
it "configures remote tracking branches when remote is not ``origin''" do
@resource.remote "opscode"
conf_tracking_branches = "git config remote.opscode.url git://github.com/opscode/chef.git && " +
"git config remote.opscode.fetch +refs/heads/*:refs/remotes/opscode/*"
@provider.should_receive(:shell_out!).with(conf_tracking_branches, :cwd => "/my/deploy/dir", :log_tag => "git[web2.0 app]", :log_level => :debug)
fetch_command = "git fetch opscode && git fetch opscode --tags && git reset --hard d35af14d41ae22b19da05d7d03a0bafc321b244c"
@provider.should_receive(:shell_out!).with(fetch_command, :cwd => "/my/deploy/dir", :log_level => :debug, :log_tag => "git[web2.0 app]")
@provider.fetch_updates
@provider.converge
end
it "raises an error if the git clone command would fail because the enclosing directory doesn't exist" do
@provider.stub!(:shell_out!)
lambda {@provider.run_action(:sync)}.should raise_error(Chef::Exceptions::MissingParentDirectory)
end
it "does a checkout by cloning the repo and then enabling submodules" do
# will be invoked in load_current_resource
::File.stub!(:exist?).with("/my/deploy/dir/.git").and_return(false)
::File.stub!(:exist?).with("/my/deploy/dir").and_return(true)
::File.stub!(:directory?).with("/my/deploy").and_return(true)
::Dir.stub!(:entries).with("/my/deploy/dir").and_return(['.','..'])
@provider.should_receive(:clone)
@provider.should_receive(:checkout)
@provider.should_receive(:enable_submodules)
@provider.run_action(:checkout)
# Even though an actual run will cause an update to occur, the fact that we've stubbed out
# the actions above will prevent updates from registering
# @resource.should be_updated
end
# REGRESSION TEST: on some OSes, the entries from an empty directory will be listed as
# ['..', '.'] but this shouldn't change the behavior
it "does a checkout by cloning the repo and then enabling submodules when the directory entries are listed as %w{.. .}" do
::File.stub!(:exist?).with("/my/deploy/dir/.git").and_return(false)
::File.stub!(:exist?).with("/my/deploy/dir").and_return(false)
::File.stub!(:directory?).with("/my/deploy").and_return(true)
::Dir.stub!(:entries).with("/my/deploy/dir").and_return(['..','.'])
@provider.should_receive(:clone)
@provider.should_receive(:checkout)
@provider.should_receive(:enable_submodules)
@provider.run_action(:checkout)
# @resource.should be_updated
end
it "should not checkout if the destination exists or is a non empty directory" do
# will be invoked in load_current_resource
::File.stub!(:exist?).with("/my/deploy/dir/.git").and_return(false)
::File.stub!(:exist?).with("/my/deploy/dir").and_return(true)
::File.stub!(:directory?).with("/my/deploy").and_return(true)
::Dir.stub!(:entries).with("/my/deploy/dir").and_return(['.','..','foo','bar'])
@provider.should_not_receive(:clone)
@provider.should_not_receive(:checkout)
@provider.should_not_receive(:enable_submodules)
@provider.run_action(:checkout)
@resource.should_not be_updated
end
it "syncs the code by updating the source when the repo has already been checked out" do
::File.should_receive(:exist?).with("/my/deploy/dir/.git").and_return(true)
::File.stub!(:directory?).with("/my/deploy").and_return(true)
@provider.should_receive(:find_current_revision).exactly(2).and_return('d35af14d41ae22b19da05d7d03a0bafc321b244c')
@provider.should_not_receive(:fetch_updates)
@provider.run_action(:sync)
@resource.should_not be_updated
end
it "marks the resource as updated when the repo is updated and gets a new version" do
::File.should_receive(:exist?).with("/my/deploy/dir/.git").and_return(true)
::File.stub!(:directory?).with("/my/deploy").and_return(true)
# invoked twice - first time from load_current_resource
@provider.should_receive(:find_current_revision).exactly(2).and_return('d35af14d41ae22b19da05d7d03a0bafc321b244c')
@provider.stub!(:target_revision).and_return('28af684d8460ba4793eda3e7ac238c864a5d029a')
@provider.should_receive(:fetch_updates)
@provider.should_receive(:enable_submodules)
@provider.run_action(:sync)
# @resource.should be_updated
end
it "does not fetch any updates if the remote revision matches the current revision" do
::File.should_receive(:exist?).with("/my/deploy/dir/.git").and_return(true)
::File.stub!(:directory?).with("/my/deploy").and_return(true)
@provider.stub!(:find_current_revision).and_return('d35af14d41ae22b19da05d7d03a0bafc321b244c')
@provider.stub!(:target_revision).and_return('d35af14d41ae22b19da05d7d03a0bafc321b244c')
@provider.should_not_receive(:fetch_updates)
@provider.run_action(:sync)
@resource.should_not be_updated
end
it "clones the repo instead of fetching it if the deploy directory doesn't exist" do
::File.stub!(:directory?).with("/my/deploy").and_return(true)
::File.should_receive(:exist?).with("/my/deploy/dir/.git").exactly(2).and_return(false)
@provider.should_receive(:action_checkout)
@provider.should_not_receive(:shell_out!)
@provider.run_action(:sync)
# @resource.should be_updated
end
it "clones the repo instead of fetching updates if the deploy directory is empty" do
::File.should_receive(:exist?).with("/my/deploy/dir/.git").exactly(2).and_return(false)
::File.stub!(:directory?).with("/my/deploy").and_return(true)
::File.stub!(:directory?).with("/my/deploy/dir").and_return(true)
@provider.stub!(:sync_command).and_return("huzzah!")
@provider.should_receive(:action_checkout)
@provider.should_not_receive(:shell_out!).with("huzzah!", :cwd => "/my/deploy/dir")
@provider.run_action(:sync)
#@resource.should be_updated
end
it "does an export by cloning the repo then removing the .git directory" do
@provider.should_receive(:action_checkout)
FileUtils.should_receive(:rm_rf).with(@resource.destination + "/.git")
@provider.run_action(:export)
@resource.should be_updated
end
end
| 49.016997 | 188 | 0.71843 |
f8ffde18541d7f4dc4497c692062a0df8649edce | 1,561 | module Featurable
extend ActiveSupport::Concern
QUERY_MODE = {
flag_query_mode: :bit_operator
}.freeze
FEATURE_LIST = YAML.safe_load(File.read(Rails.root.join('config/features.yml'))).freeze
FEATURES = FEATURE_LIST.each_with_object({}) do |feature, result|
result[result.keys.size + 1] = "feature_#{feature['name']}".to_sym
end
included do
include FlagShihTzu
has_flags FEATURES.merge(column: 'feature_flags').merge(QUERY_MODE)
before_create :enable_default_features
end
def enable_features(*names)
names.each do |name|
send("feature_#{name}=", true)
end
end
def enable_features!(*names)
enable_features(*names)
save
end
def disable_features(*names)
names.each do |name|
send("feature_#{name}=", false)
end
end
def disable_features!(*names)
disable_features(*names)
save
end
def feature_enabled?(name)
send("feature_#{name}?")
end
def all_features
FEATURE_LIST.map { |f| f['name'] }.index_with do |feature_name|
feature_enabled?(feature_name)
end
end
def enabled_features
all_features.select { |_feature, enabled| enabled == true }
end
def disabled_features
all_features.select { |_feature, enabled| enabled == false }
end
private
def enable_default_features
config = InstallationConfig.find_by(name: 'ACCOUNT_LEVEL_FEATURE_DEFAULTS')
return true if config.blank?
features_to_enabled = config.value.select { |f| f[:enabled] }.pluck(:name)
enable_features(*features_to_enabled)
end
end
| 21.985915 | 89 | 0.699552 |
03691ef039796e6532a6e1a8040f67609c1bcc78 | 1,897 | module Lita
module Handlers
class Mysql < Handler
namespace 'Mysql'
include ::MysqlHelper::Regex
include ::MysqlHelper::Utility
route(
/^mysql\salias\sadd\s#{ALIAS_PATTERN}\s#{FQDN_PATTERN}\s#{USERNAME_PATTERN}\s#{PASSWORD_PATTERN}$/,
:alias_add,
command: true,
restrict_to: [:mysql_admins],
help: {
t('help.alias_add.syntax') => t('help.alias_add.desc')
}
)
route(
/^mysql\salias\sremove\s#{ALIAS_PATTERN}$/,
:alias_remove,
command: true,
restrict_to: [:mysql_admins],
help: {
t('help.alias_remove.syntax') => t('help.alias_remove.desc')
}
)
route(
/^mysql\salias\slist$/,
:alias_list,
command: true,
help: {
t('help.alias_list.syntax') => t('help.alias_list.desc')
}
)
def alias_add(response)
a = response.match_data['alias']
fqdn = response.match_data['fqdn']
username = response.match_data['username']
password = response.match_data['password']
return response.reply(t('alias.exists', a: a)) if alias_exists?(a)
save_host(a, fqdn, username, password)
response.reply(t('alias.added', a: a))
end
def alias_remove(response)
a = response.match_data['alias']
return response.reply(t('alias.does_not_exist', a: a)) unless alias_exists?(a)
delete_host(a)
response.reply(t('alias.removed', a: a))
end
def alias_list(response)
aliases = list_aliases
return response.reply(t('alias.none_defined')) unless aliases.count > 0
aliases.each do |a|
info = fetch_alias(a)
response.reply(t('alias.info', a: a, fqdn: info['fqdn']))
end
end
end
Lita.register_handler(Mysql)
end
end
| 27.897059 | 107 | 0.571429 |
fff63a518efcd31c00981b293c1bf4d00e0aca27 | 100 | class CampaignFeatureJoin < ActiveRecord::Base
belongs_to :campaign
belongs_to :feature
end
| 14.285714 | 46 | 0.78 |
1156fecca75d1ad6a4c955c5e696f78c4d152f03 | 5,211 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20200624135642) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "application_documents", force: :cascade do |t|
t.bigint "application_id"
t.bigint "document_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["application_id"], name: "index_application_documents_on_application_id"
t.index ["document_id"], name: "index_application_documents_on_document_id"
end
create_table "applications", force: :cascade do |t|
t.string "firstname"
t.string "lastname"
t.date "dob"
t.date "enrolled"
t.date "completed"
t.boolean "express", default: false
t.integer "quantity"
t.string "studentid"
t.string "department"
t.string "college"
t.text "school"
t.text "phone"
t.text "programme"
t.text "address"
t.text "reason"
t.boolean "processed", default: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.bigint "user_id"
t.boolean "delivered", default: false
t.bigint "institution_id"
t.string "acceptance_letter_file_name"
t.string "acceptance_letter_content_type"
t.integer "acceptance_letter_file_size"
t.datetime "acceptance_letter_updated_at"
t.string "student_id_file_name"
t.string "student_id_content_type"
t.integer "student_id_file_size"
t.datetime "student_id_updated_at"
t.float "longitude"
t.float "latitude"
t.integer "document"
t.bigint "faculty_id"
t.bigint "college_id"
t.index ["college_id"], name: "index_applications_on_college_id"
t.index ["faculty_id"], name: "index_applications_on_faculty_id"
t.index ["institution_id"], name: "index_applications_on_institution_id"
t.index ["user_id"], name: "index_applications_on_user_id"
end
create_table "colleges", force: :cascade do |t|
t.text "name"
t.bigint "institution_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["institution_id"], name: "index_colleges_on_institution_id"
end
create_table "documents", force: :cascade do |t|
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "faculties", force: :cascade do |t|
t.text "name"
t.bigint "institution_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["institution_id"], name: "index_faculties_on_institution_id"
end
create_table "institutions", force: :cascade do |t|
t.text "name"
t.text "location"
t.text "contact"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "programmes", force: :cascade do |t|
t.text "name"
t.bigint "college_id"
t.bigint "faculty_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["college_id"], name: "index_programmes_on_college_id"
t.index ["faculty_id"], name: "index_programmes_on_faculty_id"
end
create_table "users", force: :cascade do |t|
t.boolean "admin", default: false
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.integer "sign_in_count", default: 0, null: false
t.datetime "current_sign_in_at"
t.datetime "last_sign_in_at"
t.inet "current_sign_in_ip"
t.inet "last_sign_in_ip"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.boolean "agent", default: false
t.string "firstname"
t.string "lastname"
t.string "username"
t.string "phone"
t.index ["email"], name: "index_users_on_email", unique: true
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
end
add_foreign_key "application_documents", "applications"
add_foreign_key "application_documents", "documents"
add_foreign_key "applications", "colleges"
add_foreign_key "applications", "faculties"
add_foreign_key "applications", "institutions"
add_foreign_key "applications", "users"
add_foreign_key "colleges", "institutions"
add_foreign_key "faculties", "institutions"
add_foreign_key "programmes", "colleges"
add_foreign_key "programmes", "faculties"
end
| 36.697183 | 95 | 0.720975 |
628c9814bba2e35dd88b0ff1afd5202a057d2d54 | 241 | FactoryGirl.define do
factory :oauth_user, :class => 'User' do
before(:create) do |user, evaluator|
user.oauth_callback = true
end
if User.devise_modules.include? :confirmable
confirmed_at Time.now
end
end
end | 24.1 | 48 | 0.684647 |
1d863993a0b0f16e3b686e5c97a8e8dae1052174 | 1,878 | # frozen_string_literal: true
module ActiveRecord # :nodoc:
module ConnectionAdapters # :nodoc:
module PostGIS # :nodoc:
class TableDefinition < PostgreSQL::TableDefinition # :nodoc:
include ColumnMethods
# super: https://github.com/rails/rails/blob/master/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb
def new_column_definition(name, type, **options)
if (info = PostGISAdapter.spatial_column_options(type.to_sym))
if (limit = options.delete(:limit)) && limit.is_a?(::Hash)
options.merge!(limit)
end
geo_type = ColumnDefinitionUtils.geo_type(options[:type] || type || info[:type])
base_type = info[:type] || (options[:geographic] ? :geography : :geometry)
options[:limit] = ColumnDefinitionUtils.limit_from_options(geo_type, options)
options[:spatial_type] = geo_type
column = super(name, base_type, **options)
else
column = super(name, type, **options)
end
column
end
end
module ColumnDefinitionUtils
class << self
def geo_type(type = "GEOMETRY")
g_type = type.to_s.delete("_").upcase
return "POINT" if g_type == "STPOINT"
return "POLYGON" if g_type == "STPOLYGON"
g_type
end
def limit_from_options(type, options = {})
spatial_type = geo_type(type)
spatial_type << "Z" if options[:has_z]
spatial_type << "M" if options[:has_m]
spatial_type << ",#{options[:srid] || default_srid(options)}"
spatial_type
end
def default_srid(options)
options[:geographic] ? 4326 : PostGISAdapter::DEFAULT_SRID
end
end
end
end
end
end
| 34.145455 | 141 | 0.592652 |
1aee803e219906dfd590b2dc252df790bc197106 | 2,418 | require_relative './config/boot'
require "json"
require "date"
require "active_support/all"
require "base64"
class Time
def floor(seconds = 60)
Time.at((self.to_f / seconds).floor * seconds).utc
end
end
set :root, File.dirname(__FILE__)
configure do
$diskcache = Diskcached.new(File.join(settings.root, 'cache'))
unless ENV["RACK_ENV"] == "development"
$diskcache.flush # ensure caches are empty on startup
end
end
def last_3_months
current = Date.today.beginning_of_month
second = current - 1.month
first = current - 2.months
[first, second, current].map do |month|
{
name: month.strftime("%B %Y"),
date_str: month.strftime("%Y%m%d")
}
end
end
get "/" do
base_url = "https://#{ENV['HARVEST_SUBDOMAIN']}.harvestapp.com"
http = HTTP.persistent(base_url)
cache_ts = Time.now.floor(10.minutes).to_i
@last_3_months = last_3_months
date = Date.parse(last_3_months[1][:date_str])
date = Date.parse(params["date"]) if params["date"]
start_date = date.beginning_of_month.strftime("%Y%m%d")
end_date = date.end_of_month.strftime("%Y%m%d")
info = $diskcache.cache("company_info") { api_request("/account/who_am_i", http).parse }
@company_name = info["company"]["name"]
@date = date
@users = $diskcache.cache("users") do
api_request("/people", http).parse.inject({}) do |a, e|
a[e["user"]["id"]] = ActiveSupport::HashWithIndifferentAccess.new(e["user"])
a
end
end
projects = $diskcache.cache("projects-#{cache_ts}") do
api_request("/projects", http)
.parse
.map { |proj| proj["project"] }
end
@projects = projects.map do |project|
total_hours = 0.0
times = $diskcache.cache("project-#{project['id']}-#{start_date}-#{cache_ts}") do
api_request("/projects/#{project['id']}/entries?from=#{start_date}&to=#{end_date}", http).parse
end
next if times.empty?
times = times.map do |day|
total_hours += day["day_entry"]["hours"]
ActiveSupport::HashWithIndifferentAccess.new(day["day_entry"])
end
ActiveSupport::HashWithIndifferentAccess.new(project.merge(total_hours: total_hours, times: times))
end.compact
http.close
erb :home
end
def api_request(path, http)
api_token = Base64.strict_encode64("#{ENV['HARVEST_EMAIL']}:#{ENV['HARVEST_PASSWORD']}")
http
.auth("Basic #{api_token}")
.headers(accept: "application/json")
.get(path)
end
| 26 | 103 | 0.670389 |
6afc998218c9a8ebbf6889c55eb5b20122dcfbf3 | 1,014 | class ApartmentTherapy::CLI
def call
list_articles
menu
goodbye
end
def list_articles
#this will grab from the articles class
puts "Articles available:"
@articles = ApartmentTherapy::Articles.articles
@articles.each.with_index(1) do |a, i|
puts "#{i}. #{a.title}"
end
end
def menu
input = ""
while input != "exit"
puts "Please put the number of an article (1 - #{@articles.length}) to learn more."
puts "Otherwise type list to see the full list again or exit to leave."
input = gets.strip.downcase
if input.to_i > 0 && input.to_i <= @articles.length
the_article = @articles[input.to_i - 1]
puts the_article.title
puts the_article.author
puts the_article.category
puts the_article.date
elsif input == "list"
list_articles
else
puts "Please enter a valid response" unless input == "exit"
end
end
end
def goodbye
puts "Have a nice day!"
end
end
| 24.142857 | 89 | 0.627219 |
e9379a10193b01cb5ff94edfbf90027d412858dc | 8,793 | require "cases/helper"
require 'support/schema_dumping_helper'
module PostgresqlUUIDHelper
def connection
@connection ||= ActiveRecord::Base.connection
end
def drop_table(name)
connection.drop_table name, if_exists: true
end
end
class PostgresqlUUIDTest < ActiveRecord::TestCase
include PostgresqlUUIDHelper
include SchemaDumpingHelper
class UUIDType < ActiveRecord::Base
self.table_name = "uuid_data_type"
end
setup do
connection.create_table "uuid_data_type" do |t|
t.uuid 'guid'
end
end
teardown do
drop_table "uuid_data_type"
end
def test_change_column_default
@connection.add_column :uuid_data_type, :thingy, :uuid, null: false, default: "uuid_generate_v1()"
UUIDType.reset_column_information
column = UUIDType.columns_hash['thingy']
assert_equal "uuid_generate_v1()", column.default_function
@connection.change_column :uuid_data_type, :thingy, :uuid, null: false, default: "uuid_generate_v4()"
UUIDType.reset_column_information
column = UUIDType.columns_hash['thingy']
assert_equal "uuid_generate_v4()", column.default_function
ensure
UUIDType.reset_column_information
end
def test_data_type_of_uuid_types
column = UUIDType.columns_hash["guid"]
assert_equal :uuid, column.type
assert_equal "uuid", column.sql_type
assert_not column.array?
type = UUIDType.type_for_attribute("guid")
assert_not type.binary?
end
def test_treat_blank_uuid_as_nil
UUIDType.create! guid: ''
assert_equal(nil, UUIDType.last.guid)
end
def test_treat_invalid_uuid_as_nil
uuid = UUIDType.create! guid: 'foobar'
assert_equal(nil, uuid.guid)
end
def test_invalid_uuid_dont_modify_before_type_cast
uuid = UUIDType.new guid: 'foobar'
assert_equal 'foobar', uuid.guid_before_type_cast
end
def test_acceptable_uuid_regex
# Valid uuids
['A0EEBC99-9C0B-4EF8-BB6D-6BB9BD380A11',
'{a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11}',
'a0eebc999c0b4ef8bb6d6bb9bd380a11',
'a0ee-bc99-9c0b-4ef8-bb6d-6bb9-bd38-0a11',
'{a0eebc99-9c0b4ef8-bb6d6bb9-bd380a11}',
# The following is not a valid RFC 4122 UUID, but PG doesn't seem to care,
# so we shouldn't block it either. (Pay attention to "fb6d" – the "f" here
# is invalid – it must be one of 8, 9, A, B, a, b according to the spec.)
'{a0eebc99-9c0b-4ef8-fb6d-6bb9bd380a11}',
].each do |valid_uuid|
uuid = UUIDType.new guid: valid_uuid
assert_not_nil uuid.guid
end
# Invalid uuids
[['A0EEBC99-9C0B-4EF8-BB6D-6BB9BD380A11'],
Hash.new,
0,
0.0,
true,
'Z0000C99-9C0B-4EF8-BB6D-6BB9BD380A11',
'a0eebc999r0b4ef8ab6d6bb9bd380a11',
'a0ee-bc99------4ef8-bb6d-6bb9-bd38-0a11',
'{a0eebc99-bb6d6bb9-bd380a11}'].each do |invalid_uuid|
uuid = UUIDType.new guid: invalid_uuid
assert_nil uuid.guid
end
end
def test_uuid_formats
["A0EEBC99-9C0B-4EF8-BB6D-6BB9BD380A11",
"{a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11}",
"a0eebc999c0b4ef8bb6d6bb9bd380a11",
"a0ee-bc99-9c0b-4ef8-bb6d-6bb9-bd38-0a11",
"{a0eebc99-9c0b4ef8-bb6d6bb9-bd380a11}"].each do |valid_uuid|
UUIDType.create(guid: valid_uuid)
uuid = UUIDType.last
assert_equal "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11", uuid.guid
end
end
def test_schema_dump_with_shorthand
output = dump_table_schema "uuid_data_type"
assert_match %r{t\.uuid "guid"}, output
end
def test_uniqueness_validation_ignores_uuid
klass = Class.new(ActiveRecord::Base) do
self.table_name = "uuid_data_type"
validates :guid, uniqueness: { case_sensitive: false }
def self.name
"UUIDType"
end
end
record = klass.create!(guid: "a0ee-bc99-9c0b-4ef8-bb6d-6bb9-bd38-0a11")
duplicate = klass.new(guid: record.guid)
assert record.guid.present? # Ensure we actually are testing a UUID
assert_not duplicate.valid?
end
end
class PostgresqlUUIDGenerationTest < ActiveRecord::TestCase
include PostgresqlUUIDHelper
include SchemaDumpingHelper
class UUID < ActiveRecord::Base
self.table_name = 'pg_uuids'
end
setup do
enable_extension!('uuid-ossp', connection)
connection.create_table('pg_uuids', id: :uuid, default: 'uuid_generate_v1()') do |t|
t.string 'name'
t.uuid 'other_uuid', default: 'uuid_generate_v4()'
end
# Create custom PostgreSQL function to generate UUIDs
# to test dumping tables which columns have defaults with custom functions
connection.execute <<-SQL
CREATE OR REPLACE FUNCTION my_uuid_generator() RETURNS uuid
AS $$ SELECT * FROM uuid_generate_v4() $$
LANGUAGE SQL VOLATILE;
SQL
# Create such a table with custom function as default value generator
connection.create_table('pg_uuids_2', id: :uuid, default: 'my_uuid_generator()') do |t|
t.string 'name'
t.uuid 'other_uuid_2', default: 'my_uuid_generator()'
end
end
teardown do
drop_table "pg_uuids"
drop_table 'pg_uuids_2'
connection.execute 'DROP FUNCTION IF EXISTS my_uuid_generator();'
disable_extension!('uuid-ossp', connection)
end
if ActiveRecord::Base.connection.supports_extensions?
def test_id_is_uuid
assert_equal :uuid, UUID.columns_hash['id'].type
assert UUID.primary_key
end
def test_id_has_a_default
u = UUID.create
assert_not_nil u.id
end
def test_auto_create_uuid
u = UUID.create
u.reload
assert_not_nil u.other_uuid
end
def test_pk_and_sequence_for_uuid_primary_key
pk, seq = connection.pk_and_sequence_for('pg_uuids')
assert_equal 'id', pk
assert_equal nil, seq
end
def test_schema_dumper_for_uuid_primary_key
schema = dump_table_schema "pg_uuids"
assert_match(/\bcreate_table "pg_uuids", id: :uuid, default: "uuid_generate_v1\(\)"/, schema)
assert_match(/t\.uuid "other_uuid", default: "uuid_generate_v4\(\)"/, schema)
end
def test_schema_dumper_for_uuid_primary_key_with_custom_default
schema = dump_table_schema "pg_uuids_2"
assert_match(/\bcreate_table "pg_uuids_2", id: :uuid, default: "my_uuid_generator\(\)"/, schema)
assert_match(/t\.uuid "other_uuid_2", default: "my_uuid_generator\(\)"/, schema)
end
end
end
class PostgresqlUUIDTestNilDefault < ActiveRecord::TestCase
include PostgresqlUUIDHelper
include SchemaDumpingHelper
setup do
enable_extension!('uuid-ossp', connection)
connection.create_table('pg_uuids', id: false) do |t|
t.primary_key :id, :uuid, default: nil
t.string 'name'
end
end
teardown do
drop_table "pg_uuids"
disable_extension!('uuid-ossp', connection)
end
if ActiveRecord::Base.connection.supports_extensions?
def test_id_allows_default_override_via_nil
col_desc = connection.execute("SELECT pg_get_expr(d.adbin, d.adrelid) as default
FROM pg_attribute a
LEFT JOIN pg_attrdef d ON a.attrelid = d.adrelid AND a.attnum = d.adnum
WHERE a.attname='id' AND a.attrelid = 'pg_uuids'::regclass").first
assert_nil col_desc["default"]
end
def test_schema_dumper_for_uuid_primary_key_with_default_override_via_nil
schema = dump_table_schema "pg_uuids"
assert_match(/\bcreate_table "pg_uuids", id: :uuid, default: nil/, schema)
end
end
end
class PostgresqlUUIDTestInverseOf < ActiveRecord::TestCase
include PostgresqlUUIDHelper
class UuidPost < ActiveRecord::Base
self.table_name = 'pg_uuid_posts'
has_many :uuid_comments, inverse_of: :uuid_post
end
class UuidComment < ActiveRecord::Base
self.table_name = 'pg_uuid_comments'
belongs_to :uuid_post
end
setup do
enable_extension!('uuid-ossp', connection)
connection.transaction do
connection.create_table('pg_uuid_posts', id: :uuid) do |t|
t.string 'title'
end
connection.create_table('pg_uuid_comments', id: :uuid) do |t|
t.references :uuid_post, type: :uuid
t.string 'content'
end
end
end
teardown do
drop_table "pg_uuid_comments"
drop_table "pg_uuid_posts"
disable_extension!('uuid-ossp', connection)
end
if ActiveRecord::Base.connection.supports_extensions?
def test_collection_association_with_uuid
post = UuidPost.create!
comment = post.uuid_comments.create!
assert post.uuid_comments.find(comment.id)
end
def test_find_with_uuid
UuidPost.create!
assert_raise ActiveRecord::RecordNotFound do
UuidPost.find(123456)
end
end
def test_find_by_with_uuid
UuidPost.create!
assert_nil UuidPost.find_by(id: 789)
end
end
end
| 29.115894 | 107 | 0.7034 |
01774f95113803247d5e24c5eb0a557da96beb50 | 249 | require "lita"
Lita.load_locales Dir[File.expand_path(
File.join("..", "..", "locales", "*.yml"), __FILE__
)]
require "lita/handlers/sensu"
Lita::Handlers::Sensu.template_root File.expand_path(
File.join("..", "..", "templates"),
__FILE__
)
| 19.153846 | 53 | 0.662651 |
91de12a9dccf189d8163cd65a48773998a325057 | 12,153 | require 'spec_helper'
describe UsersController, :type => :controller do
render_views
describe "GET 'show'" do
before(:each) do
@user = FactoryBot.create(:user)
end
# Exercise 11.5.4:
it "should paginate microposts" do
35.times { FactoryBot.create(:micropost, :user => @user,
:content => "foo") }
FactoryBot.create(:micropost, :user => @user,
:content => "a" * 55)
get :show, :id => @user
expect(response).to have_selector('div.pagination')
end
it "should be successful" do
get :show, :id => @user
expect(response).to be_success
end
it "should have the right title" do
get :show, :id => @user
expect(assigns(:user)).to eq(@user)
end
it "should include the users name" do
get :show, :id => @user
expect(response).to have_selector("h1", :content => @user.name)
end
it "should have a profile image" do
get :show, :id => @user
expect(response).to have_selector("h1>img", :class => "gravatar")
end
end
describe "GET 'new'" do
it "should be successful" do
get 'new'
expect(response).to be_success
end
it "should have the right title" do
get 'new'
expect(response).to have_selector("title", :content => "Sign up")
end
it "should have a name field" do
get :new
expect(response).to have_selector(
"input[name='user[name]'][type='text']")
end
it "should have a email field" do
get :new
expect(response).to have_selector(
"input[name='user[email]'][type='text']")
end
it "should have a password field" do
get :new
expect(response).to have_selector(
"input[name='user[password]'][type='password']")
end
it "should have a password confirmation field" do
get :new
expect(response).to have_selector(
"input[name='user[password_confirmation]'][type='password']")
end
end
describe "POST 'create'" do
describe "failure" do
before(:each) do
@attr = { :name => "", :email => "", :password => "",
:password_confirmation => "" }
end
it "should not create a user" do
expect do
post :create, :user => @attr
end.not_to change(User, :count)
end
it "should have the right title" do
post :create, :user => @attr
expect(response).to have_selector("title", :content => "Sign up")
end
it "should render the 'new page'" do
post :create, :user => @attr
expect(response).to render_template('new')
end
end
describe "success" do
before(:each) do
@attr = { :name => "New User", :email => "[email protected]",
:password => "foobar", :password_confirmation => "foobar" }
end
it "should create a user" do
expect do
post :create, :user => @attr
end.to change(User, :count).by(1)
end
it "should redirect to the user show page" do
post :create, :user => @attr
expect(response).to redirect_to(user_path(assigns(:user)))
end
it "should have a welcome message" do
post :create, :user => @attr
expect(flash[:success]).to match(/welcome to the sample app/i)
end
it "should sign the user in" do
post :create, :user => @attr
expect(controller).to be_signed_in
end
end
end
describe "GET 'edit'" do
before(:each) do
@user = FactoryBot.create(:user)
test_sign_in(@user)
end
it "should be successful" do
get :edit, :id => @user
end
it "should have the right title" do
get :edit, :id => @user
expect(response).to have_selector("title", :content => "Edit user")
end
it "should have a link to change the Gravatar" do
get :edit, :id => @user
gravatar_url = "http://gravatar.com/emails"
expect(response).to have_selector("a", :href => gravatar_url,
:content => "change")
end
end # get/edit
describe "PUT 'update'" do
before(:each) do
@user = FactoryBot.create(:user)
test_sign_in(@user)
end
describe "failure" do
before(:each) do
@attr = { :name => "", :email => "",
:password => "", :password_confirmation => "" }
end
it "should render the 'edit' page" do
put :update, :id => @user, :user => @attr
expect(response).to render_template('edit')
end
it "should have the right title" do
put :update, :id => @user, :user => @attr
expect(response).to have_selector("title", :content => "Edit user")
end
end
describe "success" do
before(:each) do
@attr = { :name => "New User", :email => "[email protected]",
:password => "barbaz", :password_confirmation => "barbaz" }
end
it "should change the user's attributes" do
put :update, :id => @user, :user => @attr
@user.reload
expect(@user.name).to eq(@attr[:name])
expect(@user.email).to eq(@attr[:email])
end
it "should redirect to the user show page" do
put :update, :id => @user, :user => @attr
expect(response).to redirect_to(user_path(@user))
end
it "should have a flash message" do
put :update, :id => @user, :user => @attr
expect(flash[:success]).to match(/updated/)
end
end
end
describe "authentication of edit/update pages" do
before(:each) do
@user = FactoryBot.create(:user)
end
describe "for non-signed-in users" do
it "should deny access to 'edit'" do
get :edit, :id => @user
expect(response).to redirect_to(signin_path)
end
it "should deny access to 'update'" do
get :update, :id => @user, :user => {}
expect(response).to redirect_to(signin_path)
end
end
describe "for signed-in users" do
before(:each) do
wrong_user = FactoryBot.create(:user,
:email => "[email protected]")
test_sign_in(wrong_user)
end
it "should require matching users for 'edit'" do
get :edit, :id => @user
expect(response).to redirect_to(root_path)
end
it "should require matching users for 'update'" do
get :update, :id => @user, :user => {}
expect(response).to redirect_to(root_path)
end
end
end
describe "GET 'index'" do
describe "for non-signed-in users" do
it "should deny access" do
get :index
expect(response).to redirect_to(signin_path)
expect(flash[:notice]).to match(/sign in/i)
end
end
describe "for signed-in users" do
before(:each) do
@user = test_sign_in(FactoryBot.create(:user))
@second = FactoryBot.create(:user, :email => "[email protected]")
third = FactoryBot.create(:user, :email => "[email protected]")
@users = [@user, @second, third]
30.times do
@users << FactoryBot.create(:user,
:email => FactoryBot.generate(:email))
end
end
it "should be successful" do
get :index
expect(response).to be_success
end
it "should have the right title" do
get :index
expect(response).to have_selector("title", :content => "All users")
end
it "should have an element for each user" do
get :index
@users[0..2].each do |user|
expect(response).to have_selector("li", :content => user.name)
end
end
it "should paginate users" do
get :index
expect(response).to have_selector("div.pagination")
expect(response).to have_selector("span.disabled",
:content => "Previous")
expect(response).to have_selector("a", :content => "2")
#:href => "/users?page=2",
expect(response).to have_selector("a", :content => "Next")
# :href => "/users?page=2",
end
# Exercise 11.5..2:
it "should display the micropost count" do
10.times { FactoryBot.create(:micropost,
:user => @user, :content => "foo") }
get :show, :id => @user
expect(response).to have_selector('td.sidebar',
:content => @user.microposts.count.to_s)
end
# Next two tests are for Chapter 10 (Exercise 4).
it "should not see delete links if not admin" do
get :index
expect(response).not_to have_selector("a", :href => "/users/2",
:content => "delete")
end
it "should show the user's microposts" do
mp1 = FactoryBot.create(:micropost, :user => @user,
:content => "Foo bar")
mp2 = FactoryBot.create(:micropost, :user => @user,
:content => "Baz guux")
get :show, :id => @user
expect(response).to have_selector("span.content",
:content => mp1.content)
expect(response).to have_selector("span.content",
:content => mp2.content)
end
# Exercise 11.5.6:
it "should not see micropost delete links of other people's microposts" do
mp3 = FactoryBot.create(:micropost, :user => @second,
:content => "Foo bar")
mp4 = FactoryBot.create(:micropost, :user => @second,
:content => "Baz guux")
get :show, :id => @user
expect(response).not_to have_selector("span.content",
:content => mp3.content)
expect(response).not_to have_selector("span.content",
:content => mp4.content)
get :show, :id => @second
expect(response).to have_selector("span.content",
:content => mp3.content)
expect(response).to have_selector("span.content",
:content => mp4.content)
end
end
end
describe "DELETE 'destory'" do
before(:each) do
@user = FactoryBot.create(:user)
end
describe "as a non-signed-in user" do
it "should deny access" do
delete :destroy, :id => @user
expect(response).to redirect_to(signin_path)
end
end
describe "as a non-admin user" do
it "should protect the page" do
test_sign_in(@user)
delete :destroy, :id => @user
expect(response).to redirect_to(root_path)
end
end
describe "as an admin user" do
before(:each) do
@admin = FactoryBot.create(:user, :email => "[email protected]",
:admin => true)
test_sign_in(@admin)
end
it "should destroy the user" do
expect do
delete :destroy, :id => @user
end.to change(User, :count).by(-1)
end
it "should redirect to the users page" do
delete :destroy, :id => @user
expect(flash[:success]).to match(/destroyed/)
expect(response).to redirect_to(users_path)
end
it "should not be able to destroy itself" do
expect do
delete :destroy, :id => @admin
end.to change(User, :count).by(0)
end
end
end # delete/destroy
describe "follow pages" do
describe "when not signed in" do
it "should protect 'following'" do
get :following, :id => 1
expect(response).to redirect_to(signin_path)
end
it "should protect 'followers'" do
get :followers, :id => 1
expect(response).to redirect_to(signin_path)
end
end
describe "when signed in" do
before(:each) do
@user = test_sign_in(FactoryBot.create(:user))
@other_user = FactoryBot.create(:user,
:email => FactoryBot.generate(:email))
@user.follow!(@other_user)
end
it "should show user following" do
get :following, :id => @user
expect(response).to have_selector("a", :href => user_path(@other_user),
:content => @other_user.name)
end
it "should show user followers" do
get :followers, :id => @other_user
expect(response).to have_selector("a", :href => user_path(@user),
:content => @user.name)
end
end
end
end
| 28.866983 | 80 | 0.573274 |
d561a73dc0d9ae0c53660efd3a7748a42809ff64 | 936 | class Pokemon < ApplicationRecord
has_many :team_pokemons
has_many :teams, through: :team_pokemons
scope :search, -> (query) { self.where("name LIKE ?", "%#{query}%") }
def self.pokemon_data(id)
information = {
"name" => [],
"abilities" => [],
"types" => [],
"moves" => [],
"sprites" => [] }
pokemon = Pokemon.find(id)
@info = HTTParty.get(pokemon.url)
information["name"] << pokemon.name
@info["abilities"].each do |x|
information["abilities"] << x["ability"]["name"]
end
@info["types"].each do |x|
information["types"] << x["type"]["name"]
end
@info["moves"].each do |x|
information["moves"] << x["move"]["name"]
end
@info["sprites"].each do |x|
information["sprites"] << x
end
return information
end
end
| 30.193548 | 73 | 0.491453 |
7ab87a1af091a70a2bf2c2fdd5eca5b4c2e3cb09 | 832 | # frozen_string_literal: true
# This is the original root class for service related classes,
# and due to historical reason takes a project as scope.
# Later separate base classes for different scopes will be created,
# and existing service will use these one by one.
# After all are migrated, we can remove this class.
#
# New services should consider inheriting from:
#
# - BaseContainerService for services scoped by container (project or group)
# - BaseProjectService for services scoped to projects
#
# or, create a new base class and update this comment.
class BaseService
include BaseServiceUtility
attr_accessor :project, :current_user, :params
def initialize(project, user = nil, params = {})
@project = project
@current_user = user
@params = params.dup
end
delegate :repository, to: :project
end
| 29.714286 | 76 | 0.754808 |
e2be93cfb35b80305c082bf86cc81a77fd5320a9 | 3,537 | # encoding: utf-8
module Backup
module Packager
class Error < Backup::Error; end
class << self
include Utilities::Helpers
##
# Build the final package for the backup model.
def package!(model)
@package = model.package
@encryptor = model.encryptor
@splitter = model.splitter
@pipeline = Pipeline.new
Logger.info "Packaging the backup files..."
procedure.call
if @pipeline.success?
Logger.info "Packaging Complete!"
else
raise Error, "Failed to Create Backup Package\n" +
@pipeline.error_messages
end
end
private
##
# Builds a chain of nested Procs which adds each command to a Pipeline
# needed to package the final command to package the backup.
# This is done so that the Encryptor and Splitter have the ability
# to perform actions before and after the final command is executed.
# No Encryptors currently utilize this, however the Splitter does.
def procedure
stack = []
##
# Initial `tar` command to package the temporary backup folder.
# The command's output will then be either piped to the Encryptor
# or the Splitter (if no Encryptor), or through `cat` into the final
# output file if neither are configured.
@pipeline.add(
"#{ utility(:tar) } -cf - " +
"-C '#{ Config.tmp_path }' '#{ @package.trigger }'",
tar_success_codes
)
##
# If an Encryptor was configured, it will be called first
# to add the encryption utility command to be piped through,
# and amend the final package extension.
# It's output will then be either piped into a Splitter,
# or through `cat` into the final output file.
if @encryptor
stack << lambda do
@encryptor.encrypt_with do |command, ext|
@pipeline << command
@package.extension << ext
stack.shift.call
end
end
end
##
# If a Splitter was configured, the `split` utility command will be
# added to the Pipeline to split the final output into multiple files.
# Once the Proc executing the Pipeline has completed and returns back
# to the Splitter, it will check the final output files to determine
# if the backup was indeed split.
# If so, it will set the package's chunk_suffixes. If not, it will
# remove the '-aa' suffix from the only file created by `split`.
#
# If no Splitter was configured, the final file output will be
# piped through `cat` into the final output file.
if @splitter
stack << lambda do
@splitter.split_with do |command|
@pipeline << command
stack.shift.call
end
end
else
stack << lambda do
outfile = File.join(Config.tmp_path, @package.basename)
@pipeline << "#{ utility(:cat) } > #{ outfile }"
stack.shift.call
end
end
##
# Last Proc to be called runs the Pipeline the procedure built.
# Once complete, the call stack will unwind back through the
# preceeding Procs in the stack (if any)
stack << lambda { @pipeline.run }
stack.shift
end
def tar_success_codes
gnu_tar? ? [0, 1] : [0]
end
end
end
end
| 32.75 | 78 | 0.584676 |
916ed828ff199f4afbe86d3514b517098c919a5f | 3,178 | require 'test_helper'
require 'crud_test_model'
require 'custom_assertions'
class ListHelperTest < ActionView::TestCase
REGEXP_ROWS = /<tr.+?<\/tr>/m
REGEXP_HEADERS = /<th.+?<\/th>/m
REGEXP_SORT_HEADERS = /<th><a .*?sort_dir=asc.*?>.*?<\/a><\/th>/m
include StandardHelper
include CrudTestHelper
include CustomAssertions
attr_reader :entries
setup :reset_db, :setup_db, :create_test_data
teardown :reset_db
test "standard list table" do
@entries = CrudTestModel.all
t = with_test_routing do
list_table
end
assert_count 7, REGEXP_ROWS, t
assert_count 13, REGEXP_SORT_HEADERS, t
end
test "custom list table with attributes" do
@entries = CrudTestModel.all
t = with_test_routing do
list_table :name, :children, :companion_id
end
assert_count 7, REGEXP_ROWS, t
assert_count 3, REGEXP_SORT_HEADERS, t
end
test "custom list table with block" do
@entries = CrudTestModel.all
t = with_test_routing do
list_table do |t|
t.attrs :name, :children, :companion_id
t.col("head") {|e| content_tag :span, e.income.to_s }
end
end
assert_count 7, REGEXP_ROWS, t
assert_count 4, REGEXP_HEADERS, t
assert_count 0, REGEXP_SORT_HEADERS, t
assert_count 6, /<span>.+?<\/span>/, t
end
test "custom list table with attributes and block" do
@entries = CrudTestModel.all
t = with_test_routing do
list_table :name, :children, :companion_id do |t|
t.col("head") {|e| content_tag :span, e.income.to_s }
end
end
assert_count 7, REGEXP_ROWS, t
assert_count 3, REGEXP_SORT_HEADERS, t
assert_count 4, REGEXP_HEADERS, t
assert_count 6, /<span>.+?<\/span>/, t
end
test "standard list table with ascending sort params" do
def params
{:sort => 'children', :sort_dir => 'asc'}
end
@entries = CrudTestModel.all
t = with_test_routing do
list_table
end
assert_count 7, REGEXP_ROWS, t
assert_count 12, REGEXP_SORT_HEADERS, t
assert_count 1, /<th><a .*?sort_dir=desc.*?>Children<\/a> ↓<\/th>/, t
end
test "standard list table with descending sort params" do
def params
{:sort => 'children', :sort_dir => 'desc'}
end
@entries = CrudTestModel.all
t = with_test_routing do
list_table
end
assert_count 7, REGEXP_ROWS, t
assert_count 12, REGEXP_SORT_HEADERS, t
assert_count 1, /<th><a .*?sort_dir=asc.*?>Children<\/a> ↑<\/th>/, t
end
test "list table with custom column sort params" do
def params
{:sort => 'chatty', :sort_dir => 'asc'}
end
@entries = CrudTestModel.all
t = with_test_routing do
list_table :name, :children, :chatty
end
assert_count 7, REGEXP_ROWS, t
assert_count 2, REGEXP_SORT_HEADERS, t
assert_count 1, /<th><a .*?sort_dir=desc.*?>Chatty<\/a> ↓<\/th>/, t
end
test "default attributes do not include id" do
assert_equal [:name, :whatever, :children, :companion_id, :rating, :income,
:birthdate, :gets_up_at, :last_seen, :human, :remarks,
:created_at, :updated_at], default_attrs
end
end
| 24.828125 | 79 | 0.654185 |
5dc1c0bae41c4637c06a4bf0f406c53338ded193 | 1,193 | module Mautic
module Connections
class Oauth2 < Mautic::Connection
def client
@client ||= OAuth2::Client.new(client_id, secret, {
site: url,
authorize_url: 'oauth/v2/authorize',
token_url: 'oauth/v2/token',
raise_errors: false
})
end
def authorize
client.auth_code.authorize_url(redirect_uri: callback_url)
end
def get_code(code)
client.auth_code.get_token(code, redirect_uri: callback_url)
end
def connection
@connection ||= OAuth2::AccessToken.new(client, token, { refresh_token: refresh_token })
end
def refresh!
@connection = connection.refresh!
update(token: @connection.token, refresh_token: @connection.refresh_token)
@connection
end
def request(type, path, params = {})
@last_request = [type, path, params]
response = connection.request(type, path, params)
parse_response(response)
end
private
def callback_url
uri = super
uri.path = Mautic::Engine.routes.url_helpers.oauth2_connection_path(self)
uri.to_s
end
end
end
end
| 24.346939 | 96 | 0.616094 |
2115ce4eaafb94b006bbdf8f7c689bdd1b5fa064 | 2,640 | class Openmama < Formula
desc "Open source high performance messaging API for various Market Data sources"
homepage "https://openmama.finos.org"
url "https://github.com/finos/OpenMAMA/archive/OpenMAMA-6.3.1-release.tar.gz"
sha256 "43e55db00290bc6296358c72e97250561ed4a4bb3961c1474f0876b81ecb6cf9"
license "LGPL-2.1-only"
bottle do
rebuild 1
sha256 cellar: :any, arm64_monterey: "2e7a0417aeac01af231fb8302c4fbe3c212d81f2d657fbef9a54894aa0e3d52f"
sha256 cellar: :any, arm64_big_sur: "e68c9fee04206d5f5b21d84889bf645c7f4b2b2922a886e9bbcc226a05faa183"
sha256 cellar: :any, monterey: "af3c17bf4293979ab3fb40f6fbfaf6ffedb4dc5ec74eb1345770da6da7ff5597"
sha256 cellar: :any, big_sur: "b3d28de466d5f2d17ddb57b2f5004e3defc7f8d48922814f61f733aa7015639c"
sha256 cellar: :any, catalina: "50fe6f8436bd5d7729f9f20c21de8d398e50546754514708e24715a097bd21f1"
sha256 cellar: :any, mojave: "b0a5d95139fce5f6f72b5a2906c5e2e6b604aef25e2b76e7c448ab1bfcefe6d6"
sha256 cellar: :any_skip_relocation, x86_64_linux: "cec891fa1150c54fb96efaf979e1efbbaf50b0327978c55a59242c83cb96a73e"
end
depends_on "cmake" => :build
depends_on "apr"
depends_on "apr-util"
depends_on "libevent"
depends_on "qpid-proton"
uses_from_macos "flex" => :build
on_macos do
depends_on "ossp-uuid"
end
# UUID is provided by util-linux on Linux.
on_linux do
depends_on "util-linux"
end
def install
mkdir "build" do
system "cmake", "..", "-DAPR_ROOT=#{Formula["apr"].opt_prefix}",
"-DPROTON_ROOT=#{Formula["qpid-proton"].opt_prefix}",
"-DCMAKE_INSTALL_RPATH=#{rpath}",
"-DINSTALL_RUNTIME_DEPENDENCIES=OFF",
"-DWITH_TESTTOOLS=OFF",
*std_cmake_args
system "make", "install"
end
end
test do
system "#{bin}/mamalistenc", "-?"
(testpath/"test.c").write <<~EOS
#include <mama/mama.h>
#include <stdio.h>
int main() {
mamaBridge bridge;
fclose(stderr);
mama_status status = mama_loadBridge(&bridge, "qpid");
if (status != MAMA_STATUS_OK) return 1;
const char* version = mama_getVersion(bridge);
if (NULL == version) return 2;
printf("%s\\n", version);
return 0;
}
EOS
system ENV.cc, "test.c", "-I#{include}", "-L#{lib}", "-lmama", "-o", "test"
assert_includes shell_output("./test"), version.to_s
end
end
| 38.823529 | 123 | 0.640152 |
1c10d4783dfc7f76c39ad22352949d95fb48b071 | 395 | # frozen_string_literal: true
module Gruff
# @private
class Renderer::Bezier
def initialize(color:, width: 1.0)
@color = color
@width = width
end
def render(points)
draw = Renderer.instance.draw
draw.push
draw.stroke(@color)
draw.stroke_width(@width)
draw.fill_opacity(0.0)
draw.bezier(*points)
draw.pop
end
end
end
| 17.173913 | 38 | 0.61519 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.