hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
1a2b6f0ec095daaf3bc286a604331cee87c2fb77 | 6,913 | require 'chef_helper'
default_dashboards_yml_output = <<-DASHBOARDYML
---
apiVersion: 1
providers:
- name: GitLab Omnibus
orgId: 1
folder: GitLab Omnibus
type: file
disableDeletion: true
updateIntervalSeconds: 600
options:
path: "/opt/gitlab/embedded/service/grafana-dashboards"
DASHBOARDYML
default_datasources_yml_output = <<-DATASOURCEYML
---
apiVersion: 1
datasources:
- name: GitLab Omnibus
type: prometheus
access: proxy
url: http://localhost:9090
DATASOURCEYML
describe 'monitoring::grafana' do
let(:chef_run) { ChefSpec::SoloRunner.new(step_into: %w(runit_service)).converge('gitlab::default') }
let(:default_vars) do
{
'SSL_CERT_DIR' => '/opt/gitlab/embedded/ssl/certs/',
}
end
before do
allow(Gitlab).to receive(:[]).and_call_original
allow_any_instance_of(PgHelper).to receive(:is_running?).and_return(true)
allow_any_instance_of(PgHelper).to receive(:database_exists?).and_return(true)
end
context 'when grafana is enabled' do
let(:config_template) { chef_run.template('/var/log/gitlab/grafana/config') }
before do
stub_gitlab_rb(
grafana: { enable: true },
prometheus: { enable: true }
)
end
it_behaves_like 'enabled runit service', 'grafana', 'root', 'root'
it 'creates necessary env variable files' do
expect(chef_run).to create_env_dir('/opt/gitlab/etc/grafana/env').with_variables(default_vars)
end
it 'populates the files with expected configuration' do
expect(config_template).to notify('ruby_block[reload_log_service]')
expect(chef_run).to render_file('/opt/gitlab/sv/grafana/run')
.with_content { |content|
expect(content).to match(/exec chpst -P/)
expect(content).to match(/\/opt\/gitlab\/embedded\/bin\/grafana-server/)
expect(content).to match(/-config '\/var\/opt\/gitlab\/grafana\/grafana.ini'/)
}
expect(chef_run).to render_file('/opt/gitlab/sv/grafana/log/run')
.with_content(/exec svlogd -tt \/var\/log\/gitlab\/grafana/)
end
it 'creates default set of directories' do
expect(chef_run).to create_directory('/var/log/gitlab/grafana').with(
owner: 'gitlab-prometheus',
group: nil,
mode: '0700'
)
end
it 'creates the configuration file' do
expect(chef_run).to render_file('/var/opt/gitlab/grafana/grafana.ini')
.with_content { |content|
expect(content).to match(/http_addr = localhost/)
expect(content).to match(/http_port = 3000/)
expect(content).to match(/root_url = http:\/\/localhost\/-\/grafana/)
expect(content).not_to match(/\[auth\.gitlab\]/)
}
end
it 'creates a default dashboards file' do
expect(chef_run).to render_file('/var/opt/gitlab/grafana/provisioning/dashboards/gitlab_dashboards.yml')
.with_content(default_dashboards_yml_output)
end
it 'creates a default datasources file' do
expect(chef_run).to render_file('/var/opt/gitlab/grafana/provisioning/datasources/gitlab_datasources.yml')
.with_content(default_datasources_yml_output)
end
end
context 'when grafana is enabled and prometheus is disabled' do
before do
stub_gitlab_rb(
external_url: 'http://gitlab.example.com',
prometheus: { enable: false },
grafana: { enable: true }
)
end
it 'should create the gitlab-prometheus account if prometheus is disabled' do
expect(chef_run).to create_account('Prometheus user and group').with_username('gitlab-prometheus')
end
end
context 'when log dir is changed' do
before do
stub_gitlab_rb(
external_url: 'http://gitlab.example.com',
grafana: {
log_directory: 'foo',
enable: true
}
)
end
it 'populates the files with expected configuration' do
expect(chef_run).to render_file('/opt/gitlab/sv/grafana/log/run')
.with_content(/exec svlogd -tt foo/)
end
end
it 'authorizes Grafana with gitlab' do
stub_gitlab_rb(external_url: 'http://gitlab.example.com')
allow(GrafanaHelper).to receive(:authorize_with_gitlab)
expect(chef_run).to run_ruby_block('authorize Grafana with GitLab')
.at_converge_time
expect(GrafanaHelper).to receive(:authorize_with_gitlab)
.with 'http://gitlab.example.com'
chef_run.ruby_block('authorize Grafana with GitLab').block.call
end
context 'with user provided settings' do
before do
stub_gitlab_rb(
external_url: 'https://trailingslash.example.com/',
grafana: {
http_addr: '0.0.0.0',
http_port: 3333,
enable: true,
gitlab_application_id: 'appid',
gitlab_secret: 'secret',
gitlab_auth_sign_up: false,
allowed_groups: %w[
allowed
also-allowed
],
env: {
'USER_SETTING' => 'asdf1234'
},
dashboards: [
{
name: 'GitLab Omnibus',
orgId: 1,
folder: 'GitLab Omnibus',
type: 'file',
disableDeletion: true,
updateIntervalSeconds: 600,
options: {
path: '/etc/grafana/dashboards',
},
},
],
}
)
end
it 'creates a custom dashboards file' do
expect(chef_run).to render_file('/var/opt/gitlab/grafana/provisioning/dashboards/gitlab_dashboards.yml')
.with_content { |content|
expect(content).to match(/options:\n path: "\/etc\/grafana\/dashboards"\n/)
}
end
it 'populates the files with expected configuration' do
expect(chef_run).to render_file('/var/opt/gitlab/grafana/grafana.ini')
.with_content { |content|
expect(content).to match(/http_addr = 0.0.0.0/)
expect(content).to match(/http_port = 3333/)
expect(content).to match(/root_url = https:\/\/trailingslash.example.com\/-\/grafana/)
expect(content).to match(/\[auth\.gitlab\]\nenabled = true\nallow_sign_up = false/)
expect(content).to match(/client_id = appid/)
expect(content).to match(/client_secret = secret/)
expect(content).to match(/auth_url = https:\/\/trailingslash.example.com\/oauth\/authorize/)
expect(content).to match(/token_url = https:\/\/trailingslash.example.com\/oauth\/token/)
expect(content).to match(/api_url = https:\/\/trailingslash.example.com\/api\/v4/)
expect(content).to match(/allowed_groups = allowed also-allowed/)
}
end
it 'creates necessary env variable files' do
expect(chef_run).to create_env_dir('/opt/gitlab/etc/grafana/env').with_variables(
default_vars.merge(
{
'USER_SETTING' => 'asdf1234'
}
)
)
end
end
end
| 32.608491 | 112 | 0.638363 |
390594f235d5f65125bd6eab6e8c73f368438991 | 259 | Rails.application.routes.draw do
resources :pages, only: [:show]
resources :posts, only: [:show]
namespace :admin do
root "static_pages#home"
resources :pages
resources :posts
resources :themes, only: [:index, :new, :create]
end
end
| 19.923077 | 52 | 0.675676 |
7aa607b085adfe7fc2bac521a087fb5e0a85dc75 | 1,798 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Configure public file server for tests with Cache-Control for performance.
config.public_file_server.enabled = true
config.public_file_server.headers = {
'Cache-Control' => 'public, max-age=3600'
}
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
config.action_mailer.perform_caching = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
config.secret_key_base = ENV['SECRET_KEY_BASE']
end
| 40.863636 | 85 | 0.773081 |
ac570017b4ed99e9a4ec1348efb3626f3ca83c3a | 310 | class AddRemoteIdIndexToRelayAccounts < ActiveRecord::Migration
def up
execute 'create unique index index_remote_id_on_person_relay_account '\
'on person_relay_accounts(lower(remote_id));'
end
def down
execute 'drop index if exists index_remote_id_on_person_relay_account'
end
end
| 28.181818 | 75 | 0.777419 |
26dd5b61b2f8e77d7875f6301a463a598d56342b | 6,627 | # frozen_string_literal: true
# Copyright (c) 2018 by Jiang Jinyang <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
require 'ciri/rlp'
require 'socket'
require 'forwardable'
require_relative 'frame_io'
require_relative 'protocol_messages'
require_relative 'errors'
require_relative 'encryption_handshake'
module Ciri
module P2P
module RLPX
# RLPX::Connection implement RLPX protocol operations
# all operations end with bang(!)
class Connection
extend Forwardable
def_delegators :@frame_io, :read_msg, :write_msg, :send_data, :closed?, :close
class Error < RLPX::Error
end
class MessageOverflowError < Error
end
class UnexpectedMessageError < Error
end
class FormatError < Error
end
def initialize(io)
set_timeout(io)
@io = io
@frame_io = nil
end
# Encryption handshake, exchange keys with node, must been invoked before other operations
def encryption_handshake!(private_key:, remote_node_id: nil)
enc_handshake = EncryptionHandshake.new(private_key: private_key, remote_id: remote_node_id)
secrets = remote_node_id.nil? ? receiver_enc_handshake(enc_handshake) : initiator_enc_handshake(enc_handshake)
@frame_io = FrameIO.new(@io, secrets)
end
# protocol handshake
def protocol_handshake!(our_hs)
@frame_io.send_data(Code::HANDSHAKE, our_hs.rlp_encode)
remote_hs = read_protocol_handshake
# enable snappy compress if remote peer support
@frame_io.snappy = remote_hs.version >= SNAPPY_PROTOCOL_VERSION
remote_hs
end
private
def receiver_enc_handshake(receiver)
auth_msg_binary, auth_packet = read_enc_handshake_msg(ENC_AUTH_MSG_LENGTH, receiver.private_key)
auth_msg = AuthMsgV4.rlp_decode(auth_msg_binary)
receiver.handle_auth_msg(auth_msg)
auth_ack_msg = receiver.auth_ack_msg
auth_ack_msg_plain_text = auth_ack_msg.rlp_encode
auth_ack_packet = if auth_msg.got_plain
raise NotImplementedError.new('not support pre eip8 plain text seal')
else
seal_eip8(auth_ack_msg_plain_text, receiver)
end
@io.write(auth_ack_packet)
@io.flush
receiver.extract_secrets(auth_packet, auth_ack_packet, initiator: false)
end
def initiator_enc_handshake(initiator)
initiator_auth_msg = initiator.auth_msg
auth_msg_plain_text = initiator_auth_msg.rlp_encode
# seal eip8
auth_packet = seal_eip8(auth_msg_plain_text, initiator)
@io.write(auth_packet)
@io.flush
auth_ack_mgs_binary, auth_ack_packet = read_enc_handshake_msg(ENC_AUTH_RESP_MSG_LENGTH, initiator.private_key)
auth_ack_msg = AuthRespV4.rlp_decode auth_ack_mgs_binary
initiator.handle_auth_ack_msg(auth_ack_msg)
initiator.extract_secrets(auth_packet, auth_ack_packet, initiator: true)
end
def read_enc_handshake_msg(plain_size, private_key)
packet = @io.read(plain_size)
decrypt_binary_msg = begin
private_key.ecies_decrypt(packet)
rescue Crypto::ECIESDecryptionError => e
nil
end
# pre eip old plain format
return decrypt_binary_msg if decrypt_binary_msg
# try decode eip8 format
prefix = packet[0...2]
size = Ciri::Utils.big_endian_decode(prefix)
raise FormatError.new("EIP8 format message size #{size} less than plain_size #{plain_size}") if size < plain_size
# continue read remain bytes
packet << @io.read(size - plain_size + 2)
# decrypt message
[private_key.ecies_decrypt(packet[2..-1], prefix), packet]
end
def read_protocol_handshake
msg = @frame_io.read_msg
if msg.size > BASE_PROTOCOL_MAX_MSG_SIZE
raise MessageOverflowError.new("message size #{msg.size} is too big")
end
if msg.code == Code::DISCONNECT
payload = RLP.decode(msg.payload)
raise UnexpectedMessageError.new("expected handshake, get disconnect, reason: #{payload}")
end
if msg.code != Code::HANDSHAKE
raise UnexpectedMessageError.new("expected handshake, get #{msg.code}")
end
ProtocolHandshake.rlp_decode(msg.payload)
end
def set_timeout(io)
timeout = HANDSHAKE_TIMEOUT
if io.is_a?(BasicSocket)
secs = Integer(timeout)
usecs = Integer((timeout - secs) * 1_000_000)
optval = [secs, usecs].pack("l_2")
io.setsockopt Socket::SOL_SOCKET, Socket::SO_RCVTIMEO, optval
io.setsockopt Socket::SOL_SOCKET, Socket::SO_SNDTIMEO, optval
end
end
def seal_eip8(encoded_msg, handshake)
# padding encoded message, make message distinguished from pre eip8
encoded_msg += "\x00".b * rand(100..300)
prefix = encoded_prefix(encoded_msg.size + ECIES_OVERHEAD)
enc = handshake.remote_key.ecies_encrypt(encoded_msg, prefix)
prefix + enc
end
# encode 16 uint prefix
def encoded_prefix(n)
prefix = Utils.big_endian_encode(n)
# pad to 2 bytes
prefix.ljust(2, "\x00".b)
end
end
end
end
end
| 36.213115 | 123 | 0.659725 |
21e7242b1273acf2fc4209f07014eb3f060e29af | 721 | # frozen_string_literal: true
class FilenameValidator < ActiveModel::Validator
def validate(record)
errors = record.errors[:name]
# \ : * ? " < > | are not allowed
if record.name =~ /[\/\\:\*\?"<>\|]/
errors <<
I18n.t('activerecord.errors.messages.filename_validator.invalid_characters',
characters: '\ : * ? " < > |')
# Tailing dots are not allowed
elsif record.name =~ /\.+\z/
errors << I18n.t('activerecord.errors.messages.filename_validator.tailing_dots')
# Leading or tailing whitespaces are not allowed
elsif record.name =~ /(\A\s+.*)|(.*\s+\z)/
errors << I18n.t('activerecord.errors.messages.filename_validator.whitespaces')
end
end
end
| 37.947368 | 86 | 0.631068 |
91d869eca6189acd6270f40b5e18107c8325c8e7 | 1,382 | # frozen_string_literal: true
# rubocop:disable RSpec/InstanceVariable
RSpec.describe SidekiqUniqueJobs::Server::Middleware do
let(:middleware) { described_class.new }
describe "#call" do
subject(:call) { middleware.call(worker_class, item, queue, &block) }
let(:block) { -> { @inside_block_value = true } }
let(:worker_class) { WhileExecutingJob }
let(:queue) { "working" }
let(:redis_pool) { nil }
let(:args) { [1] }
let(:item) do
{ "class" => worker_class,
"queue" => queue,
"args" => args }
end
let(:lock) { instance_spy(SidekiqUniqueJobs::Lock::WhileExecuting) }
before do
@inside_block_value = false
allow(middleware).to receive(:lock).and_return(lock)
allow(lock).to receive(:execute).and_yield
end
context "when unique is disabled" do
before do
allow(middleware).to receive(:unique_enabled?).and_return(false)
end
it "yields control" do
expect { call }.to change { @inside_block_value }.to(true)
end
end
context "when unique is enabled" do
before do
allow(middleware).to receive(:unique_enabled?).and_return(true)
end
it "yields control" do
expect { call }.to change { @inside_block_value }.to(true)
end
end
end
end
# rubocop:enable RSpec/InstanceVariable
| 27.64 | 73 | 0.629522 |
7a8ae0a9460e4f4c1a43cf6bb7648905cdf48e25 | 5,389 | =begin
#NSX-T Manager API
#VMware NSX-T Manager REST API
OpenAPI spec version: 2.5.1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.19
=end
require 'date'
module NSXT
class NodeSummaryList
# List of Node Summary
attr_accessor :results
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'results' => :'results'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'results' => :'Array<NodeSummary>'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'results')
if (value = attributes[:'results']).is_a?(Array)
self.results = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @results.nil?
invalid_properties.push('invalid value for "results", results cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @results.nil?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
results == o.results
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[results].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = NSXT.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 27.92228 | 107 | 0.613472 |
ff7cde0369da81937e62ecbd7e00115dba657803 | 5,539 | module ProviderInterface
class ReasonsForRejectionController < ProviderInterfaceController
before_action :set_application_choice
before_action :redirect_if_application_rejected_and_feedback_provided
def edit_initial_questions
@wizard = ReasonsForRejectionWizard.new(store, current_step: 'initial_questions')
@wizard.save_state!
end
def update_initial_questions
@wizard = ReasonsForRejectionWizard.new(store, reasons_for_rejection_params.merge(current_step: 'initial_questions'))
if @wizard.valid_for_current_step?
@wizard.save_state!
redirect_to next_redirect(@wizard)
else
render :edit_initial_questions
end
end
def edit_other_reasons
@wizard = ReasonsForRejectionWizard.new(store, current_step: 'other_reasons')
@wizard.save_state!
end
def update_other_reasons
@wizard = ReasonsForRejectionWizard.new(store, reasons_for_rejection_params.merge(current_step: 'other_reasons'))
if @wizard.valid_for_current_step?
@wizard.save_state!
redirect_to next_redirect(@wizard)
else
render :edit_other_reasons
end
end
def check
@wizard = ReasonsForRejectionWizard.new(store, current_step: 'check')
@back_link_path = if @wizard.needs_other_reasons?
provider_interface_reasons_for_rejection_other_reasons_path(@application_choice)
else
provider_interface_reasons_for_rejection_initial_questions_path(@application_choice)
end
@wizard.save_state!
end
def commit
@wizard = ReasonsForRejectionWizard.new(store)
if rbd_application_with_no_feedback?
service = RejectByDefaultFeedback.new(actor: current_provider_user, application_choice: @application_choice, structured_rejection_reasons: @wizard.to_model)
success_message = 'Feedback sent'
else
service = RejectApplication.new(actor: current_provider_user, application_choice: @application_choice, structured_rejection_reasons: @wizard.to_model)
success_message = 'Application rejected'
end
if service.save
@wizard.clear_state!
OfferWizard.new(offer_store).clear_state!
flash[:success] = success_message
redirect_to provider_interface_application_choice_feedback_path(@application_choice)
else
@wizard.errors.merge!(service.errors)
render :check
end
end
def next_redirect(wizard)
{
'other_reasons' => { action: :edit_other_reasons },
'check' => { action: :check },
}.fetch(wizard.next_step)
end
def reasons_for_rejection_params
params.require(:reasons_for_rejection)
.permit(:candidate_behaviour_y_n, :candidate_behaviour_other,
:candidate_behaviour_what_to_improve,
:quality_of_application_y_n, :quality_of_application_personal_statement_what_to_improve,
:quality_of_application_subject_knowledge_what_to_improve,
:quality_of_application_other_details, :quality_of_application_other_what_to_improve,
:qualifications_y_n, :qualifications_other_details,
:performance_at_interview_y_n, :performance_at_interview_what_to_improve,
:course_full_y_n,
:offered_on_another_course_y_n,
:offered_on_another_course_details,
:honesty_and_professionalism_y_n,
:honesty_and_professionalism_concerns_plagiarism_details,
:honesty_and_professionalism_concerns_information_false_or_inaccurate_details,
:honesty_and_professionalism_concerns_references_details,
:honesty_and_professionalism_concerns_other_details,
:safeguarding_y_n,
:safeguarding_concerns_candidate_disclosed_information_details,
:safeguarding_concerns_vetting_disclosed_information_details,
:safeguarding_concerns_other_details,
:other_advice_or_feedback_y_n,
:other_advice_or_feedback_details,
:why_are_you_rejecting_this_application,
honesty_and_professionalism_concerns: [],
safeguarding_concerns: [],
qualifications_which_qualifications: [],
quality_of_application_which_parts_needed_improvement: [],
candidate_behaviour_what_did_the_candidate_do: [])
end
def store
key = "reasons_for_rejection_wizard_store_#{current_provider_user.id}_#{@application_choice.id}"
WizardStateStores::RedisStore.new(key: key)
end
def redirect_if_application_rejected_and_feedback_provided
if @application_choice&.rejected? && !@application_choice.no_feedback?
if @application_choice.rejected_by_default?
flash[:warning] = 'The feedback for this application has already been provided.'
else
flash[:warning] = 'This application has already been rejected.'
end
redirect_to provider_interface_application_choice_feedback_path(@application_choice)
end
end
private
def rbd_application_with_no_feedback?
@application_choice.rejected_by_default? && @application_choice.no_feedback?
end
def offer_store
key = "offer_wizard_store_#{current_provider_user.id}_#{@application_choice.id}"
WizardStateStores::RedisStore.new(key: key)
end
end
end
| 39.848921 | 164 | 0.707528 |
d5203a7f39f285749b6fbe28f5531be9f12c7586 | 322 | # encoding: utf-8
require 'spec_helper'
describe Adapter::Mongo::Gateway, '.new' do
subject { object.new(adapter, relation) }
let(:adapter) { stub }
let(:relation) { stub }
let(:object) { described_class }
it { should be_instance_of(described_class) }
it { should be_frozen }
end
| 20.125 | 47 | 0.630435 |
2831ed05e95702a8a81adda2589bcc04902baa9a | 748 | require 'telegram/bot'
class Help
attr_reader :bot, :message
def initialize(bot, message)
@bot = bot
@message = message
@help = { start: 'Start remby_bot when you whant',
stop: 'Send remby_bot to sleep',
help: 'You are seeing it!',
glasses: 'remby will ask you a quesstion!',
'Why drink water is important?' => 'Remby will give you an useful link' }
end
def display_help
descriptions = ''
@help.each do |key, value|
descriptions += key.match('Why') ? "#{key}: #{value}\n\n" : "/#{key}: #{value}\n\n"
end
descriptions
end
def answer
commands = display_help
bot.api.send_message(chat_id: message.chat.id, text: commands.to_s)
end
end
| 25.793103 | 89 | 0.601604 |
ed6d8d0e0f47fc7f69974a82fbb03eeb619376dd | 1,333 | module EchoUploads
# Used by `EchoUploads::PrmFileWriting#echo_uploads_write_prm_file`.
class WritableFile
def close
# Using ActionDispatch::Http::UploadedFile is ugly. We use it because
# EchoUploads::File#persist! expects that. In version 1 of this gem, we should
# refactor. Everything should be more modular in general.
uploaded_file = ActionDispatch::Http::UploadedFile.new(
tempfile: @tempfile,
filename: @metadata.original_filename
)
ActiveRecord::Base.transaction do
# Duping the EchoUploads::File and destroy the prior one. This ensure that the
# old data is cleaned out if necessary.
new_metadata = @metadata.dup
@metadata.destroy
new_metadata.file = uploaded_file
new_metadata.persist! new_metadata.owner_attr, @options
@tempfile.close!
end
end
# Takes an EchoUploads::File.
def initialize(metadata, options)
tmp_name = SecureRandom.hex 10
@tempfile = Tempfile.new 'echo_uploads', Rails.root.join('tmp')
@metadata = metadata
@options = options
end
def method_missing(meth, *args)
if @tempfile.respond_to? meth
@tempfile.send meth, *args
else
super meth, *args
end
end
attr_reader :tempfile
end
end | 32.512195 | 86 | 0.662416 |
386e1390f1d9af0d58dcec9e24a6719630b639c5 | 339 | class CreateRecommendedMovieJob < ApplicationJob
queue_as :default
def perform(imdb)
crawler = CrawlerMovie.new.load(imdb)
movies = crawler[:recommended_movies].split(',')
movies.each do |movie|
RecommendedMovie.create(imdb: imdb, recommended_imdb: movie)
CreateMovieJob.perform_later(movie)
end
end
end
| 26.076923 | 66 | 0.731563 |
61b778d20bfa83eabffd8e273f4e9e67bcf19fa3 | 2,278 | require 'aws-sdk-s3'
require "fde/s3_client/version"
module FDE
module S3Client
class AWSAccessKeyIDNotDefinedError < StandardError; end
class AWSSecretAccessKeyNotDefinedError < StandardError; end
class AWSRegionNotDefinedError < StandardError; end
class AWSS3BucketNameNotDefinedError < StandardError; end
class BucketNameNotDefinedError < StandardError; end
class Config
attr_accessor :aws_access_key_id,
:aws_secret_access_key,
:aws_region,
:bucket_name
end
def self.config
@@config ||= Config.new
end
def self.configure
yield self.config
end
def self.s3
if self.config.aws_region.to_s.empty?
raise AWSRegionNotDefinedError
end
Aws::S3::Resource.new(
region: self.config.aws_region,
credentials: self.credentials
)
end
def self.credentials
if self.config.aws_access_key_id.to_s.empty?
raise AWSAccessKeyIDNotDefinedError
end
if self.config.aws_secret_access_key.to_s.empty?
raise AWSSecretAccessKeyNotDefinedError
end
Aws::Credentials.new(
config.aws_access_key_id,
config.aws_secret_access_key
)
end
def self.bucket
if self.config.bucket_name.to_s.empty?
raise BucketNameNotDefinedError
end
self.s3.bucket(self.config.bucket_name)
end
def self.upload_file(file_path, options = {})
if options[:key]
key = options[:folder].to_s + options[:key]
else
key = options[:folder].to_s + File.basename(file_path)
end
self.s3.bucket(self.config.bucket_name).object(key).upload_file(file_path)
end
def self.upload_content(key, content)
self.s3.bucket(self.config.bucket_name).object(key).put(body: content)
end
def self.delete(key)
object = self.bucket.object(key)
object.delete
end
def self.list
self.bucket.objects.collect(&:key)
end
def self.move(key, new_key)
object = self.bucket.object(key)
target = "#{object.bucket.name}/#{new_key}"
object.move_to(target)
end
def self.download(key, target)
object = self.bucket.object(key)
object.download_file(target)
end
end
end
| 24.494624 | 80 | 0.667252 |
ac6b9b604ebb6c6636ac68b9fb63545b7361a55a | 2,498 | class Gstreamer < Formula
desc "Development framework for multimedia applications"
homepage "https://gstreamer.freedesktop.org/"
url "https://gstreamer.freedesktop.org/src/gstreamer/gstreamer-1.18.5.tar.xz"
sha256 "55862232a63459bbf56abebde3085ca9aec211b478e891dacea4d6df8cafe80a"
license "LGPL-2.0-or-later"
head "https://gitlab.freedesktop.org/gstreamer/gstreamer.git"
livecheck do
url "https://gstreamer.freedesktop.org/src/gstreamer/"
regex(/href=.*?gstreamer[._-]v?(\d+\.\d*[02468](?:\.\d+)*)\.t/i)
end
bottle do
sha256 arm64_monterey: "17eaa301f59d1fe050947a33170c19a740249d30071df7cfea328c07f73ef21d"
sha256 arm64_big_sur: "8a5e1cea56b64080ed4849d694c5c93d6e4379c06d371006e844bdfa78c1bdf7"
sha256 monterey: "623bec9c478267a5964e1bb0e6e9fa18212a3bdc8ad70978509059e6a501b2e1"
sha256 big_sur: "572b2ffe10da57213e900b3b23d12e2ba5be8f912e327ba46cbc7b40dcff9b02"
sha256 catalina: "33c77ef7ddd3b4d5daf1e5851efc0a293e4284726a35e732938c49f9bcbdfbf3"
sha256 x86_64_linux: "5bcd1f49326cd27c245157336af71b43da146e3824a6845cb10fdfe65098ee33"
end
depends_on "bison" => :build
depends_on "gobject-introspection" => :build
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "gettext"
depends_on "glib"
uses_from_macos "flex" => :build
def install
# Ban trying to chown to root.
# https://bugzilla.gnome.org/show_bug.cgi?id=750367
args = std_meson_args + %w[
-Dintrospection=enabled
-Dptp-helper-permissions=none
]
# Look for plugins in HOMEBREW_PREFIX/lib/gstreamer-1.0 instead of
# HOMEBREW_PREFIX/Cellar/gstreamer/1.0/lib/gstreamer-1.0, so we'll find
# plugins installed by other packages without setting GST_PLUGIN_PATH in
# the environment.
inreplace "meson.build",
"cdata.set_quoted('PLUGINDIR', join_paths(get_option('prefix'), get_option('libdir'), 'gstreamer-1.0'))",
"cdata.set_quoted('PLUGINDIR', '#{HOMEBREW_PREFIX}/lib/gstreamer-1.0')"
mkdir "build" do
system "meson", *args, ".."
system "ninja", "-v"
system "ninja", "install", "-v"
end
end
def caveats
<<~EOS
Consider also installing gst-plugins-base and gst-plugins-good.
The gst-plugins-* packages contain gstreamer-video-1.0, gstreamer-audio-1.0,
and other components needed by most gstreamer applications.
EOS
end
test do
system bin/"gst-inspect-1.0"
end
end
| 36.202899 | 111 | 0.725781 |
26390a1f8ce11ff325d5ad96d222c5b8df0ceeb3 | 3,423 | describe 'Accounting' do
let(:seller) { create(:member, :level_3, :barong) }
let(:buyer) { create(:member, :level_3, :barong) }
let :order_ask do
create :order_ask, \
bid: :usd,
ask: :btc,
market: Market.find(:btcusd),
state: :wait,
ord_type: :limit,
price: '1'.to_d,
volume: '10000.0',
origin_volume: '10000.0',
locked: '10000',
origin_locked: '10000',
member: seller
end
let :order_bid do
create :order_bid, \
bid: :usd,
ask: :btc,
market: Market.find(:btcusd),
state: :wait,
ord_type: :limit,
price: '1.2'.to_d,
volume: '10000',
origin_volume: '10000',
locked: '12000',
origin_locked: '12000',
member: buyer
end
let(:deposit_btc) { create(:deposit_btc, member: seller, amount: order_ask.locked, currency_id: :btc) }
let(:deposit_usd) { create(:deposit_usd, member: buyer, amount: order_bid.locked, currency_id: :usd) }
let :executor do
ask = Matching::LimitOrder.new(order_ask.to_matching_attributes)
bid = Matching::LimitOrder.new(order_bid.to_matching_attributes)
Matching::Executor.new \
action: 'execute',
trade: {
market_id: :btcusd,
maker_order_id: ask.id,
taker_order_id: bid.id,
strike_price: '1.2',
amount: '10000',
total: '12000'
}
end
let(:asset_balance) { Operations::Asset.balance }
let(:liability_balance) { Operations::Liability.balance }
let(:revenue_balance) { Operations::Revenue.balance }
let(:expense_balance) { Operations::Expense.balance }
before do
deposit_btc.accept!
deposit_btc.process!
deposit_btc.dispatch!
deposit_usd.accept!
order_bid.hold_account!.lock_funds!(order_bid.locked)
order_bid.record_submit_operations!
order_ask.hold_account!.lock_funds!(order_ask.locked)
order_ask.record_submit_operations!
executor.execute!
end
it 'assert that asset - liabilities = revenue - expense' do
expect(asset_balance.merge(liability_balance){ |k, a, b| a - b}).to eq (revenue_balance.merge(expense_balance){ |k, a, b| a - b})
end
it 'assert the balance is 15.0 / 18.0 $' do
balance = asset_balance.merge(liability_balance){ |k, a, b| a - b}
expect(balance.fetch(:btc)).to eq '15.0'.to_d
expect(balance.fetch(:usd)).to eq '18.0'.to_d
end
context 'withdraws' do
let(:btc_withdraw) { create(:btc_withdraw, sum: 1000.to_d, member: buyer ) }
before do
btc_withdraw.accept!
btc_withdraw.update(txid: 'a1a43ab7166f81059449f80a35abdc6febe62fe1f75a0cdb25d49ebae3fc10d9')
btc_withdraw.process!
btc_withdraw.dispatch!
btc_withdraw.success!
end
it 'after btc withdraw, assert that asset - liabilities = revenue - expense' do
expect(asset_balance.merge(liability_balance){ |k, a, b| a - b}).to eq (revenue_balance.merge(expense_balance){ |k, a, b| a - b})
end
it 'after btc withdraw (fee: 0.01) assert the balance is 15.01 / 18.0 $' do
balance = asset_balance.merge(liability_balance){ |k, a, b| a - b}
expect(balance.fetch(:btc)).to eq '15.01'.to_d
expect(balance.fetch(:usd)).to eq '18.0'.to_d
end
end
end
| 31.694444 | 135 | 0.614665 |
5dd8995000c23e2617e0c0fa3480e0ebabd04d8a | 5,632 | # Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
# stub: bixby-common 0.7.1 ruby lib
Gem::Specification.new do |s|
s.name = "bixby-common"
s.version = "0.7.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Chetan Sarva"]
s.date = "2015-06-29"
s.description = "Bixby Common files/libs"
s.email = "[email protected]"
s.extra_rdoc_files = [
"LICENSE"
]
s.files = [
".coveralls.yml",
".document",
".travis.yml",
"Gemfile",
"Gemfile.lock",
"LICENSE",
"Rakefile",
"VERSION",
"bixby-common.gemspec",
"lib/bixby-common.rb",
"lib/bixby-common/api/api_channel.rb",
"lib/bixby-common/api/encrypted_json_request.rb",
"lib/bixby-common/api/http_channel.rb",
"lib/bixby-common/api/json_request.rb",
"lib/bixby-common/api/json_response.rb",
"lib/bixby-common/api/rpc_handler.rb",
"lib/bixby-common/api/signed_json_request.rb",
"lib/bixby-common/bixby.rb",
"lib/bixby-common/command_response.rb",
"lib/bixby-common/command_spec.rb",
"lib/bixby-common/exception/bundle_not_found.rb",
"lib/bixby-common/exception/command_exception.rb",
"lib/bixby-common/exception/command_not_found.rb",
"lib/bixby-common/exception/encryption_error.rb",
"lib/bixby-common/util/crypto_util.rb",
"lib/bixby-common/util/debug.rb",
"lib/bixby-common/util/hashify.rb",
"lib/bixby-common/util/http_client.rb",
"lib/bixby-common/util/jsonify.rb",
"lib/bixby-common/util/log.rb",
"lib/bixby-common/util/log/filtering_layout.rb",
"lib/bixby-common/util/log/logger.rb",
"lib/bixby-common/util/signal.rb",
"lib/bixby-common/util/thread_dump.rb",
"lib/bixby-common/util/thread_pool.rb",
"lib/bixby-common/util/thread_pool/task.rb",
"lib/bixby-common/util/thread_pool/worker.rb",
"lib/bixby-common/websocket/api_channel.rb",
"lib/bixby-common/websocket/async_response.rb",
"lib/bixby-common/websocket/message.rb",
"lib/bixby-common/websocket/request.rb",
"lib/bixby-common/websocket/response.rb",
"test/api/http_channel_test.rb",
"test/base.rb",
"test/bixby_common_test.rb",
"test/command_response_test.rb",
"test/command_spec_test.rb",
"test/helper.rb",
"test/sample_handler.rb",
"test/side_effect.rb",
"test/support/repo/vendor/test_bundle/bin/cat",
"test/support/repo/vendor/test_bundle/bin/cat.json",
"test/support/repo/vendor/test_bundle/bin/echo",
"test/support/repo/vendor/test_bundle/digest",
"test/support/repo/vendor/test_bundle/manifest.json",
"test/util/crypto_util_test.rb",
"test/util/http_client_test.rb",
"test/util/jsonify_test.rb",
"test/util/log_test.rb",
"test/util/signal_test.rb",
"test/util/thread_pool_test.rb",
"test/websocket/api_channel_test.rb",
"test/websocket/async_response_test.rb",
"test/websocket/request_test.rb",
"test/websocket/response_test.rb"
]
s.homepage = "http://github.com/chetan/bixby-common"
s.licenses = ["MIT"]
s.rubygems_version = "2.4.6"
s.summary = "Bixby Common"
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<bixby-auth>, ["~> 0.1"])
s.add_runtime_dependency(%q<faye-websocket>, ["~> 0.7"])
s.add_runtime_dependency(%q<multi_json>, ["~> 1.8"])
s.add_runtime_dependency(%q<httpi>, ["~> 2.1"])
s.add_runtime_dependency(%q<logging>, ["~> 1.8"])
s.add_runtime_dependency(%q<semver2>, ["~> 3.3"])
s.add_development_dependency(%q<yard>, ["~> 0.8"])
s.add_development_dependency(%q<bundler>, ["~> 1.1"])
s.add_development_dependency(%q<jeweler>, ["~> 2.0"])
s.add_development_dependency(%q<pry>, ["~> 0.9"])
s.add_development_dependency(%q<test_guard>, ["~> 0.2"])
s.add_development_dependency(%q<rb-inotify>, ["~> 0.9"])
s.add_development_dependency(%q<rb-fsevent>, ["~> 0.9"])
s.add_development_dependency(%q<rb-fchange>, ["~> 0.0"])
else
s.add_dependency(%q<bixby-auth>, ["~> 0.1"])
s.add_dependency(%q<faye-websocket>, ["~> 0.7"])
s.add_dependency(%q<multi_json>, ["~> 1.8"])
s.add_dependency(%q<httpi>, ["~> 2.1"])
s.add_dependency(%q<logging>, ["~> 1.8"])
s.add_dependency(%q<semver2>, ["~> 3.3"])
s.add_dependency(%q<yard>, ["~> 0.8"])
s.add_dependency(%q<bundler>, ["~> 1.1"])
s.add_dependency(%q<jeweler>, ["~> 2.0"])
s.add_dependency(%q<pry>, ["~> 0.9"])
s.add_dependency(%q<test_guard>, ["~> 0.2"])
s.add_dependency(%q<rb-inotify>, ["~> 0.9"])
s.add_dependency(%q<rb-fsevent>, ["~> 0.9"])
s.add_dependency(%q<rb-fchange>, ["~> 0.0"])
end
else
s.add_dependency(%q<bixby-auth>, ["~> 0.1"])
s.add_dependency(%q<faye-websocket>, ["~> 0.7"])
s.add_dependency(%q<multi_json>, ["~> 1.8"])
s.add_dependency(%q<httpi>, ["~> 2.1"])
s.add_dependency(%q<logging>, ["~> 1.8"])
s.add_dependency(%q<semver2>, ["~> 3.3"])
s.add_dependency(%q<yard>, ["~> 0.8"])
s.add_dependency(%q<bundler>, ["~> 1.1"])
s.add_dependency(%q<jeweler>, ["~> 2.0"])
s.add_dependency(%q<pry>, ["~> 0.9"])
s.add_dependency(%q<test_guard>, ["~> 0.2"])
s.add_dependency(%q<rb-inotify>, ["~> 0.9"])
s.add_dependency(%q<rb-fsevent>, ["~> 0.9"])
s.add_dependency(%q<rb-fchange>, ["~> 0.0"])
end
end
| 39.111111 | 105 | 0.639205 |
bf8bd7fe8b04d50b27171e6de76fe81b8d491b60 | 120 | require 'test_helper'
class VgroupTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 15 | 42 | 0.7 |
111b80b97ca87fbf6f9e9951dacf79eb28be5abf | 2,479 | class Pari < Formula
desc "Computer algebra system designed for fast computations in number theory"
homepage "https://pari.math.u-bordeaux.fr/"
url "https://pari.math.u-bordeaux.fr/pub/pari/unix/pari-2.13.3.tar.gz"
sha256 "ccba7f1606c6854f1443637bb57ad0958d41c7f4753f8ae8459f1d64c267a1ca"
license "GPL-2.0-or-later"
livecheck do
url "https://pari.math.u-bordeaux.fr/pub/pari/unix/"
regex(/href=.*?pari[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
sha256 cellar: :any, arm64_monterey: "4953a24a3d70dd39a57c987aef382c0e150bb185ce35c25c4410cb8685a8e059"
sha256 cellar: :any, arm64_big_sur: "96c77a5772be062e5f819af0526ad34c9060de50d8017b2ca0946e883f6e0c56"
sha256 cellar: :any, monterey: "ebee4731dc1f7a83c27e7d15af51bcf130330f5ebc59726cf632a29f65b7fff6"
sha256 cellar: :any, big_sur: "1425bcb4a39b7ce00dcd17ff0adf6180f51efe71b320bb2e8c0d082a45945cb3"
sha256 cellar: :any, catalina: "c107ebaa7fbadf071954735b41ca3f56764db05b80da3d7bef4277a3fb517fc4"
sha256 cellar: :any_skip_relocation, x86_64_linux: "5b505207b83024ef3997c9ed4fe380a5d2e0c98b2b97db619821d82403fb8b91"
end
depends_on "gmp"
depends_on "readline"
def install
readline = Formula["readline"].opt_prefix
gmp = Formula["gmp"].opt_prefix
system "./Configure", "--prefix=#{prefix}",
"--with-gmp=#{gmp}",
"--with-readline=#{readline}",
"--graphic=ps",
"--mt=pthread"
# Explicitly set datadir to HOMEBREW_PREFIX/share/pari to allow for external packages to be found
# We do this here rather than in configure because we still want the actual files to be installed to the Cellar
objdir = Utils.safe_popen_read("./config/objdir").chomp
inreplace %W[#{objdir}/pari.cfg #{objdir}/paricfg.h], pkgshare, "#{HOMEBREW_PREFIX}/share/pari"
# make needs to be done in two steps
system "make", "all"
system "make", "install"
# Avoid references to Homebrew shims
inreplace lib/"pari/pari.cfg", Superenv.shims_path, "/usr/bin"
end
def caveats
<<~EOS
If you need the graphical plotting functions you need to install X11 with:
brew install --cask xquartz
EOS
end
test do
(testpath/"math.tex").write "$k_{n+1} = n^2 + k_n^2 - k_{n-1}$"
system bin/"tex2mail", testpath/"math.tex"
end
end
| 42.016949 | 123 | 0.669221 |
b9865078a1fa573c29b7fedcc9df777f993ecdd1 | 3,181 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Web::Mgmt::V2018_02_01
module Models
#
# The source control OAuth token.
#
class SourceControl < ProxyOnlyResource
include MsRestAzure
# @return [String] OAuth access token.
attr_accessor :token
# @return [String] OAuth access token secret.
attr_accessor :token_secret
# @return [String] OAuth refresh token.
attr_accessor :refresh_token
# @return [DateTime] OAuth token expiration.
attr_accessor :expiration_time
#
# Mapper for SourceControl class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'SourceControl',
type: {
name: 'Composite',
class_name: 'SourceControl',
model_properties: {
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
kind: {
client_side_validation: true,
required: false,
serialized_name: 'kind',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
token: {
client_side_validation: true,
required: false,
serialized_name: 'properties.token',
type: {
name: 'String'
}
},
token_secret: {
client_side_validation: true,
required: false,
serialized_name: 'properties.tokenSecret',
type: {
name: 'String'
}
},
refresh_token: {
client_side_validation: true,
required: false,
serialized_name: 'properties.refreshToken',
type: {
name: 'String'
}
},
expiration_time: {
client_side_validation: true,
required: false,
serialized_name: 'properties.expirationTime',
type: {
name: 'DateTime'
}
}
}
}
}
end
end
end
end
| 27.903509 | 70 | 0.447972 |
1177bb1c7b60aad91a2beeb867621fb616fe0b83 | 1,087 | module Producer
module Core
class Action
INSPECT_ARGUMENTS_SUM_LEN = 68
extend Forwardable
def_delegators :@env, :input, :output, :error_output, :remote
def_delegators :remote, :fs
attr_reader :env, :arguments, :options
def initialize env, *args, **options
@env = env
@arguments = args
@options = options
setup if respond_to? :setup
end
def name
self.class.name.split('::').last.downcase
end
def to_s
[name, inspect_arguments].join ' '
end
private
def inspect_arguments
@arguments.inspect[0, INSPECT_ARGUMENTS_SUM_LEN - name.length]
end
def check_arguments_size! size
return if arguments.compact.size == size
fail ArgumentError, '`%s\' action requires %d arguments' % [name, size]
end
def convert_options conversions
conversions.each do |original, convertion|
options[convertion] = options.delete original if options.key? original
end
end
end
end
end
| 23.630435 | 80 | 0.614535 |
5d147950e21e3c9e187965ea331dbef5b3a741db | 1,258 | require_relative 'boot'
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "active_storage/engine"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "action_cable/engine"
# require "sprockets/railtie"
require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module ReactProjectBackend
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
# Only loads a smaller set of middleware suitable for API only apps.
# Middleware like session, flash, cookies can be added back manually.
# Skip views, helpers and assets when generating a new resource.
config.api_only = true
end
end
| 34.944444 | 82 | 0.77663 |
338afdc382c0404ac580099c2dc378e9a8126c3a | 2,127 | name 'supermarket'
version '3.2.1'
maintainer 'Chef Software, Inc.'
maintainer_email '[email protected]'
license 'Apache v2.0'
description 'Stands up the Supermarket application stack'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
%w(yum apt build-essential python nodejs postgresql redisio git nginx runit rubies packagecloud).each do |dep|
depends dep
end
supports 'ubuntu'
supports 'centos'
recipe 'supermarket::default',
'Installs Supermarket and all dependencies for production'
recipe 'supermarket::vagrant',
'Installs Supermarket and all dependencies for development'
provides 'service[nginx]'
provides 'service[postgresql]'
provides 'service[redis-server]'
provides 'service[unicorn]'
grouping 'postgres', :title => 'PostgreSQL options'
attribute 'postgres/user',
:display_name => 'PostgreSQL username',
:type => 'string',
:default => 'supermarket'
attribute 'postgres/database',
:display_name => 'PostgreSQL database name',
:type => 'string',
:default => 'supermarket_production'
attribute 'postgresql/version',
:display_name => 'PostgreSQL server version',
:type => 'string',
:default => '9.3'
grouping 'redis', :title => 'Redis server options'
attribute 'redis/maxmemory',
:display_name => 'Maximum memory used by redis server',
:type => 'string',
:default => '64mb'
grouping 'supermarket', :title => 'Supermarket options'
attribute 'supermarket/home',
:display_name => 'Directory to deploy Supermarket application',
:type => 'string',
:default => '/srv/supermarket'
attribute 'supermarket/host',
:display_name => 'Hostname of Supermarket application',
:type => 'string',
:default => 'supermarket.getchef.com'
attribute 'supermarket/sidekiq/concurrency',
:display_name => 'Number of concurrent jobs executed by sidekiq',
:type => 'string',
:default => '25'
| 31.746269 | 110 | 0.637047 |
f76ab85efc0584068f37a79e3592c6803202b906 | 46 | require "sapwood/version"
module Sapwood
end
| 9.2 | 25 | 0.804348 |
61b3e0fe660d058588c43b37e7609d26413cfc0c | 125 | require 'test_helper'
class TopicToWordTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 15.625 | 47 | 0.712 |
18881453032114be8ced73837807c0a27b60ebab | 24 | module RailsDecycle
end
| 8 | 19 | 0.875 |
1adf317a3f98468e62d0daea6a2aec758fbcefc4 | 1,607 | class Bde < Formula
desc "Basic Development Environment: foundational C++ libraries used at Bloomberg"
homepage "https://github.com/bloomberg/bde"
url "https://github.com/bloomberg/bde/archive/3.54.1.0.tar.gz"
sha256 "d157050476b869d68fcf9abb4bfa05f018d633e5be91b35a7572ead7def76cee"
bottle do
cellar :any_skip_relocation
sha256 "43d51a6bc15681fa341ae48b96381f12830889906d68ae9a2f59897584a3746e" => :catalina
sha256 "3222a847660e401e38df83a2d674b30460999991d320517972529cb8d578c5f3" => :mojave
sha256 "a96b2ea04a958d1011ff02c93f1613c2894e81805782c29b6deffb3572d28303" => :high_sierra
end
depends_on "cmake" => :build
depends_on "ninja" => :build
resource "bde-tools" do
url "https://github.com/bloomberg/bde-tools/archive/v1.1.tar.gz"
sha256 "c5d77d5e811e79f824816ee06dbf92a2a7e3eb0b6d9f27088bcac8c06d930fd5"
end
def install
buildpath.install resource("bde-tools")
ENV.cxx11
system "python", "./bin/waf", "configure", "--prefix=#{prefix}"
system "python", "./bin/waf", "build"
system "python", "./bin/waf", "install"
end
test do
# bde tests are incredibly performance intensive
# test below does a simple sanity check for linking against bsl.
(testpath/"test.cpp").write <<~EOS
#include <bsl/bsl_string.h>
#include <bsl/bslma_default.h>
int main() {
using namespace BloombergLP;
bsl::string string(bslma::Default::globalAllocator());
return 0;
}
EOS
system ENV.cxx, "-I#{include}/bsl", "test.cpp", "-L#{lib}", "-lbsl", "-o", "test"
system "./test"
end
end
| 33.479167 | 93 | 0.70504 |
38301a2085a2f09e214730083c0358c4dd1b6788 | 158 | require File.expand_path('../../../global_helper', __FILE__)
require 'minitest/autorun'
class MiniTest::Unit::TestCase
include RR::Adapters::MiniTest
end
| 19.75 | 60 | 0.746835 |
ffda68e025cd14e684b71e4f9124eb1e59c8a6ee | 4,062 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: developer.proto
require 'google/protobuf'
require 'google/protobuf/empty_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_message "developer.Developer" do
optional :id, :string, 1
optional :user_id, :string, 2
optional :mobile, :string, 3
optional :email, :string, 4
optional :id_card_number, :string, 5
optional :real_name, :string, 6
optional :private_key, :string, 7
optional :public_key, :string, 8
optional :access_key, :string, 9
optional :create_at, :uint64, 10
optional :update_at, :uint64, 11
optional :active, :bool, 12
optional :verified, :bool, 13
end
add_message "developer.Session" do
optional :session_id, :string, 1
optional :developer_id, :string, 2
optional :secret_key, :string, 3
optional :app_id, :string, 4
optional :crypto_type, :string, 5
end
add_message "developer.SessionRequest" do
optional :access_key, :string, 1
optional :crypto_type, :string, 2
end
add_message "developer.AppBank" do
optional :id, :string, 1
optional :developer_id, :string, 2
optional :crypto_type, :string, 3
end
add_message "developer.AppBankCurrency" do
optional :id, :string, 1
optional :bank_id, :string, 2
optional :coin_type, :string, 3
optional :balance, :string, 4
optional :main_net, :string, 5
optional :gas_rate, :double, 6
optional :gas, :uint32, 7
optional :address_balance, :string, 8
optional :address, :string, 9
optional :decimals, :uint32, 10
optional :crypto_type, :string, 11
end
add_message "developer.AllocateGas" do
optional :id, :string, 1
optional :main_net, :string, 2
optional :gas_type, :string, 3
optional :decimal, :uint32, 4
optional :session_id, :string, 5
end
add_message "developer.UnAllocateGas" do
optional :id, :string, 1
optional :main_net, :string, 2
optional :gas_type, :string, 3
optional :decimal, :uint32, 4
optional :session_id, :string, 5
end
add_message "developer.AppBankBalance" do
repeated :bank_currencies, :message, 1, "developer.AppBankCurrency"
end
add_message "developer.BankTx" do
optional :id, :string, 1
optional :bank_id, :string, 2
optional :tx_type, :string, 3
optional :app_id, :string, 4
optional :app_user_id, :string, 5
optional :coin_type, :string, 6
optional :amount, :string, 7
optional :main_net, :string, 8
optional :trade_at, :uint64, 9
optional :session_id, :string, 10
optional :currency_id, :string, 11
optional :decimals, :uint32, 12
optional :tx_status, :uint32, 13
optional :crypto_type, :string, 14
end
add_message "developer.BankAddress" do
optional :id, :string, 1
optional :address, :string, 2
optional :owner, :string, 3
optional :private_key, :string, 4
optional :main_net, :string, 5
optional :crypto_type, :string, 6
end
end
module Developer
Developer = Google::Protobuf::DescriptorPool.generated_pool.lookup("developer.Developer").msgclass
Session = Google::Protobuf::DescriptorPool.generated_pool.lookup("developer.Session").msgclass
SessionRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("developer.SessionRequest").msgclass
AppBank = Google::Protobuf::DescriptorPool.generated_pool.lookup("developer.AppBank").msgclass
AppBankCurrency = Google::Protobuf::DescriptorPool.generated_pool.lookup("developer.AppBankCurrency").msgclass
AllocateGas = Google::Protobuf::DescriptorPool.generated_pool.lookup("developer.AllocateGas").msgclass
UnAllocateGas = Google::Protobuf::DescriptorPool.generated_pool.lookup("developer.UnAllocateGas").msgclass
AppBankBalance = Google::Protobuf::DescriptorPool.generated_pool.lookup("developer.AppBankBalance").msgclass
BankTx = Google::Protobuf::DescriptorPool.generated_pool.lookup("developer.BankTx").msgclass
BankAddress = Google::Protobuf::DescriptorPool.generated_pool.lookup("developer.BankAddress").msgclass
end
| 37.962617 | 112 | 0.724028 |
3902b1ccfce41a7e5032a102d1f6445b83a292fc | 158 | #
# Cookbook Name:: windev
# Recipe:: default
#
# Author:: Vassilis Rizopoulos (<[email protected]>)
#
# Copyright (c) 2014-2018 ZΓΌhlke, All Rights Reserved.
#
| 17.555556 | 54 | 0.689873 |
bfc1cb5a54f2bd66435791b7eaf40acfe7d3ce3c | 719 | module Dandelion
class Tree
attr_reader :commit
def initialize(repo, commit)
@repo = repo
@commit = commit
end
def is_symlink?(path)
# https://github.com/libgit2/libgit2/blob/development/include/git2/types.h
@commit.tree.path(path)[:filemode] == 0120000
end
def data(path)
submodule = @repo.submodules[path]
if submodule
# TODO
nil
else
info, obj = object(path)
blob_content(obj)
end
end
private
def object(path)
info = @commit.tree.path(path)
object = @repo.lookup(info[:oid])
[info, object]
end
def blob_content(object)
object.read_raw.data
end
end
end
| 17.536585 | 80 | 0.589708 |
338ed89711cbb68c5bd94f58f292397d66dcd959 | 4,701 | require 'spec_helper'
require 'invitational/services/service_helper'
describe Invitational::Invitation do
Given {no_invitations_exist}
Given(:user1) { setup_user "[email protected]" }
Given(:user2) { setup_user "[email protected]" }
Given(:user3) { setup_user "[email protected]" }
Given(:user4) { setup_user "[email protected]" }
Given(:user5) { setup_user "[email protected]" }
Given(:entity1) { setup_entity "Test entity 1"}
Given(:entity2) { setup_entity "Test entity 2"}
Given(:entity3) { setup_entity "Test entity 3"}
Given (:unclaimed) {invite_by_email user1.email, entity1, :user}
Given (:claimed) {invite_user user2, entity2, :admin}
Given (:uber_admin) {invite_uber_admin user3}
context "Initialization" do
context "Creates Claim hash and date sent on creation" do
Given(:new_invite) {Invitation.new(email: "[email protected]", invitable: entity1, role: :user)}
When {new_invite.save}
Then {new_invite.claim_hash.should_not be_nil}
And {new_invite.date_sent.should_not be_nil}
end
context "Generates Claim hash that is not predictable" do
Given(:first_invite) {Invitation.new(email: "[email protected]", invitable: entity1, role: :user)}
Given(:second_invite) {Invitation.new(email: "[email protected]", invitable: entity2, role: :user)}
When {first_invite.save && second_invite.save}
Then {first_invite.claim_hash != second_invite.claim_hash}
end
end
context "Role Title" do
context "Standard Role" do
Then {unclaimed.role_title.should == "User"}
end
context "Uber Admin" do
Then {uber_admin.role_title.should == "Uber Admin"}
end
end
context "Type" do
context "Standard Role" do
Then {unclaimed.uberadmin?.should_not be_truthy}
And {claimed.uberadmin?.should_not be_truthy}
end
context "Uber Admin" do
Then {uber_admin.uberadmin?.should be_truthy}
end
end
context "Claim Status" do
context "Unclaimed" do
Then {unclaimed.claimed?.should_not be_truthy}
And {unclaimed.unclaimed?.should be_truthy}
end
context "Claimed" do
Then {claimed.claimed?.should be_truthy}
And {claimed.unclaimed?.should_not be_truthy}
end
end
context "Claiming" do
context "By Hash" do
When (:result) {Invitation.claim unclaimed.claim_hash, user1}
Then { result.id.should == unclaimed.id}
And { user1.invitations.should include(result) }
end
context "All for a given user" do
Given {invite_by_email user4.email, entity3, :user}
When (:result) {Invitation.claim_all_for user4}
Then {user4.entities.should include(entity3)}
end
end
context "Invites Uberadmin" do
context "By email" do
When (:result) {Invitation.invite_uberadmin user4.email}
Then {result.should_not be_nil}
And {result.invitable.should be_nil}
And {result.email.should == user4.email}
And {result.role.should == :uberadmin }
And {result.unclaimed?.should be_truthy}
end
context "Existing user" do
When (:result) {Invitation.invite_uberadmin user4}
Then {result.should_not be_nil}
And {result.invitable.should be_nil}
And {result.email.should == user4.email}
And {result.role.should == :uberadmin}
And {result.claimed?.should be_truthy}
And {result.user.should == user4 }
end
context "When already invited" do
Given {invite_uber_admin user4}
When (:result) {Invitation.invite_uberadmin user4}
Then { expect(result).to have_failed(Invitational::AlreadyInvitedError) }
end
context "Invites to System Role" do
context "By email" do
When (:result) {Invitation.invite_system_user user4.email, :employer}
Then {result.should_not be_nil}
And {result.invitable.should be_nil}
And {result.email.should == user4.email}
And {result.role.should == :employer }
And {result.unclaimed?.should be_truthy}
end
context "Existing user" do
When (:result) {Invitation.invite_system_user user4, :employer}
Then {result.should_not be_nil}
And {result.invitable.should be_nil}
And {result.email.should == user4.email}
And {result.role.should == :employer}
And {result.claimed?.should be_truthy}
And {result.user.should == user4 }
end
context "When already invited" do
Given {invite_system_role user4, :employer}
When (:result) {Invitation.invite_system_user user4, :employer}
Then { expect(result).to have_failed(Invitational::AlreadyInvitedError) }
end
end
end
end
| 30.329032 | 102 | 0.66773 |
ffd085a96414410321a061aa9b9f610769dd9534 | 570 | describe Ppl::Adapter::Color::Colored do
before(:each) do
@adapter = Ppl::Adapter::Color::Colored.new
end
describe "#colorize" do
it "should colorize the string using the monkeypatched String method" do
string = "example"
expect(string).to receive(:red).and_return("red example")
expect(@adapter.colorize(string, "red")).to eq "red example"
end
it "should only attempt to colorize the string if the color exists" do
string = "example"
expect(@adapter.colorize(string, "neon")).to eq "example"
end
end
end
| 23.75 | 76 | 0.668421 |
91ba288c8a917a1e3e719835dc126f0e547e71a9 | 356 | module AllegroApi
module ResponseHelpers
def process_items_response(response, item_klass)
return [] unless response && response[:item]
if response[:item].is_a? Array
response[:item].map do |data|
item_klass.from_api(data)
end
else
[item_klass.from_api(response[:item])]
end
end
end
end
| 23.733333 | 52 | 0.63764 |
61388938334b1461a0e70ee986d615d91e8d11e5 | 4,703 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "helper"
require "gapic/grpc/service_stub"
require "google/cloud/servicedirectory/v1/lookup_service_pb"
require "google/cloud/servicedirectory/v1/lookup_service_services_pb"
require "google/cloud/service_directory/v1/lookup_service"
class ::Google::Cloud::ServiceDirectory::V1::LookupService::ClientTest < Minitest::Test
class ClientStub
attr_accessor :call_rpc_count, :requests
def initialize response, operation, &block
@response = response
@operation = operation
@block = block
@call_rpc_count = 0
@requests = []
end
def call_rpc *args, **kwargs
@call_rpc_count += 1
@requests << @block&.call(*args, **kwargs)
yield @response, @operation if block_given?
@response
end
end
def test_resolve_service
# Create GRPC objects.
grpc_response = ::Google::Cloud::ServiceDirectory::V1::ResolveServiceResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
max_endpoints = 42
endpoint_filter = "hello world"
resolve_service_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :resolve_service, name
assert_kind_of ::Google::Cloud::ServiceDirectory::V1::ResolveServiceRequest, request
assert_equal "hello world", request.name
assert_equal 42, request.max_endpoints
assert_equal "hello world", request.endpoint_filter
refute_nil options
end
Gapic::ServiceStub.stub :new, resolve_service_client_stub do
# Create client
client = ::Google::Cloud::ServiceDirectory::V1::LookupService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.resolve_service({ name: name, max_endpoints: max_endpoints, endpoint_filter: endpoint_filter }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.resolve_service name: name, max_endpoints: max_endpoints, endpoint_filter: endpoint_filter do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.resolve_service ::Google::Cloud::ServiceDirectory::V1::ResolveServiceRequest.new(name: name, max_endpoints: max_endpoints, endpoint_filter: endpoint_filter) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.resolve_service({ name: name, max_endpoints: max_endpoints, endpoint_filter: endpoint_filter }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.resolve_service ::Google::Cloud::ServiceDirectory::V1::ResolveServiceRequest.new(name: name, max_endpoints: max_endpoints, endpoint_filter: endpoint_filter), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, resolve_service_client_stub.call_rpc_count
end
end
def test_configure
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
client = block_config = config = nil
Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::ServiceDirectory::V1::LookupService::Client.new do |config|
config.credentials = grpc_channel
end
end
config = client.configure do |c|
block_config = c
end
assert_same block_config, config
assert_kind_of ::Google::Cloud::ServiceDirectory::V1::LookupService::Client::Configuration, config
end
end
| 36.176923 | 208 | 0.72996 |
2612c6fa43e63cf6e979e3c401a1556381712232 | 611 | # encoding: utf-8
module Adhearsion
class CallController
module MenuDSL
class StringMatchCalculator < MatchCalculator
def match(query)
args = { :query => query, :exact_matches => nil, :potential_matches => nil }
pattern_string = pattern.to_s
query_string = query.to_s
if pattern_string == query_string
args[:exact_matches] = [pattern]
elsif pattern_string.start_with? query_string
args[:potential_matches] = [pattern]
end
new_calculated_match args
end
end
end
end
end
| 23.5 | 86 | 0.610475 |
5d3a6e3f5bcb0ef5dcf6c868b551af7c7c5737c2 | 1,153 | require 'test_helper'
class IntroductionsControllerTest < ActionDispatch::IntegrationTest
setup do
@introduction = introductions(:one)
end
test "should get index" do
get introductions_url
assert_response :success
end
test "should get new" do
get new_introduction_url
assert_response :success
end
test "should create introduction" do
assert_difference('Introduction.count') do
post introductions_url, params: { introduction: { } }
end
assert_redirected_to introduction_url(Introduction.last)
end
test "should show introduction" do
get introduction_url(@introduction)
assert_response :success
end
test "should get edit" do
get edit_introduction_url(@introduction)
assert_response :success
end
test "should update introduction" do
patch introduction_url(@introduction), params: { introduction: { } }
assert_redirected_to introduction_url(@introduction)
end
test "should destroy introduction" do
assert_difference('Introduction.count', -1) do
delete introduction_url(@introduction)
end
assert_redirected_to introductions_url
end
end
| 23.530612 | 73 | 0.741544 |
1d8433b254ddecbda1782368731a7b2ca6d4d6af | 2,581 | require 'daemons/exceptions'
module Daemons
require 'daemons/daemonize'
class Monitor
def self.find(dir, app_name)
pid = PidFile.find_files(dir, app_name, false)[0]
if pid
pid = PidFile.existing(pid)
unless PidFile.running?(pid.pid)
begin; pid.cleanup; rescue ::Exception; end
return
end
monitor = allocate
monitor.instance_variable_set(:@pid, pid)
return monitor
end
nil
end
def initialize(an_app)
@app = an_app
@app_name = an_app.group.app_name + '_monitor'
if an_app.pidfile_dir
@pid = PidFile.new(an_app.pidfile_dir, @app_name, false)
else
@pid = PidMem.new
end
end
def watch(application_group)
sleep(5)
loop do
application_group.applications.each do |a|
unless a.running?
a.zap!
sleep(1)
Process.detach(fork { a.start(restart = true) })
sleep(5)
end
end
sleep(30)
end
end
private :watch
def start_with_pidfile(application_group)
fork do
Daemonize.daemonize(nil, @app_name)
begin
@pid.pid = Process.pid
watch(application_group)
rescue ::Exception => e
begin
File.open(@app.logfile, 'a') do |f|
f.puts Time.now
f.puts e
f.puts e.backtrace.inspect
end
ensure
begin; @pid.cleanup; rescue ::Exception; end
exit!
end
end
end
end
private :start_with_pidfile
def start_without_pidfile(application_group)
Thread.new { watch(application_group) }
end
private :start_without_pidfile
def start(application_group)
return if application_group.applications.empty?
if @pid.kind_of?(PidFile)
start_with_pidfile(application_group)
else
start_without_pidfile(application_group)
end
end
def stop
begin
pid = @pid.pid
Process.kill(Application::SIGNAL, pid)
Timeout.timeout(5, TimeoutError) do
while Pid.running?(pid)
sleep(0.1)
end
end
rescue ::Exception => e
puts "exception while trying to stop monitor process #{pid}: #{e}"
puts 'deleting pid-file.'
end
# We try to remove the pid-files by ourselves, in case the monitor
# didn't clean it up.
begin; @pid.zap; rescue ::Exception; end
end
end
end
| 21.872881 | 74 | 0.567997 |
262ba5606c85c8b6434bd8de72d2f6e728787f55 | 101 | # desc "Explaining what the task does"
# task :administrate_field_aasm do
# # Task goes here
# end
| 20.2 | 38 | 0.722772 |
f7b2d5a8389709669c198fc15d2466943b030767 | 1,589 | # frozen_string_literal: true
module Stupidedi
using Refinements
module Values
#
# @see X222.pdf B.1.1.3.12.2 Data Segment Groups
# @see X222.pdf B.1.1.3.12.4 Loops of Data Segments
#
class LoopVal < AbstractVal
include SegmentValGroup
# @return [LoopDef]
attr_reader :definition
# @return [Array<SegmentVal, LoopVal>]
attr_reader :children
def_delegators "@children.head", :position
def initialize(definition, children)
@definition, @children =
definition, children
end
# @return [LoopVal]
def copy(changes = {})
LoopVal.new \
changes.fetch(:definition, @definition),
changes.fetch(:children, @children)
end
# (see AbstractVal#loop?)
# @return true
def loop?
true
end
# @return [void]
def pretty_print(q)
id = @definition.try do |d|
ansi.bold("[#{d.id.to_s}]")
end
q.text(ansi.loop("LoopVal#{id}"))
q.group(2, "(", ")") do
q.breakable ""
@children.each do |e|
unless q.current_group.first?
q.text ","
q.breakable
end
q.pp e
end
end
end
# @return [String]
def inspect
ansi.loop("Loop") + "(#{@children.map(&:inspect).join(', ')})"
end
# @return [Boolean]
def ==(other)
eql?(other) or
(other.definition == @definition and
other.children == @children)
end
end
end
end
| 21.186667 | 70 | 0.525488 |
6ae9956ade5f61ea16aa2318a2dbd9614bcb2812 | 2,462 | class Todo < ActiveRecord::Base
include Sortable
ASSIGNED = 1
MENTIONED = 2
BUILD_FAILED = 3
MARKED = 4
APPROVAL_REQUIRED = 5 # This is an EE-only feature
ACTION_NAMES = {
ASSIGNED => :assigned,
MENTIONED => :mentioned,
BUILD_FAILED => :build_failed,
MARKED => :marked,
APPROVAL_REQUIRED => :approval_required
}
belongs_to :author, class_name: "User"
belongs_to :note
belongs_to :project
belongs_to :target, polymorphic: true, touch: true
belongs_to :user
delegate :name, :email, to: :author, prefix: true, allow_nil: true
validates :action, :project, :target_type, :user, presence: true
validates :target_id, presence: true, unless: :for_commit?
validates :commit_id, presence: true, if: :for_commit?
default_scope { reorder(id: :desc) }
scope :pending, -> { with_state(:pending) }
scope :done, -> { with_state(:done) }
state_machine :state, initial: :pending do
event :done do
transition [:pending] => :done
end
state :pending
state :done
end
after_save :keep_around_commit
class << self
def sort(method)
method == "priority" ? order_by_labels_priority : order_by(method)
end
# Order by priority depending on which issue/merge request the Todo belongs to
# Todos with highest priority first then oldest todos
# Need to order by created_at last because of differences on Mysql and Postgres when joining by type "Merge_request/Issue"
def order_by_labels_priority
highest_priority = highest_label_priority(["Issue", "MergeRequest"], "todos.target_id").to_sql
select("#{table_name}.*, (#{highest_priority}) AS highest_priority").
order(Gitlab::Database.nulls_last_order('highest_priority', 'ASC')).
order('todos.created_at')
end
end
def build_failed?
action == BUILD_FAILED
end
def action_name
ACTION_NAMES[action]
end
def body
if note.present?
note.note
else
target.title
end
end
def for_commit?
target_type == "Commit"
end
# override to return commits, which are not active record
def target
if for_commit?
project.commit(commit_id) rescue nil
else
super
end
end
def target_reference
if for_commit?
target.short_id
else
target.to_reference
end
end
private
def keep_around_commit
project.repository.keep_around(self.commit_id)
end
end
| 23.226415 | 126 | 0.675873 |
ed0338b64040b45ae96bad5a6679b08ebf8a2b4e | 660 | module SoberSwag
module Reporting
##
# Namespace modules for the various "reporters," or things that provide error handling.
module Report
autoload :Base, 'sober_swag/reporting/report/base'
autoload :Either, 'sober_swag/reporting/report/either'
autoload :Error, 'sober_swag/reporting/report/error'
autoload :Object, 'sober_swag/reporting/report/object'
autoload :Output, 'sober_swag/reporting/report/output'
autoload :MergedObject, 'sober_swag/reporting/report/merged_object'
autoload :Value, 'sober_swag/reporting/report/value'
autoload :List, 'sober_swag/reporting/report/list'
end
end
end
| 38.823529 | 91 | 0.730303 |
f869afca3101d4f30133a6dd89f7b7ae2c5a76a7 | 916 | module SecureHeaders
describe XDownloadOptions do
specify { expect(XDownloadOptions.new.name).to eq(XDO_HEADER_NAME)}
specify { expect(XDownloadOptions.new.value).to eq("noopen")}
specify { expect(XDownloadOptions.new('noopen').value).to eq('noopen')}
specify { expect(XDownloadOptions.new(:value => 'noopen').value).to eq('noopen') }
context "invalid configuration values" do
it "accepts noopen" do
expect {
XDownloadOptions.new("noopen")
}.not_to raise_error
expect {
XDownloadOptions.new(:value => "noopen")
}.not_to raise_error
end
it "accepts nil" do
expect {
XDownloadOptions.new
}.not_to raise_error
end
it "doesn't accept anything besides noopen" do
expect {
XDownloadOptions.new("open")
}.to raise_error(XDOBuildError)
end
end
end
end
| 27.757576 | 86 | 0.629913 |
f83eb87beaecce214f6b23bea43dc5ee8c7a411c | 355 | # frozen_string_literal: true
#
# Namespace for the Socializer engine
#
module Socializer
module Circles
#
# Contacts controller
#
class ContactsController < ApplicationController
before_action :authenticate_user
# GET /circles/contacts
def index
@contacts = current_user.contacts
end
end
end
end
| 16.904762 | 52 | 0.684507 |
91049f825fc021e27ed62dfe0ffa9620f9289707 | 1,671 | # -*- encoding: utf-8 -*-
# stub: html-pipeline 2.7.1 ruby lib
Gem::Specification.new do |s|
s.name = "html-pipeline".freeze
s.version = "2.7.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Ryan Tomayko".freeze, "Jerry Cheung".freeze]
s.date = "2017-09-21"
s.description = "GitHub HTML processing filters and utilities".freeze
s.email = ["[email protected]".freeze, "[email protected]".freeze]
s.homepage = "https://github.com/jch/html-pipeline".freeze
s.licenses = ["MIT".freeze]
s.post_install_message = "-------------------------------------------------\nThank you for installing html-pipeline!\nYou must bundle Filter gem dependencies.\nSee html-pipeline README.md for more details.\nhttps://github.com/jch/html-pipeline#dependencies\n-------------------------------------------------\n".freeze
s.rubygems_version = "2.7.3".freeze
s.summary = "Helpers for processing content through a chain of filters".freeze
s.installed_by_version = "2.7.3" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<nokogiri>.freeze, [">= 1.4"])
s.add_runtime_dependency(%q<activesupport>.freeze, [">= 2"])
else
s.add_dependency(%q<nokogiri>.freeze, [">= 1.4"])
s.add_dependency(%q<activesupport>.freeze, [">= 2"])
end
else
s.add_dependency(%q<nokogiri>.freeze, [">= 1.4"])
s.add_dependency(%q<activesupport>.freeze, [">= 2"])
end
end
| 45.162162 | 319 | 0.652902 |
61d003cf617b5e5b3ddfdc03189ae556c22624e9 | 3,080 | class CreateTeamReposJob < CourseJob
@job_name = "Create Team Repositories"
@job_short_name = "create_team_repos"
@job_description = "Creates a repository named with the provided name pattern for each team matching a specified team name pattern"
def attempt_job(options)
@visibility = options[:visibility].upcase
@permission_level = options[:permission_level].downcase
@org_id = get_org_node_id
matching_teams = OrgTeam.where(course_id: @course.id).where("name ~* ?", options[:team_pattern])
repos_created = 0
repos_permissions_updated = 0
matching_teams.each do |team|
repo_name = options[:repo_pattern].sub("{team}", team.slug)
repos_created += create_team_repo(repo_name, team)
repos_permissions_updated += update_permissions_team_repo(repo_name, team)
end
"#{pluralize repos_created, "repository"} created and permissions updated for #{pluralize repos_permissions_updated, "repository"} with team permission level #{@permission_level}."
end
def create_team_repo(repo_name, team)
begin
response = github_machine_user.post '/graphql', { query: create_team_repo_query(repo_name, team.team_id) }.to_json
if !response.respond_to?(:data) || response.respond_to?(:errors)
return 0
end
new_repo_full_name = get_repo_name_and_create_record(response, team.id)
rescue Exception => e
puts "CREATION ERROR with #{repo_name} for team #{team} #{e}"
return 0
end
1
end
def update_permissions_team_repo(repo_name, team)
begin
github_machine_user.put("/orgs/#{@course.course_organization}/teams/#{team.slug}/repos/#{@course.course_organization}/#{repo_name}", {"permission": "#{@permission_level}"})
rescue Exception => e
puts "PERMISSION ERROR with #{repo_name} for team #{team} #{e}"
end
1
end
def create_team_repo_query(repo_name, team_id)
<<-GRAPHQL
mutation {
createRepository(input:{
visibility: #{@visibility}
ownerId:"#{@org_id}"
name:"#{repo_name}"
teamId:"#{team_id}"
}) {
repository {
name
url
nameWithOwner
databaseId
}
}
}
GRAPHQL
end
def get_repo_name_and_create_record(response, team_record_id)
repoInfo = response.data.createRepository.repository
new_repo = GithubRepo.create(name: repoInfo.name, url: repoInfo.url, full_name: repoInfo.nameWithOwner,
course_id: @course.id, visibility: @visibility.downcase, repo_id: repoInfo.databaseId)
RepoTeamContributor.create(org_team_id: team_record_id, github_repo_id: new_repo.id, permission_level: @permission_level)
new_repo.full_name
end
def get_org_node_id
response = github_machine_user.post '/graphql', { query: org_node_id_query }.to_json
response.data.organization.id
end
def org_node_id_query
<<-GRAPHQL
query {
organization(login:"#{@course.course_organization}") {
id
}
}
GRAPHQL
end
end | 34.606742 | 184 | 0.681494 |
5d2134a0543595a33262fb0cccdd7713f1568edf | 5,622 | require 'spec_helper'
require 'yaml'
require 'mspec/commands/mspec'
describe MSpecMain, "#options" do
before :each do
@options, @config = new_option
MSpecOptions.stub(:new).and_return(@options)
@script = MSpecMain.new
@script.stub(:config).and_return(@config)
@script.stub(:load)
end
it "enables the configure option" do
@options.should_receive(:configure)
@script.options
end
it "provides a custom action (block) to the config option" do
@script.options ["-B", "config"]
@config[:options].should include("-B", "config")
end
it "loads the file specified by the config option" do
@script.should_receive(:load).with("config")
@script.options ["-B", "config"]
end
it "enables the target options" do
@options.should_receive(:targets)
@script.options
end
it "sets config[:options] to all argv entries that are not registered options" do
@options.on "-X", "--exclude", "ARG", "description"
@script.options [".", "-G", "fail", "-X", "ARG", "--list", "unstable", "some/file.rb"]
@config[:options].should == [".", "-G", "fail", "--list", "unstable", "some/file.rb"]
end
it "calls #custom_options" do
@script.should_receive(:custom_options).with(@options)
@script.options
end
end
describe MSpecMain, "#run" do
before :each do
@options, @config = new_option
MSpecOptions.stub(:new).and_return(@options)
@script = MSpecMain.new
@script.stub(:config).and_return(@config)
@script.stub(:exec)
@err = $stderr
$stderr = IOStub.new
end
after :each do
$stderr = @err
end
it "uses exec to invoke the runner script" do
@script.should_receive(:exec).with("ruby", "#{MSPEC_HOME}/bin/mspec-run")
@script.options []
@script.run
end
it "shows the command line on stderr" do
@script.should_receive(:exec).with("ruby", "#{MSPEC_HOME}/bin/mspec-run")
@script.options []
@script.run
$stderr.to_s.should == "$ ruby #{Dir.pwd}/bin/mspec-run\n"
end
it "adds config[:launch] to the exec options" do
@script.should_receive(:exec).with("ruby",
"-Xlaunch.option", "#{MSPEC_HOME}/bin/mspec-run")
@config[:launch] << "-Xlaunch.option"
@script.options []
@script.run
$stderr.to_s.should == "$ ruby -Xlaunch.option #{Dir.pwd}/bin/mspec-run\n"
end
it "calls #multi_exec if the command is 'ci' and the multi option is passed" do
@script.should_receive(:multi_exec).and_return do |argv|
argv.should == ["ruby", "#{MSPEC_HOME}/bin/mspec-ci"]
end
@script.options ["ci", "-j"]
lambda do
@script.run
end.should raise_error(SystemExit)
end
end
describe "The --warnings option" do
before :each do
@options, @config = new_option
MSpecOptions.stub(:new).and_return(@options)
@script = MSpecMain.new
@script.stub(:config).and_return(@config)
end
it "is enabled by #options" do
@options.stub(:on)
@options.should_receive(:on).with("--warnings", an_instance_of(String))
@script.options
end
it "sets flags to -w" do
@config[:flags] = []
@script.options ["--warnings"]
@config[:flags].should include("-w")
end
it "set OUTPUT_WARNINGS = '1' in the environment" do
ENV['OUTPUT_WARNINGS'] = '0'
@script.options ["--warnings"]
ENV['OUTPUT_WARNINGS'].should == '1'
end
end
describe "The -j, --multi option" do
before :each do
@options, @config = new_option
MSpecOptions.stub(:new).and_return(@options)
@script = MSpecMain.new
@script.stub(:config).and_return(@config)
end
it "is enabled by #options" do
@options.stub(:on)
@options.should_receive(:on).with("-j", "--multi", an_instance_of(String))
@script.options
end
it "sets the multiple process option" do
["-j", "--multi"].each do |opt|
@config[:multi] = nil
@script.options [opt]
@config[:multi].should == true
end
end
end
describe "The -h, --help option" do
before :each do
@options, @config = new_option
MSpecOptions.stub(:new).and_return(@options)
@script = MSpecMain.new
@script.stub(:config).and_return(@config)
end
it "is enabled by #options" do
@options.stub(:on)
@options.should_receive(:on).with("-h", "--help", an_instance_of(String))
@script.options
end
it "passes the option to the subscript" do
["-h", "--help"].each do |opt|
@config[:options] = []
@script.options ["ci", opt]
@config[:options].sort.should == ["-h"]
end
end
it "prints help and exits" do
@script.should_receive(:puts).twice
@script.should_receive(:exit).twice
["-h", "--help"].each do |opt|
@script.options [opt]
end
end
end
describe "The -v, --version option" do
before :each do
@options, @config = new_option
MSpecOptions.stub(:new).and_return(@options)
@script = MSpecMain.new
@script.stub(:config).and_return(@config)
end
it "is enabled by #options" do
@options.stub(:on)
@options.should_receive(:on).with("-v", "--version", an_instance_of(String))
@script.options
end
it "passes the option to the subscripts" do
["-v", "--version"].each do |opt|
@config[:options] = []
@script.options ["ci", opt]
@config[:options].sort.should == ["-v"]
end
end
it "prints the version and exits if no subscript is invoked" do
@config[:command] = nil
File.stub(:basename).and_return("mspec")
@script.should_receive(:puts).twice.with("mspec #{MSpec::VERSION}")
@script.should_receive(:exit).twice
["-v", "--version"].each do |opt|
@script.options [opt]
end
end
end
| 27.028846 | 90 | 0.638741 |
18f29856caf611d3185eb8d8320c16ac250a358a | 112 | # frozen_string_literal: true
Rails.application.config.session_store :active_record_store, key: 'current_user'
| 28 | 80 | 0.839286 |
611e58309526981dc9960d4feca675af59caa7d0 | 3,331 | require 'spec_helper'
describe HighVoltage::PagesController do
render_views
context 'using default configuration' do
describe 'on GET to /pages/exists' do
before { get :show, :id => 'exists' }
it 'responds with success and render template' do
expect(response).to be_success
expect(response).to render_template('exists')
end
it 'uses the default layout used by ApplicationController' do
expect(response).to render_template('layouts/application')
end
end
describe 'on GET to /pages/dir/nested' do
before { get :show, :id => 'dir/nested' }
it 'responds with success and render template' do
expect(response).to be_success
expect(response).to render_template('pages/dir/nested')
end
end
it 'raises a routing error for an invalid page' do
expect { get :show, id: 'invalid' }
.to raise_error(ActionController::RoutingError)
end
it 'raises a routing error for a page in another directory' do
expect { get :show, id: '../other/wrong' }
.to raise_error(ActionController::RoutingError)
end
it 'raises a missing template error for valid page with invalid partial' do
expect { get :show, id: 'exists_but_references_nonexistent_partial' }
.to raise_error(ActionView::MissingTemplate)
end
end
context 'using custom layout' do
before(:each) do
HighVoltage.layout = 'alternate'
end
describe 'on GET to /pages/exists' do
before { get :show, :id => 'exists' }
it 'uses the custom configured layout' do
expect(response).not_to render_template('layouts/application')
expect(response).to render_template('layouts/alternate')
end
end
end
context 'using custom content path' do
before(:each) do
HighVoltage.content_path = 'other_pages/'
Rails.application.reload_routes!
end
describe 'on GET to /other_pages/also_exists' do
before { get :show, :id => 'also_exists' }
it 'responds with success and render template' do
expect(response).to be_success
expect(response).to render_template('other_pages/also_exists')
end
end
describe 'on GET to /other_pages/also_dir/nested' do
before { get :show, :id => 'also_dir/also_nested' }
it 'responds with success and render template' do
expect(response).to be_success
expect(response).to render_template('other_pages/also_dir/also_nested')
end
end
it 'raises a routing error for an invalid page' do
expect { get :show, id: 'also_invalid' }
.to raise_error(ActionController::RoutingError)
end
context 'page in another directory' do
it 'raises a routing error' do
expect { get :show, id: '../other_wrong' }
.to raise_error(ActionController::RoutingError)
end
it 'raises a routing error when using a Unicode exploit' do
expect { get :show, id: '/\\../other/wrong' }
.to raise_error(ActionController::RoutingError)
end
end
it 'raises a missing template error for valid page with invalid partial' do
id = 'also_exists_but_references_nonexistent_partial'
expect { get :show, id: id }
.to raise_error(ActionView::MissingTemplate)
end
end
end
| 30.559633 | 79 | 0.663164 |
b90db3125412ddef2cf20f4f2df677cd4390824c | 1,459 | require 'builder'
require 'thread'
require 'fakes3/s3_object'
require 'fakes3/sorted_object_list'
module FakeS3
class Bucket
attr_accessor :name,:creation_date,:objects
def initialize(name,creation_date,objects)
@name = name
@creation_date = creation_date
@objects = SortedObjectList.new
objects.each do |obj|
@objects.add(obj)
end
@mutex = Mutex.new
end
def find(object_name)
@mutex.synchronize do
@objects.find(object_name)
end
end
def add(object)
# Unfortunately have to synchronize here since the our SortedObjectList
# not thread safe. Probably can get finer granularity if performance is
# important
@mutex.synchronize do
@objects.add(object)
end
end
def remove(object)
@mutex.synchronize do
@objects.remove(object)
end
end
def query_for_range(options)
marker = options[:marker]
prefix = options[:prefix]
max_keys = options[:max_keys] || 1000
delimiter = options[:delimiter]
match_set = nil
@mutex.synchronize do
match_set = @objects.list(options)
end
bq = BucketQuery.new
bq.bucket = self
bq.marker = marker
bq.prefix = prefix
bq.max_keys = max_keys
bq.delimiter = delimiter
bq.matches = match_set.matches
bq.is_truncated = match_set.is_truncated
return bq
end
end
end
| 22.446154 | 77 | 0.639479 |
abe522e430543ee14d8a4945fa471b5f1215f99e | 1,827 | ValidationServer::Application.routes.draw do
post "receipts/validate"
# The priority is based upon order of creation:
# first created -> highest priority.
# Sample of regular route:
# match 'products/:id' => 'catalog#view'
# Keep in mind you can assign values other than :controller and :action
# Sample of named route:
# match 'products/:id/purchase' => 'catalog#purchase', :as => :purchase
# This route can be invoked with purchase_url(:id => product.id)
# Sample resource route (maps HTTP verbs to controller actions automatically):
# resources :products
# Sample resource route with options:
# resources :products do
# member do
# get 'short'
# post 'toggle'
# end
#
# collection do
# get 'sold'
# end
# end
# Sample resource route with sub-resources:
# resources :products do
# resources :comments, :sales
# resource :seller
# end
# Sample resource route with more complex sub-resources
# resources :products do
# resources :comments
# resources :sales do
# get 'recent', :on => :collection
# end
# end
# Sample resource route within a namespace:
# namespace :admin do
# # Directs /admin/products/* to Admin::ProductsController
# # (app/controllers/admin/products_controller.rb)
# resources :products
# end
# You can have the root of your site routed with "root"
# just remember to delete public/index.html.
# root :to => 'welcome#index'
# See how all your routes lay out with "rake routes"
# This is a legacy wild controller route that's not recommended for RESTful applications.
# Note: This route will make all actions in every controller accessible via GET requests.
# match ':controller(/:action(/:id))(.:format)'
end
| 29.95082 | 91 | 0.660646 |
18a6fa02a4e0f58d400df807190cbf2b70d1aa86 | 119 | module Xcodeproj
# The version of the xcodeproj gem.
#
VERSION = '0.24.3' unless defined? Xcodeproj::VERSION
end
| 19.833333 | 55 | 0.714286 |
33eb5a37227bcd8ff675eee51ec26c961a32e576 | 235 | describe "Range#end" do
it "end returns the last element of self" do
(-1..1).end.should == 1
(0..1).end.should == 1
("A".."Q").end.should == "Q"
("A"..."Q").end.should == "Q"
(0.5..2.4).end.should == 2.4
end
end | 26.111111 | 46 | 0.514894 |
6250a7ac5687557993a378a17663cb4d025af67d | 2,692 | # encoding: utf-8
# This file is autogenerated. Do not edit it manually.
# If you want change the content of this file, edit
#
# /spec/fixtures/responses/whois.nic.af/status_registered.expected
#
# and regenerate the tests with the following rake task
#
# $ rake spec:generate
#
require 'spec_helper'
require 'whois/record/parser/whois.nic.af.rb'
describe Whois::Record::Parser::WhoisNicAf, "status_registered.expected" do
subject do
file = fixture("responses", "whois.nic.af/status_registered.txt")
part = Whois::Record::Part.new(body: File.read(file))
described_class.new(part)
end
describe "#domain" do
it do
expect(subject.domain).to eq("google.af")
end
end
describe "#domain_id" do
it do
expect(subject.domain_id).to eq("345679-CoCCA")
end
end
describe "#status" do
it do
expect(subject.status).to eq(:registered)
end
end
describe "#available?" do
it do
expect(subject.available?).to eq(false)
end
end
describe "#registered?" do
it do
expect(subject.registered?).to eq(true)
end
end
describe "#created_on" do
it do
expect(subject.created_on).to be_a(Time)
expect(subject.created_on).to eq(Time.parse("2009-10-05 03:51:17 UTC"))
end
end
describe "#updated_on" do
it do
expect(subject.updated_on).to be_a(Time)
expect(subject.updated_on).to eq(Time.parse("2014-01-16 06:50:48 UTC"))
end
end
describe "#expires_on" do
it do
expect(subject.expires_on).to be_a(Time)
expect(subject.expires_on).to eq(Time.parse("2014-05-05 03:51:17 UTC"))
end
end
describe "#registrar" do
it do
expect(subject.registrar).to be_a(Whois::Record::Registrar)
expect(subject.registrar.id).to eq(nil)
expect(subject.registrar.name).to eq("MarkMonitor")
expect(subject.registrar.organization).to eq(nil)
expect(subject.registrar.url).to eq("http://www.markmonitor.com")
end
end
describe "#nameservers" do
it do
expect(subject.nameservers).to be_a(Array)
expect(subject.nameservers).to have(4).items
expect(subject.nameservers[0]).to be_a(Whois::Record::Nameserver)
expect(subject.nameservers[0].name).to eq("ns1.google.com")
expect(subject.nameservers[1]).to be_a(Whois::Record::Nameserver)
expect(subject.nameservers[1].name).to eq("ns2.google.com")
expect(subject.nameservers[2]).to be_a(Whois::Record::Nameserver)
expect(subject.nameservers[2].name).to eq("ns3.google.com")
expect(subject.nameservers[3]).to be_a(Whois::Record::Nameserver)
expect(subject.nameservers[3].name).to eq("ns4.google.com")
end
end
end
| 29.582418 | 77 | 0.680535 |
6281712df602be523d8255a129a256f8cc133df5 | 22,697 | # frozen_string_literal: true
require "cases/helper"
require "models/post"
require "models/comment"
require "models/developer"
require "models/project"
require "models/computer"
require "models/vehicle"
require "models/cat"
require "concurrent/atomic/cyclic_barrier"
class DefaultScopingTest < ActiveRecord::TestCase
fixtures :developers, :posts, :comments
def test_default_scope
expected = Developer.all.merge!(order: "salary DESC").to_a.collect(&:salary)
received = DeveloperOrderedBySalary.all.collect(&:salary)
assert_equal expected, received
end
def test_default_scope_as_class_method
assert_equal [developers(:david).becomes(ClassMethodDeveloperCalledDavid)], ClassMethodDeveloperCalledDavid.all
end
def test_default_scope_as_class_method_referencing_scope
assert_equal [developers(:david).becomes(ClassMethodReferencingScopeDeveloperCalledDavid)], ClassMethodReferencingScopeDeveloperCalledDavid.all
end
def test_default_scope_as_block_referencing_scope
assert_equal [developers(:david).becomes(LazyBlockReferencingScopeDeveloperCalledDavid)], LazyBlockReferencingScopeDeveloperCalledDavid.all
end
def test_default_scope_with_lambda
assert_equal [developers(:david).becomes(LazyLambdaDeveloperCalledDavid)], LazyLambdaDeveloperCalledDavid.all
end
def test_default_scope_with_block
assert_equal [developers(:david).becomes(LazyBlockDeveloperCalledDavid)], LazyBlockDeveloperCalledDavid.all
end
def test_default_scope_with_callable
assert_equal [developers(:david).becomes(CallableDeveloperCalledDavid)], CallableDeveloperCalledDavid.all
end
def test_default_scope_is_unscoped_on_find
assert_equal 1, DeveloperCalledDavid.count
assert_equal 11, DeveloperCalledDavid.unscoped.count
end
def test_default_scope_is_unscoped_on_create
assert_nil DeveloperCalledJamis.unscoped.create!.name
end
def test_default_scope_with_conditions_string
assert_equal Developer.where(name: "David").map(&:id).sort, DeveloperCalledDavid.all.map(&:id).sort
assert_nil DeveloperCalledDavid.create!.name
end
def test_default_scope_with_conditions_hash
assert_equal Developer.where(name: "Jamis").map(&:id).sort, DeveloperCalledJamis.all.map(&:id).sort
assert_equal "Jamis", DeveloperCalledJamis.create!.name
end
def test_default_scope_with_inheritance
wheres = InheritedPoorDeveloperCalledJamis.all.where_values_hash
assert_equal "Jamis", wheres["name"]
assert_equal 50000, wheres["salary"]
end
def test_default_scope_with_module_includes
wheres = ModuleIncludedPoorDeveloperCalledJamis.all.where_values_hash
assert_equal "Jamis", wheres["name"]
assert_equal 50000, wheres["salary"]
end
def test_default_scope_with_multiple_calls
wheres = MultiplePoorDeveloperCalledJamis.all.where_values_hash
assert_equal "Jamis", wheres["name"]
assert_equal 50000, wheres["salary"]
end
def test_scope_overwrites_default
expected = Developer.all.merge!(order: "salary DESC, name DESC").to_a.collect(&:name)
received = DeveloperOrderedBySalary.by_name.to_a.collect(&:name)
assert_equal expected, received
end
def test_reorder_overrides_default_scope_order
expected = Developer.order("name DESC").collect(&:name)
received = DeveloperOrderedBySalary.reorder("name DESC").collect(&:name)
assert_equal expected, received
end
def test_order_after_reorder_combines_orders
expected = Developer.order("name DESC, id DESC").collect { |dev| [dev.name, dev.id] }
received = Developer.order("name ASC").reorder("name DESC").order("id DESC").collect { |dev| [dev.name, dev.id] }
assert_equal expected, received
end
def test_unscope_overrides_default_scope
expected = Developer.all.collect { |dev| [dev.name, dev.id] }
received = DeveloperCalledJamis.unscope(:where).collect { |dev| [dev.name, dev.id] }
assert_equal expected, received
end
def test_unscope_after_reordering_and_combining
expected = Developer.order("id DESC, name DESC").collect { |dev| [dev.name, dev.id] }
received = DeveloperOrderedBySalary.reorder("name DESC").unscope(:order).order("id DESC, name DESC").collect { |dev| [dev.name, dev.id] }
assert_equal expected, received
expected_2 = Developer.all.collect { |dev| [dev.name, dev.id] }
received_2 = Developer.order("id DESC, name DESC").unscope(:order).collect { |dev| [dev.name, dev.id] }
assert_equal expected_2, received_2
expected_3 = Developer.all.collect { |dev| [dev.name, dev.id] }
received_3 = Developer.reorder("name DESC").unscope(:order).collect { |dev| [dev.name, dev.id] }
assert_equal expected_3, received_3
end
def test_unscope_with_where_attributes
expected = Developer.order("salary DESC").collect(&:name)
received = DeveloperOrderedBySalary.where(name: "David").unscope(where: :name).collect(&:name)
assert_equal expected.sort, received.sort
expected_2 = Developer.order("salary DESC").collect(&:name)
received_2 = DeveloperOrderedBySalary.select("id").where("name" => "Jamis").unscope({ where: :name }, :select).collect(&:name)
assert_equal expected_2.sort, received_2.sort
expected_3 = Developer.order("salary DESC").collect(&:name)
received_3 = DeveloperOrderedBySalary.select("id").where("name" => "Jamis").unscope(:select, :where).collect(&:name)
assert_equal expected_3.sort, received_3.sort
expected_4 = Developer.order("salary DESC").collect(&:name)
received_4 = DeveloperOrderedBySalary.where.not("name" => "Jamis").unscope(where: :name).collect(&:name)
assert_equal expected_4.sort, received_4.sort
expected_5 = Developer.order("salary DESC").collect(&:name)
received_5 = DeveloperOrderedBySalary.where.not("name" => ["Jamis", "David"]).unscope(where: :name).collect(&:name)
assert_equal expected_5.sort, received_5.sort
expected_6 = Developer.order("salary DESC").collect(&:name)
received_6 = DeveloperOrderedBySalary.where(Developer.arel_table["name"].eq("David")).unscope(where: :name).collect(&:name)
assert_equal expected_6.sort, received_6.sort
expected_7 = Developer.order("salary DESC").collect(&:name)
received_7 = DeveloperOrderedBySalary.where(Developer.arel_table[:name].eq("David")).unscope(where: :name).collect(&:name)
assert_equal expected_7.sort, received_7.sort
end
def test_unscope_comparison_where_clauses
# unscoped for WHERE (`developers`.`id` <= 2)
expected = Developer.order("salary DESC").collect(&:name)
received = DeveloperOrderedBySalary.where(id: -Float::INFINITY..2).unscope(where: :id).collect { |dev| dev.name }
assert_equal expected.sort, received.sort
# unscoped for WHERE (`developers`.`id` < 2)
expected = Developer.order("salary DESC").collect(&:name)
received = DeveloperOrderedBySalary.where(id: -Float::INFINITY...2).unscope(where: :id).collect { |dev| dev.name }
assert_equal expected.sort, received.sort
end
def test_unscope_multiple_where_clauses
expected = Developer.order("salary DESC").collect(&:name)
received = DeveloperOrderedBySalary.where(name: "Jamis").where(id: 1).unscope(where: [:name, :id]).collect(&:name)
assert_equal expected.sort, received.sort
end
def test_unscope_string_where_clauses_involved
dev_relation = Developer.order("salary DESC").where("created_at > ?", 1.year.ago)
expected = dev_relation.collect(&:name)
dev_ordered_relation = DeveloperOrderedBySalary.where(name: "Jamis").where("created_at > ?", 1.year.ago)
received = dev_ordered_relation.unscope(where: [:name]).collect(&:name)
assert_equal expected.sort, received.sort
end
def test_unscope_with_grouping_attributes
expected = Developer.order("salary DESC").collect(&:name)
received = DeveloperOrderedBySalary.group(:name).unscope(:group).collect(&:name)
assert_equal expected.sort, received.sort
expected_2 = Developer.order("salary DESC").collect(&:name)
received_2 = DeveloperOrderedBySalary.group("name").unscope(:group).collect(&:name)
assert_equal expected_2.sort, received_2.sort
end
def test_unscope_with_limit_in_query
expected = Developer.order("salary DESC").collect(&:name)
received = DeveloperOrderedBySalary.limit(1).unscope(:limit).collect(&:name)
assert_equal expected.sort, received.sort
end
def test_order_to_unscope_reordering
scope = DeveloperOrderedBySalary.order("salary DESC, name ASC").reverse_order.unscope(:order)
assert_no_match(/order/i, scope.to_sql)
end
def test_unscope_reverse_order
expected = Developer.all.collect(&:name)
received = Developer.order("salary DESC").reverse_order.unscope(:order).collect(&:name)
assert_equal expected, received
end
def test_unscope_select
expected = Developer.order("salary ASC").collect(&:name)
received = Developer.order("salary DESC").reverse_order.select(:name).unscope(:select).collect(&:name)
assert_equal expected, received
expected_2 = Developer.all.collect(&:id)
received_2 = Developer.select(:name).unscope(:select).collect(&:id)
assert_equal expected_2, received_2
end
def test_unscope_offset
expected = Developer.all.collect(&:name)
received = Developer.offset(5).unscope(:offset).collect(&:name)
assert_equal expected, received
end
def test_unscope_joins_and_select_on_developers_projects
expected = Developer.all.collect(&:name)
received = Developer.joins("JOIN developers_projects ON id = developer_id").select(:id).unscope(:joins, :select).collect(&:name)
assert_equal expected, received
end
def test_unscope_left_outer_joins
expected = Developer.all.collect(&:name)
received = Developer.left_outer_joins(:projects).select(:id).unscope(:left_outer_joins, :select).collect(&:name)
assert_equal expected, received
end
def test_unscope_left_joins
expected = Developer.all.collect(&:name)
received = Developer.left_joins(:projects).select(:id).unscope(:left_joins, :select).collect(&:name)
assert_equal expected, received
end
def test_unscope_includes
expected = Developer.all.collect(&:name)
received = Developer.includes(:projects).select(:id).unscope(:includes, :select).collect(&:name)
assert_equal expected, received
end
def test_unscope_having
expected = DeveloperOrderedBySalary.all.collect(&:name)
received = DeveloperOrderedBySalary.having("name IN ('Jamis', 'David')").unscope(:having).collect(&:name)
assert_equal expected, received
end
def test_unscope_and_scope
developer_klass = Class.new(Developer) do
scope :by_name, -> name { unscope(where: :name).where(name: name) }
end
expected = developer_klass.where(name: "Jamis").collect { |dev| [dev.name, dev.id] }
received = developer_klass.where(name: "David").by_name("Jamis").collect { |dev| [dev.name, dev.id] }
assert_equal expected, received
end
def test_unscope_errors_with_invalid_value
assert_raises(ArgumentError) do
Developer.includes(:projects).where(name: "Jamis").unscope(:stupidly_incorrect_value)
end
assert_raises(ArgumentError) do
Developer.all.unscope(:includes, :select, :some_broken_value)
end
assert_raises(ArgumentError) do
Developer.order("name DESC").reverse_order.unscope(:reverse_order)
end
assert_raises(ArgumentError) do
Developer.order("name DESC").where(name: "Jamis").unscope()
end
end
def test_unscope_errors_with_non_where_hash_keys
assert_raises(ArgumentError) do
Developer.where(name: "Jamis").limit(4).unscope(limit: 4)
end
assert_raises(ArgumentError) do
Developer.where(name: "Jamis").unscope("where" => :name)
end
end
def test_unscope_errors_with_non_symbol_or_hash_arguments
assert_raises(ArgumentError) do
Developer.where(name: "Jamis").limit(3).unscope("limit")
end
assert_raises(ArgumentError) do
Developer.select("id").unscope("select")
end
assert_raises(ArgumentError) do
Developer.select("id").unscope(5)
end
end
def test_unscope_merging
merged = Developer.where(name: "Jamis").merge(Developer.unscope(:where))
assert_empty merged.where_clause
assert_not_empty merged.where(name: "Jon").where_clause
end
def test_order_in_default_scope_should_not_prevail
expected = Developer.all.merge!(order: "salary desc").to_a.collect(&:salary)
received = DeveloperOrderedBySalary.all.merge!(order: "salary").to_a.collect(&:salary)
assert_equal expected, received
end
def test_create_attribute_overwrites_default_scoping
assert_equal "David", PoorDeveloperCalledJamis.create!(name: "David").name
assert_equal 200000, PoorDeveloperCalledJamis.create!(name: "David", salary: 200000).salary
end
def test_create_attribute_overwrites_default_values
assert_nil PoorDeveloperCalledJamis.create!(salary: nil).salary
assert_equal 50000, PoorDeveloperCalledJamis.create!(name: "David").salary
end
def test_default_scope_attribute
jamis = PoorDeveloperCalledJamis.new(name: "David")
assert_equal 50000, jamis.salary
end
def test_where_attribute
aaron = PoorDeveloperCalledJamis.where(salary: 20).new(name: "Aaron")
assert_equal 20, aaron.salary
assert_equal "Aaron", aaron.name
end
def test_where_attribute_merge
aaron = PoorDeveloperCalledJamis.where(name: "foo").new(name: "Aaron")
assert_equal "Aaron", aaron.name
end
def test_scope_composed_by_limit_and_then_offset_is_equal_to_scope_composed_by_offset_and_then_limit
posts_limit_offset = Post.limit(3).offset(2)
posts_offset_limit = Post.offset(2).limit(3)
assert_equal posts_limit_offset, posts_offset_limit
end
def test_create_with_merge
aaron = PoorDeveloperCalledJamis.create_with(name: "foo", salary: 20).merge(
PoorDeveloperCalledJamis.create_with(name: "Aaron")).new
assert_equal 20, aaron.salary
assert_equal "Aaron", aaron.name
aaron = PoorDeveloperCalledJamis.create_with(name: "foo", salary: 20).
create_with(name: "Aaron").new
assert_equal 20, aaron.salary
assert_equal "Aaron", aaron.name
end
def test_create_with_using_both_string_and_symbol
jamis = PoorDeveloperCalledJamis.create_with(name: "foo").create_with("name" => "Aaron").new
assert_equal "Aaron", jamis.name
end
def test_create_with_reset
jamis = PoorDeveloperCalledJamis.create_with(name: "Aaron").create_with(nil).new
assert_equal "Jamis", jamis.name
end
def test_create_with_takes_precedence_over_where
developer = Developer.where(name: nil).create_with(name: "Aaron").new
assert_equal "Aaron", developer.name
end
def test_create_with_nested_attributes
assert_difference("Project.count", 1) do
Developer.create_with(
projects_attributes: [{ name: "p1" }]
).scoping do
Developer.create!(name: "Aaron")
end
end
end
# FIXME: I don't know if this is *desired* behavior, but it is *today's*
# behavior.
def test_create_with_empty_hash_will_not_reset
jamis = PoorDeveloperCalledJamis.create_with(name: "Aaron").create_with({}).new
assert_equal "Aaron", jamis.name
end
def test_unscoped_with_named_scope_should_not_have_default_scope
assert_equal [DeveloperCalledJamis.find(developers(:poor_jamis).id)], DeveloperCalledJamis.poor
assert_includes DeveloperCalledJamis.unscoped.poor, developers(:david).becomes(DeveloperCalledJamis)
assert_equal 11, DeveloperCalledJamis.unscoped.length
assert_equal 1, DeveloperCalledJamis.poor.length
assert_equal 10, DeveloperCalledJamis.unscoped.poor.length
assert_equal 10, DeveloperCalledJamis.unscoped { DeveloperCalledJamis.poor }.length
end
def test_default_scope_with_joins
assert_equal Comment.where(post_id: SpecialPostWithDefaultScope.pluck(:id)).count,
Comment.joins(:special_post_with_default_scope).count
assert_equal Comment.where(post_id: Post.pluck(:id)).count,
Comment.joins(:post).count
end
def test_joins_not_affected_by_scope_other_than_default_or_unscoped
without_scope_on_post = Comment.joins(:post).to_a
with_scope_on_post = nil
Post.where(id: [1, 5, 6]).scoping do
with_scope_on_post = Comment.joins(:post).to_a
end
assert_equal with_scope_on_post, without_scope_on_post
end
def test_unscoped_with_joins_should_not_have_default_scope
assert_equal SpecialPostWithDefaultScope.unscoped { Comment.joins(:special_post_with_default_scope).to_a },
Comment.joins(:post).to_a
end
def test_sti_association_with_unscoped_not_affected_by_default_scope
post = posts(:thinking)
comments = [comments(:does_it_hurt)]
post.special_comments.update_all(deleted_at: Time.now)
assert_raises(ActiveRecord::RecordNotFound) { Post.joins(:special_comments).find(post.id) }
assert_equal [], post.special_comments
SpecialComment.unscoped do
assert_equal post, Post.joins(:special_comments).find(post.id)
assert_equal comments, Post.joins(:special_comments).find(post.id).special_comments
assert_equal comments, Post.eager_load(:special_comments).find(post.id).special_comments
assert_equal comments, Post.includes(:special_comments).find(post.id).special_comments
assert_equal comments, Post.preload(:special_comments).find(post.id).special_comments
end
end
def test_default_scope_select_ignored_by_aggregations
assert_equal DeveloperWithSelect.all.to_a.count, DeveloperWithSelect.count
end
def test_default_scope_select_ignored_by_grouped_aggregations
assert_equal Hash[Developer.all.group_by(&:salary).map { |s, d| [s, d.count] }],
DeveloperWithSelect.group(:salary).count
end
def test_default_scope_order_ignored_by_aggregations
assert_equal DeveloperOrderedBySalary.all.count, DeveloperOrderedBySalary.count
end
def test_default_scope_find_last
assert DeveloperOrderedBySalary.count > 1, "need more than one row for test"
lowest_salary_dev = DeveloperOrderedBySalary.find(developers(:poor_jamis).id)
assert_equal lowest_salary_dev, DeveloperOrderedBySalary.last
end
def test_default_scope_include_with_count
d = DeveloperWithIncludes.create!
d.audit_logs.create! message: "foo"
assert_equal 1, DeveloperWithIncludes.where(audit_logs: { message: "foo" }).count
end
def test_default_scope_with_references_works_through_collection_association
post = PostWithCommentWithDefaultScopeReferencesAssociation.create!(title: "Hello World", body: "Here we go.")
comment = post.comment_with_default_scope_references_associations.create!(body: "Great post.", developer_id: Developer.first.id)
assert_equal comment, post.comment_with_default_scope_references_associations.to_a.first
end
def test_default_scope_with_references_works_through_association
post = PostWithCommentWithDefaultScopeReferencesAssociation.create!(title: "Hello World", body: "Here we go.")
comment = post.comment_with_default_scope_references_associations.create!(body: "Great post.", developer_id: Developer.first.id)
assert_equal comment, post.first_comment
end
def test_default_scope_with_references_works_with_find_by
post = PostWithCommentWithDefaultScopeReferencesAssociation.create!(title: "Hello World", body: "Here we go.")
comment = post.comment_with_default_scope_references_associations.create!(body: "Great post.", developer_id: Developer.first.id)
assert_equal comment, CommentWithDefaultScopeReferencesAssociation.find_by(id: comment.id)
end
test "additional conditions are ANDed with the default scope" do
scope = DeveloperCalledJamis.where(name: "David")
assert_equal 2, scope.where_clause.ast.children.length
assert_equal [], scope.to_a
end
test "additional conditions in a scope are ANDed with the default scope" do
scope = DeveloperCalledJamis.david
assert_equal 2, scope.where_clause.ast.children.length
assert_equal [], scope.to_a
end
test "a scope can remove the condition from the default scope" do
scope = DeveloperCalledJamis.david2
assert_equal 1, scope.where_clause.ast.children.length
assert_equal Developer.where(name: "David").map(&:id), scope.map(&:id)
end
def test_with_abstract_class_where_clause_should_not_be_duplicated
scope = Bus.all
assert_equal scope.where_clause.ast.children.length, 1
end
def test_sti_conditions_are_not_carried_in_default_scope
ConditionalStiPost.create! body: ""
SubConditionalStiPost.create! body: ""
SubConditionalStiPost.create! title: "Hello world", body: ""
assert_equal 2, ConditionalStiPost.count
assert_equal 2, ConditionalStiPost.all.to_a.size
assert_equal 3, ConditionalStiPost.unscope(where: :title).to_a.size
assert_equal 1, SubConditionalStiPost.count
assert_equal 1, SubConditionalStiPost.all.to_a.size
assert_equal 2, SubConditionalStiPost.unscope(where: :title).to_a.size
end
def test_with_abstract_class_scope_should_be_executed_in_correct_context
vegetarian_pattern, gender_pattern = if current_adapter?(:Mysql2Adapter)
[/`lions`.`is_vegetarian`/, /`lions`.`gender`/]
elsif current_adapter?(:OracleAdapter)
[/"LIONS"."IS_VEGETARIAN"/, /"LIONS"."GENDER"/]
else
[/"lions"."is_vegetarian"/, /"lions"."gender"/]
end
assert_match vegetarian_pattern, Lion.all.to_sql
assert_match gender_pattern, Lion.female.to_sql
end
end
class DefaultScopingWithThreadTest < ActiveRecord::TestCase
self.use_transactional_tests = false
def test_default_scoping_with_threads
2.times do
Thread.new {
assert_includes DeveloperOrderedBySalary.all.to_sql, "salary DESC"
DeveloperOrderedBySalary.connection.close
}.join
end
end
def test_default_scope_is_threadsafe
2.times { ThreadsafeDeveloper.unscoped.create! }
threads = []
assert_not_equal 1, ThreadsafeDeveloper.unscoped.count
barrier_1 = Concurrent::CyclicBarrier.new(2)
barrier_2 = Concurrent::CyclicBarrier.new(2)
threads << Thread.new do
Thread.current[:default_scope_delay] = -> { barrier_1.wait; barrier_2.wait }
assert_equal 1, ThreadsafeDeveloper.all.to_a.count
ThreadsafeDeveloper.connection.close
end
threads << Thread.new do
Thread.current[:default_scope_delay] = -> { barrier_2.wait }
barrier_1.wait
assert_equal 1, ThreadsafeDeveloper.all.to_a.count
ThreadsafeDeveloper.connection.close
end
threads.each(&:join)
ensure
ThreadsafeDeveloper.unscoped.destroy_all
end
end unless in_memory_db?
| 39.404514 | 147 | 0.756752 |
b9374aeb36a28dc5bf429d8decada69cecc15b0d | 1,392 | require 'pry'
class DatasetGenerator
def self.generate(file_data)
datasets_amt = file_data[0].to_i # Indicates number of sets (useable for tests)
# Initialize dataset start point
start_index = 1
# Find first dataset's denoms and prices--this is repeatable and also lets me know how many
# rows of data are in each set--what index to start the process on again.
datasets_amt.times do
@dataset = Dataset.new
# binding.pry
denoms = file_data[start_index].split(" ").first.to_i
prices = file_data[start_index].split(" ").last.to_i
# binding.pry
currency_multipliers = file_data[start_index + 1].split(" ").map {|e| e.to_i}
row_end_price = 0
i=0
(prices).times do
row = file_data[start_index + 2 + i].split(" ").map {|e| e.to_i}
#convert prices
row.map!.with_index do |currency, index|
if index >= currency_multipliers.length
currency
else
currency * currency_multipliers[index..currency_multipliers.length].reduce(:*).to_i
end
end
@dataset.prices << row
i = i + 1
end
start_index += 2 + prices
end
end
end
| 33.95122 | 107 | 0.541667 |
bb6d8820c2444bd509d2c20497cd862de0e9a92c | 3,149 | #
# Copyright:: Copyright (c) 2015 Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "chef-dk/command/base"
require "chef-dk/ui"
require "chef-dk/policyfile_services/push_archive"
require "chef-dk/configurable"
module ChefDK
module Command
class PushArchive < Base
include Configurable
banner(<<-E)
Usage: chef push-archive POLICY_GROUP ARCHIVE_FILE [options]
`chef push-archive` publishes a policy archive to a Chef Server. Policy
archives can be created with `chef export -a`. The policy will be applied to
the given POLICY_GROUP, which is a set of nodes that share the same
run_list and cookbooks.
For more information about Policyfiles, see our detailed README:
https://docs.chef.io/policyfile.html
Options:
E
option :config_file,
short: "-c CONFIG_FILE",
long: "--config CONFIG_FILE",
description: "Path to configuration file"
option :debug,
short: "-D",
long: "--debug",
description: "Enable stacktraces and other debug output",
default: false
attr_accessor :ui
attr_reader :policy_group
attr_reader :archive_path
def initialize(*args)
super
@policy_group = nil
@archive_path = nil
@chef_config = nil
@ui = UI.new
end
def run(params)
return 1 unless apply_params!(params)
push_archive_service.run
0
rescue PolicyfileServiceError => e
handle_error(e)
1
end
# @api private
def handle_error(error)
ui.err("Error: #{error.message}")
if error.respond_to?(:reason)
ui.err("Reason: #{error.reason}")
ui.err("")
ui.err(error.extended_error_info) if debug?
ui.err(error.cause.backtrace.join("\n")) if debug?
end
end
# @api private
def push_archive_service
@push_archive_service ||=
ChefDK::PolicyfileServices::PushArchive.new(
archive_file: archive_file,
policy_group: policy_group,
ui: ui,
config: chef_config
)
end
def archive_file
File.expand_path(archive_path)
end
# @api private
def debug?
!!config[:debug]
end
# @api private
def apply_params!(params)
remaining_args = parse_options(params)
if remaining_args.size != 2
ui.err(opt_parser)
return false
end
@policy_group, @archive_path = remaining_args
end
end
end
end
| 24.992063 | 76 | 0.635757 |
b95b0c1894c13a0fdee5660b6cef430781e29693 | 323 | require 'rake'
require 'active_record'
require 'yaml/store'
require 'ostruct'
require 'date'
require 'bundler/setup'
Bundler.require
# put the code to connect to the database here
ActiveRecord::Base.establish_connection(
:adapter => "sqlite3",
:database => "db/artists.sqlite"
)
require_relative "../artist.rb"
| 16.15 | 46 | 0.736842 |
ac7bc4ae816e2f896d9712e572a7489c387b3d92 | 1,176 | require "rails_helper"
module EnjuRoot
RSpec.describe WorksController, type: :routing do
routes {EnjuRoot::Engine.routes}
describe "routing" do
it "routes to #index" do
expect(:get => "/works").to route_to("enju_root/works#index")
end
it "routes to #new" do
expect(:get => "/works/new").to route_to("enju_root/works#new")
end
it "routes to #show" do
expect(:get => "/works/1").to route_to("enju_root/works#show", :id => "1")
end
it "routes to #edit" do
expect(:get => "/works/1/edit").to route_to("enju_root/works#edit", :id => "1")
end
it "routes to #create" do
expect(:post => "/works").to route_to("enju_root/works#create")
end
it "routes to #update via PUT" do
expect(:put => "/works/1").to route_to("enju_root/works#update", :id => "1")
end
it "routes to #update via PATCH" do
expect(:patch => "/works/1").to route_to("enju_root/works#update", :id => "1")
end
it "routes to #destroy" do
expect(:delete => "/works/1").to route_to("enju_root/works#destroy", :id => "1")
end
end
end
end
| 26.727273 | 88 | 0.577381 |
398890e5d664ebbc2b37316e7ec7247d364ca710 | 1,632 | class FileGenerator < RubiGen::Base
default_options :author => nil
attr_reader :name
def initialize(runtime_args, runtime_options = {})
super
usage if args.empty?
@name = args.shift
extract_options
end
def manifest
record do |m|
# Ensure appropriate folder(s) exists
m.directory 'src'
m.directory 'test'
# Create stubs
# m.template "template.rb", "some_file_after_erb.rb"
# m.template_copy_each ["template.rb", "template2.rb"]
# m.file "file", "some_file_copied"
# m.file_copy_each ["path/to/file", "path/to/file2"]
m.template "file.cpp", "src/#{name}.cpp"
m.template "file.h", "src/#{name}.h"
m.template "test.cpp", "test/test_#{name}.cpp"
end
end
protected
def banner
<<-EOS
Creates a ...
USAGE: #{$0} #{spec.name} name
EOS
end
def add_options!(opts)
# opts.separator ''
# opts.separator 'Options:'
# For each option below, place the default
# at the top of the file next to "default_options"
# opts.on("-a", "--author=\"Your Name\"", String,
# "Some comment about this option",
# "Default: none") { |options[:author]| }
# opts.on("-v", "--version", "Show the #{File.basename($0)} version number and quit.")
end
def extract_options
# for each option, extract it into a local variable (and create an "attr_reader :author" at the top)
# Templates can access these value via the attr_reader-generated methods, but not the
# raw instance variable value.
# @author = options[:author]
end
end | 28.631579 | 106 | 0.610907 |
bf3fc54848b6bc0409192d44b3748b7ec9d2cc23 | 1,078 | require 'delegate'
module Restspec
module Stores
# Provides methods for the {SchemaStore} object.
class SchemaStoreDelegator < SimpleDelegator
# Stores a schema. It uses the name of the schema as the hash key.
#
# @param schema [Restspec::Schema::Schema] the schema to store.
# @return [Restspec::Schema::Schema] the schema inserted.
def store(schema)
self[schema.name] = schema
end
# Get the schema. It's just an alias for the Hash#[] method
#
# @param schema_name the name of the schema.
# @return [Restspec::Schema::Schema, nil] the schema found.
def get(schema_name)
self[schema_name]
end
end
# The Schema Store is a Hash extended using {Stores::SchemaStoreDelegator}
# This is where we store the schemas to use.
#
# It's important to note that, because this is a Hash, there can't be
# two schemas with the same name.
SchemaStore = SchemaStoreDelegator.new(Hash.new)
end
# (see Stores::SchemaStore)
SchemaStore = Stores::SchemaStore
end
| 30.8 | 78 | 0.664193 |
f76dd368559f181f9590ad9d99a7deabfe514a78 | 9 | puts 1+2
| 4.5 | 8 | 0.666667 |
1114ceace49c083c930302d7623bfc8b1322e08d | 1,071 | # frozen_string_literal: true
RSpec.describe Blacklight::Icon do
subject { described_class.new(:search, classes: 'awesome') }
describe '#svg' do
it 'returns a string' do
expect(subject.svg).to be_an String
end
it 'returns raw svg' do
expect(Capybara.string(subject.svg))
.to have_css 'svg title', text: 'Search'
end
end
describe '#options' do
it 'applies options classes and default class' do
expect(subject.options[:class]).to eq 'blacklight-icons awesome'
end
end
describe '#path' do
it 'prepends blacklight and sufixes .svg' do
expect(subject.path).to eq 'blacklight/search.svg'
end
end
describe 'file_source' do
context 'file is not available' do
subject { described_class.new(:yolo) }
it {
expect { subject.file_source }
.to raise_error(Blacklight::Exceptions::IconNotFound)
}
end
context 'file is available' do
it 'returns the filesource' do
expect(subject.file_source).to include '<svg'
end
end
end
end
| 23.8 | 70 | 0.652661 |
79e5ce77230ee02128cf05d50debea2b4b2281cc | 293 | # frozen_string_literal: true
module I18n::Tasks
module StringInterpolation
module_function
def interpolate_soft(s, t = {})
return s unless s
t.each do |k, v|
pat = "%{#{k}}"
s = s.gsub pat, v.to_s if s.include?(pat)
end
s
end
end
end
| 17.235294 | 49 | 0.573379 |
33d967fac4d90220f71913126811f91742cbcca8 | 4,022 | module Spina
module Admin
class PagesController < AdminController
before_action :set_tabs, only: [:new, :create, :edit, :update]
before_action :set_locale
before_action :set_page, only: [:edit, :update, :destroy, :children]
layout 'spina/admin/admin'
def index
add_breadcrumb I18n.t('spina.website.pages'), spina.admin_pages_path
redirect_to admin_pages_path unless current_admin_path.starts_with?('/pages')
@pages = Page.active.sorted.roots.regular_pages
end
def new
@resource = Resource.find_by(id: params[:resource_id])
@page = Page.new(resource: @resource, parent: Page.find_by(id: params[:parent_id]) || @resource&.parent_page)
add_index_breadcrumb
if current_theme.new_page_templates.any? { |template| template[0] == params[:view_template] }
@page.view_template = params[:view_template]
end
add_breadcrumb I18n.t('spina.pages.new')
@page_parts = @page.view_template_page_parts(current_theme).map { |part| @page.part(part) }
render layout: 'spina/admin/admin'
end
def create
@page = Page.new(page_params)
add_breadcrumb I18n.t('spina.pages.new')
if @page.save
@page.navigations << Spina::Navigation.where(auto_add_pages: true)
redirect_to spina.edit_admin_page_url(@page), flash: {success: t('spina.pages.saved')}
else
@page_parts = @page.view_template_page_parts(current_theme).map { |part| @page.part(part) }
render :new, layout: 'spina/admin/admin'
end
end
def edit
add_index_breadcrumb
add_breadcrumb @page.title
@page_parts = @page.view_template_page_parts(current_theme).map { |part| @page.part(part) }
render layout: 'spina/admin/admin'
end
def update
respond_to do |format|
Mobility.locale = @locale
if @page.update(page_params)
@page.touch
format.html { redirect_to spina.edit_admin_page_url(@page, params: {locale: @locale}), flash: {success: t('spina.pages.saved')} }
format.js
else
format.html do
@page_parts = @page.view_template_page_parts(current_theme).map { |part| @page.part(part) }
Mobility.locale = I18n.default_locale
render :edit, layout: 'spina/admin/admin'
end
end
end
end
def sort
params[:list].each_pair do |parent_pos, parent_node|
update_child_pages_position(parent_node)
update_page_position(parent_node, parent_pos, nil)
end
head :ok
end
def children
@children = @page.children.active.sorted
render layout: false
end
def destroy
@page.destroy
redirect_to spina.admin_pages_url
end
private
def set_locale
@locale = params[:locale] || I18n.default_locale
end
def add_index_breadcrumb
if @page.resource.present?
add_breadcrumb @page.resource.label, spina.admin_resource_path(@page.resource)
else
add_breadcrumb I18n.t('spina.website.pages'), spina.admin_pages_path
end
end
def set_tabs
@tabs = %w{page_content page_seo advanced}
end
def update_page_position(page, position, parent_id = nil)
Page.update(page[:id], position: position.to_i + 1, parent_id: parent_id )
end
def update_child_pages_position(node)
if node[:children].present?
node[:children].each_pair do |child_pos, child_node|
update_child_pages_position(child_node) if child_node[:children].present?
update_page_position(child_node, child_pos, node[:id])
end
end
end
def page_params
params.require(:page).permit!
end
def set_page
@page = Page.find(params[:id])
end
end
end
end
| 32.699187 | 141 | 0.622576 |
796797279740d5322cee68a522a05ef24e786303 | 623 | require "websocket/driver"
class LocalPusherConnection
def initialize(socket)
@socket = socket
driver.on :connect, -> (event) do
driver.start
write_json(
'event' => 'pusher:connection_established',
'data' => {'socket_id' => 101}.to_json
)
end
end
def close
socket.close
end
def parse
driver.parse(socket.gets)
rescue IOError
end
def write(string)
socket.write(string)
end
def write_json(object)
driver.text(object.to_json)
end
private
attr_reader :socket
def driver
@driver ||= WebSocket::Driver.server(self)
end
end
| 15.195122 | 51 | 0.642055 |
fffe6b12b63f88376aad74b4aa94490cca7d10b8 | 1,250 | #!/usr/bin/env ruby
#
# @author Couchbase <[email protected]>
# @copyright 2013 Couchbase, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'socket'
BIND_PORT = ARGV[0] ? ARGV[0].to_i : 22222
def run_accept_loop(server_sock)
while true
client = server_sock.accept
puts "got client #{client}"
Thread.new { run_client_loop(client) }
end
end
def run_client_loop(client)
puts "starting client loop: #{client}"
while true
stuff = client.readpartial(16384)
puts "#{client}: got #{stuff.inspect}"
client.write stuff
end
rescue EOFError
client.close rescue nil
puts "ended client: #{client}"
return
rescue Exception => exc
puts "got exception: #{exc}"
end
run_accept_loop(TCPServer.new(BIND_PORT))
| 26.595745 | 74 | 0.7288 |
5dfbd4df427c30fd9fedb84ee1e0630143c97fd6 | 5,468 | class KeyPairsController < ApplicationController
# GET /key_pairs
# GET /key_pairs.json
def index
# Grab the own user's keys only
@key_pairs = KeyPair.where(user_id: @current_user.id)
@key_strings = Array.new
@key_pairs.each do |keypair|
# Read the key
key = File.open(File.join(KEY_STORAGE, keypair.file_name))
@key_strings.push(key.read)
end
@key_pairs = @key_pairs.zip(@key_strings)
respond_to do |format|
format.html # index.html.erb
format.json { render json: { key_pairs: @key_pairs } }
end
end
# GET /key_pairs/1
# GET /key_pairs/1.json
def show
@key_pair = KeyPair.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.json { render json: @key_pair }
end
end
# GET /key_pairs/new
# GET /key_pairs/new.json
def new
@key_pair = KeyPair.new
respond_to do |format|
format.html # new.html.erb
format.json { render json: @key_pair }
end
end
# GET /key_pairs/1/edit
def edit
@key_pair = KeyPair.find(params[:id])
end
# Given a File object to upload, save it on the file system with
# association to the user_name given.
# If a String is supplied as the first argument then it's content
# is used to create the public key
# Creates the KEY_STORAGE directory if it does not yet exist
def upload_key_file(file_content, time_stamp)
create_key_directory
write_key(file_content, time_stamp)
add_key(File.join(KEY_STORAGE, @current_user.user_name +
"@#{time_stamp}.pub"))
end
def write_key(file_content, time_stamp)
File.open(Rails.root.join(KEY_STORAGE, @current_user.user_name +
"@#{time_stamp}.pub"), 'wb') do |f|
f.write(file_content)
end
end
# Creates the KEY_STORAGE directory if required
def create_key_directory
Dir.mkdir(KEY_STORAGE) unless File.exists?(KEY_STORAGE)
end
# Adds a specific public key to a specific user.
def add_key(_path)
# TODO: Think of a generic mechanism
end
# Deletes a specific public key from a specific user.
def remove_key(_path)
# TODO: Think of a generic mechanism
# Delete key file
if File.exist?(_path)
File.delete(_path)
end
end
# POST /key_pairs
# POST /key_pairs.json
def create
# Used to uniquely identify key
time_stamp = Time.now.to_i.to_s
public_key_content = ''
# If user uploads the public key as a file then that takes precedence over
# the key_string
if !key_pair_params[:file]
# Get key from key_string param
public_key_content = key_pair_params[:key_string]
else
# Get key from file contents
public_key_content = key_pair_params[:file].read
end
# Check to see if the public_key_content is a valid ssh key: an ssh
# key has the format "type blob label" and cannot have a nil type or blob.
type, blob, _label = public_key_content.split
if !type.nil? && !blob.nil?
# Upload the file
upload_key_file(public_key_content, time_stamp)
# Save the record
@key_pair = KeyPair.new(user_name: @current_user.user_name,
user_id: @current_user.id,
file_name: @current_user.user_name +
"@#{time_stamp}.pub")
respond_to do |format|
if @key_pair.save
flash_message(:success, t('key_pairs.create.success'))
format.html do
redirect_to key_pairs_path
end
format.json do
render json: @key_pair,
status: :created,
location: @key_pair
end
else
format.html { render action: 'new' }
format.json do
render json: @key_pair.errors,
status: :unprocessable_entity
end
end
end
else # if type and/or blob are nil
flash_message(:error, t('key_pairs.create.invalid_key'))
respond_to do |format|
format.html do
redirect_to :back
end
end
end
end
# PATCH/PUT /key_pairs/1
# PATCH/PUT /key_pairs/1.json
def update
@key_pair = KeyPair.find(params[:id])
respond_to do |format|
if @key_pair.update_attributes(key_pair_params)
flash_message(:success, t('key_pairs.update.success'))
format.html do
redirect_to @key_pair
end
format.json { head :no_content }
else
format.html { render action: 'edit' }
format.json do
render json: @key_pair.errors,
status: :unprocessable_entity
end
end
end
end
# DELETE /key_pairs/1
# DELETE /key_pairs/1.json
def destroy
@key_pair = KeyPair.find(params[:id])
remove_key(File.join(KEY_STORAGE, @key_pair.file_name))
@key_pair.destroy
flash_message(:success, t('key_pairs.delete.success'))
respond_to do |format|
format.html do
redirect_to key_pairs_path
end
format.json { head :no_content }
end
end
private
# Use this method to whitelist the permissible parameters. Example:
# params.require(:person).permit(:name, :age)
# Also, you can specialize this method with per-user checking of
# permissible attributes.
def key_pair_params
params.require(:key_pair).permit(:file, :key_string)
end
end
| 27.34 | 78 | 0.630029 |
7921b02294c29254e736ba368e095fed8e37d867 | 2,181 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ApiManagement::Mgmt::V2016_07_07
module Models
#
# Paged Quota Counter list representation.
#
class QuotaCounterCollection
include MsRestAzure
# @return [Array<QuotaCounterContract>] Quota counter values.
attr_accessor :value
# @return [Integer] Total records count number.
attr_accessor :count
# @return [String] Next page link if any.
attr_accessor :next_link
#
# Mapper for QuotaCounterCollection class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'QuotaCounterCollection',
type: {
name: 'Composite',
class_name: 'QuotaCounterCollection',
model_properties: {
value: {
client_side_validation: true,
required: false,
serialized_name: 'value',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'QuotaCounterContractElementType',
type: {
name: 'Composite',
class_name: 'QuotaCounterContract'
}
}
}
},
count: {
client_side_validation: true,
required: false,
serialized_name: 'count',
type: {
name: 'Number'
}
},
next_link: {
client_side_validation: true,
required: false,
serialized_name: 'nextLink',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 27.961538 | 73 | 0.489225 |
f87b6d0f8169078301fd4649170ee8b70d970356 | 1,026 | class ApplicationController < ActionController::API
before_action :allow_cross_domain_ajax
before_action :require_login
def allow_cross_domain_ajax
headers['Access-Control-Allow-Origin'] = '*'
headers['Access-Control-Request-Method'] = 'POST, OPTIONS'
end
def encode_token(payload)
JWT.encode(payload, 'my_secret')
end
def auth_header
request.headers['Authorization']
end
def decoded_token
if auth_header
token = auth_header.split(' ')[1]
begin
JWT.decode(token, 'my_secret', true, algorithm: 'HS256')
rescue JWT::DecodeError
[]
end
end
end
def session_user
decoded_hash = decoded_token
if !decoded_hash.empty?
puts decoded_hash.class
user_id = decoded_hash[0]['user_id']
@user = User.find_by(id: user_id)
else
nil
end
end
def logged_in?
!!session_user
end
def require_login
render json: {message: 'Please Login'}, status: :unauthorized unless logged_in?
end
end | 19.730769 | 83 | 0.667641 |
d55f020c0a435e98cbd8b44504d668b5f11f5eef | 2,003 | # frozen_string_literal: true
module Elastic
module Latest
module GitInstanceProxy
extend ActiveSupport::Concern
class_methods do
def methods_for_all_write_targets
super + [:delete_index_for_commits_and_blobs]
end
end
def es_parent
"project_#{project_id}"
end
def elastic_search(query, type: 'all', page: 1, per: 20, options: {})
options = repository_specific_options(options)
self.class.elastic_search(query, type: type, page: page, per: per, options: options)
end
# @return [Kaminari::PaginatableArray]
def elastic_search_as_found_blob(query, page: 1, per: 20, options: {})
options = repository_specific_options(options)
self.class.elastic_search_as_found_blob(query, page: page, per: per, options: options)
end
def delete_index_for_commits_and_blobs(wiki: false)
types =
if wiki
%w[wiki_blob]
else
%w[commit blob]
end
client.delete_by_query(
index: index_name,
routing: es_parent,
body: {
query: {
bool: {
filter: [
{
terms: {
type: types
}
},
{
has_parent: {
parent_type: 'project',
query: {
term: {
id: project_id
}
}
}
}
]
}
}
}
)
end
private
def repository_id
raise NotImplementedError
end
def repository_specific_options(options)
if options[:repository_id].nil?
options = options.merge(repository_id: repository_id)
end
options
end
end
end
end
| 23.845238 | 94 | 0.48677 |
5da3ad33a3e2be1b3e70483d6e5ec24414fc72d0 | 4,416 | require "cases/migration/helper"
module ActiveRecord
class Migration
class RenameTableTest < ActiveRecord::TestCase
include ActiveRecord::Migration::TestHelper
self.use_transactional_tests = false
def setup
super
add_column "test_models", :url, :string
remove_column "test_models", :created_at
remove_column "test_models", :updated_at
end
def teardown
rename_table :octopi, :test_models if connection.table_exists? :octopi
super
end
if current_adapter?(:SQLite3Adapter)
def test_rename_table_for_sqlite_should_work_with_reserved_words
renamed = false
add_column :test_models, :url, :string
connection.rename_table :references, :old_references
connection.rename_table :test_models, :references
renamed = true
# Using explicit id in insert for compatibility across all databases
connection.execute "INSERT INTO 'references' (url, created_at, updated_at) VALUES ('http://rubyonrails.com', 0, 0)"
assert_equal "http://rubyonrails.com", connection.select_value("SELECT url FROM 'references' WHERE id=1")
ensure
return unless renamed
connection.rename_table :references, :test_models
connection.rename_table :old_references, :references
end
end
unless current_adapter?(:FbAdapter) # Firebird cannot rename tables
def test_rename_table
rename_table :test_models, :octopi
connection.execute "INSERT INTO octopi (#{connection.quote_column_name('id')}, #{connection.quote_column_name('url')}) VALUES (1, 'http://www.foreverflying.com/octopus-black7.jpg')"
assert_equal "http://www.foreverflying.com/octopus-black7.jpg", connection.select_value("SELECT url FROM octopi WHERE id=1")
end
def test_rename_table_with_an_index
add_index :test_models, :url
rename_table :test_models, :octopi
connection.execute "INSERT INTO octopi (#{connection.quote_column_name('id')}, #{connection.quote_column_name('url')}) VALUES (1, 'http://www.foreverflying.com/octopus-black7.jpg')"
assert_equal "http://www.foreverflying.com/octopus-black7.jpg", connection.select_value("SELECT url FROM octopi WHERE id=1")
index = connection.indexes(:octopi).first
assert_includes index.columns, "url"
assert_equal "index_octopi_on_url", index.name
end
def test_rename_table_does_not_rename_custom_named_index
add_index :test_models, :url, name: "special_url_idx"
rename_table :test_models, :octopi
assert_equal ["special_url_idx"], connection.indexes(:octopi).map(&:name)
end
end
if current_adapter?(:PostgreSQLAdapter)
def test_rename_table_for_postgresql_should_also_rename_default_sequence
rename_table :test_models, :octopi
pk, seq = connection.pk_and_sequence_for("octopi")
assert_equal ConnectionAdapters::PostgreSQL::Name.new("public", "octopi_#{pk}_seq"), seq
end
def test_renaming_table_renames_primary_key
connection.create_table :cats, id: :uuid, default: "uuid_generate_v4()"
rename_table :cats, :felines
assert connection.table_exists? :felines
refute connection.table_exists? :cats
primary_key_name = connection.select_values(<<-SQL.strip_heredoc, "SCHEMA")[0]
SELECT c.relname
FROM pg_class c
JOIN pg_index i
ON c.oid = i.indexrelid
WHERE i.indisprimary
AND i.indrelid = 'felines'::regclass
SQL
assert_equal "felines_pkey", primary_key_name
ensure
connection.drop_table :cats, if_exists: true
connection.drop_table :felines, if_exists: true
end
def test_renaming_table_doesnt_attempt_to_rename_non_existent_sequences
connection.create_table :cats, id: :uuid, default: "uuid_generate_v4()"
assert_nothing_raised { rename_table :cats, :felines }
assert connection.table_exists? :felines
refute connection.table_exists? :cats
ensure
connection.drop_table :cats, if_exists: true
connection.drop_table :felines, if_exists: true
end
end
end
end
end
| 37.74359 | 191 | 0.672101 |
bf39d84928aa07a2e026bcb454bb5e9564baf8b6 | 1,638 | module Fog
module Google
class SQL
##
# Deletes all client certificates and generates a new server SSL certificate for the instance.
# The changes will not take effect until the instance is restarted. Existing instances without
# a server certificate will need to call this once to set a server certificate
#
# @see https://developers.google.com/cloud-sql/docs/admin-api/v1beta3/instances/resetSslConfig
class Real
def reset_instance_ssl_config(instance_id)
api_method = @sql.instances.reset_ssl_config
parameters = {
'project' => @project,
'instance' => instance_id,
}
request(api_method, parameters)
end
end
class Mock
def reset_instance_ssl_config(instance_id)
operation = self.random_operation
self.data[:operations][instance_id] ||= {}
self.data[:operations][instance_id][operation] = {
'kind' => 'sql#instanceOperation',
'instance' => instance_id,
'operation' => operation,
'operationType' => 'UPDATE',
'state' => Fog::Google::SQL::Operation::DONE_STATE,
'userEmailAddress' => '[email protected]',
'enqueuedTime' => Time.now.iso8601,
'startTime' => Time.now.iso8601,
'endTime' => Time.now.iso8601,
}
body = {
'kind' => 'sql#instancesResetSslConfig',
'operation' => operation,
}
build_excon_response(body)
end
end
end
end
end
| 32.76 | 100 | 0.592186 |
bb17753fe68464358068127098b251110cf0c3a2 | 133 | # encoding: utf-8
module DescendantsTracker
# Unreleased gem version
VERSION = '0.0.3'.freeze
end # module DescendantsTracker
| 14.777778 | 31 | 0.744361 |
7a78c79afaadef1d577051234a995aed2395ad03 | 209 | # encoding: utf-8
require 'acceptance_helper'
describe 'buttons WiceGrid', type: :request, js: true do
before :each do
visit '/csv_and_detached_filters'
end
include_examples 'detached_filters'
end
| 19 | 56 | 0.751196 |
03f33160c7be062b89cb491cde1cd807c390a8b5 | 38 | module StICML
VERSION = "0.1.0"
end
| 9.5 | 19 | 0.657895 |
21e1d8bbdd3baf2db9893e189a7ec0b02a8fc6f4 | 6,523 | =begin
#Accounting API
#No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.3.1
=end
require 'time'
require 'date'
module XeroRuby::Accounting
require 'bigdecimal'
class ImportSummaryObject
attr_accessor :import_summary
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'import_summary' => :'ImportSummary'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'import_summary' => :'ImportSummary'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `XeroRuby::Accounting::ImportSummaryObject` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `XeroRuby::Accounting::ImportSummaryObject`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'import_summary')
self.import_summary = attributes[:'import_summary']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
import_summary == o.import_summary
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[import_summary].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(parse_date(value))
when :Date
Date.parse(parse_date(value))
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BigDecimal
BigDecimal(value.to_s)
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
XeroRuby::Accounting.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash(downcase: true)
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
key = downcase ? attr : param
hash[key] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
def parse_date(datestring)
if datestring.include?('Date')
seconds_since_epoch = datestring.scan(/[0-9]+/)[0].to_i / 1000.0
Time.at(seconds_since_epoch).utc.strftime('%Y-%m-%dT%H:%M:%S%z').to_s
else # handle date 'types' for small subset of payroll API's
Time.parse(datestring).strftime('%Y-%m-%dT%H:%M:%S').to_s
end
end
end
end
| 30.768868 | 219 | 0.632071 |
1c8f0c6cb4aeaaf669a6b338453420b76eeaa758 | 1,105 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'milia/version'
Gem::Specification.new do |spec|
spec.name = "milia"
spec.version = Milia::VERSION
spec.authors = ["daudi amani"]
spec.email = ["[email protected]"]
spec.description = %q{Multi-tenanting gem for hosted Rails/Ruby/devise applications}
spec.summary = %q{Transparent multi-tenanting for hosted rails/ruby/devise web applications}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency 'rails', '~> 4.0'
spec.add_dependency 'devise', '~> 3.2'
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
spec.add_development_dependency "sqlite3"
spec.add_development_dependency "shoulda"
spec.add_development_dependency "turn"
end
| 36.833333 | 100 | 0.677828 |
4adf63447fa606ce1f1b775eda4d89c9cf8cf160 | 49 | object false
collection @cities
attributes :name
| 12.25 | 18 | 0.836735 |
5da518d21edcf968ec2a2dbd1e94c04b8087f3f1 | 195 | class CreateContatos < ActiveRecord::Migration
def change
create_table :contatos do |t|
t.string :contato
t.boolean :visitante
t.timestamps null: false
end
end
end
| 17.727273 | 46 | 0.676923 |
b9ac1f26c5733ae551ebd46b77e43b3e3d80caf7 | 2,874 | require 'fluent/config/section'
describe Fluent::Config::Section do
context 'class' do
describe '.name' do
it 'returns its full module name as String' do
expect(Fluent::Config::Section.name).to eql('Fluent::Config::Section')
end
end
end
context 'instance object' do
describe '#initialize' do
it 'creates blank object without argument' do
s = Fluent::Config::Section.new
expect(s.instance_eval{ @params }).to eql({})
end
it 'creates object which contains specified hash object itself' do
hash = {
name: 'tagomoris',
age: 34,
send: 'email',
class: 'normal',
keys: 5,
}
s1 = Fluent::Config::Section.new(hash)
expect(s1.instance_eval{ @params }).to eq(hash)
expect(s1[:name]).to eql("tagomoris")
expect(s1[:age]).to eql(34)
expect(s1[:send]).to eql("email")
expect(s1[:class]).to eql("normal")
expect(s1[:keys]).to eql(5)
expect(s1.name).to eql("tagomoris")
expect(s1.age).to eql(34)
expect(s1.send).to eql("email")
expect(s1.class).to eql("normal")
expect(s1.keys).to eql(5)
expect{ s1.dup }.to raise_error(NoMethodError)
end
end
describe '#to_h' do
it 'returns internal hash itself' do
hash = {
name: 'tagomoris',
age: 34,
send: 'email',
class: 'normal',
keys: 5,
}
s = Fluent::Config::Section.new(hash)
expect(s.to_h).to eq(hash)
expect(s.to_h.class).to eq(Hash)
end
end
describe '#instance_of?' do
it 'can judge whether it is a Section object or not' do
s = Fluent::Config::Section.new
expect(s.instance_of?(Fluent::Config::Section)).to be_true
expect(s.instance_of?(BasicObject)).to be_false
end
end
describe '#is_a?' do
it 'can judge whether it belongs to or not' do
s = Fluent::Config::Section.new
expect(s.is_a?(Fluent::Config::Section)).to be_true
expect(s.kind_of?(Fluent::Config::Section)).to be_true
expect(s.is_a?(BasicObject)).to be_true
end
end
describe '#+' do
it 'can merge 2 sections: argument side is primary, internal hash is newly created' do
h1 = {name: "s1", num: 10, class: "A"}
s1 = Fluent::Config::Section.new(h1)
h2 = {name: "s2", class: "A", num2: "5", num3: "8"}
s2 = Fluent::Config::Section.new(h2)
s = s1 + s2
expect(s.to_h.object_id).not_to eq(h1.object_id)
expect(s.to_h.object_id).not_to eq(h2.object_id)
expect(s.name).to eql("s2")
expect(s.num).to eql(10)
expect(s.class).to eql("A")
expect(s.num2).to eql("5")
expect(s.num3).to eql("8")
end
end
end
end
| 29.326531 | 92 | 0.567154 |
1a640c81ff2b2cb05d0e94aec2edb0e06c7ff008 | 471 | # frozen_string_literal: true
module MaxExchangeApi
class Config
attr_accessor :timeout
attr_accessor :logger
def initialize(data = nil)
data ||= {}
@timeout = data[:timeout]
@logger = data[:logger]
end
def reverse_merge!(other)
@timeout ||= other.timeout
@logger ||= other.logger
end
end
@default_config = Config.new
@default_config.timeout = 3
class << self
attr_reader :default_config
end
end
| 17.444444 | 32 | 0.645435 |
ab541fe027da2d8fe8e06e2b3c165ca1c8a161c8 | 94 | # frozen_string_literal: true
require './init'
require './app'
$stdout.sync = true
run App
| 10.444444 | 29 | 0.702128 |
1c6322ac0bf305e877e044f6b361b78672a756a1 | 1,094 | module WebMock
module Util
class Headers
def self.normalize_headers(headers)
return nil unless headers
array = headers.map { |name, value|
[name.to_s.split(/_|-/).map { |segment| segment.capitalize }.join("-"),
case value
when Regexp then value
when Array then (value.size == 1) ? value.first : value.map {|v| v.to_s}.sort
else value.to_s
end
]
}
Hash[*array.inject([]) {|r,x| r + x}]
end
def self.sorted_headers_string(headers)
headers = WebMock::Util::Headers.normalize_headers(headers)
str = '{'
str << headers.map do |k,v|
v = case v
when Regexp then v.inspect
when Array then "["+v.map{|v| "'#{v.to_s}'"}.join(", ")+"]"
else "'#{v.to_s}'"
end
"'#{k}'=>#{v}"
end.sort.join(", ")
str << '}'
end
def self.decode_userinfo_from_header(header)
header.sub(/^Basic /, "").unpack("m").first
end
end
end
end
| 24.863636 | 89 | 0.49543 |
26f54f3cfa3b656cb44bdb1f52f2e37b39614124 | 1,164 | class Ils
class Item < BaseStruct
attribute :id, Types::String
attribute :call_number, Types::String.optional
attribute :status, Ils::ItemStatus
attribute :barcode, Types::String.optional
attribute :library, Ils::Library.optional
attribute :location, Ils::Location.optional
attribute :process_type, Ils::ProcessType.optional
attribute :due_date, Types::Time.optional
attribute :due_date_policy, Types::String.optional
#attribute :signature, Types::String
#attribute :collection_code, Types::String
#attribute :item_status_code, Types::String
#attribute :process_status_code, Types::String.optional
#attribute :process_status, Types::ProcessStatus
#attribute :availability_status, Types::AvailabilityStatus
#attribute :due_date, Types::Date.optional
#attribute :note, Types::String.optional
#attribute :hold_request_count, Types::Integer.default(0)
#attribute :hold_request_allowed, Types::Bool.default(false)
#def expected?
# process_status == :expected && due_date.present?
#end
#def loaned?
# process_status == :loaned && due_date.present?
#end
end
end
| 35.272727 | 64 | 0.727663 |
1d2f10a315b803fadaf953b45f43fc1e3172df95 | 2,335 | # Cloud Foundry Java Buildpack
# Copyright 2013-2017 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'java_buildpack/component'
require 'java_buildpack/util/qualify_path'
module JavaBuildpack
module Component
# An abstraction around the +JAVA_HOME+ path used by the droplet. This implementation is immutable and should be
# passed to any component that is not a jre.
#
# A new instance of this type should be created once for the application.
class ImmutableJavaHome
include JavaBuildpack::Util
# Creates a new instance of the java home abstraction
#
# @param [MutableJavaHome] delegate the instance of +MutableJavaHome+ to use as a delegate for +root+ calls
def initialize(delegate, droplet_root)
@delegate = delegate
@droplet_root = droplet_root
end
# Returns the path of +JAVA_HOME+ as an environment variable formatted as +JAVA_HOME=$PWD/<value>+
#
# @return [String] the path of +JAVA_HOME+ as an environment variable
def as_env_var
"JAVA_HOME=#{qualify_path root}"
end
# Whether or not the version of Java is 8 or later
#
# @return [Boolean] +true+ iff the version is 1.8.0 or later
def java_8_or_later?
@delegate.java_8_or_later?
end
# Whether or not the version of Java is 9 or later
#
# @return [Boolean] +true+ iff the version is 9.0.0 or later
def java_9_or_later?
@delegate.java_9_or_later?
end
# @return [Pathname] the root of the droplet's +JAVA_HOME+
def root
@delegate.root
end
# @return # @return [JavaBuildpack::Util::TokenizedVersion] the tokenized droplet's +VERSION+
def version
@delegate.version
end
end
end
end
| 32.430556 | 117 | 0.688651 |
38715b4fa83eeb412b07b50de0ae11d24eb06b77 | 402 | cask 'navicat-premium' do
version '11.1.17'
sha256 '33c4db7b9628e99c29f097b399ec177dd22a3563196f26e59b5ff257b5c25cf0'
url "http://download.navicat.com/download/navicat#{version.sub(%r{^(\d+)\.(\d+).*},'\1\2')}_premium_en.dmg"
name 'Navicat Premium'
homepage 'http://www.navicat.com/products/navicat-premium'
license :commercial
tags :vendor => 'Navicat'
app 'Navicat Premium.app'
end
| 30.923077 | 109 | 0.728856 |
d555a6cf42d4be3445063fc7c30460eb12beb5c2 | 521 | cask 'picasa' do
version '3.9'
sha256 'df84018272ecf0f56e78e73cb6eb8f7c08911f6e24294fdc811e362a35dace9c'
url "https://dl.google.com/photos/picasamac#{version.no_dots}.dmg"
name 'Picasa'
homepage 'https://picasa.google.com/'
license :gratis
app 'Picasa.app'
zap delete: [
'~/Library/Preferences/com.google.picasa.plist',
'~/Library/Application Support/Google/Picasa3',
'~/Library/Saved Application State/com.google.picasa.savedState',
]
end
| 28.944444 | 81 | 0.664107 |
08552284ecb0ee189eee998718a60a247176c6fe | 372 | class CdTo < Cask
version '2.6.0'
sha256 'a92def521d332a373f655a41338d0ec18dfaa6e24eb9ec2ca6df281398db3d46'
url "https://github.com/jbtule/cdto/releases/download/#{version.gsub('.', '_')}/cdto_#{version.gsub('.', '_').gsub(/_\d$/, '')}.zip"
homepage 'https://github.com/jbtule/cdto'
app "cdto_#{version.gsub('.', '_').gsub(/_\d$/, '')}/terminal/cd to.app"
end
| 37.2 | 134 | 0.672043 |
d59d49573e3495395fb8797abc3dfadaa9a8610a | 5,111 | #
# Be sure to run `pod spec lint WSNetworkModule.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# βββ Spec Metadata ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "WSNetworkModule"
s.version = "0.2.1"
s.summary = "A short description of WSNetworkModule."
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
.........................................
DESC
s.homepage = "http://EXAMPLE/WSNetworkModule"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# βββ Spec License βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = "MIT"
s.license = { :type => "MIT", :file => "FILE_LICENSE" }
# βββ Author Metadata βββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "sw" => "[email protected]" }
# Or just: s.author = "sw"
# s.authors = { "sw" => "[email protected]" }
# s.social_media_url = "http://twitter.com/sw"
# βββ Platform Specifics βββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
s.platform = :ios, "8.0"
# When using multiple platforms
# s.ios.deployment_target = "5.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# βββ Source Location ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/wsv587/WSNetworkModule.git", :tag => "#{s.version}" }
# βββ Source Code ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "WSNetworkModule", "WSNetworkModule/Classes/WSNetworkModule/*.{h,m}"
# s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# βββ Resources ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# βββ Project Linking ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# βββ Project Settings βββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
end
| 36.769784 | 102 | 0.588339 |
6119550b9df84c3df231ce432700b7d8a3f7550b | 1,106 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "googleauth"
module Google
module Ads
module GoogleAds
module V9
module Services
module GoogleAdsFieldService
# Credentials for the GoogleAdsFieldService API.
class Credentials < ::Google::Auth::Credentials
self.scope = [
"https://www.googleapis.com/auth/adwords"
]
end
end
end
end
end
end
end
| 28.358974 | 74 | 0.676311 |
79870ed9db33d8a1783cab4fdace7f0950c0fcd7 | 282 | ENV['RACK_ENV'] = 'test'
require 'bundler/setup'
Bundler.require :default, :test
set :root, Dir.pwd
require 'capybara/rspec'
Capybara.app = Sinatra::Application
set :show_exceptions, false
require './app'
Dir[File.dirname(__FILE__) + '/../lib/*.rb'].each { |file| require file }
| 21.692308 | 73 | 0.70922 |
1c9ccd274f989791ca115008726a4d5475353cdb | 879 | Rails.application.routes.draw do
# JSON format alias
json = { format: 'json' }
# Devise routes
devise_for :users, skip: [:sessions, :registrations, :confirmations, :unlocks, :passwords]
devise_scope :user do
post 'users/sign_in' => 'users/sessions#create', defaults: json # Still need to
post 'users/password' => 'users/passwords#create', defaults: json # find another better solution
put 'users/password' => 'users/passwords#update', defaults: json # with these defaults
patch 'users/password' => 'users/passwords#update', defaults: json
post 'users' => 'users/registrations#create', defaults: json
end
# API routes
namespace :api, defaults: json do
namespace :v1 do
# User profile
put '/user' => 'users#update'
get '/user' => 'users#show'
end
end
root 'application#root'
end
| 32.555556 | 105 | 0.650739 |
1ac9e094c43826bc878cb5aa88b80f4ca1940dc4 | 1,295 | require "trailblazer/skill"
require "uber/delegates"
# Dependency ("skill") management for Operation.
# Op::[]
# Op::[]=
# Writing, even with an existing name, will never mutate a container.
# Op#[]
# Op#[]=
# Op.(params, { "constructor" => competences })
class Trailblazer::Operation
module Skill
# The class-level skill container: Operation::[], ::[]=.
module Accessors
# :private:
def skills
@skills ||= {}
end
extend Uber::Delegates
delegates :skills, :[], :[]=
end
# Overrides Operation::call, creates the Skill hash and passes it to :call.
module Call
def call(options={}, *dependencies)
super Trailblazer::Skill.new(options, *dependencies, self.skills)
# DISCUSS: should this be: Trailblazer::Skill.new(runtime_options: [options, *dependencies], compiletime_options: [self.skills])
end
alias :_call :call
# It really sucks that Ruby doesn't have method overloading where we could simply have
# two different implementations of ::call.
# FIXME: that shouldn't be here in this namespace.
module Positional
def call(params={}, options={}, *dependencies)
super(options.merge("params" => params), *dependencies)
end
end
end
end
end
| 29.431818 | 136 | 0.641699 |
5d47d62a272bf89f7e68811ec70b75c12fd44ec9 | 1,808 |
Pod::Spec.new do |s|
s.name = "CZUIComponent"
s.version = "0.0.1"
s.summary = "cz self CZUIComponent summary"
s.description = "cz self CZUIComponent description"
s.homepage = "https://github.com/JuYiWei/CZUIComponent"
s.license = "MIT"
s.author = { "juyw" => "[email protected]" }
s.platform = :ios, "9.0"
# θ΅ζΊ
s.source = { :git => "https://github.com/JuYiWei/CZUIComponent.git", :tag => "#{s.version}" }
# s.source_files = "CZCategory/CZCategory/Category/*.{h,m}"
s.subspec 'UIComponent' do |ss1|
ss1.source_files = "CZUIComponent/CZUIComponent/UIComponent/*.{h,m}"
end
# s.subspec 'Foundation' do |ss2|
# ss2.source_files = "CZCategory/CZCategory/Category/Foundation/*.{h,m}"
# end
# ζι€θ΅ζΊ
# s.exclude_files = "Classes/Exclude"
# εΎη
#s.resources = "CZLibrary/CZLibrary/Lbirary/Assets/*.png"
# ui
# s.dependency 'Masonry', '~> 1.0'
# s.dependency 'SDWebImage', '~> 4.0'
# s.dependency 'SVProgressHUD', '~> 2.0'
# s.dependency 'MBProgressHUD', '~> 1.1'
# s.dependency 'MJRefresh', '~> 3.0'
# s.dependency 'DZNEmptyDataSet', '~> 1.8'
# network
# s.dependency 'AFNetworking', '~> 3.0'
# tool
# s.dependency 'MJExtension', '~>3.0'
# s.dependency 'FMDB', '~>2.0'
# s.dependency 'YYCache', '~>1.0'
# s.dependency 'IQKeyboardManager', '~>6.0'
#s.dependency 'CocoaLumberjack', '~>3.4'
# εη
# s.dependency 'UMCCommon', '~> 1.5'
# s.dependency 'UMCSecurityPlugins', '~> 1.0'
# s.dependency 'UMCCommonLog', '~> 1.0'
# s.dependency 'UMCAnalytics', '~> 5.5'
# s.dependency 'UMCPush', '~> 3.2'
# s.dependency 'UMCShare/UI', '~> 6.9.4'
# s.dependency 'UMCShare/Social/ReducedWeChat', '~> 6.9.4'
# s.dependency 'UMCShare/Social/ReducedQQ', '~> 6.9.4'
s.requires_arc = true
end
| 27.393939 | 101 | 0.610066 |
87febff56eb8b725f745fe7919f6c88acaf7dc43 | 9,685 | class PostArchive < ApplicationRecord
class RevertError < Exception ; end
extend Memoist
belongs_to :post
belongs_to_updater
user_status_counter :post_update_count, foreign_key: :updater_id
before_validation :fill_version, on: :create
before_validation :fill_changes, on: :create
#establish_connection (ENV["ARCHIVE_DATABASE_URL"] || "archive_#{Rails.env}".to_sym) if enabled?
self.table_name = "post_versions"
def self.check_for_retry(msg)
if msg =~ /can't get socket descriptor/ && msg =~ /post_versions/
connection.reconnect!
end
end
module SearchMethods
def for_user(user_id)
if user_id
where("updater_id = ?", user_id)
else
none
end
end
def for_user_name(name)
user_id = User.name_to_id(name)
for_user(user_id)
end
def build_query(params)
must = []
def should(*args)
{bool: {should: args}}
end
def split_to_terms(field, input)
input.split(',').map(&:to_i).map {|x| {term: {field => x}}}
end
def tag_list(field, input, target)
if input.present?
target += Tag.scan_tags(input, strip_metatags: true).map {|x| {term: {field => x}}}
end
target
end
if params[:updater_name].present?
user_id = User.name_to_id(params[:updater_name])
must << {term: {updater_id: user_id}} if user_id
end
if params[:updater_id].present?
must << should(*split_to_terms(:updater_id, params[:updater_id]))
end
if params[:post_id].present?
must << should(*split_to_terms(:post_id, params[:post_id]))
end
if params[:start_id].present?
must << {range: {id: {gte: params[:start_id].to_i}}}
end
must = tag_list(:tags, params[:tags], must)
must = tag_list(:tags_removed, params[:tags_removed], must)
must = tag_list(:tags_added, params[:tags_added], must)
must = tag_list(:locked_tags, params[:locket_tags], must)
must = tag_list(:locked_tags_removed, params[:locked_tags_removed], must)
must = tag_list(:locked_tags_added, params[:locked_tags_added], must)
if params[:reason].present?
must << {match: {reason: params[:reason]}}
end
if params[:description].present?
must << {match: {description: params[:description]}}
end
if must.empty?
must.push({match_all: {}})
end
{
query: {bool: {must: must}},
sort: {id: :desc},
_source: false,
timeout: "#{CurrentUser.user.try(:statement_timeout) || 3_000}ms"
}
end
end
extend SearchMethods
include Indexable
include PostVersionIndex
def self.queue(post)
self.create({
post_id: post.id,
rating: post.rating,
parent_id: post.parent_id,
source: post.source,
updater_id: CurrentUser.id,
updater_ip_addr: CurrentUser.ip_addr,
tags: post.tag_string,
locked_tags: post.locked_tags,
description: post.description,
reason: post.edit_reason
})
end
def self.calculate_version(post_id)
1 + where("post_id = ?", post_id).maximum(:version).to_i
end
def fill_version
self.version = PostArchive.calculate_version (self.post_id)
end
def fill_changes
prev = previous
if prev
self.added_tags = tag_array - prev.tag_array
self.removed_tags = prev.tag_array - tag_array
self.added_locked_tags = locked_tag_array - prev.locked_tag_array
self.removed_locked_tags = prev.locked_tag_array - locked_tag_array
else
self.added_tags = tag_array
self.removed_tags = []
self.added_locked_tags = locked_tag_array
self.removed_locked_tags = []
end
self.rating_changed = prev.nil? || rating != prev.try(:rating)
self.parent_changed = prev.nil? || parent_id != prev.try(:parent_id)
self.source_changed = prev.nil? || source != prev.try(:source)
self.description_changed = prev.nil? || description != prev.try(:description)
end
def tag_array
tags.split
end
def locked_tag_array
(locked_tags || "").split
end
def presenter
PostVersionPresenter.new(self)
end
def reload
flush_cache
super
end
def previous
# HACK: If this if the first version we can avoid a lookup because we know there are no previous versions.
if version <= 1
return nil
end
# HACK: if all the post versions for this post have already been preloaded,
# we can use that to avoid a SQL query.
if association(:post).loaded? && post && post.association(:versions).loaded?
post.versions.sort_by(&:version).reverse.find {|v| v.version < version}
else
PostArchive.where("post_id = ? and version < ?", post_id, version).order("version desc").first
end
end
def visible?
post && post.visible?
end
def diff_sources(version = nil)
new_sources = source&.split("\n") || []
old_sources = version&.source&.split("\n") || []
added_sources = new_sources - old_sources
removed_sources = old_sources - new_sources
return {
:added_sources => added_sources,
:unchanged_sources => new_sources & old_sources,
:removed_sources => removed_sources
}
end
def diff(version = nil)
if post.nil?
latest_tags = tag_array
else
latest_tags = post.tag_array
latest_tags << "rating:#{post.rating}" if post.rating.present?
latest_tags << "parent:#{post.parent_id}" if post.parent_id.present?
end
new_tags = tag_array
new_tags << "rating:#{rating}" if rating.present?
new_tags << "parent:#{parent_id}" if parent_id.present?
old_tags = version.present? ? version.tag_array : []
if version.present?
old_tags << "rating:#{version.rating}" if version.rating.present?
old_tags << "parent:#{version.parent_id}" if version.parent_id.present?
end
added_tags = new_tags - old_tags
removed_tags = old_tags - new_tags
new_locked = locked_tag_array
old_locked = version.present? ? version.locked_tag_array : []
added_locked = new_locked - old_locked
removed_locked = old_locked - new_locked
return {
added_tags: added_tags,
removed_tags: removed_tags,
obsolete_added_tags: added_tags - latest_tags,
obsolete_removed_tags: removed_tags & latest_tags,
unchanged_tags: new_tags & old_tags,
added_locked_tags: added_locked,
removed_locked_tags: removed_locked,
unchanged_locked_tags: new_locked & old_locked
}
end
def changes
delta = {
:added_tags => added_tags,
:removed_tags => removed_tags,
:obsolete_removed_tags => [],
:obsolete_added_tags => [],
:unchanged_tags => []
}
return delta if post.nil?
latest_tags = post.tag_array
latest_tags << "rating:#{post.rating}" if post.rating.present?
latest_tags << "parent:#{post.parent_id}" if post.parent_id.present?
latest_tags << "source:#{post.source}" if post.source.present?
if parent_changed
if parent_id.present?
delta[:added_tags] << "parent:#{parent_id}"
end
if previous
delta[:removed_tags] << "parent:#{previous.parent_id}"
end
end
if rating_changed
delta[:added_tags] << "rating:#{rating}"
if previous
delta[:removed_tags] << "rating:#{previous.rating}"
end
end
if source_changed
if source.present?
delta[:added_tags] << "source:#{source}"
end
if previous
delta[:removed_tags] << "source:#{previous.source}"
end
end
delta[:obsolete_added_tags] = delta[:added_tags] - latest_tags
delta[:obsolete_removed_tags] = delta[:removed_tags] & latest_tags
if previous
delta[:unchanged_tags] = tag_array & previous.tag_array
else
delta[:unchanged_tags] = []
end
delta
end
def added_tags_with_fields
changes[:added_tags].join(" ")
end
def removed_tags_with_fields
changes[:removed_tags].join(" ")
end
def obsolete_added_tags
changes[:obsolete_added_tags].join(" ")
end
def obsolete_removed_tags
changes[:obsolete_removed_tags].join(" ")
end
def unchanged_tags
changes[:unchanged_tags].join(" ")
end
def truncated_source
source.gsub(/^http:\/\//, "").sub(/\/.+/, "")
end
def undo
raise RevertError unless post.visible?
added = changes[:added_tags] - changes[:obsolete_added_tags]
removed = changes[:removed_tags] - changes[:obsolete_removed_tags]
added.each do |tag|
if tag =~ /^source:/
post.source = ""
elsif tag =~ /^parent:/
post.parent_id = nil
else
escaped_tag = Regexp.escape(tag)
post.tag_string = post.tag_string.sub(/(?:\A| )#{escaped_tag}(?:\Z| )/, " ").strip
end
end
removed.each do |tag|
if tag =~ /^source:(.+)$/
post.source = $1
else
post.tag_string = "#{post.tag_string} #{tag}".strip
end
end
end
def undo!
undo
post.save!
end
def can_undo?(user)
version > 1 && post&.visible? && user.is_member?
end
def can_revert_to?(user)
post&.visible? && user.is_member?
end
def method_attributes
super + [:obsolete_added_tags, :obsolete_removed_tags, :unchanged_tags, :updater_name]
end
memoize :previous, :tag_array, :changes, :added_tags_with_fields, :removed_tags_with_fields, :obsolete_removed_tags, :obsolete_added_tags, :unchanged_tags
end
| 27.128852 | 156 | 0.634693 |
38cb4ba076830dcacb82e820a015323a032a8849 | 1,009 | class Event < ApplicationRecord
belongs_to :itinerary
validates :itinerary, presence: true
has_attached_file :thumbnail,
styles: { thumb: ["400x200#", :jpg],
original: ['1024x768>', :jpg] },
# convert_options: { thumb: "-quality 100 -strip",
# original: "-quality 100 -strip" },
url: "/event_images/:hash.:extension",
hash_secret: Rails.application.secrets.paperclip_image_hash_secret
validates_attachment :thumbnail,
content_type: { content_type: ["image/jpeg", "image/gif", "image/png"] },
size: { in: 0..5000.kilobytes }
after_create :append_event_to_itinerary
after_destroy :remove_event_from_itinerary
private
def append_event_to_itinerary
self.itinerary.events_id.push(self.id)
self.itinerary.save
end
def remove_event_from_itinerary
self.itinerary.events_id.delete(self.id)
self.itinerary.save
end
end
| 31.53125 | 94 | 0.632309 |
bf590066516c9ee628e55d7b15e675ee3eca6125 | 1,098 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe 'Redirections', type: :request do
let(:valid_original_url) { 'https://vinicode.xyz' }
let(:valid_attributes) do
{ original: valid_original_url, shortened: '' }
end
let(:link) { create(:link, valid_attributes) }
describe 'a call to the shortened link redirects to the original one' do
before do
get link.shortened
end
it 'for new certification form' do
# Removing the default parameter from the api v1 links url
expect(response).to redirect_to("#{api_v1_links_url.gsub('?locale=en', '')}/#{link.id}?locale=en")
end
end
describe 'a call to the shortened link redirects to the original one' do
let!(:old_access_count) { link.access_count }
before do
get "#{api_v1_links_url}/#{link.id}"
end
it 'for new certification form' do
expect(response).to redirect_to(link.original)
end
it 'increments the access_count' do
link.reload
new_count = old_access_count + 1
expect(new_count).to eq(link.access_count)
end
end
end
| 26.780488 | 104 | 0.684882 |
615504c2caa9925c114339a6726a7b10bb08f8d3 | 8,324 | lock '3.10.1'
# Application #
#####################################################################################
set :application, 'aidstream'
set :branch, ENV["branch"] || "master"
set :user, ENV["user"] || ENV["USER"] || "aidstream"
set :tmp_dir, :tmp_dir
# SCM #
#####################################################################################
set :repo_url, '[email protected]:web-apps/aidstream-new.git'
set :scm, :git
set :repo_base_url, :'http://gitlab.yipl.com.np/'
set :repo_diff_path, :'web-apps/aidstream-new/compare/master...'
# Multistage Deployment #
#####################################################################################
set :stages, %w(dev staging prod)
set :default_stage, "staging"
# Other Options #
#####################################################################################
set :ssh_options, { :forward_agent => true }
set :default_run_options, { :pty => true }
# Permissions #
#####################################################################################
set :use_sudo, false
set :permission_method, :acl
set :use_set_permissions, true
set :webserver_user, "www-data"
set :group, "www-data"
set :keep_releases, 3
# Hipchat Integration #
#####################################################################################
set :hipchat_token, "ZpXA6zeepyBgIm4R3EbImcmm7xCcXMl49NbbEpRg"
set :hipchat_room_name, "1080583"
# Create ver.txt #
#######################################################################################
require 'date'
set :current_time, DateTime.now
namespace :environment do
desc "Set environment variables"
task :set_variables do
on roles(:app) do
puts ("--> Copying environment configuration file")
execute "cp #{release_path}/.env.server #{release_path}/.env"
puts ("--> Setting environment variables")
execute "sed --in-place -f #{fetch(:overlay_path)}/parameters.sed #{release_path}/.env"
end
end
end
namespace :composer do
desc "Running Composer Install"
task :install do
on roles(:app) do
within release_path do
execute :composer, "install --no-dev --quiet"
execute :composer, "dumpautoload -o"
end
end
end
end
namespace :aidstream do
desc "Create shared folders"
task :create_storage_folder do
on roles(:all) do
execute "mkdir -p #{shared_path}/storage"
execute "mkdir #{shared_path}/storage/app"
execute "mkdir #{shared_path}/storage/framework"
execute "mkdir #{shared_path}/storage/framework/cache"
execute "mkdir #{shared_path}/storage/framework/sessions"
execute "mkdir #{shared_path}/storage/framework/views"
execute "mkdir #{shared_path}/storage/logs"
execute :chmod, "-R 777 #{shared_path}/storage"
end
end
task :create_uploads_folder do
on roles(:all) do
execute "mkdir #{shared_path}/uploads"
execute "mkdir #{shared_path}/uploads/files"
execute "mkdir #{shared_path}/uploads/files/organization"
execute "mkdir #{shared_path}/uploads/files/activity"
execute "mkdir #{shared_path}/uploads/temp"
execute :chmod, "-R 777 #{shared_path}/uploads/"
end
end
desc "Symbolic link for shared folders"
task :create_symlink do
on roles(:app) do
within release_path do
execute "rm -rf #{release_path}/storage"
execute "ln -s #{shared_path}/storage/ #{release_path}"
execute "ln -s #{shared_path}/uploads #{release_path}/public"
execute "ln -s #{shared_path}/files #{release_path}/public/files"
end
end
end
desc "Run Laravel Artisan migrate task."
task :migrate do
on roles(:app) do
within release_path do
execute :php, "artisan migrate --force"
end
end
end
desc "Run Laravel Artisan seed task."
task :seed do
on roles(:app) do
within release_path do
execute :php, "artisan db:seed --force"
end
end
end
desc "Optimize Laravel Class Loader"
task :optimize do
on roles(:app) do
within release_path do
execute :php, "artisan clear-compiled"
execute :php, "artisan optimize"
end
end
end
desc "Restart Laravel Queue"
task :queue_restart do
on roles(:app) do
execute "sudo supervisorctl restart activity_csv_importer_queue"
end
end
desc 'Create ver.txt'
task :create_ver_txt do
on roles(:all) do
puts ("--> Copying ver.txt file")
execute "cp #{release_path}/config/deploy/ver.txt.example #{release_path}/public/ver.txt"
execute "sed --in-place 's/%date%/#{fetch(:current_time)}/g
s/%branch%/#{fetch(:branch)}/g
s/%revision%/#{fetch(:current_revision)}/g
s/%deployed_by%/#{fetch(:user)}/g' #{release_path}/public/ver.txt"
execute "find #{release_path}/public -type f -name 'ver.txt' -exec chmod 664 {} \\;"
end
end
desc " Set up project "
task :set_up do
on roles(:all) do
invoke "aidstream:create_storage_folder"
invoke "aidstream:create_uploads_folder"
# invoke "environment:create_variables"
end
end
end
namespace :vendor do
desc 'Copy vendor directory from last release'
task :copy do
on roles(:web) do
puts ("--> Copy vendor folder from previous release")
execute "vendorDir=#{current_path}/vendor; if [ -d $vendorDir ] || [ -h $vendorDir ]; then cp -a $vendorDir #{release_path}/vendor; fi;"
end
end
end
namespace :hipchat do
desc 'Notify Hipchat'
task :notify do
on roles(:all) do
execute "curl -s -H 'Content-Type: application/json' -X POST -d '{\"color\": \"#{fetch(:notify_color)}\", \"message_format\": \"text\", \"message\": \"#{fetch(:notify_message)}\", \"notify\": \"true\" }' https://api.hipchat.com/v2/room/#{fetch(:hipchat_room_name)}/notification?auth_token=#{fetch(:hipchat_token)}"
Rake::Task["hipchat:notify"].reenable
end
end
desc 'Hipchat notification on deployment'
task :start do
on roles(:all) do
message = "#{fetch(:user)} is deploying #{fetch(:application)}/#{fetch(:branch)} to #{fetch(:env)}. diff at: #{fetch(:repo_base_url)}#{fetch(:repo_diff_path)}#{fetch(:branch)}"
set :notify_message, message
set :notify_color, 'gray'
invoke "hipchat:notify"
end
end
task :deployed do
on roles(:all) do
message = "#{fetch(:user)} finished deploying #{fetch(:application)}/#{fetch(:branch)} (revision #{fetch(:current_revision)}) to #{fetch(:env)}."
set :notify_message, message
set :notify_color, 'green'
invoke "hipchat:notify"
end
end
task :notify_deploy_failed do
on roles(:all) do
message = "Error deploying #{fetch(:application)}/#{fetch(:branch)} (revision #{fetch(:current_revision)}) to #{fetch(:env)}, user: #{fetch(:user)} ."
set :notify_message, message
set :notify_color, 'red'
invoke "hipchat:notify"
end
end
end
namespace :nginx do
desc 'Reload nginx server'
task :reload do
on roles(:all) do
execute :sudo, "/etc/init.d/nginx reload"
end
end
end
namespace :deploy do
after :starting, "hipchat:start"
after :updated, "vendor:copy"
after :updated, "composer:install"
after :updated, "environment:set_variables"
after :published, "aidstream:create_symlink"
after :finished, "hipchat:deployed"
after :finished, "aidstream:queue_restart"
after :finished, "aidstream:create_ver_txt"
after :failed, "hipchat:notify_deploy_failed"
end
after "deploy", "nginx:reload"
| 34.828452 | 326 | 0.551658 |
6afd5a7f28e71f075832b5cf5fc259aea1e7d3e0 | 932 | module LicenseFinder
class PossibleLicenseFiles
CANDIDATE_FILE_NAMES = %w(LICENSE License Licence COPYING README Readme ReadMe)
CANDIDATE_PATH_WILDCARD = "*{#{CANDIDATE_FILE_NAMES.join(',')}}*"
def self.find(install_path)
new(install_path).find
end
def initialize(install_path)
@install_path = install_path ? Pathname(install_path) : nil
end
def find
paths_of_candidate_files.map do |path|
file_at_path(path)
end
end
private
attr_reader :install_path
def paths_of_candidate_files
candidate_files_and_dirs.map do |path|
path.directory? ? path.children : path
end.flatten.uniq
end
def candidate_files_and_dirs
return [] if install_path.nil?
Pathname.glob(install_path.join('**', CANDIDATE_PATH_WILDCARD))
end
def file_at_path(path)
PossibleLicenseFile.new(install_path, path)
end
end
end
| 23.3 | 83 | 0.693133 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.