hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e9c113f5b71b2d6f529bdd38ea8e5ad15d93705a | 1,708 | module ApplicationHelper
def title(title)
content_for :title do
title
end
end
def on_applications
"active" if controller_names.include? %w(dashboard global_applications)
end
def on_servers
"active" if %w(dashboard servers applications ssh_keys env_vars application_databases backups).
include?(controller_name) && !on_backups
end
def on_account
controller_names = %w(accounts)
controller_names.include?(controller_name) ? "active" : nil
end
def on_backups
"active" if controller_name == "backups" && action_name == "overview"
end
def format_log(log)
escape_to_html(log).gsub(/\n/, "<br>").gsub(/\ /, " ")
end
def poll_server_url
return unless current_server
if current_app
poll_server_path(app_id: current_app.id)
else
if controller_name == "applications" && action_name == "index"
poll_server_path(include_deleted: true)
else
poll_server_path
end
end
end
def escape_to_html(data)
{ 1 => :nothing,
2 => :nothing,
4 => :nothing,
5 => :nothing,
7 => :nothing,
30 => :black,
31 => :red,
32 => :green,
33 => :yellow,
34 => :blue,
35 => :magenta,
36 => :cyan,
37 => :white,
40 => :nothing,
41 => :nothing,
43 => :nothing,
44 => :nothing,
45 => :nothing,
46 => :nothing,
47 => :nothing
}.each do |key, value|
if value != :nothing
data = data.gsub(/\e\[#{key}m/, '<span style="color:#{value}">')
else
data = data.gsub(/\e\[#{key}m/, "<span>")
end
end
data = data.gsub(/\e\[0m/, "</span>")
end
end
| 23.081081 | 99 | 0.575527 |
b90ba9798b3ac40c2ac275522d797de9fec4f4f5 | 79 | class CreateRides < ActiveRecord::Migration
# Write your migrations here
end
| 19.75 | 43 | 0.797468 |
aba472da29859bbc335d04f6ff88ed96456f44f9 | 2,835 | class Innotop < Formula
desc "Top clone for MySQL"
homepage "https://github.com/innotop/innotop/"
url "https://github.com/innotop/innotop/archive/v1.13.0.tar.gz"
sha256 "6ec91568e32bda3126661523d9917c7fbbd4b9f85db79224c01b2a740727a65c"
license any_of: ["GPL-2.0-only", "Artistic-1.0-Perl"]
revision 1
head "https://github.com/innotop/innotop.git"
bottle do
sha256 cellar: :any, arm64_big_sur: "cf80f3c8db711fe4481d9f9663fb1c9877c5a078542f9756a0f4f5dcb8658962"
sha256 cellar: :any, big_sur: "e064fd105588d19acfcf7c1a52afb674b140bf8ca66bbdbd85e0c4c45a14c784"
sha256 cellar: :any, catalina: "e8becfe3d8da8c8ed971061e0814e46ae19609cd0ea0ab58260713d8ca2595d0"
sha256 cellar: :any, mojave: "d2430929751627ed8242594b4d0cfc59346e5604232a148a97e879d1700fa05e"
end
depends_on "mysql-client"
depends_on "[email protected]"
uses_from_macos "perl"
resource "Devel::CheckLib" do
url "https://cpan.metacpan.org/authors/id/M/MA/MATTN/Devel-CheckLib-1.14.tar.gz"
sha256 "f21c5e299ad3ce0fdc0cb0f41378dca85a70e8d6c9a7599f0e56a957200ec294"
end
resource "DBI" do
url "https://cpan.metacpan.org/authors/id/T/TI/TIMB/DBI-1.643.tar.gz"
sha256 "8a2b993db560a2c373c174ee976a51027dd780ec766ae17620c20393d2e836fa"
end
resource "DBD::mysql" do
url "https://cpan.metacpan.org/authors/id/D/DV/DVEEDEN/DBD-mysql-4.050.tar.gz"
sha256 "4f48541ff15a0a7405f76adc10f81627c33996fbf56c95c26c094444c0928d78"
end
resource "TermReadKey" do
url "https://cpan.metacpan.org/authors/id/J/JS/JSTOWE/TermReadKey-2.38.tar.gz"
sha256 "5a645878dc570ac33661581fbb090ff24ebce17d43ea53fd22e105a856a47290"
end
def install
ENV.prepend_create_path "PERL5LIB", libexec/"lib/perl5"
resources.each do |r|
r.stage do
system "perl", "Makefile.PL", "INSTALL_BASE=#{libexec}"
# Work around restriction on 10.15+ where .bundle files cannot be loaded
# from a relative path -- while in the middle of our build we need to
# refer to them by their full path. Workaround adapted from:
# https://github.com/fink/fink-distributions/issues/461#issuecomment-563331868
inreplace "Makefile", "blib/", "$(shell pwd)/blib/" if r.name == "TermReadKey"
system "make", "install"
end
end
# Disable dynamic selection of perl which may cause segfault when an
# incompatible perl is picked up.
inreplace "innotop", "#!/usr/bin/env perl", "#!/usr/bin/perl"
system "perl", "Makefile.PL", "INSTALL_BASE=#{prefix}"
system "make", "install"
share.install prefix/"man"
bin.env_script_all_files(libexec/"bin", PERL5LIB: ENV["PERL5LIB"])
end
test do
# Calling commands throws up interactive GUI, which is a pain.
assert_match version.to_s, shell_output("#{bin}/innotop --version")
end
end
| 39.929577 | 106 | 0.729453 |
61888d43d306c0e9d664a0b3d7e04d28baf36a06 | 568 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/devtools/artifactregistry/v1beta2/settings.proto
require 'google/protobuf'
require 'google/api/field_behavior_pb'
require 'google/api/resource_pb'
require 'google/protobuf/field_mask_pb'
require 'google/api/annotations_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/devtools/artifactregistry/v1beta2/settings.proto", :syntax => :proto3) do
end
end
module Google
module Cloud
module ArtifactRegistry
module V1beta2
end
end
end
end
| 24.695652 | 92 | 0.77993 |
61fdd241798c0c3066091a3a64bbd8bd54f17e52 | 1,290 | # frozen_string_literal: true
require 'ox'
require_relative 'message_builder'
require_relative 'find_candidate_message_error'
module MPI
module Messages
module FindProfileMessageHelpers
include MPI::Messages::MessageBuilder
EXTENSION = 'PRPA_IN201305UV02'
def to_xml
super(EXTENSION, build_body)
rescue => e
Rails.logger.error "failed to build find candidate message: #{e.message}"
raise
end
private
def build_body
body = build_control_act_process
body << query_by_parameter
body
end
def query_by_parameter
build_query_by_parameter << build_parameter_list
end
def build_query_by_parameter
el = element('queryByParameter')
el << element('queryId', root: '1.2.840.114350.1.13.28.1.18.5.999', extension: '18204')
el << element('statusCode', code: 'new')
el << element('modifyCode', code: search_type)
el << element('initialQuantity', value: 1)
end
def build_vba_orchestration
el = element('otherIDsScopingOrganization')
el << element('value', extension: 'VBA', root: '2.16.840.1.113883.4.349')
el << element('semanticsText', text!: 'MVI.ORCHESTRATION')
end
end
end
end
| 26.875 | 95 | 0.648837 |
e8052c59ecbfb27b5ce1374d0671f6b1c617b519 | 4,957 | describe Hubspot::Contact do
let(:example_company_hash) do
VCR.use_cassette("company_example", record: :none) do
HTTParty.get("https://api.hubapi.com/companies/v2/companies/21827084?hapikey=demo").parsed_response
end
end
before{ Hubspot.configure(hapikey: "demo") }
describe "#initialize" do
subject{ Hubspot::Company.new(example_company_hash) }
it{ should be_an_instance_of Hubspot::Company }
its(["name"]){ should == "HubSpot" }
its(["domain"]){ should == "hubspot.com" }
its(:vid){ should == 21827084 }
end
describe ".create!" do
cassette "company_create"
let(:params){{}}
subject{ Hubspot::Company.create!(name, params) }
context "with a new name" do
let(:name){ "New Company #{Time.now.to_i}" }
it{ should be_an_instance_of Hubspot::Company }
its(:name){ should match /New Company .*/ } # Due to VCR the email may not match exactly
context "and some params" do
cassette "company_create_with_params"
let(:name){ "New Company with Params #{Time.now.to_i}" }
let(:params){ {domain: "new-company-domain-#{Time.now.to_i}"} }
its(["name"]){ should match /New Company with Params/ }
its(["domain"]){ should match /new\-company\-domain/ }
end
end
end
describe ".find_by_id" do
context 'given an uniq id' do
cassette "company_find_by_id"
subject{ Hubspot::Company.find_by_id(vid) }
context "when the company is found" do
let(:vid){ 21827084 }
it{ should be_an_instance_of Hubspot::Company }
its(:name){ should == "HubSpot" }
end
context "when the contact cannot be found" do
it 'raises an error' do
expect { Hubspot::Company.find_by_id(9999999) }.to raise_error(Hubspot::RequestError)
end
end
end
end
describe ".find_by_domain" do
context 'given a domain' do
cassette "company_find_by_domain"
subject{ Hubspot::Company.find_by_domain("hubspot.com") }
context "when a company is found" do
it{ should be_an_instance_of Array }
it{ should_not be_empty }
end
context "when a company cannot be found" do
subject{Hubspot::Company.find_by_domain("asdf1234baddomain.com")}
it{ should be_an_instance_of Array }
it{ should be_empty }
end
end
end
describe '.all' do
context 'all companies' do
cassette 'find_all_companies'
it 'must get the companies list' do
companies = Hubspot::Company.all
expect(companies.size).to eql 20 # default page size
first = companies.first
last = companies.last
expect(first).to be_a Hubspot::Company
expect(first.vid).to eql 42866817
expect(first['name']).to eql 'name'
expect(last).to be_a Hubspot::Company
expect(last.vid).to eql 42861017
expect(last['name']).to eql 'Xge5rbdt2zm'
end
it 'must filter only 2 copmanies' do
copmanies = Hubspot::Company.all(count: 2)
expect(copmanies.size).to eql 2
end
end
context 'recent companies' do
cassette 'find_all_recent_companies'
it 'must get the companies list' do
companies = Hubspot::Company.all(recent: true)
expect(companies.size).to eql 20
first, last = companies.first, companies.last
expect(first).to be_a Hubspot::Company
expect(first.vid).to eql 42866817
expect(last).to be_a Hubspot::Company
expect(last.vid).to eql 42861017
end
end
end
describe "#update!" do
cassette "company_update"
let(:company){ Hubspot::Company.new(example_company_hash) }
let(:params){ {name: "Acme Cogs", domain: "abccogs.com"} }
subject{ company.update!(params) }
it{ should be_an_instance_of Hubspot::Company }
its(["name"]){ should == "Acme Cogs" }
its(["domain"]){ should == "abccogs.com" }
context "when the request is not successful" do
let(:company){ Hubspot::Company.new({"vid" => "invalid", "properties" => {}})}
it "raises an error" do
expect{ subject }.to raise_error Hubspot::RequestError
end
end
end
describe "#destroy!" do
cassette "company_destroy"
let(:company){ Hubspot::Company.create!("newcompany_y_#{Time.now.to_i}@hsgem.com") }
subject{ company.destroy! }
it { should be_true }
it "should be destroyed" do
subject
company.destroyed?.should be_true
end
context "when the request is not successful" do
let(:company){ Hubspot::Company.new({"vid" => "invalid", "properties" => {}})}
it "raises an error" do
expect{ subject }.to raise_error Hubspot::RequestError
company.destroyed?.should be_false
end
end
end
describe "#destroyed?" do
let(:company){ Hubspot::Company.new(example_company_hash) }
subject{ company }
its(:destroyed?){ should be_false }
end
end | 30.98125 | 105 | 0.638491 |
180718b7dbaaea18d2625af44ff4787c5fc2f6eb | 399 | class TaxonNameClassification::Iczn < TaxonNameClassification
NOMEN_URI='http://purl.obolibrary.org/obo/NOMEN_0000107'.freeze
def self.applicable_ranks
ICZN
end
def self.code_applicability_start_year
1758
end
def self.disjoint_taxon_name_classes
ICNP_TAXON_NAME_CLASSIFICATION_NAMES + ICN_TAXON_NAME_CLASSIFICATION_NAMES + ICTV_TAXON_NAME_CLASSIFICATION_NAMES
end
end
| 22.166667 | 117 | 0.819549 |
abc8118835028808e03a206f379e6ee5a4945093 | 7,600 | #
# Author:: Serdar Sutay (<[email protected]>)
# Copyright:: Copyright 2013-2016, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
if Chef::Platform.windows?
require "chef/application/windows_service_manager"
end
#
# ATTENTION:
# This test creates a windows service for testing purposes and runs it
# as Local System (or an otherwise specified user) on windows boxes.
# This test will fail if you run the tests inside a Windows VM by
# sharing the code from your host since Local System account by
# default can't see the mounted partitions.
# Run this test by copying the code to a local VM directory or setup
# Local System account to see the maunted partitions for the shared
# directories.
#
describe "Chef::Application::WindowsServiceManager", :windows_only, :system_windows_service_gem_only, :appveyor_only do
include_context "using Win32::Service"
context "with invalid service definition" do
it "throws an error when initialized with no service definition" do
expect { Chef::Application::WindowsServiceManager.new(nil) }.to raise_error(ArgumentError)
end
it "throws an error with required missing options" do
%i{service_name service_display_name service_description service_file_path}.each do |key|
service_def = test_service.dup
service_def.delete(key)
expect { Chef::Application::WindowsServiceManager.new(service_def) }.to raise_error(ArgumentError)
end
end
end
context "with valid definition" do
before(:each) do
@service_manager_output = [ ]
# Uncomment below lines to debug this test
# original_puts = $stdout.method(:puts)
allow($stdout).to receive(:puts) do |message|
@service_manager_output << message
# original_puts.call(message)
end
end
after(:each) do
cleanup
end
context "when service doesn't exist" do
it "default => should say service don't exist" do
service_manager.run
expect(@service_manager_output.grep(/doesn't exist on the system/).length).to be > 0
end
it "install => should install the service" do
service_manager.run(["-a", "install"])
expect(test_service_exists?).to be_truthy
end
it "other actions => should say service doesn't exist" do
%w{delete start stop pause resume uninstall}.each do |action|
service_manager.run(["-a", action])
expect(@service_manager_output.grep(/doesn't exist on the system/).length).to be > 0
@service_manager_output = [ ]
end
end
end
context "when service exists" do
before(:each) do
service_manager.run(["-a", "install"])
end
it "should have an own-process, non-interactive type" do
status = ::Win32::Service.status("spec-service")
expect(status[:service_type]).to eq("own process")
expect(status[:interactive]).to be_falsey
end
it "install => should say service already exists" do
service_manager.run(["-a", "install"])
expect(@service_manager_output.grep(/already exists/).length).to be > 0
end
context "and service is stopped" do
%w{delete uninstall}.each do |action|
it "#{action} => should remove the service", :volatile do
service_manager.run(["-a", action])
expect(test_service_exists?).to be_falsey
end
end
it "default, status => should say service is stopped" do
service_manager.run([ ])
expect(@service_manager_output.grep(/stopped/).length).to be > 0
@service_manager_output = [ ]
service_manager.run(["-a", "status"])
expect(@service_manager_output.grep(/stopped/).length).to be > 0
end
it "start should start the service", :volatile do
service_manager.run(["-a", "start"])
expect(test_service_state).to eq("running")
expect(File.exists?(test_service_file)).to be_truthy
end
it "stop should not affect the service" do
service_manager.run(["-a", "stop"])
expect(test_service_state).to eq("stopped")
end
%w{pause resume}.each do |action|
it "#{action} => should raise error" do
expect { service_manager.run(["-a", action]) }.to raise_error(SystemCallError)
end
end
context "and service is started", :volatile do
before(:each) do
service_manager.run(["-a", "start"])
end
%w{delete uninstall}.each do |action|
it "#{action} => should remove the service", :volatile do
service_manager.run(["-a", action])
expect(test_service_exists?).to be_falsey
end
end
it "default, status => should say service is running" do
service_manager.run([ ])
expect(@service_manager_output.grep(/running/).length).to be > 0
@service_manager_output = [ ]
service_manager.run(["-a", "status"])
expect(@service_manager_output.grep(/running/).length).to be > 0
end
it "stop should stop the service" do
service_manager.run(["-a", "stop"])
expect(test_service_state).to eq("stopped")
end
it "pause should pause the service" do
service_manager.run(["-a", "pause"])
expect(test_service_state).to eq("paused")
end
it "resume should have no affect" do
service_manager.run(["-a", "resume"])
expect(test_service_state).to eq("running")
end
end
context "and service is paused", :volatile do
before(:each) do
service_manager.run(["-a", "start"])
service_manager.run(["-a", "pause"])
end
actions = %w{delete uninstall}
actions.each do |action|
it "#{action} => should remove the service" do
service_manager.run(["-a", action])
expect(test_service_exists?).to be_falsey
end
end
it "default, status => should say service is paused" do
service_manager.run([ ])
expect(@service_manager_output.grep(/paused/).length).to be > 0
@service_manager_output = [ ]
service_manager.run(["-a", "status"])
expect(@service_manager_output.grep(/paused/).length).to be > 0
end
it "stop should stop the service" do
service_manager.run(["-a", "stop"])
expect(test_service_state).to eq("stopped")
end
it "pause should not affect the service" do
service_manager.run(["-a", "pause"])
expect(test_service_state).to eq("paused")
end
it "start should raise an error" do
expect { service_manager.run(["-a", "start"]) }.to raise_error(::Win32::Service::Error)
end
end
end
end
end
end
| 34.38914 | 119 | 0.623553 |
e95070f99401a7716a97eec05465cdc04c36484e | 73 | # typed: true
:@@ # error: `@@` is not allowed as a class variable name
| 18.25 | 57 | 0.630137 |
f7783bd7e0ce08dd542d799e3a124850b3e22c3d | 3,615 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
module Elastic
module Transport
module Transport
module HTTP
# The default transport implementation, using the [_Faraday_](https://rubygems.org/gems/faraday)
# library for abstracting the HTTP client.
#
# @see Transport::Base
#
class Faraday
include Base
# Performs the request by invoking {Transport::Base#perform_request} with a block.
#
# @return [Response]
# @see Transport::Base#perform_request
#
def perform_request(method, path, params = {}, body = nil, headers = nil, opts = {})
super do |connection, url|
headers = if connection.connection.headers
if !headers.nil?
connection.connection.headers.merge(headers)
else
connection.connection.headers
end
else
headers
end
body = body ? __convert_to_json(body) : nil
body, headers = compress_request(body, headers)
response = connection.connection.run_request(
method.downcase.to_sym,
url,
body,
headers
)
Response.new response.status, decompress_response(response.body), response.headers
end
end
# Builds and returns a connection
#
# @return [Connections::Connection]
#
def __build_connection(host, options={}, block=nil)
client = ::Faraday.new(__full_url(host), options, &block)
apply_headers(client, options)
Connections::Connection.new(host: host, connection: client)
end
# Returns an array of implementation specific connection errors.
#
# @return [Array]
#
def host_unreachable_exceptions
[
::Faraday::ConnectionFailed,
::Faraday::TimeoutError,
::Faraday.const_defined?(:ServerError) ? ::Faraday::ServerError : nil,
::Faraday::SSLError
].compact
end
private
def user_agent_header(client)
@user_agent ||= begin
meta = ["RUBY_VERSION: #{RUBY_VERSION}"]
if RbConfig::CONFIG && RbConfig::CONFIG['host_os']
meta << "#{RbConfig::CONFIG['host_os'].split('_').first[/[a-z]+/i].downcase} #{RbConfig::CONFIG['target_cpu']}"
end
meta << "#{client.headers[USER_AGENT_STR]}"
"elastic-transport-ruby/#{VERSION} (#{meta.join('; ')})"
end
end
end
end
end
end
end
| 36.15 | 127 | 0.565975 |
797352dd25358b067251e90ab74ad0b57711b1e1 | 1,699 | require 'declarative_authorization/maintenance'
module Alchemy
module Specs
# Helpers for integration specs
#
# This file is included in rspec integration/request tests.
#
module IntegrationHelpers
include ::Authorization::TestHelper
# Shortcut method for:
#
# * create_admin_user
# * login_into_alchemy
#
def authorize_as_admin
create_admin_user
login_into_alchemy
end
# Capybara actions to login into Alchemy Backend
#
# You should have a admin user before loggin in.
#
# See: create_admin_user method
#
def login_into_alchemy
# Ensure that phantomjs has always the same browser language.
if Capybara.current_driver == :poltergeist
page.driver.headers = { 'Accept-Language' => 'en' }
end
visit login_path
fill_in('user_login', :with => 'jdoe')
fill_in('user_password', :with => 's3cr3t')
click_on('Login')
end
# Load additional authorization_rules for specs.
#
# For some strange reason, this isn't done automatically while running the specs
#
def load_authorization_rules
instance = Alchemy::Auth::Engine.get_instance
instance.load(File.join(File.dirname(__FILE__), '../../dummy', 'config/authorization_rules.rb'))
end
# Creates an admin user in a way it works
#
# You should create it once in a before block
#
# === Example:
#
# before do
# create_admin_user
# end
#
def create_admin_user
FactoryGirl.create(:admin_user)
end
end
end
end
| 25.358209 | 104 | 0.612713 |
336aac8b3cfdf4ad13cd81a7b6a7a86c7e46d6e5 | 909 | # frozen_string_literal: true
require "spec_helper"
RSpec.describe Gemsmith::Generators::GitLint do
subject(:git_lint) { described_class.new cli, configuration: configuration }
include_context "with temporary directory"
let(:cli) { instance_spy Gemsmith::CLI, destination_root: temp_dir }
let(:configuration) { {gem: {name: "tester"}, generate: {git_lint: create_git_lint}} }
describe "#run" do
before { git_lint.run }
context "when enabled" do
let(:create_git_lint) { true }
it "does not remove Rakefile lines" do
expect(cli).not_to have_received(:gsub_file)
end
end
context "when disabled" do
let(:create_git_lint) { false }
it "removes Rakefile requirement" do
expect(cli).to have_received(:gsub_file).with(
"tester/Rakefile",
%r(require.+git/lint.+\n),
""
)
end
end
end
end
| 24.567568 | 88 | 0.652365 |
285373de8afa457a93889ec06cd9cee66884430d | 160 | module MakropoulosViewTool
class Renderer
def self.copyright name, msg
"© #{Time.now.year} | <b>#{name}</b> #{msg}".html_safe
end
end
end | 22.857143 | 65 | 0.64375 |
212d8b1525244df78f2cd59472de6dc223c7e979 | 14,596 | # frozen_string_literal: true
require 'spec_helper'
describe Projects::PipelinesController do
include ApiHelpers
set(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
let(:feature) { ProjectFeature::ENABLED }
before do
stub_not_protect_default_branch
project.add_developer(user)
project.project_feature.update(builds_access_level: feature)
sign_in(user)
end
describe 'GET index.json' do
before do
%w(pending running success failed canceled).each_with_index do |status, index|
create_pipeline(status, project.commit("HEAD~#{index}"))
end
end
context 'when using persisted stages', :request_store do
before do
stub_feature_flags(ci_pipeline_persisted_stages: true)
end
it 'returns serialized pipelines', :request_store do
expect(::Gitlab::GitalyClient).to receive(:allow_ref_name_caching).and_call_original
queries = ActiveRecord::QueryRecorder.new do
get_pipelines_index_json
end
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('pipeline')
expect(json_response).to include('pipelines')
expect(json_response['pipelines'].count).to eq 5
expect(json_response['count']['all']).to eq '5'
expect(json_response['count']['running']).to eq '1'
expect(json_response['count']['pending']).to eq '1'
expect(json_response['count']['finished']).to eq '3'
json_response.dig('pipelines', 0, 'details', 'stages').tap do |stages|
expect(stages.count).to eq 3
end
expect(queries.count).to be
end
end
context 'when using legacy stages', :request_store do
before do
stub_feature_flags(ci_pipeline_persisted_stages: false)
end
it 'returns JSON with serialized pipelines' do
get_pipelines_index_json
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('pipeline')
expect(json_response).to include('pipelines')
expect(json_response['pipelines'].count).to eq 5
expect(json_response['count']['all']).to eq '5'
expect(json_response['count']['running']).to eq '1'
expect(json_response['count']['pending']).to eq '1'
expect(json_response['count']['finished']).to eq '3'
json_response.dig('pipelines', 0, 'details', 'stages').tap do |stages|
expect(stages.count).to eq 3
end
end
it 'does not execute N+1 queries' do
queries = ActiveRecord::QueryRecorder.new do
get_pipelines_index_json
end
expect(queries.count).to be <= 36
end
end
it 'does not include coverage data for the pipelines' do
get_pipelines_index_json
expect(json_response['pipelines'][0]).not_to include('coverage')
end
context 'when performing gitaly calls', :request_store do
it 'limits the Gitaly requests' do
# Isolate from test preparation (Repository#exists? is also cached in RequestStore)
RequestStore.end!
RequestStore.clear!
RequestStore.begin!
expect(::Gitlab::GitalyClient).to receive(:allow_ref_name_caching).and_call_original
expect { get_pipelines_index_json }
.to change { Gitlab::GitalyClient.get_request_count }.by(2)
end
end
context 'when the project is private' do
let(:project) { create(:project, :private, :repository) }
it 'returns `not_found` when the user does not have access' do
sign_in(create(:user))
get_pipelines_index_json
expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns the pipelines when the user has access' do
get_pipelines_index_json
expect(json_response['pipelines'].size).to eq(5)
end
end
def get_pipelines_index_json
get :index, params: {
namespace_id: project.namespace,
project_id: project
},
format: :json
end
def create_pipeline(status, sha)
pipeline = create(:ci_empty_pipeline, status: status,
project: project,
sha: sha)
create_build(pipeline, 'build', 1, 'build')
create_build(pipeline, 'test', 2, 'test')
create_build(pipeline, 'deploy', 3, 'deploy')
end
def create_build(pipeline, stage, stage_idx, name)
status = %w[created running pending success failed canceled].sample
create(:ci_build, pipeline: pipeline, stage: stage, stage_idx: stage_idx, name: name, status: status)
end
end
describe 'GET show.json' do
let(:pipeline) { create(:ci_pipeline_with_one_job, project: project) }
it 'returns the pipeline' do
get_pipeline_json
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).not_to be_an(Array)
expect(json_response['id']).to be(pipeline.id)
expect(json_response['details']).to have_key 'stages'
end
context 'when the pipeline has multiple stages and groups', :request_store do
let(:project) { create(:project, :repository) }
let(:pipeline) do
create(:ci_empty_pipeline, project: project,
user: user,
sha: project.commit.id)
end
before do
create_build('build', 0, 'build')
create_build('test', 1, 'rspec 0')
create_build('deploy', 2, 'production')
create_build('post deploy', 3, 'pages 0')
end
it 'does not perform N + 1 queries' do
# Set up all required variables
get_pipeline_json
control_count = ActiveRecord::QueryRecorder.new { get_pipeline_json }.count
first_build = pipeline.builds.first
first_build.tag_list << [:hello, :world]
create(:deployment, deployable: first_build)
second_build = pipeline.builds.second
second_build.tag_list << [:docker, :ruby]
create(:deployment, deployable: second_build)
new_count = ActiveRecord::QueryRecorder.new { get_pipeline_json }.count
expect(new_count).to be_within(1).of(control_count)
end
end
context 'when builds are disabled' do
let(:feature) { ProjectFeature::DISABLED }
it 'users can not see internal pipelines' do
get_pipeline_json
expect(response).to have_gitlab_http_status(:not_found)
end
context 'when pipeline is external' do
let(:pipeline) { create(:ci_pipeline, source: :external, project: project) }
it 'users can see the external pipeline' do
get_pipeline_json
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to be(pipeline.id)
end
end
end
def get_pipeline_json
get :show, params: { namespace_id: project.namespace, project_id: project, id: pipeline }, format: :json
end
def create_build(stage, stage_idx, name)
create(:ci_build, pipeline: pipeline, stage: stage, stage_idx: stage_idx, name: name)
end
end
describe 'GET stages.json' do
let(:pipeline) { create(:ci_pipeline, project: project) }
context 'when accessing existing stage' do
before do
create(:ci_build, :retried, :failed, pipeline: pipeline, stage: 'build')
create(:ci_build, pipeline: pipeline, stage: 'build')
end
context 'without retried' do
before do
get_stage('build')
end
it 'returns pipeline jobs without the retried builds' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('pipeline_stage')
expect(json_response['latest_statuses'].length).to eq 1
expect(json_response).not_to have_key('retried')
end
end
context 'with retried' do
before do
get_stage('build', retried: true)
end
it 'returns pipelines jobs with the retried builds' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('pipeline_stage')
expect(json_response['latest_statuses'].length).to eq 1
expect(json_response['retried'].length).to eq 1
end
end
end
context 'when accessing unknown stage' do
before do
get_stage('test')
end
it 'responds with not found' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
def get_stage(name, params = {})
get :stage, params: {
**params.merge(
namespace_id: project.namespace,
project_id: project,
id: pipeline.id,
stage: name,
format: :json)
}
end
end
describe 'GET stages_ajax.json' do
let(:pipeline) { create(:ci_pipeline, project: project) }
context 'when accessing existing stage' do
before do
create(:ci_build, pipeline: pipeline, stage: 'build')
get_stage_ajax('build')
end
it 'returns html source for stage dropdown' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template('projects/pipelines/_stage')
expect(json_response).to include('html')
end
end
context 'when accessing unknown stage' do
before do
get_stage_ajax('test')
end
it 'responds with not found' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
def get_stage_ajax(name)
get :stage_ajax, params: {
namespace_id: project.namespace,
project_id: project,
id: pipeline.id,
stage: name
},
format: :json
end
end
describe 'GET status.json' do
let(:pipeline) { create(:ci_pipeline, project: project) }
let(:status) { pipeline.detailed_status(double('user')) }
before do
get :status, params: {
namespace_id: project.namespace,
project_id: project,
id: pipeline.id
},
format: :json
end
it 'return a detailed pipeline status in json' do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['text']).to eq status.text
expect(json_response['label']).to eq status.label
expect(json_response['icon']).to eq status.icon
expect(json_response['favicon']).to match_asset_path("/assets/ci_favicons/#{status.favicon}.png")
end
end
describe 'POST retry.json' do
let!(:pipeline) { create(:ci_pipeline, :failed, project: project) }
let!(:build) { create(:ci_build, :failed, pipeline: pipeline) }
before do
post :retry, params: {
namespace_id: project.namespace,
project_id: project,
id: pipeline.id
},
format: :json
end
it 'retries a pipeline without returning any content' do
expect(response).to have_gitlab_http_status(:no_content)
expect(build.reload).to be_retried
end
context 'when builds are disabled' do
let(:feature) { ProjectFeature::DISABLED }
it 'fails to retry pipeline' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
describe 'POST cancel.json' do
let!(:pipeline) { create(:ci_pipeline, project: project) }
let!(:build) { create(:ci_build, :running, pipeline: pipeline) }
before do
post :cancel, params: {
namespace_id: project.namespace,
project_id: project,
id: pipeline.id
},
format: :json
end
it 'cancels a pipeline without returning any content' do
expect(response).to have_gitlab_http_status(:no_content)
expect(pipeline.reload).to be_canceled
end
context 'when builds are disabled' do
let(:feature) { ProjectFeature::DISABLED }
it 'fails to retry pipeline' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
describe 'GET latest' do
let(:branch_main) { project.repository.branches[0] }
let(:branch_secondary) { project.repository.branches[1] }
let!(:pipeline_master) do
create(:ci_pipeline,
ref: branch_main.name,
sha: branch_main.target,
project: project)
end
let!(:pipeline_secondary) do
create(:ci_pipeline,
ref: branch_secondary.name,
sha: branch_secondary.target,
project: project)
end
before do
project.change_head(branch_main.name)
project.reload_default_branch
end
context 'no ref provided' do
it 'shows latest pipeline for the default project branch' do
get :show, params: { namespace_id: project.namespace, project_id: project, latest: true, ref: nil }
expect(response).to have_gitlab_http_status(200)
expect(assigns(:pipeline)).to have_attributes(id: pipeline_master.id)
end
end
context 'ref provided' do
before do
create(:ci_pipeline, ref: 'master', project: project)
end
it 'shows the latest pipeline for the provided ref' do
get :show, params: { namespace_id: project.namespace, project_id: project, latest: true, ref: branch_secondary.name }
expect(response).to have_gitlab_http_status(200)
expect(assigns(:pipeline)).to have_attributes(id: pipeline_secondary.id)
end
context 'newer pipeline exists for older sha' do
before do
create(:ci_pipeline, ref: branch_secondary.name, sha: project.commit(branch_secondary.name).parent, project: project)
end
it 'shows the provided ref with the last sha/pipeline combo' do
get :show, params: { namespace_id: project.namespace, project_id: project, latest: true, ref: branch_secondary.name }
expect(response).to have_gitlab_http_status(200)
expect(assigns(:pipeline)).to have_attributes(id: pipeline_secondary.id)
end
end
end
it 'renders a 404 if no pipeline is found for the ref' do
get :show, params: { namespace_id: project.namespace, project_id: project, ref: 'no-branch' }
expect(response).to have_gitlab_http_status(404)
end
end
end
| 31.321888 | 127 | 0.632022 |
bbbef4df65c50753e4cc2972f187fd8f3659b37a | 371 | class CreateDevices < ActiveRecord::Migration
def change
create_table :devices do |t|
t.string :applianceId
t.string :manufacturerName
t.string :modelName
t.string :version
t.string :friendlyName
t.string :friendlyDescription
t.string :isReachable
t.string :photonId
t.timestamps null: false
end
end
end
| 21.823529 | 45 | 0.668464 |
6a2180140fc9cee6b12f3b2e1f4a566cf28da29b | 2,327 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Storage::Mgmt::V2018_07_01
module Models
#
# Storage REST API operation definition.
#
class Operation
include MsRestAzure
# @return [String] Operation name: {provider}/{resource}/{operation}
attr_accessor :name
# @return [OperationDisplay] Display metadata associated with the
# operation.
attr_accessor :display
# @return [String] The origin of operations.
attr_accessor :origin
# @return [ServiceSpecification] One property of operation, include
# metric specifications.
attr_accessor :service_specification
#
# Mapper for Operation class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'Operation',
type: {
name: 'Composite',
class_name: 'Operation',
model_properties: {
name: {
client_side_validation: true,
required: false,
serialized_name: 'name',
type: {
name: 'String'
}
},
display: {
client_side_validation: true,
required: false,
serialized_name: 'display',
type: {
name: 'Composite',
class_name: 'OperationDisplay'
}
},
origin: {
client_side_validation: true,
required: false,
serialized_name: 'origin',
type: {
name: 'String'
}
},
service_specification: {
client_side_validation: true,
required: false,
serialized_name: 'properties.serviceSpecification',
type: {
name: 'Composite',
class_name: 'ServiceSpecification'
}
}
}
}
}
end
end
end
end
| 27.702381 | 74 | 0.50838 |
1d519d71b3f218daeb3d36cb76d174e5a50414ef | 732 | module Fog
module Compute
class Google
class Mock
def list_zones
zones = self.data[:zones].values
build_response(:body => {
"kind" => "compute#zoneList",
"selfLink" => "https://www.googleapis.com/compute/#{api_version}/projects/#{@project}/zones",
"id" => "projects/#{@project}/zones",
"items" => zones
})
end
end
class Real
def list_zones
api_method = @compute.zones.list
parameters = {
'project' => @project
}
result = self.build_result(api_method, parameters)
response = self.build_response(result)
end
end
end
end
end
| 24.4 | 105 | 0.521858 |
038aeb5e5505bbfd72364a2e6cb5fccb6029f2ff | 1,133 | #
# Be sure to run `pod lib lint VNBase.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'VNEssential'
s.version = '0.4.62'
s.summary = 'Simple MVVM helper'
s.swift_version = '5.0'
s.description = <<-DESC
TODO: I should add some description later =)
DESC
s.homepage = 'https://github.com/teanet/VNBase'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'teanet' => '[email protected]' }
s.source = { :git => 'https://github.com/teanet/VNBase.git', :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/teanet'
s.ios.deployment_target = '10.0'
s.watchos.deployment_target = '5.0'
s.source_files = 'VNBase/Essential/**/*'
# s.resource_bundles = {
# 'VNBase' => ['VNBase/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
s.ios.frameworks = 'UIKit'
end
| 32.371429 | 97 | 0.598411 |
01b52e4e6476f9f60049da8379f30a315e435947 | 22,257 | =begin
#DocuSign REST API
#The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign.
OpenAPI spec version: v2.1
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.13-SNAPSHOT
=end
require 'date'
module DocuSign_eSign
class TemplateTabs
# Specifies a tag on the document where you want the recipient to approve documents in an envelope without placing a signature or initials on the document. If the recipient clicks the Approve tag during the signing process, the recipient is considered to have signed the document. No information is shown on the document for the approval, but it is recorded as a signature in the envelope history.
attr_accessor :approve_tabs
# Specifies a tag on the document in a location where the recipient can select an option.
attr_accessor :checkbox_tabs
#
attr_accessor :comment_thread_tabs
# Specifies a tag on the document where you want the recipient's company name to appear. When getting information that includes this tab type, the original value of the tab when the associated envelope was sent is included in the response.
attr_accessor :company_tabs
# Specifies a tab on the document where the date the document was signed will automatically appear.
attr_accessor :date_signed_tabs
# Specifies a tab on the document where you want the recipient to enter a date. Date tabs are single-line fields that allow date information to be entered in any format. The tooltip for this tab recommends entering the date as MM/DD/YYYY, but this is not enforced. The format entered by the signer is retained. If you need a particular date format enforced, DocuSign recommends using a Text tab with a Validation Pattern and Validation Message to enforce the format.
attr_accessor :date_tabs
# Specifies a tag on the document where you want to give the recipient the option of declining an envelope. If the recipient clicks the Decline tag during the signing process, the envelope is voided.
attr_accessor :decline_tabs
#
attr_accessor :draw_tabs
# Specifies a location on the document where you want where you want the recipient's email, as entered in the recipient information, to display.
attr_accessor :email_address_tabs
# Specifies a tag on the document where you want the recipient to enter an email. Email tags are single-line fields that accept any characters. The system checks that a valid email format (i.e. [email protected]) is entered in the tag. It uses the same parameters as a Text tab, with the validation message and pattern set for email information. When getting information that includes this tab type, the original value of the tab when the associated envelope was sent is included in the response.
attr_accessor :email_tabs
# Specifies a tag on the document where you want the envelope ID for to appear. Recipients cannot enter or change the information in this tab, it is for informational purposes only.
attr_accessor :envelope_id_tabs
# Specifies tag on a document where you want the recipient's first name to appear. This tag takes the recipient's name, as entered in the recipient information, splits it into sections based on spaces and uses the first section as the first name.
attr_accessor :first_name_tabs
# Specifies a tag that is used to add a calculated field to a document. Envelope recipients cannot directly enter information into the tag; the formula tab calculates and displays a new value when changes are made to the reference tag values. The reference tag information and calculation operations are entered in the \"formula\" element. See the [ML:Using the Calculated Fields Feature] quick start guide or [ML:DocuSign Service User Guide] for more information about formulas.
attr_accessor :formula_tabs
# Specifies a tag on the document where you want the recipient's name to appear.
attr_accessor :full_name_tabs
# Specifies a tag location in the document at which a recipient will place their initials. The `optional` parameter specifies whether the initials are required or optional.
attr_accessor :initial_here_tabs
# Specifies a tag on a document where you want the recipient's last name to appear. This tag takes the recipient's name, as entered in the recipient information, splits it into sections based on spaces and uses the last section as the last name.
attr_accessor :last_name_tabs
# Specify this tag to give your recipient a list of options, presented as a drop-down list, from which they can select.
attr_accessor :list_tabs
#
attr_accessor :notarize_tabs
# Specifies a location on the document where you want to place additional information, in the form of a note, for a recipient.
attr_accessor :note_tabs
# Specifies a tag on the document where you want the recipient to enter a number. It uses the same parameters as a Text tab, with the validation message and pattern set for number information. When getting information that includes this tab type, the original value of the tab when the associated envelope was sent is included in the response.
attr_accessor :number_tabs
#
attr_accessor :poly_line_overlay_tabs
# Specifies a tag on the document in a location where the recipient can select one option from a group of options using a radio button. The radio buttons do not have to be on the same page in a document.
attr_accessor :radio_group_tabs
# Specifies a tag on the document when you want the recipient to add supporting documents to an envelope.
attr_accessor :signer_attachment_tabs
# A complex type the contains information about the tag that specifies where the recipient places their signature in the document. The \"optional\" parameter sets if the signature is required or optional.
attr_accessor :sign_here_tabs
#
attr_accessor :smart_section_tabs
# Specifies a tag on the document where you want the recipient to enter a Social Security Number (SSN). A SSN can be typed with or without dashes. It uses the same parameters as a Text tab, with the validation message and pattern set for SSN information. When getting information that includes this tab type, the original value of the tab when the associated envelope was sent is included in the response.
attr_accessor :ssn_tabs
#
attr_accessor :tab_groups
# Specifies a that that is an adaptable field that allows the recipient to enter different text information. When getting information that includes this tab type, the original value of the tab when the associated envelope was sent is included in the response.
attr_accessor :text_tabs
# Specifies a tag on the document where you want the recipient's title to appear. When getting information that includes this tab type, the original value of the tab when the associated envelope was sent is included in the response.
attr_accessor :title_tabs
#
attr_accessor :view_tabs
# Specifies a tag on the document where you want the recipient to enter a ZIP code. The ZIP code can be a five numbers or the ZIP+4 format with nine numbers. The zip code can be typed with or without dashes. It uses the same parameters as a Text tab, with the validation message and pattern set for ZIP code information. When getting information that includes this tab type, the original value of the tab when the associated envelope was sent is included in the response.
attr_accessor :zip_tabs
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'approve_tabs' => :'approveTabs',
:'checkbox_tabs' => :'checkboxTabs',
:'comment_thread_tabs' => :'commentThreadTabs',
:'company_tabs' => :'companyTabs',
:'date_signed_tabs' => :'dateSignedTabs',
:'date_tabs' => :'dateTabs',
:'decline_tabs' => :'declineTabs',
:'draw_tabs' => :'drawTabs',
:'email_address_tabs' => :'emailAddressTabs',
:'email_tabs' => :'emailTabs',
:'envelope_id_tabs' => :'envelopeIdTabs',
:'first_name_tabs' => :'firstNameTabs',
:'formula_tabs' => :'formulaTabs',
:'full_name_tabs' => :'fullNameTabs',
:'initial_here_tabs' => :'initialHereTabs',
:'last_name_tabs' => :'lastNameTabs',
:'list_tabs' => :'listTabs',
:'notarize_tabs' => :'notarizeTabs',
:'note_tabs' => :'noteTabs',
:'number_tabs' => :'numberTabs',
:'poly_line_overlay_tabs' => :'polyLineOverlayTabs',
:'radio_group_tabs' => :'radioGroupTabs',
:'signer_attachment_tabs' => :'signerAttachmentTabs',
:'sign_here_tabs' => :'signHereTabs',
:'smart_section_tabs' => :'smartSectionTabs',
:'ssn_tabs' => :'ssnTabs',
:'tab_groups' => :'tabGroups',
:'text_tabs' => :'textTabs',
:'title_tabs' => :'titleTabs',
:'view_tabs' => :'viewTabs',
:'zip_tabs' => :'zipTabs'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'approve_tabs' => :'Array<Approve>',
:'checkbox_tabs' => :'Array<Checkbox>',
:'comment_thread_tabs' => :'Array<CommentThread>',
:'company_tabs' => :'Array<Company>',
:'date_signed_tabs' => :'Array<DateSigned>',
:'date_tabs' => :'Array<DocuSign_eSign::Date>',
:'decline_tabs' => :'Array<Decline>',
:'draw_tabs' => :'Array<Draw>',
:'email_address_tabs' => :'Array<EmailAddress>',
:'email_tabs' => :'Array<Email>',
:'envelope_id_tabs' => :'Array<EnvelopeId>',
:'first_name_tabs' => :'Array<FirstName>',
:'formula_tabs' => :'Array<FormulaTab>',
:'full_name_tabs' => :'Array<FullName>',
:'initial_here_tabs' => :'Array<InitialHere>',
:'last_name_tabs' => :'Array<LastName>',
:'list_tabs' => :'Array<Array>',
:'notarize_tabs' => :'Array<Notarize>',
:'note_tabs' => :'Array<Note>',
:'number_tabs' => :'Array<Number>',
:'poly_line_overlay_tabs' => :'Array<PolyLineOverlay>',
:'radio_group_tabs' => :'Array<RadioGroup>',
:'signer_attachment_tabs' => :'Array<SignerAttachment>',
:'sign_here_tabs' => :'Array<SignHere>',
:'smart_section_tabs' => :'Array<SmartSection>',
:'ssn_tabs' => :'Array<Ssn>',
:'tab_groups' => :'Array<TabGroup>',
:'text_tabs' => :'Array<Text>',
:'title_tabs' => :'Array<Title>',
:'view_tabs' => :'Array<View>',
:'zip_tabs' => :'Array<Zip>'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'approveTabs')
if (value = attributes[:'approveTabs']).is_a?(Array)
self.approve_tabs = value
end
end
if attributes.has_key?(:'checkboxTabs')
if (value = attributes[:'checkboxTabs']).is_a?(Array)
self.checkbox_tabs = value
end
end
if attributes.has_key?(:'commentThreadTabs')
if (value = attributes[:'commentThreadTabs']).is_a?(Array)
self.comment_thread_tabs = value
end
end
if attributes.has_key?(:'companyTabs')
if (value = attributes[:'companyTabs']).is_a?(Array)
self.company_tabs = value
end
end
if attributes.has_key?(:'dateSignedTabs')
if (value = attributes[:'dateSignedTabs']).is_a?(Array)
self.date_signed_tabs = value
end
end
if attributes.has_key?(:'dateTabs')
if (value = attributes[:'dateTabs']).is_a?(Array)
self.date_tabs = value
end
end
if attributes.has_key?(:'declineTabs')
if (value = attributes[:'declineTabs']).is_a?(Array)
self.decline_tabs = value
end
end
if attributes.has_key?(:'drawTabs')
if (value = attributes[:'drawTabs']).is_a?(Array)
self.draw_tabs = value
end
end
if attributes.has_key?(:'emailAddressTabs')
if (value = attributes[:'emailAddressTabs']).is_a?(Array)
self.email_address_tabs = value
end
end
if attributes.has_key?(:'emailTabs')
if (value = attributes[:'emailTabs']).is_a?(Array)
self.email_tabs = value
end
end
if attributes.has_key?(:'envelopeIdTabs')
if (value = attributes[:'envelopeIdTabs']).is_a?(Array)
self.envelope_id_tabs = value
end
end
if attributes.has_key?(:'firstNameTabs')
if (value = attributes[:'firstNameTabs']).is_a?(Array)
self.first_name_tabs = value
end
end
if attributes.has_key?(:'formulaTabs')
if (value = attributes[:'formulaTabs']).is_a?(Array)
self.formula_tabs = value
end
end
if attributes.has_key?(:'fullNameTabs')
if (value = attributes[:'fullNameTabs']).is_a?(Array)
self.full_name_tabs = value
end
end
if attributes.has_key?(:'initialHereTabs')
if (value = attributes[:'initialHereTabs']).is_a?(Array)
self.initial_here_tabs = value
end
end
if attributes.has_key?(:'lastNameTabs')
if (value = attributes[:'lastNameTabs']).is_a?(Array)
self.last_name_tabs = value
end
end
if attributes.has_key?(:'listTabs')
if (value = attributes[:'listTabs']).is_a?(Array)
self.list_tabs = value
end
end
if attributes.has_key?(:'notarizeTabs')
if (value = attributes[:'notarizeTabs']).is_a?(Array)
self.notarize_tabs = value
end
end
if attributes.has_key?(:'noteTabs')
if (value = attributes[:'noteTabs']).is_a?(Array)
self.note_tabs = value
end
end
if attributes.has_key?(:'numberTabs')
if (value = attributes[:'numberTabs']).is_a?(Array)
self.number_tabs = value
end
end
if attributes.has_key?(:'polyLineOverlayTabs')
if (value = attributes[:'polyLineOverlayTabs']).is_a?(Array)
self.poly_line_overlay_tabs = value
end
end
if attributes.has_key?(:'radioGroupTabs')
if (value = attributes[:'radioGroupTabs']).is_a?(Array)
self.radio_group_tabs = value
end
end
if attributes.has_key?(:'signerAttachmentTabs')
if (value = attributes[:'signerAttachmentTabs']).is_a?(Array)
self.signer_attachment_tabs = value
end
end
if attributes.has_key?(:'signHereTabs')
if (value = attributes[:'signHereTabs']).is_a?(Array)
self.sign_here_tabs = value
end
end
if attributes.has_key?(:'smartSectionTabs')
if (value = attributes[:'smartSectionTabs']).is_a?(Array)
self.smart_section_tabs = value
end
end
if attributes.has_key?(:'ssnTabs')
if (value = attributes[:'ssnTabs']).is_a?(Array)
self.ssn_tabs = value
end
end
if attributes.has_key?(:'tabGroups')
if (value = attributes[:'tabGroups']).is_a?(Array)
self.tab_groups = value
end
end
if attributes.has_key?(:'textTabs')
if (value = attributes[:'textTabs']).is_a?(Array)
self.text_tabs = value
end
end
if attributes.has_key?(:'titleTabs')
if (value = attributes[:'titleTabs']).is_a?(Array)
self.title_tabs = value
end
end
if attributes.has_key?(:'viewTabs')
if (value = attributes[:'viewTabs']).is_a?(Array)
self.view_tabs = value
end
end
if attributes.has_key?(:'zipTabs')
if (value = attributes[:'zipTabs']).is_a?(Array)
self.zip_tabs = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
approve_tabs == o.approve_tabs &&
checkbox_tabs == o.checkbox_tabs &&
comment_thread_tabs == o.comment_thread_tabs &&
company_tabs == o.company_tabs &&
date_signed_tabs == o.date_signed_tabs &&
date_tabs == o.date_tabs &&
decline_tabs == o.decline_tabs &&
draw_tabs == o.draw_tabs &&
email_address_tabs == o.email_address_tabs &&
email_tabs == o.email_tabs &&
envelope_id_tabs == o.envelope_id_tabs &&
first_name_tabs == o.first_name_tabs &&
formula_tabs == o.formula_tabs &&
full_name_tabs == o.full_name_tabs &&
initial_here_tabs == o.initial_here_tabs &&
last_name_tabs == o.last_name_tabs &&
list_tabs == o.list_tabs &&
notarize_tabs == o.notarize_tabs &&
note_tabs == o.note_tabs &&
number_tabs == o.number_tabs &&
poly_line_overlay_tabs == o.poly_line_overlay_tabs &&
radio_group_tabs == o.radio_group_tabs &&
signer_attachment_tabs == o.signer_attachment_tabs &&
sign_here_tabs == o.sign_here_tabs &&
smart_section_tabs == o.smart_section_tabs &&
ssn_tabs == o.ssn_tabs &&
tab_groups == o.tab_groups &&
text_tabs == o.text_tabs &&
title_tabs == o.title_tabs &&
view_tabs == o.view_tabs &&
zip_tabs == o.zip_tabs
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[approve_tabs, checkbox_tabs, comment_thread_tabs, company_tabs, date_signed_tabs, date_tabs, decline_tabs, draw_tabs, email_address_tabs, email_tabs, envelope_id_tabs, first_name_tabs, formula_tabs, full_name_tabs, initial_here_tabs, last_name_tabs, list_tabs, notarize_tabs, note_tabs, number_tabs, poly_line_overlay_tabs, radio_group_tabs, signer_attachment_tabs, sign_here_tabs, smart_section_tabs, ssn_tabs, tab_groups, text_tabs, title_tabs, view_tabs, zip_tabs].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = DocuSign_eSign.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 40.614964 | 495 | 0.656827 |
b92032890a284c0706a3dcbd13b07fd9460f933b | 803 | require 'lnd_client'
class PaymentsController < ApplicationController
before_action :require_account
def create
data = LndClient.decode_pay_req(params[:payment_request])
amount = data['num_satoshis']
if current_account.balance < amount
return render json: { error: 'balance is not enough' }, status: :unprocessable_entity
end
res = nil
ActiveRecord::Base.transaction do
# TODO: atomic update
current_account.update!(balance: current_account.balance - amount)
res = LndClient.pay(params[:payment_request])
if res.payment_error
render({
json: {
error: res.payment_error
},
status: :internal_server_error
})
return
end
end
render json: {}, status: :ok
end
end
| 23.617647 | 91 | 0.648817 |
1aec460853e9d17196a77a10660ab09b2b035ebb | 3,538 | RSpec.describe MagicPipe::Config do
subject { described_class.new }
describe "the default configuration values" do
specify "client_name" do
expect(subject.client_name).to be_a String
end
specify "producer_name" do
expect(subject.producer_name).to be_a String
end
specify "logger" do
expect(subject.logger).to be_a Logger
end
specify "metrics_client" do
expect(subject.metrics_client).to_not be_nil
end
describe "https_transport_options" do
specify "when not set, it stays nil" do
expect(subject.https_transport_options).to be_nil
end
describe "when set" do
subject do
described_class.new { |c| c.https_transport_options = conf }
end
context "with missing values" do
let(:conf) { {} }
it "sets all the defaults" do
actual = subject.https_transport_options
expect(actual[:url]).to_not be_nil
expect(actual[:basic_auth]).to_not be_nil
expect(actual[:timeout]).to_not be_nil
expect(actual[:open_timeout]).to_not be_nil
expect(actual[:dynamic_path_builder]).to be_nil
end
end
context "with configured values" do
let(:fn) { -> (x) { x } }
let(:conf) do
{
url: "http://foo.bar",
dynamic_path_builder: fn
}
end
it "sets the defaults, but preserved the configured value" do
actual = subject.https_transport_options
expect(actual[:url]).to eq "http://foo.bar"
expect(actual[:dynamic_path_builder]).to eq fn
expect(actual[:basic_auth]).to_not be_nil
expect(actual[:timeout]).to_not be_nil
expect(actual[:open_timeout]).to_not be_nil
end
end
end
end
describe "sqs_transport_options" do
specify "when not set, it gets pre-populated" do
expect(subject.sqs_transport_options).to be_a Hash
expect(subject.sqs_transport_options).to_not be_empty
end
context "with missing values" do
it "sets all the defaults" do
actual = subject.sqs_transport_options
expect(actual[:queue]).to eq "magic_pipe"
end
end
context "with configured values" do
subject do
described_class.new { |c| c.sqs_transport_options = conf }
end
let(:conf) { { queue: "foo_bar" } }
it "sets the defaults, but preserved the configured value" do
actual = subject.sqs_transport_options
expect(actual[:queue]).to eq "foo_bar"
end
end
end
describe "async_transport_options" do
specify "when not set, it gets pre-populated" do
expect(subject.async_transport_options).to be_a Hash
expect(subject.async_transport_options).to_not be_empty
end
context "with missing values" do
it "sets all the defaults" do
actual = subject.async_transport_options
expect(actual[:queue]).to eq "magic_pipe"
end
end
context "with configured values" do
subject do
described_class.new { |c| c.async_transport_options = conf }
end
let(:conf) { { queue: "foo_bar" } }
it "sets the defaults, but preserved the configured value" do
actual = subject.async_transport_options
expect(actual[:queue]).to eq "foo_bar"
end
end
end
end
end
| 28.532258 | 71 | 0.610514 |
e870428158fc7dc69584418b6f4515010f0d50ae | 180 | class AddSmsAuthCodeToUsers < ActiveRecord::Migration[4.2]
def change
add_column :users, :sms_auth_code, :string
add_index :users, :sms_auth_code, unique: true
end
end
| 25.714286 | 58 | 0.75 |
6181f155ee9f9880c0ede1ca25a82c75eccdd362 | 385 | class CreateListings < ActiveRecord::Migration[6.0]
def change
create_table :listings do |t|
t.string :title
t.string :description
t.string :rating
t.string :price
t.string :location
t.string :host
t.string :host_rating
t.string :image
t.references :user, null: false, foreign_key: true
t.timestamps
end
end
end
| 21.388889 | 56 | 0.633766 |
ab49cba964c00bc6210eb3b93fb37de60320f742 | 599 | require_relative 'base'
require 'bundler/audit/cli'
module Integration
class BundleAudit < Base
def run_with(config)
args = ['check']
ignored_cves = config.fetch('ignored_cve', [])
if !ignored_cves.empty?
args += ["--ignore"]
ignored_cves.each do |ignored_cve|
args << ignored_cve
end
end
Keepclean.logger.debug "Updating CVE database"
Bundler::Audit::CLI.start(['update', '--quiet'])
Keepclean.logger.debug "Running with args: #{args.inspect}"
Bundler::Audit::CLI.start(args)
true
end
end
end
| 22.185185 | 65 | 0.619366 |
18eb020fcbb76494cae9f6060a1310de49cfadbf | 22 | require "stellar-base" | 22 | 22 | 0.818182 |
0377cd497e514386b676049c7e9eab811ad4810c | 469 | # t.integer "event_id"
# t.integer "price"
# t.string "name"
# t.integer "max_quantity"
# t.datetime "created_at", null: false
# t.datetime "updated_at", null: false
# t.index ["event_id"], name: "index_ticket_types_on_event_id", using: :btree
class TicketType < ActiveRecord::Base
belongs_to :event
validates_presence_of :name
validates :price, :max_quantity, presence: true, numericality: true
end
| 31.266667 | 87 | 0.643923 |
115ef65578a96c1a52b636f7962ac7398c07dd1f | 12,749 | require 'spec_helper'
describe ActiveRecordViews do
describe '.create_view' do
let(:connection) { ActiveRecord::Base.connection }
def create_test_view(sql, options = {})
ActiveRecordViews.create_view connection, 'test', 'Test', sql, options
end
def drop_test_view
ActiveRecordViews.drop_view connection, 'test'
end
def test_view_sql
connection.select_value(<<-SQL.squish).try(&:squish)
SELECT view_definition
FROM information_schema.views
WHERE table_schema = 'public' AND table_name = 'test'
SQL
end
def test_view_populated?
value = connection.select_value(<<~SQL)
SELECT ispopulated
FROM pg_matviews
WHERE schemaname = 'public' AND matviewname = 'test'
SQL
if Rails::VERSION::MAJOR < 5
value = ActiveRecord::ConnectionAdapters::Column::TRUE_VALUES.include?(value)
end
value
end
def test_view_refreshed_at
connection.select_value(<<~SQL)
SELECT refreshed_at
FROM active_record_views
WHERE name = 'test'
SQL
end
def test_materialized_view_sql
connection.select_value(<<-SQL.squish).try(&:squish)
SELECT definition
FROM pg_matviews
WHERE schemaname = 'public' AND matviewname = 'test'
SQL
end
it 'creates database view' do
expect(test_view_sql).to be_nil
create_test_view 'select 1 as id'
expect(test_view_sql).to eq 'SELECT 1 AS id;'
end
it 'records checksum, class name, and options' do
create_test_view 'select 1 as id', materialized: true
expect(connection.select_all('select * from active_record_views').to_a).to eq [
{
'name' => 'test',
'class_name' => 'Test',
'checksum' => Digest::SHA1.hexdigest('select 1 as id'),
'options' => '{"materialized":true,"dependencies":[]}',
'refreshed_at' => nil,
}
]
end
it 'persists views if transaction rolls back' do
expect(test_view_sql).to be_nil
connection.transaction :requires_new => true do
create_test_view 'select 1 as id'
raise ActiveRecord::Rollback
end
expect(test_view_sql).to eq 'SELECT 1 AS id;'
end
it 'raises descriptive error if view SQL is invalid' do
expect {
create_test_view 'select blah'
}.to raise_error ActiveRecord::StatementInvalid, /column "blah" does not exist/
end
context 'with existing view' do
before do
create_test_view 'select 1 as id'
expect(test_view_sql).to eq 'SELECT 1 AS id;'
end
it 'updates view with compatible change' do
create_test_view 'select 2 as id'
expect(test_view_sql).to eq 'SELECT 2 AS id;'
end
it 'recreates view with incompatible change' do
create_test_view "select 'foo'::text as name"
expect(test_view_sql).to eq "SELECT 'foo'::text AS name;"
end
context 'having dependant views' do
before do
without_dependency_checks do
ActiveRecordViews.create_view connection, 'dependant1', 'Dependant1', 'SELECT id FROM test;'
ActiveRecordViews.create_view connection, 'dependant2a', 'Dependant2a', 'SELECT id, id * 2 AS id2 FROM dependant1;'
ActiveRecordViews.create_view connection, 'dependant2b', 'Dependant2b', 'SELECT id, id * 4 AS id4 FROM dependant1;'
ActiveRecordViews.create_view connection, 'dependant3', 'Dependant3', 'SELECT * FROM dependant2b;'
ActiveRecordViews.create_view connection, 'dependant4', 'Dependant4', 'SELECT id FROM dependant1 UNION ALL SELECT id FROM dependant3;'
end
end
it 'updates view with compatible change' do
create_test_view 'select 2 as id'
expect(test_view_sql).to eq 'SELECT 2 AS id;'
expect(Integer(connection.select_value('SELECT id2 FROM dependant2a'))).to eq 4
end
describe 'changes incompatible with CREATE OR REPLACE' do
it 'updates view with new column added before existing' do
create_test_view "select 'foo'::text as name, 3 as id"
expect(test_view_sql).to eq "SELECT 'foo'::text AS name, 3 AS id;"
expect(Integer(connection.select_value('SELECT id2 FROM dependant2a'))).to eq 6
end
it 'fails to update view if column used by dependant view is removed' do
expect {
create_test_view "select 'foo'::text as name"
}.to raise_error ActiveRecord::StatementInvalid, /column test.id does not exist/
expect(test_view_sql).to eq 'SELECT 1 AS id;'
expect(Integer(connection.select_value('SELECT id2 FROM dependant2a'))).to eq 2
end
end
describe '.drop_all_views' do
it 'can drop all managed views' do
connection.execute 'CREATE VIEW unmanaged AS SELECT 2 AS id;'
expect(view_names).to match_array %w[test dependant1 dependant2a dependant2b dependant3 dependant4 unmanaged]
ActiveRecordViews.drop_all_views connection
expect(view_names).to match_array %w[unmanaged]
end
it 'support being ran inside a transaction' do
expect(ActiveRecordViews).to receive(:without_transaction).at_least(:once).and_wrap_original do |original, *args, &block|
original.call(*args) do |new_connection|
new_connection.execute 'SET statement_timeout = 1000'
block.call(new_connection)
end
end
connection.transaction requires_new: true do
expect {
ActiveRecordViews.drop_all_views connection
}.to change { view_names }
end
end
it 'errors if an unmanaged view depends on a managed view' do
connection.execute 'CREATE VIEW unmanaged AS SELECT * from dependant2a'
expect {
ActiveRecordViews.drop_all_views connection
}.to raise_error ActiveRecord::StatementInvalid, /view unmanaged depends on view dependant2a/
end
it 'can drop materialized views' do
without_dependency_checks do
ActiveRecordViews.create_view connection, 'materialized', 'Materialized', 'SELECT id FROM test;', materialized: true
end
ActiveRecordViews.drop_all_views connection
expect(view_names).to match_array %w[]
end
end
end
describe 'with unmanaged dependant view' do
before do
connection.execute 'CREATE VIEW dependant AS SELECT id FROM test'
end
after do
connection.execute 'DROP VIEW dependant;'
end
it 'updates view with compatible change' do
create_test_view 'select 2 as id'
expect(test_view_sql).to eq 'SELECT 2 AS id;'
end
it 'fails to update view with incompatible change' do
expect {
create_test_view "SELECT 'foo'::text as name, 4 as id"
}.to raise_error ActiveRecord::StatementInvalid, /view dependant depends on view test/
expect(test_view_sql).to eq 'SELECT 1 AS id;'
end
end
end
it 'creates and drops materialized views' do
create_test_view 'select 123 as id', materialized: true
expect(test_view_sql).to eq nil
expect(test_materialized_view_sql).to eq 'SELECT 123 AS id;'
drop_test_view
expect(test_view_sql).to eq nil
expect(test_materialized_view_sql).to eq nil
end
it 'replaces a normal view with a materialized view' do
create_test_view 'select 11 as id'
create_test_view 'select 22 as id', materialized: true
expect(test_view_sql).to eq nil
expect(test_materialized_view_sql).to eq 'SELECT 22 AS id;'
end
it 'replaces a materialized view with a normal view' do
create_test_view 'select 22 as id', materialized: true
create_test_view 'select 11 as id'
expect(test_view_sql).to eq 'SELECT 11 AS id;'
expect(test_materialized_view_sql).to eq nil
end
it 'can test if materialized views can be refreshed concurrently' do
expect(ActiveRecordViews.supports_concurrent_refresh?(connection)).to be true
end
it 'preserves materialized view if dropping/recreating' do
without_dependency_checks do
ActiveRecordViews.create_view connection, 'test1', 'Test1', 'SELECT 1 AS foo'
ActiveRecordViews.create_view connection, 'test2', 'Test2', 'SELECT * FROM test1', materialized: true
ActiveRecordViews.create_view connection, 'test1', 'Test1', 'SELECT 2 AS bar, 1 AS foo'
end
expect(materialized_view_names).to eq %w[test2]
expect(view_names).to eq %w[test1]
end
it 'supports creating unique indexes on materialized views' do
create_test_view 'select 1 as foo, 2 as bar, 3 as baz', materialized: true, unique_columns: [:foo, 'bar']
index_sql = connection.select_value("SELECT indexdef FROM pg_indexes WHERE schemaname = 'public' AND indexname = 'test_pkey';")
expect(index_sql).to eq 'CREATE UNIQUE INDEX test_pkey ON public.test USING btree (foo, bar)'
end
it 'errors if trying to create unique index on non-materialized view' do
expect {
create_test_view 'select 1 as foo, 2 as bar, 3 as baz', materialized: false, unique_columns: [:foo, 'bar']
}.to raise_error ArgumentError, 'unique_columns option requires view to be materialized'
end
it 'supports resetting all materialised views' do
class ResetMaterializeViewTestModel < ActiveRecord::Base
self.table_name = 'test'
is_view 'select 123 as id', materialized: true
end
ResetMaterializeViewTestModel.refresh_view!
expect {
ActiveRecordViews.reset_materialized_views
}.to change { test_view_populated? }.to(false)
.and change { test_view_refreshed_at }.to(nil)
end
end
describe '.drop_all_views' do
let(:connection) { ActiveRecord::Base.connection }
it 'does nothing when no views have been defined' do
ActiveRecordViews.drop_all_views connection
expect(view_names).to match_array %w[]
end
end
describe '.without_transaction' do
let(:original_connection) { ActiveRecord::Base.connection }
it 'yields original connection if no active transaction' do
ActiveRecordViews.without_transaction original_connection do |new_connection|
expect(new_connection).to eq original_connection
end
end
it 'yields a new connection if inside a transaction' do
original_connection.transaction do
ActiveRecordViews.without_transaction original_connection do |new_connection|
expect(new_connection).to_not eq original_connection
end
end
end
it 'yields original connection if called recursively' do
ActiveRecordViews.without_transaction original_connection do |new_connection_1|
expect(new_connection_1).to eq original_connection
new_connection_1.transaction do
ActiveRecordViews.without_transaction new_connection_1 do |new_connection_2|
expect(new_connection_2).to eq new_connection_1
end
end
end
end
it 'yields same isolated connection if called recursively on original connection inside transaction' do
original_connection.transaction do
ActiveRecordViews.without_transaction original_connection do |new_connection_1|
expect(new_connection_1).to_not eq original_connection
ActiveRecordViews.without_transaction original_connection do |new_connection_2|
expect(new_connection_2).to eq new_connection_1
end
end
end
end
it 'yields different isolated connection if called recursively on different connections inside transcation' do
begin
original_connection_2 = original_connection.pool.checkout
original_connection.transaction do
ActiveRecordViews.without_transaction original_connection do |new_connection_1|
expect(new_connection_1).to_not eq original_connection
original_connection_2.transaction do
ActiveRecordViews.without_transaction original_connection_2 do |new_connection_2|
expect(new_connection_2).to_not eq original_connection
expect(new_connection_2).to_not eq original_connection_2
expect(new_connection_2).to_not eq new_connection_1
end
end
end
end
ensure
original_connection.pool.checkin original_connection_2
end
end
end
end
| 37.718935 | 146 | 0.669543 |
264ffecce941c349d7e800e60f593d60422425ac | 781 | # frozen_string_literal: true
module UffizziCore::Concerns::Models::ActivityItem
extend ActiveSupport::Concern
included do
include UffizziCore::ActivityItemRepo
self.table_name = UffizziCore.table_names[:activity_items]
belongs_to :deployment
belongs_to :container
belongs_to :build, optional: true
has_many :events, dependent: :destroy
scope :docker, -> {
where(type: UffizziCore::ActivityItem::Docker.name)
}
scope :github, -> {
where(type: UffizziCore::ActivityItem::Github.name)
}
def docker?
type == UffizziCore::ActivityItem::Docker.name
end
def image
[namespace, name].compact.join('/')
end
def full_image
return "#{image}:#{tag}" if docker?
''
end
end
end
| 19.525 | 62 | 0.663252 |
e8eeecd68c523f7a60330e3c62000006dcfd80cf | 1,667 | # -*- encoding: utf-8 -*-
# stub: retriable 3.1.2 ruby lib
Gem::Specification.new do |s|
s.name = "retriable".freeze
s.version = "3.1.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Jack Chu".freeze]
s.date = "2018-06-11"
s.description = "Retriable is a simple DSL to retry failed code blocks with randomized exponential backoff. This is especially useful when interacting external api/services or file system calls.".freeze
s.email = ["[email protected]".freeze]
s.homepage = "http://github.com/kamui/retriable".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.0.0".freeze)
s.rubygems_version = "3.0.4".freeze
s.summary = "Retriable is a simple DSL to retry failed code blocks with randomized exponential backoff".freeze
s.installed_by_version = "3.0.4" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<bundler>.freeze, [">= 0"])
s.add_development_dependency(%q<rspec>.freeze, ["~> 3"])
s.add_development_dependency(%q<listen>.freeze, ["~> 3.1"])
else
s.add_dependency(%q<bundler>.freeze, [">= 0"])
s.add_dependency(%q<rspec>.freeze, ["~> 3"])
s.add_dependency(%q<listen>.freeze, ["~> 3.1"])
end
else
s.add_dependency(%q<bundler>.freeze, [">= 0"])
s.add_dependency(%q<rspec>.freeze, ["~> 3"])
s.add_dependency(%q<listen>.freeze, ["~> 3.1"])
end
end
| 41.675 | 204 | 0.677864 |
ac2c329b7025af159e7cf0700e066d5125a32fa6 | 8,855 | Faraday::Response.register_middleware format_whm: Lumberg::FormatWhm
module Lumberg
module Whm
class Server < Base
# Server
attr_accessor :host
# Remote access hash
attr_accessor :hash
# Base URL to the WHM API
attr_accessor :base_url
# Enable Basic Authentication with API - default false
attr_accessor :basic_auth
# API username - default: root
attr_accessor :user
# WHM parsed response
attr_reader :response
# HTTP Params used for API requests
attr_accessor :params
# WHM API function name
attr_reader :function
# Use ssl?
attr_accessor :ssl
# HTTP SSL verify mode
attr_accessor :ssl_verify
# Returned params to transfor to booleans
attr_accessor :boolean_params
# Force response type...ARG!
attr_accessor :force_response_type
# HTTP read/open timeout
attr_accessor :timeout
# Whostmgr
attr_accessor :whostmgr
# To use cpanel uapi api
attr_accessor :uapi
#
# ==== Required
# * <tt>:host</tt> - PENDING
# * <tt>:hash</tt> - PENDING
#
# ==== Optional
# * <tt>:user</tt> - PENDING
# * <tt>:ssl</tt> - PENDING
# * <tt>:basic_auth</tt>
# * <tt>:port</tt>
def initialize(options)
@ssl_verify ||= false
@ssl = options.delete(:ssl)
@host = options.delete(:host)
validate_server_host
@hash = format_hash(options.delete(:hash))
@user = (options.has_key?(:user) ? options.delete(:user) : 'root')
@basic_auth = options.delete(:basic_auth)
@timeout = options.delete(:timeout)
@whostmgr = options.delete(:whostmgr)
@port = options.delete(:port)
@uapi = options.delete(:uapi)
@base_url = format_url(options)
end
def perform_request(function, options = {})
# WHM sometime uses different keys for the result hash
@response_key = options.delete(:response_key) || 'result'
@whostmgr = options.delete(:whostmgr)
@base_url = format_url(options) if @whostmgr || @uapi
@function = function
@params = format_query(options)
yield self if block_given?
do_request(@base_url, function, @params)
end
def get_hostname
perform_request('gethostname', {response_key: 'hostname'})
end
def version
perform_request('version', {response_key: 'version'})
end
def load_average
@force_response_type = :query
result = perform_request('loadavg')
result[:success] = result[:params].has_key?(:one)
result
end
def system_load_average(options = {})
perform_request('systemloadavg', options.merge(response_key: 'data'))
end
def languages
perform_request('getlanglist', {response_key: 'lang'})
end
def themes
perform_request('getlanglist', {response_key: 'themes'})
end
def list_ips
perform_request('listips', {response_key: 'result'})
end
def get_tweaksetting(options = {})
request = perform_request('get_tweaksetting',
options.merge(
response_key: 'data',
:'api.version' => 1
)
)
request[:success] = !request[:params].empty?
request
end
def set_tweaksetting(options = {})
request = perform_request('set_tweaksetting',
options.merge(
response_key: 'metadata',
:'api.version' => 1
)
)
request[:success] = (request[:params][:reason] == 'OK')
request
end
def add_ip(options = {})
perform_request('addip', options.merge(response_key: 'addip'))
end
def delete_ip(options = {})
perform_request('delip', options.merge(response_key: 'delip'))
end
def set_hostname(options = {})
perform_request('sethostname', options.merge(response_key: 'sethostname'))
end
def set_resolvers(options = {})
perform_request('setresolvers', options.merge(response_key: 'setresolvers'))
end
def show_bandwidth(options = {})
perform_request('showbw', options.merge(response_key: 'bandwidth'))
end
def set_nv_var(options = {})
perform_request('nvset', options.merge(response_key: 'nvset'))
end
def get_nv_var(options = {})
perform_request('nvget', options.merge(response_key: 'nvget'))
end
def reboot
perform_request('reboot', {response_key: "reboot"})
end
def list_hooks
request = perform_request('list_hooks',
response_key: 'data',
:'api.version' => 1)
request[:success] = request.has_key?(:params)
request
end
def edit_hook(options = {})
request = perform_request('edit_hook',
options.merge(:'api.version' => 1,
response_key: 'metadata'))
request[:success] = request[:params][:reason] == 'OK'
request
end
def disk_usage
request = perform_request('getdiskusage',
response_key: 'data',
:'api.version' => 1)
request[:success] = request.has_key?(:params)
request
end
# Public: Gets IP address for a given hostname.
#
# options - Hash of options (default: {})
# :host - String host name
#
# Returns a Hash API Response
def lookup_nameserver_ip(options = {})
request = perform_request('lookupnsip',
options.merge(:'api.version' => 1,
response_key: 'data'))
request[:success] = request[:params].has_key?(:ip)
request
end
def account
@account ||= Account.new(server: self)
end
def dns
@dns ||= Dns.new(server: self)
end
def reseller
@reseller ||= Reseller.new(server: self)
end
def cert
@cert ||= Cert.new(server: self)
end
def transfer_tool
@transfer_tool ||= TransferTool.new(server: self)
end
private
def do_request(uri, function, params)
@response = Faraday.new(url: uri, ssl: ssl_options) do |c|
if basic_auth
c.basic_auth @user, @hash
else
c.headers['Authorization'] = "WHM #{@user}:#{@hash}"
end
c.headers['Accept-Encoding'] = 'deflate'
c.params = params
c.request :url_encoded
c.response :format_whm, @force_response_type, @response_key, @boolean_params
c.response :logger, create_logger_instance
c.adapter :net_http
c.options[:timeout] = timeout if timeout
end.get(function).body
@force_response_type = nil
@response
rescue Faraday::Error::ConnectionFailed, Faraday::TimeoutError
raise Lumberg::WhmConnectionError.new(
"#{@host} is either unavailable or is not currently accepting requests. Please try again in a few minutes."
)
end
def format_query(hash)
hash.inject({}) do |params, (key, value)|
value = 1 if value === true
value = 0 if value === false
params[key] = value
params
end
end
def create_logger_instance
Logger.new(Lumberg.configuration[:debug].is_a?(TrueClass) ? $stderr : Lumberg.configuration[:debug])
end
def ssl_options
if @ssl_verify
{
verify_mode: OpenSSL::SSL::VERIFY_PEER,
ca_file: File.join(Lumberg::base_path, "cacert.pem")
}
else
{
verify_mode: OpenSSL::SSL::VERIFY_NONE
}
end
end
def format_url(options = {})
@ssl = true if @ssl.nil?
port = @port || (@ssl ? 2087 : 2086)
proto = (@ssl ? 'https' : 'http')
api = if @uapi
"execute/#{options.delete(:api_module)}"
else
@whostmgr ? "scripts2" : "json-api"
end
"#{proto}://#{@host}:#{port}/#{api}/"
end
def format_hash(hash)
raise Lumberg::WhmArgumentError.new("Missing WHM hash for #{@host}") unless hash.is_a?(String)
hash.gsub(/\n|\s/, '')
end
def validate_server_host
Resolv.getaddress(@host)
rescue Resolv::ResolvError
raise Lumberg::WhmArgumentError.new(
"Unable to resolve #{@host}"
)
end
end
end
end
| 27.079511 | 117 | 0.555618 |
7a31b095cfcfd6581f2b4e98164690e697abdeb3 | 8,434 | # frozen_string_literal: true
class Label < ApplicationRecord
include CacheMarkdownField
include Referable
include Subscribable
include Gitlab::SQL::Pattern
include OptionallySearch
include Sortable
include FromUnion
include Presentable
cache_markdown_field :description, pipeline: :single_line
DEFAULT_COLOR = '#6699cc'
default_value_for :color, DEFAULT_COLOR
has_many :lists, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :priorities, class_name: 'LabelPriority'
has_many :label_links, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :issues, through: :label_links, source: :target, source_type: 'Issue'
has_many :merge_requests, through: :label_links, source: :target, source_type: 'MergeRequest'
before_validation :strip_whitespace_from_title_and_color
validates :color, color: true, allow_blank: false
# Don't allow ',' for label titles
validates :title, presence: true, format: { with: /\A[^,]+\z/ }
validates :title, uniqueness: { scope: [:group_id, :project_id] }
validates :title, length: { maximum: 255 }
default_scope { order(title: :asc) } # rubocop:disable Cop/DefaultScope
scope :templates, -> { where(template: true, type: [Label.name, nil]) }
scope :with_title, ->(title) { where(title: title) }
scope :with_lists_and_board, -> { joins(lists: :board).merge(List.movable) }
scope :on_project_boards, ->(project_id) { with_lists_and_board.where(boards: { project_id: project_id }) }
scope :on_board, ->(board_id) { with_lists_and_board.where(boards: { id: board_id }) }
scope :order_name_asc, -> { reorder(title: :asc) }
scope :order_name_desc, -> { reorder(title: :desc) }
scope :subscribed_by, ->(user_id) { joins(:subscriptions).where(subscriptions: { user_id: user_id, subscribed: true }) }
scope :top_labels_by_target, -> (target_relation) {
label_id_column = arel_table[:id]
# Window aggregation to count labels
count_by_id = Arel::Nodes::Over.new(
Arel::Nodes::NamedFunction.new('count', [label_id_column]),
Arel::Nodes::Window.new.partition(label_id_column)
).as('count_by_id')
select(arel_table[Arel.star], count_by_id)
.joins(:label_links)
.merge(LabelLink.where(target: target_relation))
.reorder(count_by_id: :desc)
.distinct
}
def self.prioritized(project)
joins(:priorities)
.where(label_priorities: { project_id: project })
.reorder('label_priorities.priority ASC, labels.title ASC')
end
def self.unprioritized(project)
labels = Label.arel_table
priorities = LabelPriority.arel_table
label_priorities = labels.join(priorities, Arel::Nodes::OuterJoin)
.on(labels[:id].eq(priorities[:label_id]).and(priorities[:project_id].eq(project.id)))
.join_sources
joins(label_priorities).where(priorities[:priority].eq(nil))
end
def self.left_join_priorities
labels = Label.arel_table
priorities = LabelPriority.arel_table
label_priorities = labels.join(priorities, Arel::Nodes::OuterJoin)
.on(labels[:id].eq(priorities[:label_id]))
.join_sources
joins(label_priorities)
end
def self.optionally_subscribed_by(user_id)
if user_id
subscribed_by(user_id)
else
all
end
end
alias_attribute :name, :title
def self.reference_prefix
'~'
end
##
# Pattern used to extract label references from text
#
# This pattern supports cross-project references.
#
def self.reference_pattern
# NOTE: The id pattern only matches when all characters on the expression
# are digits, so it will match ~2 but not ~2fa because that's probably a
# label name and we want it to be matched as such.
@reference_pattern ||= %r{
(#{Project.reference_pattern})?
#{Regexp.escape(reference_prefix)}
(?:
(?<label_id>\d+(?!\S\w)\b)
| # Integer-based label ID, or
(?<label_name>
# String-based single-word label title, or
[A-Za-z0-9_\-\?\.&]+
(?<!\.|\?)
|
# String-based multi-word label surrounded in quotes
".+?"
)
)
}x
end
def self.link_reference_pattern
nil
end
# Searches for labels with a matching title or description.
#
# This method uses ILIKE on PostgreSQL.
#
# query - The search query as a String.
#
# Returns an ActiveRecord::Relation.
def self.search(query, **options)
fuzzy_search(query, [:title, :description])
end
# Override Gitlab::SQL::Pattern.min_chars_for_partial_matching as
# label queries are never global, and so will not use a trigram
# index. That means we can have just one character in the LIKE.
def self.min_chars_for_partial_matching
1
end
def self.on_project_board?(project_id, label_id)
return false if label_id.blank?
on_project_boards(project_id).where(id: label_id).exists?
end
# Generate a hex color based on hex-encoded value
def self.color_for(value)
"##{Digest::MD5.hexdigest(value)[0..5]}"
end
def open_issues_count(user = nil)
issues_count(user, state: 'opened')
end
def closed_issues_count(user = nil)
issues_count(user, state: 'closed')
end
def open_merge_requests_count(user = nil)
params = {
subject_foreign_key => subject.id,
label_name: title,
scope: 'all',
state: 'opened'
}
MergeRequestsFinder.new(user, params.with_indifferent_access).execute.count
end
def prioritize!(project, value)
label_priority = priorities.find_or_initialize_by(project_id: project.id)
label_priority.priority = value
label_priority.save!
end
def unprioritize!(project)
priorities.where(project: project).delete_all
end
def priority(project)
priority = if priorities.loaded?
priorities.first { |p| p.project == project }
else
priorities.find_by(project: project)
end
priority.try(:priority)
end
def priority?
priorities.present?
end
def color
super || DEFAULT_COLOR
end
def text_color
LabelsHelper.text_color_for_bg(self.color)
end
def title=(value)
write_attribute(:title, sanitize_value(value)) if value.present?
end
def description=(value)
write_attribute(:description, sanitize_value(value)) if value.present?
end
##
# Returns the String necessary to reference this Label in Markdown
#
# format - Symbol format to use (default: :id, optional: :name)
#
# Examples:
#
# Label.first.to_reference # => "~1"
# Label.first.to_reference(format: :name) # => "~\"bug\""
# Label.first.to_reference(project, target_project: same_namespace_project) # => "gitlab-foss~1"
# Label.first.to_reference(project, target_project: another_namespace_project) # => "gitlab-org/gitlab-foss~1"
#
# Returns a String
#
def to_reference(from = nil, target_project: nil, format: :id, full: false)
format_reference = label_format_reference(format)
reference = "#{self.class.reference_prefix}#{format_reference}"
if from
"#{from.to_reference_base(target_project, full: full)}#{reference}"
else
reference
end
end
def as_json(options = {})
super(options).tap do |json|
json[:type] = self.try(:type)
json[:priority] = priority(options[:project]) if options.key?(:project)
json[:textColor] = text_color
end
end
def hook_attrs
attributes
end
def present(attributes)
super(**attributes.merge(presenter_class: ::LabelPresenter))
end
private
def issues_count(user, params = {})
params.merge!(subject_foreign_key => subject.id, label_name: title, scope: 'all')
IssuesFinder.new(user, params.with_indifferent_access).execute.count
end
def label_format_reference(format = :id)
raise StandardError, 'Unknown format' unless [:id, :name].include?(format)
if format == :name && !name.include?('"')
%("#{name}")
else
id
end
end
def sanitize_value(value)
CGI.unescapeHTML(Sanitize.clean(value.to_s))
end
def strip_whitespace_from_title_and_color
%w(color title).each { |attr| self[attr] = self[attr]&.strip }
end
end
Label.prepend_if_ee('EE::Label')
| 29.082759 | 122 | 0.671805 |
aca1f08080a2be57b96b562f342f214df1edcee3 | 4,829 | # frozen_string_literal: true
require 'active_support/all'
require 'active_model_serializers'
require 'state_machines-activerecord'
require 'validate_url'
require 'responders'
require 'explicit-parameters'
require 'attr_encrypted'
require 'sass-rails'
require 'coffee-rails'
require 'jquery-rails'
require 'rails-timeago'
require 'lodash-rails'
require 'ansi_stream'
require 'autoprefixer-rails'
require 'rails_autolink'
require 'gemoji'
require 'omniauth-github'
require 'pubsubstub'
require 'safe_yaml/load'
require 'securecompare'
require 'redis-objects'
require 'redis-namespace'
require 'octokit'
require 'faraday-http-cache'
require 'shipit/version'
require 'shipit/octokit_check_runs'
require 'shipit/flock'
require 'shipit/github_app'
require 'shipit/paginator'
require 'shipit/null_serializer'
require 'shipit/csv_serializer'
require 'shipit/octokit_iterator'
require 'shipit/first_parent_commits_iterator'
require 'shipit/simple_message_verifier'
require 'shipit/command'
require 'shipit/commands'
require 'shipit/stack_commands'
require 'shipit/review_stack_commands'
require 'shipit/task_commands'
require 'shipit/deploy_commands'
require 'shipit/rollback_commands'
require 'shipit/environment_variables'
require 'shipit/stat'
require 'shipit/github_http_cache_middleware'
require 'shipit/same_site_cookie_middleware'
require 'shipit/cast_value'
require 'shipit/line_buffer'
SafeYAML::OPTIONS[:default_mode] = :safe
SafeYAML::OPTIONS[:deserialize_symbols] = false
module Shipit
extend self
delegate :table_name_prefix, to: :secrets
attr_accessor :disable_api_authentication, :timeout_exit_codes
attr_writer(
:internal_hook_receivers,
:preferred_org_emails,
:task_execution_strategy,
:task_logger,
)
def task_execution_strategy
@task_execution_strategy ||= Shipit::TaskExecutionStrategy::Default
end
self.timeout_exit_codes = [].freeze
def authentication_disabled?
ENV['SHIPIT_DISABLE_AUTH'].present?
end
def enable_samesite_middleware?
ENV['SHIPIT_ENABLE_SAMESITE_NONE'].present?
end
def app_name
@app_name ||= secrets.app_name || Rails.application.class.name.split(':').first || 'Shipit'
end
def redis_url
secrets.redis_url.present? ? URI(secrets.redis_url) : nil
end
def redis(namespace = nil)
@redis ||= Redis.new(
url: redis_url.to_s.presence,
logger: Rails.logger,
reconnect_attempts: 3,
reconnect_delay: 0.5,
reconnect_delay_max: 1,
)
return @redis unless namespace
Redis::Namespace.new(namespace, redis: @redis)
end
def github
@github ||= GitHubApp.new(secrets.github)
end
def legacy_github_api
if secrets&.github_api.present?
@legacy_github_api ||= github.new_client(access_token: secrets.github_api['access_token'])
end
end
def user
if github.bot_login
User.find_or_create_by_login!(github.bot_login)
else
AnonymousUser.new
end
end
def api_clients_secret
secrets.api_clients_secret.presence || secrets.secret_key_base
end
def user_access_tokens_key
(secrets.user_access_tokens_key.presence || secrets.secret_key_base).byteslice(0, 32)
end
def host
secrets.host.presence
end
def default_merge_method
secrets.default_merge_method || 'merge'
end
def update_latest_deployed_ref
secrets.update_latest_deployed_ref
end
def enforce_publish_config
secrets.enforce_publish_config.presence
end
def npm_org_scope
secrets.npm_org_scope.presence
end
def private_npm_registry
secrets.private_npm_registry.presence
end
def github_teams
@github_teams ||= github.oauth_teams.map { |t| Team.find_or_create_by_handle(t) }
end
def all_settings_present?
@all_settings_present ||= [
secrets.github, # TODO: handle GitHub settings
redis_url,
host,
].all?(&:present?)
end
def env
{ 'SHIPIT' => '1' }.merge(secrets.env || {})
end
def shell_paths
[Shipit::Engine.root.join('lib', 'snippets').to_s]
end
def revision
@revision ||= begin
if revision_file.exist?
revision_file.read
else
%x(git rev-parse HEAD)
end.strip
end
end
def default_inactivity_timeout
secrets.commands_inactivity_timeout || 5.minutes.to_i
end
def committer_name
secrets.committer_name.presence || app_name
end
def committer_email
secrets.committer_email.presence || "#{app_name.underscore.dasherize}@#{host}"
end
def internal_hook_receivers
@internal_hook_receivers ||= []
end
def preferred_org_emails
@preferred_org_emails ||= []
end
def task_logger
@task_logger ||= Logger.new(nil)
end
protected
def revision_file
Rails.root.join('REVISION')
end
def secrets
Rails.application.secrets
end
end
require 'shipit/engine'
| 21.654709 | 96 | 0.743632 |
5d04013bd6e4d0eff824d4d5aa80e89fc7ae4e4e | 1,049 | ###########################################################
# tc_each_key.rb
#
# Test suite for the Hash#each_key instance method.
###########################################################
require "test/unit"
class TC_Hash_EachKey_Instance < Test::Unit::TestCase
def setup
@hash = {"ant", 1, "bat", 2, "cat", 3, "dog", 4}
end
def test_each_basic
assert_respond_to(@hash, :each_key)
assert_nothing_raised{ @hash.each_key{} }
end
def test_each_iterate
i = 0
@hash.each_key{ |key|
assert_equal(true, ["ant","bat","cat","dog"].include?(key))
i += 1
}
assert_equal(4, i)
end
def test_each_noop_on_empty
i = 0
{}.each_key{ i += 1 }
assert_equal(0, i)
assert_equal(@hash, @hash.each_key{})
end
def test_each_expected_errors
assert_raises(ArgumentError){ @hash.each_key(1){} }
# No longer a valid test in 1.8.7
=begin
assert_raises(LocalJumpError){ @hash.each_key }
=end
end
def teardown
@hash = nil
end
end
| 22.804348 | 68 | 0.542421 |
acae3553f1bc27ce36047730469bf21f90adf2f2 | 906 | module Api
module V1
class DebfilesController < Api::V1::ApiController
skip_before_filter :authenticate_user!, :upload
def upload
current_user = User.find_by_token(params[:token]) or raise Errors::AccessDenied
log = Logger.new Rails.root.join('log', 'uploads.log')
io = params[:filedata]
log.info "Receiving #{io.original_filename} from #{current_user.email}"
filename = Rails.root.join('public', 'uploads', io.original_filename)
File.open(filename, 'wb') do |file|
file.write(io.read)
end
log.info "Saved #{io.original_filename} to uploads folder"
svc = Services::UploadedFileImporter.new(filename, log)
if svc.process(Library.new)
render nothing: true, status: 200
else
render nothing: true, status: 400
end
end
end
end
end | 28.3125 | 87 | 0.620309 |
1aca3b0a06af9c641e8193806258c3973d56cafb | 752 | # See https://github.com/markdownlint/markdownlint/blob/master/docs/creating_styles.md for doc
# on creating and modifying this style file
# rules are named by their aliases here for clarity, not their code.
# But for instance, ul-indent = MD007
all
rule 'no-trailing-punctuation', :punctuation=>'.,;:!'
rule 'ul-indent', :indent=> 4
exclude_rule 'no-bare-urls'
exclude_rule 'code-block-style'
exclude_rule 'line-length'
# excluded rule for as kramdown has a bug.
# see https://github.com/markdownlint/markdownlint/issues/294#issuecomment-600600407
# these are now in info style.
# to be put back in error style once bug fixed.
exclude_rule 'single-h1'
exclude_rule 'no-space-in-code'
exclude_rule 'no-duplicate-header'
exclude_rule 'first-line-h1'
| 34.181818 | 94 | 0.768617 |
875ab34b44b2e08b26fbe28e222606722d53a200 | 397 | class CreateSpree<%= class_name.pluralize %>Translations < ActiveRecord::Migration
def up
Spree::<%= class_name %>.create_translation_table!({
<% attributes.each do |attribute| -%>
<% next unless options[:i18n].include? attribute.name -%>
<%= attribute.name %>: :<%= attribute.type %>,
<% end -%>
})
end
def down
Spree::<%= class_name %>.drop_translation_table!
end
end
| 26.466667 | 82 | 0.654912 |
e800b42032539eb932504414ee6097ad08aa5642 | 12,662 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/documentai/v1beta3/document_processor_service.proto
require 'google/api/annotations_pb'
require 'google/api/client_pb'
require 'google/api/field_behavior_pb'
require 'google/api/resource_pb'
require 'google/cloud/documentai/v1beta3/document_pb'
require 'google/cloud/documentai/v1beta3/document_io_pb'
require 'google/cloud/documentai/v1beta3/operation_metadata_pb'
require 'google/cloud/documentai/v1beta3/processor_pb'
require 'google/cloud/documentai/v1beta3/processor_type_pb'
require 'google/longrunning/operations_pb'
require 'google/protobuf/field_mask_pb'
require 'google/protobuf/timestamp_pb'
require 'google/rpc/status_pb'
require 'google/protobuf'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/cloud/documentai/v1beta3/document_processor_service.proto", :syntax => :proto3) do
add_message "google.cloud.documentai.v1beta3.ProcessRequest" do
optional :name, :string, 1
optional :document, :message, 2, "google.cloud.documentai.v1beta3.Document"
optional :skip_human_review, :bool, 3
oneof :source do
optional :inline_document, :message, 4, "google.cloud.documentai.v1beta3.Document"
optional :raw_document, :message, 5, "google.cloud.documentai.v1beta3.RawDocument"
end
end
add_message "google.cloud.documentai.v1beta3.HumanReviewStatus" do
optional :state, :enum, 1, "google.cloud.documentai.v1beta3.HumanReviewStatus.State"
optional :state_message, :string, 2
optional :human_review_operation, :string, 3
end
add_enum "google.cloud.documentai.v1beta3.HumanReviewStatus.State" do
value :STATE_UNSPECIFIED, 0
value :SKIPPED, 1
value :VALIDATION_PASSED, 2
value :IN_PROGRESS, 3
value :ERROR, 4
end
add_message "google.cloud.documentai.v1beta3.ProcessResponse" do
optional :document, :message, 1, "google.cloud.documentai.v1beta3.Document"
optional :human_review_operation, :string, 2
optional :human_review_status, :message, 3, "google.cloud.documentai.v1beta3.HumanReviewStatus"
end
add_message "google.cloud.documentai.v1beta3.BatchProcessRequest" do
optional :name, :string, 1
repeated :input_configs, :message, 2, "google.cloud.documentai.v1beta3.BatchProcessRequest.BatchInputConfig"
optional :output_config, :message, 3, "google.cloud.documentai.v1beta3.BatchProcessRequest.BatchOutputConfig"
optional :input_documents, :message, 5, "google.cloud.documentai.v1beta3.BatchDocumentsInputConfig"
optional :document_output_config, :message, 6, "google.cloud.documentai.v1beta3.DocumentOutputConfig"
optional :skip_human_review, :bool, 4
end
add_message "google.cloud.documentai.v1beta3.BatchProcessRequest.BatchInputConfig" do
optional :gcs_source, :string, 1
optional :mime_type, :string, 2
end
add_message "google.cloud.documentai.v1beta3.BatchProcessRequest.BatchOutputConfig" do
optional :gcs_destination, :string, 1
end
add_message "google.cloud.documentai.v1beta3.BatchProcessResponse" do
end
add_message "google.cloud.documentai.v1beta3.BatchProcessMetadata" do
optional :state, :enum, 1, "google.cloud.documentai.v1beta3.BatchProcessMetadata.State"
optional :state_message, :string, 2
optional :create_time, :message, 3, "google.protobuf.Timestamp"
optional :update_time, :message, 4, "google.protobuf.Timestamp"
repeated :individual_process_statuses, :message, 5, "google.cloud.documentai.v1beta3.BatchProcessMetadata.IndividualProcessStatus"
end
add_message "google.cloud.documentai.v1beta3.BatchProcessMetadata.IndividualProcessStatus" do
optional :input_gcs_source, :string, 1
optional :status, :message, 2, "google.rpc.Status"
optional :output_gcs_destination, :string, 3
optional :human_review_operation, :string, 4
optional :human_review_status, :message, 5, "google.cloud.documentai.v1beta3.HumanReviewStatus"
end
add_enum "google.cloud.documentai.v1beta3.BatchProcessMetadata.State" do
value :STATE_UNSPECIFIED, 0
value :WAITING, 1
value :RUNNING, 2
value :SUCCEEDED, 3
value :CANCELLING, 4
value :CANCELLED, 5
value :FAILED, 6
end
add_message "google.cloud.documentai.v1beta3.FetchProcessorTypesRequest" do
optional :parent, :string, 1
end
add_message "google.cloud.documentai.v1beta3.FetchProcessorTypesResponse" do
repeated :processor_types, :message, 1, "google.cloud.documentai.v1beta3.ProcessorType"
end
add_message "google.cloud.documentai.v1beta3.ListProcessorsRequest" do
optional :parent, :string, 1
optional :page_size, :int32, 2
optional :page_token, :string, 3
end
add_message "google.cloud.documentai.v1beta3.ListProcessorsResponse" do
repeated :processors, :message, 1, "google.cloud.documentai.v1beta3.Processor"
optional :next_page_token, :string, 2
end
add_message "google.cloud.documentai.v1beta3.CreateProcessorRequest" do
optional :parent, :string, 1
optional :processor, :message, 2, "google.cloud.documentai.v1beta3.Processor"
end
add_message "google.cloud.documentai.v1beta3.DeleteProcessorRequest" do
optional :name, :string, 1
end
add_message "google.cloud.documentai.v1beta3.DeleteProcessorMetadata" do
optional :common_metadata, :message, 5, "google.cloud.documentai.v1beta3.CommonOperationMetadata"
end
add_message "google.cloud.documentai.v1beta3.EnableProcessorRequest" do
optional :name, :string, 1
end
add_message "google.cloud.documentai.v1beta3.EnableProcessorResponse" do
end
add_message "google.cloud.documentai.v1beta3.EnableProcessorMetadata" do
optional :common_metadata, :message, 5, "google.cloud.documentai.v1beta3.CommonOperationMetadata"
end
add_message "google.cloud.documentai.v1beta3.DisableProcessorRequest" do
optional :name, :string, 1
end
add_message "google.cloud.documentai.v1beta3.DisableProcessorResponse" do
end
add_message "google.cloud.documentai.v1beta3.DisableProcessorMetadata" do
optional :common_metadata, :message, 5, "google.cloud.documentai.v1beta3.CommonOperationMetadata"
end
add_message "google.cloud.documentai.v1beta3.ReviewDocumentRequest" do
optional :human_review_config, :string, 1
optional :document, :message, 2, "google.cloud.documentai.v1beta3.Document"
optional :enable_schema_validation, :bool, 3
optional :priority, :enum, 5, "google.cloud.documentai.v1beta3.ReviewDocumentRequest.Priority"
oneof :source do
optional :inline_document, :message, 4, "google.cloud.documentai.v1beta3.Document"
end
end
add_enum "google.cloud.documentai.v1beta3.ReviewDocumentRequest.Priority" do
value :DEFAULT, 0
value :URGENT, 1
end
add_message "google.cloud.documentai.v1beta3.ReviewDocumentResponse" do
optional :gcs_destination, :string, 1
end
add_message "google.cloud.documentai.v1beta3.ReviewDocumentOperationMetadata" do
optional :state, :enum, 1, "google.cloud.documentai.v1beta3.ReviewDocumentOperationMetadata.State"
optional :state_message, :string, 2
optional :create_time, :message, 3, "google.protobuf.Timestamp"
optional :update_time, :message, 4, "google.protobuf.Timestamp"
optional :common_metadata, :message, 5, "google.cloud.documentai.v1beta3.CommonOperationMetadata"
end
add_enum "google.cloud.documentai.v1beta3.ReviewDocumentOperationMetadata.State" do
value :STATE_UNSPECIFIED, 0
value :RUNNING, 1
value :CANCELLING, 2
value :SUCCEEDED, 3
value :FAILED, 4
value :CANCELLED, 5
end
end
end
module Google
module Cloud
module DocumentAI
module V1beta3
ProcessRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.ProcessRequest").msgclass
HumanReviewStatus = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.HumanReviewStatus").msgclass
HumanReviewStatus::State = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.HumanReviewStatus.State").enummodule
ProcessResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.ProcessResponse").msgclass
BatchProcessRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.BatchProcessRequest").msgclass
BatchProcessRequest::BatchInputConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.BatchProcessRequest.BatchInputConfig").msgclass
BatchProcessRequest::BatchOutputConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.BatchProcessRequest.BatchOutputConfig").msgclass
BatchProcessResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.BatchProcessResponse").msgclass
BatchProcessMetadata = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.BatchProcessMetadata").msgclass
BatchProcessMetadata::IndividualProcessStatus = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.BatchProcessMetadata.IndividualProcessStatus").msgclass
BatchProcessMetadata::State = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.BatchProcessMetadata.State").enummodule
FetchProcessorTypesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.FetchProcessorTypesRequest").msgclass
FetchProcessorTypesResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.FetchProcessorTypesResponse").msgclass
ListProcessorsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.ListProcessorsRequest").msgclass
ListProcessorsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.ListProcessorsResponse").msgclass
CreateProcessorRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.CreateProcessorRequest").msgclass
DeleteProcessorRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.DeleteProcessorRequest").msgclass
DeleteProcessorMetadata = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.DeleteProcessorMetadata").msgclass
EnableProcessorRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.EnableProcessorRequest").msgclass
EnableProcessorResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.EnableProcessorResponse").msgclass
EnableProcessorMetadata = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.EnableProcessorMetadata").msgclass
DisableProcessorRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.DisableProcessorRequest").msgclass
DisableProcessorResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.DisableProcessorResponse").msgclass
DisableProcessorMetadata = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.DisableProcessorMetadata").msgclass
ReviewDocumentRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.ReviewDocumentRequest").msgclass
ReviewDocumentRequest::Priority = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.ReviewDocumentRequest.Priority").enummodule
ReviewDocumentResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.ReviewDocumentResponse").msgclass
ReviewDocumentOperationMetadata = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.ReviewDocumentOperationMetadata").msgclass
ReviewDocumentOperationMetadata::State = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.documentai.v1beta3.ReviewDocumentOperationMetadata.State").enummodule
end
end
end
end
| 63.628141 | 201 | 0.77468 |
28f71ff24a7299eb49847648d8bd20aefe632f90 | 362 | Sequel.migration do
change do
create_table :reg_1400 do
column :id, Integer, primary_key: true
column :id_pai, Integer, index: true, null: false
column :cod_item_ipm, String, size: 60
column :mun, String, size: 7
column :valor, BigDecimal, size: [18, 2]
column :cnpj_pai, String, size: 14, index: true
end
end
end
| 27.846154 | 55 | 0.651934 |
03f085c65a660b32d77a5dc15abfbebf5b3181cb | 601 | # # encoding: utf-8
# Inspec test for recipe hdp-chef::zookeeper_user
# The Inspec reference, with examples and extensive documentation, can be
# found at https://docs.chef.io/inspec_reference.html
control 'hdp-chef::zookeeper_user' do
title 'Testing zookeeper user'
describe group('hadoop') do
it { should exist }
its('gid') { should eq 10010 }
end
describe user('zookeeper') do
it { should exist }
its('uid') { should eq 15025 }
its('home') { should eq '/home/zookeeper' }
its('shell') { should eq '/bin/bash' }
its('group') { should eq 'hadoop' }
end
end
| 25.041667 | 73 | 0.663894 |
1a25e563ec637559dc1668ac41b8fb6d3a6b910c | 1,228 | # frozen_string_literal: true
describe 'Block controls' do
let(:exhibit) { FactoryBot.create(:exhibit) }
let(:exhibit_curator) { FactoryBot.create(:exhibit_curator, exhibit: exhibit) }
before { login_as exhibit_curator }
it 'is split into separate sections', js: true do
# create page
visit spotlight.exhibit_dashboard_path(exhibit)
click_link 'Feature pages'
add_new_via_button('My New Feature Page')
expect(page).to have_css('h3', text: 'My New Feature Page')
expect(page).to have_content('The feature page was created.')
within('li.dd-item') do
click_link 'Edit'
end
# fill in title
fill_in 'feature_page_title', with: 'Exhibit Title'
# click to add widget
click_add_widget
within('.spotlight-block-controls') do
expect(page).to have_css('.st-controls-group', count: 2)
within(first('.st-controls-group')) do
expect(page).to have_content 'Standard widgets'
expect(page).to have_css('.st-block-controls__button')
end
within(all('.st-controls-group').last) do
expect(page).to have_content 'Exhibit item widgets'
expect(page).to have_css('.st-block-controls__button')
end
end
end
end
| 28.55814 | 81 | 0.681596 |
f7a403648668eefcd5d37865ef0a9536d20d0824 | 205 | RSpec.describe FindMyRepresentative do
it "has a version number" do
expect(FindMyRepresentative::VERSION).not_to be nil
end
it "does something useful" do
expect(false).to eq(true)
end
end
| 20.5 | 55 | 0.736585 |
ff0d49474b94d320c4ee2851c3dbbeb659c356fc | 5,329 | # Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# A collection of HostScanTargetSummary objects
class VulnerabilityScanning::Models::HostScanTargetSummaryCollection
# **[Required]** The HostScanTargetSummary objects in the collection
# @return [Array<OCI::VulnerabilityScanning::Models::HostScanTargetSummary>]
attr_accessor :items
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'items': :'items'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'items': :'Array<OCI::VulnerabilityScanning::Models::HostScanTargetSummary>'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [Array<OCI::VulnerabilityScanning::Models::HostScanTargetSummary>] :items The value to assign to the {#items} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.items = attributes[:'items'] if attributes[:'items']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
items == other.items
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[items].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 35.291391 | 245 | 0.676112 |
18a71239d24514fc5774ad61cc8ff4d94468d679 | 935 |
class SCREEN_NAME < Roda
plugin :middleware
route do |r|
r.get '@:raw_name' do |raw_name|
HTML.to_html( :screen_name => raw_name )
end # === on get
end # === route
HTML = Megauni::WWW_App.new {
use ::MUE
style {
body {
padding 0
margin 0
}
div.^(:block) {
max_width '500px'
width 'auto'
border 0
}
} # === style
title '{{{html.screen_name}}}'
h1 {
background_color black
color white
margin_top '0'
margin_bottom '0'
padding '0.5em'
'{{{html.screen_name}}}'
}
use ::NAV_BAR
div.^(:block) {
div.^(:item) {
h3 'Secret Compliment'
div.^(:item_content) {
div "I think ... and ... and ...."
}
}
} # === div.block
} # === WWW_App.new
end # === class SCREEN_NAME
use SCREEN_NAME
| 14.84127 | 46 | 0.464171 |
185921d8fbee0ccd070d458a11d01e7e3215d8ff | 775 | # Provides an ActiveRecord-like interface to a model whose data is not persisted to a database.
module StaticModel
extend ActiveSupport::Concern
module ClassMethods
# Used by ActiveRecord's polymorphic association to set object_id
def primary_key
'id'
end
# Used by ActiveRecord's polymorphic association to set object_type
def base_class
self
end
end
# Used by AR for fetching attributes
#
# Pass it along if we respond to it.
def [](key)
send(key) if respond_to?(key)
end
def to_param
id
end
def new_record?
false
end
def persisted?
false
end
def destroyed?
false
end
def ==(other)
if other.is_a? ::StaticModel
id == other.id
else
super
end
end
end
| 16.145833 | 95 | 0.660645 |
bfb2f6fbe06d34699982be1bd7f66c2774a50e07 | 2,902 | # frozen_string_literal: true
module SharedRoleComponents
def self.make_list_embed(embed)
embed.fields = [
{ name: 'Missing a class?', value: 'If we are missing a class, let us know and we will add a channel!' },
{ name: 'General roles:', value: "`#{ROLES['general'].keys.join('` `')}`" },
{ name: 'Class roles:', value: "`#{ROLES['classes'].keys.map { |k| k.ljust 7 }.join('` `')}`" },
{ name: 'Slash commands:',
value: <<~USAGE,
`/role list`
`/role add`
`/role remove`
USAGE
inline: true },
{ name: 'Classic commands:',
value: <<~USAGE,
`!role list`
`!role add foo [bar baz ...]`
`!role remove foo [bar baz ...]`
USAGE
inline: true }
]
embed.color = CONFIG['colors']['error']
end
def self.add_selects(embed, view)
embed.fields = [
{ name: 'Role Selection', value: "Select roles in the dropdowns below:\n(sorted by class number)" }
]
embed.color = CONFIG['colors']['info']
# since max of 25 choices per dropdown, break up by level
view.row do |row|
# general roles (not class)
r = ROLES['general']
row.select_menu(custom_id: 'role_add_general', placeholder: 'General roles', max_values: r.size) do |s|
r.each do |_slug, data|
s.option(label: data['title'], value: data['id'].to_s)
end
end
end
view.row do |row|
# 100/200 level
r = ROLES['classes'].filter { |n, _| n.match?(/[12]\d\d/) }
row.select_menu(custom_id: 'role_add_100/200', placeholder: '100/200-level classes', max_values: r.size) do |s|
r.each do |slug, data|
s.option(label: "#{slug.upcase}: #{data['title']}", value: data['id'].to_s)
end
end
end
view.row do |row|
# 300 level
r = ROLES['classes'].filter { |n, _| n.match?(/3\d\d/) }
row.select_menu(custom_id: 'role_add_300', placeholder: '300-level classes', max_values: r.size) do |s|
r.each do |slug, data|
s.option(label: "#{slug.upcase}: #{data['title']}", value: data['id'].to_s)
end
end
end
view.row do |row|
# 400 level
r = ROLES['classes'].filter { |n, _| n.match?(/4\d\d/) }
row.select_menu(custom_id: 'role_add_400', placeholder: '400-level classes', max_values: r.size) do |s|
r.each do |slug, data|
s.option(label: "#{slug.upcase}: #{data['title']}", value: data['id'].to_s)
end
end
end
view.row do |row|
# 500 level
r = ROLES['classes'].filter { |n, _| n.match?(/5\d\d/) }
row.select_menu(custom_id: 'role_add_500', placeholder: '500-level classes', max_values: r.size) do |s|
r.each do |slug, data|
s.option(label: "#{slug.upcase}: #{data['title']}", value: data['id'].to_s)
end
end
end
end
end
| 34.141176 | 117 | 0.555824 |
1c7f8e709f6e12654f057bbfa5f5dca1a5a68cf3 | 13,343 | require 'rubygems'
require 'rubygems/package'
require 'time'
begin
gem 'builder'
require 'builder/xchar'
rescue LoadError
end
##
# Top level class for building the gem repository index.
class Gem::Indexer
include Gem::UserInteraction
##
# Build indexes for RubyGems 1.2.0 and newer when true
attr_accessor :build_modern
##
# Index install location
attr_reader :dest_directory
##
# Specs index install location
attr_reader :dest_specs_index
##
# Latest specs index install location
attr_reader :dest_latest_specs_index
##
# Prerelease specs index install location
attr_reader :dest_prerelease_specs_index
##
# Index build directory
attr_reader :directory
##
# Create an indexer that will index the gems in +directory+.
def initialize(directory, options = {})
require 'fileutils'
require 'tmpdir'
require 'zlib'
unless defined?(Builder::XChar) then
raise "Gem::Indexer requires that the XML Builder library be installed:" +
"\n\tgem install builder"
end
options = { :build_modern => true }.merge options
@build_modern = options[:build_modern]
@dest_directory = directory
@directory = File.join(Dir.tmpdir, "gem_generate_index_#{$$}")
marshal_name = "Marshal.#{Gem.marshal_version}"
@master_index = File.join @directory, 'yaml'
@marshal_index = File.join @directory, marshal_name
@quick_dir = File.join @directory, 'quick'
@quick_marshal_dir = File.join @quick_dir, marshal_name
@quick_marshal_dir_base = File.join "quick", marshal_name # FIX: UGH
@quick_index = File.join @quick_dir, 'index'
@latest_index = File.join @quick_dir, 'latest_index'
@specs_index = File.join @directory, "specs.#{Gem.marshal_version}"
@latest_specs_index =
File.join(@directory, "latest_specs.#{Gem.marshal_version}")
@prerelease_specs_index =
File.join(@directory, "prerelease_specs.#{Gem.marshal_version}")
@dest_specs_index =
File.join(@dest_directory, "specs.#{Gem.marshal_version}")
@dest_latest_specs_index =
File.join(@dest_directory, "latest_specs.#{Gem.marshal_version}")
@dest_prerelease_specs_index =
File.join(@dest_directory, "prerelease_specs.#{Gem.marshal_version}")
@files = []
end
##
# Abbreviate the spec for downloading. Abbreviated specs are only used for
# searching, downloading and related activities and do not need deployment
# specific information (e.g. list of files). So we abbreviate the spec,
# making it much smaller for quicker downloads.
#--
# TODO move to Gem::Specification
def abbreviate(spec)
spec.files = []
spec.test_files = []
spec.rdoc_options = []
spec.extra_rdoc_files = []
spec.cert_chain = []
spec
end
##
# Build various indicies
def build_indicies
Gem::Specification.dirs = []
Gem::Specification.add_specs(*map_gems_to_specs(gem_file_list))
build_marshal_gemspecs
build_modern_indicies if @build_modern
compress_indicies
end
##
# Builds Marshal quick index gemspecs.
def build_marshal_gemspecs
count = Gem::Specification.count { |s| not s.default_gem? }
progress = ui.progress_reporter count,
"Generating Marshal quick index gemspecs for #{count} gems",
"Complete"
files = []
Gem.time 'Generated Marshal quick index gemspecs' do
Gem::Specification.each do |spec|
next if spec.default_gem?
spec_file_name = "#{spec.original_name}.gemspec.rz"
marshal_name = File.join @quick_marshal_dir, spec_file_name
marshal_zipped = Gem.deflate Marshal.dump(spec)
open marshal_name, 'wb' do |io| io.write marshal_zipped end
files << marshal_name
progress.updated spec.original_name
end
progress.done
end
@files << @quick_marshal_dir
files
end
##
# Build a single index for RubyGems 1.2 and newer
def build_modern_index(index, file, name)
say "Generating #{name} index"
Gem.time "Generated #{name} index" do
open(file, 'wb') do |io|
specs = index.map do |*spec|
# We have to splat here because latest_specs is an array, while the
# others are hashes.
spec = spec.flatten.last
platform = spec.original_platform
# win32-api-1.0.4-x86-mswin32-60
unless String === platform then
alert_warning "Skipping invalid platform in gem: #{spec.full_name}"
next
end
platform = Gem::Platform::RUBY if platform.nil? or platform.empty?
[spec.name, spec.version, platform]
end
specs = compact_specs(specs)
Marshal.dump(specs, io)
end
end
end
##
# Builds indicies for RubyGems 1.2 and newer. Handles full, latest, prerelease
def build_modern_indicies
specs = Gem::Specification.reject { |s| s.default_gem? }
prerelease, released = specs.partition { |s|
s.version.prerelease?
}
latest_specs =
Gem::Specification.latest_specs.reject { |s| s.default_gem? }
build_modern_index(released.sort, @specs_index, 'specs')
build_modern_index(latest_specs.sort, @latest_specs_index, 'latest specs')
build_modern_index(prerelease.sort, @prerelease_specs_index,
'prerelease specs')
@files += [@specs_index,
"#{@specs_index}.gz",
@latest_specs_index,
"#{@latest_specs_index}.gz",
@prerelease_specs_index,
"#{@prerelease_specs_index}.gz"]
end
def map_gems_to_specs gems
gems.map { |gemfile|
if File.size(gemfile) == 0 then
alert_warning "Skipping zero-length gem: #{gemfile}"
next
end
begin
spec = Gem::Package.new(gemfile).spec
spec.loaded_from = gemfile
# HACK: fuck this shit - borks all tests that use pl1
# if File.basename(gemfile, ".gem") != spec.original_name then
# exp = spec.full_name
# exp << " (#{spec.original_name})" if
# spec.original_name != spec.full_name
# msg = "Skipping misnamed gem: #{gemfile} should be named #{exp}"
# alert_warning msg
# next
# end
abbreviate spec
sanitize spec
spec
rescue SignalException => e
alert_error "Received signal, exiting"
raise
rescue Exception => e
msg = ["Unable to process #{gemfile}",
"#{e.message} (#{e.class})",
"\t#{e.backtrace.join "\n\t"}"].join("\n")
alert_error msg
end
}.compact
end
##
# Compresses indicies on disk
#--
# All future files should be compressed using gzip, not deflate
def compress_indicies
say "Compressing indicies"
Gem.time 'Compressed indicies' do
if @build_modern then
gzip @specs_index
gzip @latest_specs_index
gzip @prerelease_specs_index
end
end
end
##
# Compacts Marshal output for the specs index data source by using identical
# objects as much as possible.
def compact_specs(specs)
names = {}
versions = {}
platforms = {}
specs.map do |(name, version, platform)|
names[name] = name unless names.include? name
versions[version] = version unless versions.include? version
platforms[platform] = platform unless platforms.include? platform
[names[name], versions[version], platforms[platform]]
end
end
##
# Compress +filename+ with +extension+.
def compress(filename, extension)
data = Gem.read_binary filename
zipped = Gem.deflate data
open "#{filename}.#{extension}", 'wb' do |io|
io.write zipped
end
end
##
# List of gem file names to index.
def gem_file_list
Dir[File.join(@dest_directory, "gems", '*.gem')]
end
##
# Builds and installs indicies.
def generate_index
make_temp_directories
build_indicies
install_indicies
rescue SignalException
ensure
FileUtils.rm_rf @directory
end
##
# Zlib::GzipWriter wrapper that gzips +filename+ on disk.
def gzip(filename)
Zlib::GzipWriter.open "#{filename}.gz" do |io|
io.write Gem.read_binary(filename)
end
end
##
# Install generated indicies into the destination directory.
def install_indicies
verbose = Gem.configuration.really_verbose
say "Moving index into production dir #{@dest_directory}" if verbose
files = @files
files.delete @quick_marshal_dir if files.include? @quick_dir
if files.include? @quick_marshal_dir and not files.include? @quick_dir then
files.delete @quick_marshal_dir
dst_name = File.join(@dest_directory, @quick_marshal_dir_base)
FileUtils.mkdir_p File.dirname(dst_name), :verbose => verbose
FileUtils.rm_rf dst_name, :verbose => verbose
FileUtils.mv(@quick_marshal_dir, dst_name,
:verbose => verbose, :force => true)
end
files = files.map do |path|
path.sub(/^#{Regexp.escape @directory}\/?/, '') # HACK?
end
files.each do |file|
src_name = File.join @directory, file
dst_name = File.join @dest_directory, file
FileUtils.rm_rf dst_name, :verbose => verbose
FileUtils.mv(src_name, @dest_directory,
:verbose => verbose, :force => true)
end
end
##
# Make directories for index generation
def make_temp_directories
FileUtils.rm_rf @directory
FileUtils.mkdir_p @directory, :mode => 0700
FileUtils.mkdir_p @quick_marshal_dir
end
##
# Ensure +path+ and path with +extension+ are identical.
def paranoid(path, extension)
data = Gem.read_binary path
compressed_data = Gem.read_binary "#{path}.#{extension}"
unless data == Gem.inflate(compressed_data) then
raise "Compressed file #{compressed_path} does not match uncompressed file #{path}"
end
end
##
# Sanitize the descriptive fields in the spec. Sometimes non-ASCII
# characters will garble the site index. Non-ASCII characters will
# be replaced by their XML entity equivalent.
def sanitize(spec)
spec.summary = sanitize_string(spec.summary)
spec.description = sanitize_string(spec.description)
spec.post_install_message = sanitize_string(spec.post_install_message)
spec.authors = spec.authors.collect { |a| sanitize_string(a) }
spec
end
##
# Sanitize a single string.
def sanitize_string(string)
return string unless string
# HACK the #to_s is in here because RSpec has an Array of Arrays of
# Strings for authors. Need a way to disallow bad values on gemspec
# generation. (Probably won't happen.)
string = string.to_s
begin
Builder::XChar.encode string
rescue NameError, NoMethodError
string.to_xs
end
end
##
# Perform an in-place update of the repository from newly added gems.
def update_index
make_temp_directories
specs_mtime = File.stat(@dest_specs_index).mtime
newest_mtime = Time.at 0
updated_gems = gem_file_list.select do |gem|
gem_mtime = File.stat(gem).mtime
newest_mtime = gem_mtime if gem_mtime > newest_mtime
gem_mtime >= specs_mtime
end
if updated_gems.empty? then
say 'No new gems'
terminate_interaction 0
end
specs = map_gems_to_specs updated_gems
prerelease, released = specs.partition { |s| s.version.prerelease? }
Gem::Specification.dirs = []
Gem::Specification.add_specs(*specs)
files = build_marshal_gemspecs
Gem.time 'Updated indexes' do
update_specs_index released, @dest_specs_index, @specs_index
update_specs_index released, @dest_latest_specs_index, @latest_specs_index
update_specs_index(prerelease,
@dest_prerelease_specs_index,
@prerelease_specs_index)
end
compress_indicies
verbose = Gem.configuration.really_verbose
say "Updating production dir #{@dest_directory}" if verbose
files << @specs_index
files << "#{@specs_index}.gz"
files << @latest_specs_index
files << "#{@latest_specs_index}.gz"
files << @prerelease_specs_index
files << "#{@prerelease_specs_index}.gz"
files = files.map do |path|
path.sub(/^#{Regexp.escape @directory}\/?/, '') # HACK?
end
files.each do |file|
src_name = File.join @directory, file
dst_name = File.join @dest_directory, file # REFACTOR: duped above
FileUtils.mv src_name, dst_name, :verbose => verbose,
:force => true
File.utime newest_mtime, newest_mtime, dst_name
end
end
##
# Combines specs in +index+ and +source+ then writes out a new copy to
# +dest+. For a latest index, does not ensure the new file is minimal.
def update_specs_index(index, source, dest)
specs_index = Marshal.load Gem.read_binary(source)
index.each do |spec|
platform = spec.original_platform
platform = Gem::Platform::RUBY if platform.nil? or platform.empty?
specs_index << [spec.name, spec.version, platform]
end
specs_index = compact_specs specs_index.uniq.sort
open dest, 'wb' do |io|
Marshal.dump specs_index, io
end
end
end
| 26.739479 | 96 | 0.658098 |
1ce599bbab27d68cbd8cce7f922b95eefd776956 | 1,534 | =begin
#Molecule API Documentation
#The Hydrogen Molecule API
OpenAPI spec version: 1.3.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.14
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for MoleculeApi::Document
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'Document' do
before do
# run before each test
@instance = MoleculeApi::Document.new
end
after do
# run after each test
end
describe 'test an instance of Document' do
it 'should create an instance of Document' do
expect(@instance).to be_instance_of(MoleculeApi::Document)
end
end
describe 'test attribute "electron_document_id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "version"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "doc_file_hash"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "wallet_id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 25.566667 | 102 | 0.726206 |
018c305c16282662a850fdc8d74151f0fc33bce7 | 135 | class AddIsAssessedFlagToProduct < ActiveRecord::Migration[5.0]
def change
add_column :products, :isAssessed, :boolean
end
end
| 22.5 | 63 | 0.77037 |
abf804fb84e58c7e6158a9afeef3d66b18e630ab | 121 | # frozen_string_literal: true
require "rom/commands/create"
require "rom/commands/update"
require "rom/commands/delete"
| 20.166667 | 29 | 0.801653 |
d57ca346e6abd46ab6779fd79aac4f00c7621f06 | 1,998 | =begin
#Kubernetes
#No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.3
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.2.3
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for Kubernetes::V1alpha1PriorityClass
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'V1alpha1PriorityClass' do
before do
# run before each test
@instance = Kubernetes::V1alpha1PriorityClass.new
end
after do
# run after each test
end
describe 'test an instance of V1alpha1PriorityClass' do
it 'should create an instance of V1alpha1PriorityClass' do
expect(@instance).to be_instance_of(Kubernetes::V1alpha1PriorityClass)
end
end
describe 'test attribute "api_version"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "description"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "global_default"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "kind"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "metadata"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "value"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 27.369863 | 103 | 0.726226 |
5d28caa8de2d54e8c5c040093afe7dc7b8c3e77b | 1,863 | require "lru_cache_hashes_only"
require "pry"
require "complexity_assert"
describe "LruCache" do
cache = LruCacheHashesOnly.new(2)
context "when put(key, value) is passed a key and value" do
it "should set the value when key is present" do
cache.put(2,2)
cache.put(2,3)
expect(cache.get(2)).to eq(3)
end
it "should insert new key and value when key is not present" do
cache.put(3,3)
expect(cache.get(3)).to eq(3)
end
end
context "when get() is passed a key parameter" do
it "returns the corresponding value if the key exists" do
cache.put(1,1)
cache.put(2,2)
expect(cache.get(1)).to eq(1)
end
it "returns -1 if the key does not exist" do
expect(cache.get(3)).to eq(-1)
end
end
context "when the cache has reached capacity" do
it "should invalidate least recently used item" do
cache = LruCacheHashesOnly.new(2)
cache.put(1,1)
cache.put(2,2)
cache.get(1)
cache.put(3,3) # evicts key 2
expect(cache.get(2)).to eq(-1)
cache.get(2)
cache.put(4,4) # evicts key 1
expect(cache.get(1)).to eq(-1)
end
end
end
class ConstantSearch
def generate_args(size)
# binding.pry
[Array.new(size) {{rand(1..size) => rand(1..100)}}, rand(1..size)]
end
def run(array, searched)
# binding.pry
cache = LruCacheHashesOnly.new(10)
cache.get(1)
cache.put(5,5)
cache.put(1,1)
cache.put(2,2)
cache.get(2)
cache.put(3,3)
end
end
describe "Linear search" do
it "performs linearly" do
expect(ConstantSearch.new).to be_constant()
end
end
| 23 | 74 | 0.552335 |
ab5dc77eb63e073d50929c36b4d3c41212449407 | 1,333 | #=========================================================================
#
# Copyright NumFOCUS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#=========================================================================
require 'simpleitk'
if ARGV.length != 3 then
puts "Usage: SimpleGaussian <input> <sigma> <output>";
exit( 1 )
end
reader = Simpleitk::ImageFileReader.new
reader.set_file_name( ARGV[0] )
image = reader.execute
inputPixelType = image.get_pixel_idvalue
gaussian = Simpleitk::SmoothingRecursiveGaussianImageFilter.new
gaussian.set_sigma ARGV[1].to_f
image = gaussian.execute image;
caster = Simpleitk::CastImageFilter.new
caster.set_output_pixel_type inputPixelType
image = caster.execute image
writer = Simpleitk::ImageFileWriter.new
writer.set_file_name ARGV[2]
writer.execute image
| 31 | 75 | 0.68042 |
1899cb6aa9f1a8ddfc6ef2f5b0b3963e5237fedc | 72 | module Tandem
require 'spec_helper'
describe Content do
end
end
| 9 | 23 | 0.736111 |
33c70a39256a902def40255030f5cfa37ba38b87 | 437 | module Spree
class Gateway::PayuPolskaGateway < Gateway
preference :client_id, :string
preference :merchant_pos_id, :string
preference :client_secret, :password
preference :notify_url, :string
def provider_class
ActiveMerchant::Billing::PayuPolskaGateway
end
def payment_source_class
Check
end
def method_type
'payu'
end
def auto_capture?
false
end
end
end
| 17.48 | 48 | 0.684211 |
6255e3428d7d7ad12b1520cc59542597c3107af2 | 8,437 | # Copyright 2017 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "helper"
describe Google::Cloud::Spanner::Client, :transaction, :rollback, :mock_spanner do
let(:instance_id) { "my-instance-id" }
let(:database_id) { "my-database-id" }
let(:session_id) { "session123" }
let(:session_grpc) { Google::Spanner::V1::Session.new name: session_path(instance_id, database_id, session_id) }
let(:session) { Google::Cloud::Spanner::Session.from_grpc session_grpc, spanner.service }
let(:transaction_id) { "tx789" }
let(:transaction_grpc) { Google::Spanner::V1::Transaction.new id: transaction_id }
let(:transaction) { Google::Cloud::Spanner::Transaction.from_grpc transaction_grpc, session }
let(:tx_selector) { Google::Spanner::V1::TransactionSelector.new id: transaction_id }
let(:default_options) { Google::Gax::CallOptions.new kwargs: { "google-cloud-resource-prefix" => database_path(instance_id, database_id) } }
let :results_hash do
{
metadata: {
rowType: {
fields: [
{ name: "id", type: { code: "INT64" } },
{ name: "name", type: { code: "STRING" } },
{ name: "active", type: { code: "BOOL" } },
{ name: "age", type: { code: "INT64" } },
{ name: "score", type: { code: "FLOAT64" } },
{ name: "updated_at", type: { code: "TIMESTAMP" } },
{ name: "birthday", type: { code: "DATE"} },
{ name: "avatar", type: { code: "BYTES" } },
{ name: "project_ids", type: { code: "ARRAY",
arrayElementType: { code: "INT64" } } }
]
}
},
values: [
{ stringValue: "1" },
{ stringValue: "Charlie" },
{ boolValue: true},
{ stringValue: "29" },
{ numberValue: 0.9 },
{ stringValue: "2017-01-02T03:04:05.060000000Z" },
{ stringValue: "1950-01-01" },
{ stringValue: "aW1hZ2U=" },
{ listValue: { values: [ { stringValue: "1"},
{ stringValue: "2"},
{ stringValue: "3"} ]}}
]
}
end
let(:results_json) { results_hash.to_json }
let(:results_grpc) { Google::Spanner::V1::PartialResultSet.decode_json results_json }
let(:results_enum) { Array(results_grpc).to_enum }
let(:client) { spanner.client instance_id, database_id, pool: { min: 0 } }
let(:tx_opts) { Google::Spanner::V1::TransactionOptions.new(read_write: Google::Spanner::V1::TransactionOptions::ReadWrite.new) }
it "will rollback and not pass on the error when using Rollback" do
mock = Minitest::Mock.new
mock.expect :create_session, session_grpc, [database_path(instance_id, database_id), session: nil, options: default_options]
mock.expect :begin_transaction, transaction_grpc, [session_grpc.name, tx_opts, options: default_options]
mock.expect :execute_streaming_sql, results_enum, [session_grpc.name, "SELECT * FROM users", transaction: tx_selector, params: nil, param_types: nil, resume_token: nil, partition_token: nil, seqno: 1, options: default_options]
mock.expect :rollback, nil, [session_grpc.name, transaction_id, options: default_options]
# transaction checkin
mock.expect :begin_transaction, transaction_grpc, [session_grpc.name, tx_opts, options: default_options]
spanner.service.mocked_service = mock
results = nil
timestamp = client.transaction do |tx|
tx.must_be_kind_of Google::Cloud::Spanner::Transaction
results = tx.execute_query "SELECT * FROM users"
# This mutation will never be committed, so no mocks for it.
tx.update "users", [{ id: 1, name: "Charlie", active: false }]
# Cause an error
raise Google::Cloud::Spanner::Rollback
end
timestamp.must_be :nil?
shutdown_client! client
mock.verify
assert_results results
end
it "will rollback and pass on the error" do
mock = Minitest::Mock.new
mock.expect :create_session, session_grpc, [database_path(instance_id, database_id), session: nil, options: default_options]
mock.expect :begin_transaction, transaction_grpc, [session_grpc.name, tx_opts, options: default_options]
mock.expect :execute_streaming_sql, results_enum, [session_grpc.name, "SELECT * FROM users", transaction: tx_selector, params: nil, param_types: nil, resume_token: nil, partition_token: nil, seqno: 1, options: default_options]
mock.expect :rollback, nil, [session_grpc.name, transaction_id, options: default_options]
# transaction checkin
mock.expect :begin_transaction, transaction_grpc, [session_grpc.name, tx_opts, options: default_options]
spanner.service.mocked_service = mock
results = nil
assert_raises ZeroDivisionError do
client.transaction do |tx|
tx.must_be_kind_of Google::Cloud::Spanner::Transaction
results = tx.execute_query "SELECT * FROM users"
# This mutation will never be committed, so no mocks for it.
tx.update "users", [{ id: 1, name: "Charlie", active: false }]
# Cause an error
1/0
end
end
shutdown_client! client
mock.verify
assert_results results
end
it "does not allow nested transactions" do
mock = Minitest::Mock.new
mock.expect :create_session, session_grpc, [database_path(instance_id, database_id), session: nil, options: default_options]
mock.expect :begin_transaction, transaction_grpc, [session_grpc.name, tx_opts, options: default_options]
mock.expect :rollback, nil, [session_grpc.name, transaction_id, options: default_options]
# transaction checkin
mock.expect :begin_transaction, transaction_grpc, [session_grpc.name, tx_opts, options: default_options]
spanner.service.mocked_service = mock
nested_error = assert_raises RuntimeError do
client.transaction do |tx|
tx.update "users", [{ id: 1, name: "Charlie", active: false }]
tx.insert "users", [{ id: 2, name: "Harvey", active: true }]
tx.upsert "users", [{ id: 3, name: "Marley", active: false }]
tx.replace "users", [{ id: 4, name: "Henry", active: true }]
tx.delete "users", [1, 2, 3, 4, 5]
# A nested transaction is not allowed
client.transaction do |tx2|
tx2.insert "users", [{ id: 6, name: "Barney", active: true }]
end
end
end
nested_error.message.must_equal "Nested transactions are not allowed"
shutdown_client! client
mock.verify
end
def assert_results results
results.must_be_kind_of Google::Cloud::Spanner::Results
results.fields.wont_be :nil?
results.fields.must_be_kind_of Google::Cloud::Spanner::Fields
results.fields.keys.count.must_equal 9
results.fields[:id].must_equal :INT64
results.fields[:name].must_equal :STRING
results.fields[:active].must_equal :BOOL
results.fields[:age].must_equal :INT64
results.fields[:score].must_equal :FLOAT64
results.fields[:updated_at].must_equal :TIMESTAMP
results.fields[:birthday].must_equal :DATE
results.fields[:avatar].must_equal :BYTES
results.fields[:project_ids].must_equal [:INT64]
rows = results.rows.to_a # grab them all from the enumerator
rows.count.must_equal 1
row = rows.first
row.must_be_kind_of Google::Cloud::Spanner::Data
row.keys.must_equal [:id, :name, :active, :age, :score, :updated_at, :birthday, :avatar, :project_ids]
row[:id].must_equal 1
row[:name].must_equal "Charlie"
row[:active].must_equal true
row[:age].must_equal 29
row[:score].must_equal 0.9
row[:updated_at].must_equal Time.parse("2017-01-02T03:04:05.060000000Z")
row[:birthday].must_equal Date.parse("1950-01-01")
row[:avatar].must_be_kind_of StringIO
row[:avatar].read.must_equal "image"
row[:project_ids].must_equal [1, 2, 3]
end
end
| 45.117647 | 230 | 0.666232 |
ffe699e6a21e7d4a61245332ccf7d47e0e7391e6 | 1,185 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'bukovina/version'
Gem::Specification.new do |spec|
spec.name = "bukovina"
spec.version = Bukovina::VERSION
spec.authors = ["Malo Skrylevo"]
spec.email = ["[email protected]"]
spec.description = %q{Bukovina is the Orthodox Christian God-service library}
spec.summary = %q{Bukovina is the Orthodox Christian God-service library}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency 'i18n'
spec.add_dependency 'rdoba', '>= 0.9.2'
spec.add_dependency 'validate_url'
spec.add_dependency 'activerecord', '>= 4.2'
spec.add_dependency 'excon'
spec.add_dependency 'when_easter'
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "pry"
spec.add_development_dependency "pry_debug"
spec.add_development_dependency "rake"
end
| 35.909091 | 81 | 0.683544 |
03c3dca1d62bf60934e08da63803e98c6e8b9aa6 | 60 | $check_warnings_load_count += 1
$checked_verbose = $VERBOSE
| 20 | 31 | 0.8 |
18f7325492ea2db12b1ed3a74cf2afda34acd312 | 343 | require "film_on/version"
require "film_on/services/channels"
require "film_on/services/groups"
require "film_on/services/video_on_demand"
require "film_on/base"
require "film_on/models/channel"
require "film_on/models/group"
require "film_on/models/programme"
require "film_on/models/movie"
require "film_on/models/genre"
module FilmOn
end
| 22.866667 | 42 | 0.816327 |
62751154d044f65d307463e50dedffd660718eac | 1,399 | # frozen_string_literal: true
# Load default formatter gem
require "simplecov-html"
require "pathname"
require "simplecov/profiles/root_filter"
require "simplecov/profiles/test_frameworks"
require "simplecov/profiles/bundler_filter"
require "simplecov/profiles/hidden_filter"
require "simplecov/profiles/rails"
# Default configuration
SimpleCov.configure do
formatter SimpleCov::Formatter::HTMLFormatter
load_profile "bundler_filter"
load_profile "hidden_filter"
# Exclude files outside of SimpleCov.root
load_profile "root_filter"
end
# Gotta stash this a-s-a-p, see the CommandGuesser class and i.e. #110 for further info
SimpleCov::CommandGuesser.original_run_command = "#{$PROGRAM_NAME} #{ARGV.join(' ')}"
at_exit do
next if SimpleCov.external_at_exit?
SimpleCov.at_exit_behavior
end
# Autoload config from ~/.simplecov if present
require "simplecov/load_global_config"
# Autoload config from .simplecov if present
# Recurse upwards until we find .simplecov or reach the root directory
config_path = Pathname.new(SimpleCov.root)
loop do
filename = config_path.join(".simplecov")
if filename.exist?
begin
load filename
rescue LoadError, StandardError
warn "Warning: Error occurred while trying to load #{filename}. " \
"Error message: #{$!.message}"
end
break
end
config_path, = config_path.split
break if config_path.root?
end
| 27.431373 | 87 | 0.768406 |
62156b1abd514618c43bd8a8505ce75a4fbc696c | 8,132 | #
# httpresponse.rb -- HTTPResponse Class
#
# Author: IPR -- Internet Programming with Ruby -- writers
# Copyright (c) 2000, 2001 TAKAHASHI Masayoshi, GOTOU Yuuzou
# Copyright (c) 2002 Internet Programming with Ruby writers. All rights
# reserved.
#
# $IPR: httpresponse.rb,v 1.45 2003/07/11 11:02:25 gotoyuzo Exp $
require 'time'
require 'webrick/httpversion'
require 'webrick/htmlutils'
require 'webrick/httputils'
require 'webrick/httpstatus'
module WEBrick
class HTTPResponse
BUFSIZE = 1024*4
attr_reader :http_version, :status, :header
attr_reader :cookies
attr_accessor :reason_phrase
attr_accessor :body
attr_accessor :request_method, :request_uri, :request_http_version
attr_accessor :filename
attr_accessor :keep_alive
attr_reader :config, :sent_size
def initialize(config)
@config = config
@logger = config[:Logger]
@header = Hash.new
@status = HTTPStatus::RC_OK
@reason_phrase = nil
@http_version = HTTPVersion::convert(@config[:HTTPVersion])
@body = ''
@keep_alive = true
@cookies = []
@request_method = nil
@request_uri = nil
@request_http_version = @http_version # temporary
@chunked = false
@filename = nil
@sent_size = 0
end
def status_line
"HTTP/#@http_version #@status #@reason_phrase #{CRLF}"
end
def status=(status)
@status = status
@reason_phrase = HTTPStatus::reason_phrase(status)
end
def [](field)
@header[field.downcase]
end
def []=(field, value)
@header[field.downcase] = value.to_s
end
def content_length
if len = self['content-length']
return Integer(len)
end
end
def content_length=(len)
self['content-length'] = len.to_s
end
def content_type
self['content-type']
end
def content_type=(type)
self['content-type'] = type
end
def each
@header.each{|k, v| yield(k, v) }
end
def chunked?
@chunked
end
def chunked=(val)
@chunked = val ? true : false
end
def keep_alive?
@keep_alive
end
def send_response(socket)
begin
setup_header()
send_header(socket)
send_body(socket)
rescue Errno::EPIPE, Errno::ECONNRESET, Errno::ENOTCONN => ex
@logger.debug(ex)
@keep_alive = false
rescue Exception => ex
@logger.error(ex)
@keep_alive = false
end
end
def setup_header()
@reason_phrase ||= HTTPStatus::reason_phrase(@status)
@header['server'] ||= @config[:ServerSoftware]
@header['date'] ||= Time.now.httpdate
# HTTP/0.9 features
if @request_http_version < "1.0"
@http_version = HTTPVersion.new("0.9")
@keep_alive = false
end
# HTTP/1.0 features
if @request_http_version < "1.1"
if chunked?
@chunked = false
ver = @request_http_version.to_s
msg = "chunked is set for an HTTP/#{ver} request. (ignored)"
@logger.warn(msg)
end
end
# Determine the message length (RFC2616 -- 4.4 Message Length)
if @status == 304 || @status == 204 || HTTPStatus::info?(@status)
@header.delete('content-length')
@body = ""
elsif chunked?
@header["transfer-encoding"] = "chunked"
@header.delete('content-length')
elsif %r{^multipart/byteranges} =~ @header['content-type']
@header.delete('content-length')
elsif @header['content-length'].nil?
unless @body.is_a?(IO)
@header['content-length'] = @body ? @body.size : 0
end
end
# Keep-Alive connection.
if @header['connection'] == "close"
@keep_alive = false
elsif keep_alive?
if chunked? || @header['content-length']
@header['connection'] = "Keep-Alive"
end
else
@header['connection'] = "close"
end
# Location is a single absoluteURI.
if location = @header['location']
if @request_uri
@header['location'] = @request_uri.merge(location)
end
end
end
def send_header(socket)
if @http_version.major > 0
data = status_line()
@header.each{|key, value|
tmp = key.gsub(/\bwww|^te$|\b\w/){|s| s.upcase }
data << "#{tmp}: #{value}" << CRLF
}
@cookies.each{|cookie|
data << "Set-Cookie: " << cookie.to_s << CRLF
}
data << CRLF
_write_data(socket, data)
end
end
def send_body(socket)
case @body
when IO then send_body_io(socket)
else send_body_string(socket)
end
end
def to_s
ret = ""
send_response(ret)
ret
end
def set_redirect(status, url)
@body = "<HTML><A HREF=\"#{url.to_s}\">#{url.to_s}</A>.</HTML>\n"
@header['location'] = url.to_s
raise status
end
def set_error(ex, backtrace=false)
case ex
when HTTPStatus::Status
@keep_alive = false if HTTPStatus::error?(ex.code)
self.status = ex.code
else
@keep_alive = false
self.status = HTTPStatus::RC_INTERNAL_SERVER_ERROR
end
@header['content-type'] = "text/html"
if respond_to?(:create_error_page)
create_error_page()
return
end
if @request_uri
host, port = @request_uri.host, @request_uri.port
else
host, port = @config[:ServerName], @config[:Port]
end
@body = ''
@body << <<-_end_of_html_
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0//EN">
<HTML>
<HEAD><TITLE>#{HTMLUtils::escape(@reason_phrase)}</TITLE></HEAD>
<BODY>
<H1>#{HTMLUtils::escape(@reason_phrase)}</H1>
#{HTMLUtils::escape(ex.message)}
<HR>
_end_of_html_
if backtrace && $DEBUG
@body << "backtrace of `#{HTMLUtils::escape(ex.class.to_s)}' "
@body << "#{HTMLUtils::escape(ex.message)}"
@body << "<PRE>"
ex.backtrace.each{|line| @body << "\t#{line}\n"}
@body << "</PRE><HR>"
end
@body << <<-_end_of_html_
<ADDRESS>
#{HTMLUtils::escape(@config[:ServerSoftware])} at
#{host}:#{port}
</ADDRESS>
</BODY>
</HTML>
_end_of_html_
end
private
def send_body_io(socket)
begin
if @request_method == "HEAD"
# do nothing
elsif chunked?
while buf = @body.read(BUFSIZE)
next if buf.empty?
data = ""
data << format("%x", buf.size) << CRLF
data << buf << CRLF
_write_data(socket, data)
@sent_size += buf.size
end
_write_data(socket, "0#{CRLF}#{CRLF}")
else
size = @header['content-length'].to_i
_send_file(socket, @body, 0, size)
@sent_size = size
end
ensure
@body.close
end
end
def send_body_string(socket)
if @request_method == "HEAD"
# do nothing
elsif chunked?
remain = body ? @body.size : 0
while buf = @body[@sent_size, BUFSIZE]
break if buf.empty?
data = ""
data << format("%x", buf.size) << CRLF
data << buf << CRLF
_write_data(socket, data)
@sent_size += buf.size
end
_write_data(socket, "0#{CRLF}#{CRLF}")
else
if @body && @body.size > 0
_write_data(socket, @body)
@sent_size = @body.size
end
end
end
def _send_file(output, input, offset, size)
while offset > 0
sz = BUFSIZE < offset ? BUFSIZE : offset
buf = input.read(sz)
offset -= buf.size
end
if size == 0
while buf = input.read(BUFSIZE)
_write_data(output, buf)
end
else
while size > 0
sz = BUFSIZE < size ? BUFSIZE : size
buf = input.read(sz)
_write_data(output, buf)
size -= buf.size
end
end
end
def _write_data(socket, data)
socket << data
end
end
end
| 24.792683 | 71 | 0.562223 |
bf67aa236ac535508c3914ba9b5cfa9c3d13e7c7 | 294 | require 'leaflet-draw-rails/shape'
module Leaflet::Draw
module Rails::Testing
class Circle < Shape
def draw!
@browser.move_to(*@points.first)
@browser.click_and_hold
@browser.move_to(*@points.second)
@browser.release
end
end
end
end
| 15.473684 | 41 | 0.62585 |
6a5e08068c83e0284199958412592e41cc7eb9ab | 664 | # frozen_string_literal: true
module AMA
module Chef
module User
# Action represents single action required to be taken to converge
# system into target state. Actions are created during planning phase
# and then run by feeding in current resource they are called from within
class Action
attr_accessor :class_name
# rubocop:disable Lint/UnusedMethodArgument
def apply(resource)
raise 'Abstract method left behind'
end
# rubocop:enable Lint/UnusedMethodArgument
protected
def noop
::Chef::Log.debug('Noop action')
end
end
end
end
end
| 24.592593 | 79 | 0.65512 |
f79be5ace421578262e8477ef1d0a72446aaf43c | 936 | #
# Copyright (c) 2006-2019 Wade Alcorn - [email protected]
# Browser Exploitation Framework (BeEF) - http://beefproject.com
# See the file 'doc/COPYING' for copying permission
#
# This module is written by Zaur Molotnikov, 2017
# Only for the use for test purposes!
# Inspired by the coinhive miner integration (copied and modified).
#
class Cryptoloot_miner < BeEF::Core::Command
def self.options
[{ 'name' => 'public_token',
'description' => 'Public Token',
'ui_label' => 'Public Token',
'value' => 'ae5c906cfd37610626e86e25786866d6d2ff1c258d5f',
'type' => 'text'
},
{ 'name' => 'report_interval',
'description' => 'Report Interval (in seconds)',
'ui_label' => 'Report Interval (s)',
'value' => '30',
'type' => 'text'
}]
end
def post_execute
save({'result' => @datastore['result']})
end
end
| 31.2 | 72 | 0.597222 |
3989b69ccfa60870361de6ac2f24520b0c516f96 | 2,775 | # frozen_string_literal: true
require 'test_helper'
module ShopifyCli
class ProjectTest < MiniTest::Test
def setup
@context = TestHelpers::FakeContext.new(root: Dir.mktmpdir)
FileUtils.cd(@context.root)
end
def test_directory_recurses
Dir.mktmpdir do |dir|
Dir.stubs(:pwd).returns("#{dir}/a/b/c/d")
FileUtils.mkdir_p("#{dir}/a/b/c/d")
FileUtils.touch("#{dir}/.shopify-cli.yml")
assert_equal(dir, Project.current.directory)
end
end
def test_current_fails_if_no_config
Dir.mktmpdir do |dir|
Dir.stubs(:pwd).returns("#{dir}/a/b/c/d")
assert_raises ShopifyCli::Abort do
FileUtils.mkdir_p("#{dir}/a/b/c/d")
Project.current
end
end
end
def test_write_writes_yaml
Dir.stubs(:pwd).returns(@context.root)
FileUtils.touch(".shopify-cli.yml")
ShopifyCli::Project.write(@context, project_type: :node, organization_id: 42)
assert_equal :node, Project.current.config['project_type']
assert_equal 42, Project.current.config['organization_id']
end
def test_write_includes_identifiers
Dir.stubs(:pwd).returns(@context.root)
FileUtils.touch(".shopify-cli.yml")
ShopifyCli::Project.write(
@context,
project_type: :node,
organization_id: 42,
other_option: true,
)
assert Project.current.config['other_option']
end
def test_project_name_returns_name
Dir.mktmpdir do |dir|
FileUtils.mkdir_p("#{dir}/myapp")
FileUtils.touch("#{dir}/myapp/.shopify-cli.yml")
FileUtils.cd("#{dir}/myapp")
project_name = Project.project_name
assert_equal "myapp", project_name
end
end
def test_project_name_returns_name_even_if_called_from_subdirectory
Dir.mktmpdir do |dir|
FileUtils.mkdir_p("#{dir}/myapp/lib")
FileUtils.touch("#{dir}/myapp/.shopify-cli.yml")
FileUtils.cd("#{dir}/myapp/lib")
project_name = Project.project_name
assert_equal "myapp", project_name
end
end
def test_project_env_returns_nil_if_doesnt_exist
Dir.mktmpdir do |dir|
Dir.stubs(:pwd).returns(dir)
FileUtils.touch("#{dir}/.shopify-cli.yml")
assert_nil(Project.current.env)
end
end
def test_project_env_returns_env_file_if_it_exists
Dir.mktmpdir do |dir|
Dir.stubs(:pwd).returns(dir)
FileUtils.touch("#{dir}/.shopify-cli.yml")
content = <<~CONTENT
SHOPIFY_API_KEY=foo
SHOPIFY_API_SECRET=bar
HOST=baz
AWSKEY=awskey
CONTENT
File.write(File.join(dir, '.env'), content)
refute_nil(Project.current.env)
end
end
end
end
| 29.521277 | 83 | 0.637838 |
7a23226ec332bc57fd2b18cf08cceee62c7536df | 1,972 |
# 1.数组
names = ["张三", "李四", "王五", "赵二麻子"]
print "第一个名字:", names[0], "\n"
print "第二个名字:", names[1], "\n"
print "第三个名字:", names[2], "\n"
print "第四个名字:", names[3], "\n"
names[1] = "aaa"
print "第二个名字:", names[1], "\n"
print names, "\n"
puts names.size
print(names.size,"\n")
print names.size, "\n"
print names.length, "\n"
# ? 为什么使用print names.size "\n" 会报错?`length': wrong number of arguments (given 1, expected 0) (ArgumentError)
# 少了分隔符 ","
# 需求: 输出names 所有元素 对在数组中符合某条件的元素执行ooo方法,不合符的执行xxx方法
# each方法: each 方法会把数组元素逐个拿出来,赋值给指定的|变量|,那么block里面的方法就可以通过访问该变量,实现循环遍历数组的操作
# 语法
# 数组.each do|变量|
# 希望循环的处理
# end
names.each do |n|
puts n
if n == "王五"
print n, "执行ooo方法", "\n"
else
print n, "执行xxx方法", "\n"
end
end
# ruby中散列是键值对的一种数据结构,一般以字符串或者符号作为键,来保存对应的对象
# 2.散列
# 符号:符号与字符串很相似,可以理解为轻量级的字符串,一般作为名称的标签使用,用来表示方法等的对象的名称
# 创建符号,在标识符的前面加上:
sym = :foo
print "sym-->", sym, "\n"
sym1 = :foo
print "sym1-->", sym1, "\n"
sym.to_s
print "sym.to_s->", sym.to_s, "\n"
sym1.to_s
print "sym1.to_s->", sym1.to_s, "\n"
"foo".to_sym
print "foo.to_s->", "foo".to_sym, "\n"
# 散列的创建 : 散列使用{}把创建的内容括起来,用=>来定义获取对象所需要的key,以及key对应的value
# address = {:name => "张三", :id=> "1001", :tel => "15954389999"}
# 将符号当做key来使用时,散列还可以这样写
# address = {name: "张三", id: "1001", tel: "15954389999"}
# 散列名[key] = 希望保存的对象
address = {name: "张三", id: "1001", tel: "15954389999"}
# 打印散列
print address, "\n"
# 取数据
print "取散列对应key数据: ", address[:name], "\n"
# 散列的循环 : 使用each方法,可以遍历散列里的所有元素,逐个取出其元素的key和value.循环数组时是按照索引顺序遍历元素,循环散列时是按照键值组合遍历元素
# 散列.each do|key,value|
# 希望循环的处理
# end
address.each do |key, value|
puts "#{key},#{value}"
end
# 3.正则表达式
# ruby处理字符串时,经常用到正则表达式,使用正则表达式能够实现以下功能
# 1.将字符串与模式相匹配
# 2.使用模式分割字符串
# 正则表达式与字符串匹配
# /模式/ =~ 希望匹配的字符串
# 若匹配成功则返回匹配部分的位置.匹配失败,返回nil
/ruby/ =~ "ssssssrubyss"
puts /ruby/ =~ "ssssssrubyss"
# 正则表达式右边的/加上i,则表示不区分大小写 /模式/i
puts /rubY/i =~ "ssRUBY"
names = ["小王", "隔壁老王", "犀利哥"]
names.each do |name|
if /王/ =~ name
puts name
end
end
| 20.122449 | 108 | 0.647566 |
bf3964c0a75f5b760bc3533b5afc7e0ba038c5ff | 630 | require File.expand_path("../../Abstract/abstract-php-extension", __FILE__)
class Php55Yac < AbstractPhp55Extension
init
desc "A fast shared memory user data cache for PHP"
homepage "https://github.com/laruence/yac"
url "https://github.com/laruence/yac.git", :branch => "master"
head "https://github.com/laruence/yac.git"
version "latest"
def install
ENV.universal_binary if build.universal?
safe_phpize
system "./configure", "--prefix=#{prefix}",
phpconfig
system "make"
prefix.install %w[modules/yac.so]
write_config_file if build.with? "config-file"
end
end
| 28.636364 | 75 | 0.68254 |
ac61bc63170e81d4df320ffad2b0e8426228a183 | 22,314 | class PanoramicCliAT151b3 < Formula
include Language::Python::Virtualenv
desc "Panoramic Command Line Interface"
homepage "https://github.com/panoramichq/panoramic-cli"
url "https://files.pythonhosted.org/packages/9d/69/124ecc25a0075367d90915f022af556fd4237fb8f97b701616ecf177a920/panoramic-cli-1.5.1b3.tar.gz"
sha256 "3344a205567eb1f645b86f5c7c75aa11783a28ddbc2b05c040405f0501997903"
bottle do
root_url "https://a1.panocdn.com/bottles"
sha256 "de71296105d2074120f4f2cc8c1b139810df51c3a900e00472787250bb419a18" => :catalina
sha256 "67354f19208795cc79bac4eb7e35f99015ff0de3c01ec76f2ed6b31b896ed015" => :big_sur
end
depends_on "libffi"
depends_on "[email protected]"
depends_on "postgresql"
depends_on "[email protected]"
resource "agate" do
url "https://files.pythonhosted.org/packages/d4/1c/99fb34c81c68012c71e8d35a1f16a6b25952322e23c911c81327c8464be8/agate-1.6.1.tar.gz"
sha256 "c93aaa500b439d71e4a5cf088d0006d2ce2c76f1950960c8843114e5f361dfd3"
end
resource "analytics-python" do
url "https://files.pythonhosted.org/packages/6d/ae/affa8190ad884f9654483201f6fe71465bd59263b3365c0e3b544cd36203/analytics-python-1.2.9.tar.gz"
sha256 "f3d1ca27cb277da67c10d71a5c9c593d2a9ec99109e31409ab771b44821a86bf"
end
resource "asn1crypto" do
url "https://files.pythonhosted.org/packages/6b/b4/42f0e52ac2184a8abb31f0a6f98111ceee1aac0b473cee063882436e0e09/asn1crypto-1.4.0.tar.gz"
sha256 "f4f6e119474e58e04a2b1af817eb585b4fd72bdd89b998624712b5c99be7641c"
end
resource "attrs" do
url "https://files.pythonhosted.org/packages/f0/cb/80a4a274df7da7b8baf083249b0890a0579374c3d74b5ac0ee9291f912dc/attrs-20.3.0.tar.gz"
sha256 "832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"
end
resource "azure-common" do
url "https://files.pythonhosted.org/packages/af/63/bbdc87fd69c7582130f61523cd9e30b7194eae7609d0d168041edc85479e/azure-common-1.1.26.zip"
sha256 "b2866238aea5d7492cfb0282fc8b8d5f6d06fb433872345864d45753c10b6e4f"
end
resource "azure-core" do
url "https://files.pythonhosted.org/packages/d3/d9/90d70fb4d4fb8be96913efcdefcfb5f838bbef9b3a27eef40d0d066b8060/azure-core-1.9.0.zip"
sha256 "ef8ae93a2ce8b595f231395579be11aadc1838168cbc2582e2d0bbd8b15c461f"
end
resource "azure-storage-blob" do
url "https://files.pythonhosted.org/packages/36/21/17828253012587b3396917349380f68591a760214d2ce1b30ae3933d448e/azure-storage-blob-12.6.0.zip"
sha256 "dc7832d48ae3f5b31a0b24191084ce6ef7d8dfbf73e553dfe34eaddcb6813be3"
end
resource "Babel" do
url "https://files.pythonhosted.org/packages/41/1b/5ed6e564b9ca54318df20ebe5d642ab25da4118df3c178247b8c4b26fa13/Babel-2.9.0.tar.gz"
sha256 "da031ab54472314f210b0adcff1588ee5d1d1d0ba4dbd07b94dba82bde791e05"
end
resource "boto3" do
url "https://files.pythonhosted.org/packages/01/fd/b65e271fcf58b898d3288e217c400a9bebed6017dd5bd469140cef4d1ba9/boto3-1.11.17.tar.gz"
sha256 "3f02c5ec585fe0c7c843026f0f3db3a7bb98a830072b0eb151456ed07ba8e46d"
end
resource "botocore" do
url "https://files.pythonhosted.org/packages/09/e8/b32eeab0260a881bc73194550975c76a62b2aab01427cf0e0b1a22058030/botocore-1.14.17.tar.gz"
sha256 "75c759fcd89c4b2c717b40c2bd43915716bf15cfb7fb5bfccdc9bd9f697ac75f"
end
resource "cachetools" do
url "https://files.pythonhosted.org/packages/fc/c8/0b52cf3132b4b85c9e83faa3e4d375575afeb3a1710c40b2b2cd2a3e5635/cachetools-4.1.1.tar.gz"
sha256 "bbaa39c3dede00175df2dc2b03d0cf18dd2d32a7de7beb68072d13043c9edb20"
end
resource "certifi" do
url "https://files.pythonhosted.org/packages/e6/de/879cf857ae6f890dfa23c3d6239814c5471936b618c8fb0c8732ad5da885/certifi-2020.11.8.tar.gz"
sha256 "f05def092c44fbf25834a51509ef6e631dc19765ab8a57b4e7ab85531f0a9cf4"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/66/6a/98e023b3d11537a5521902ac6b50db470c826c682be6a8c661549cb7717a/cffi-1.14.4.tar.gz"
sha256 "1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c"
end
resource "chardet" do
url "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz"
sha256 "84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"
end
resource "click" do
url "https://files.pythonhosted.org/packages/27/6f/be940c8b1f1d69daceeb0032fee6c34d7bd70e3e649ccac0951500b4720e/click-7.1.2.tar.gz"
sha256 "d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"
end
resource "colorama" do
url "https://files.pythonhosted.org/packages/82/75/f2a4c0c94c85e2693c229142eb448840fba0f9230111faa889d1f541d12d/colorama-0.4.3.tar.gz"
sha256 "e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"
end
resource "cryptography" do
url "https://files.pythonhosted.org/packages/56/3b/78c6816918fdf2405d62c98e48589112669f36711e50158a0c15d804c30d/cryptography-2.9.2.tar.gz"
sha256 "a0c30272fb4ddda5f5ffc1089d7405b7a71b0b0f51993cb4e5dbb4590b2fc229"
end
resource "dataclasses" do
url "https://files.pythonhosted.org/packages/59/e4/2f921edfdf1493bdc07b914cbea43bc334996df4841a34523baf73d1fb4f/dataclasses-0.6.tar.gz"
sha256 "6988bd2b895eef432d562370bb707d540f32f7360ab13da45340101bc2307d84"
end
resource "dbt" do
url "https://files.pythonhosted.org/packages/c8/01/e59647a4d42aa65378e4d048f17e8c3f78077acfe6d85e91e1a1283c795a/dbt-0.18.1.tar.gz"
sha256 "bf4103d6370f0f5e91853d7969cc009060b722e633a7c0c3d8a85c2f19f94e0a"
end
resource "dbt-bigquery" do
url "https://files.pythonhosted.org/packages/38/98/70b0d8176da22ac9817bd0113cbea94b278d6be5560b0eaea1826f221af4/dbt-bigquery-0.18.1.tar.gz"
sha256 "a7208ef39d4e2216a0d4bbda74d991680ae5cff70ece4b156d9ed88e07cbc7b1"
end
resource "dbt-core" do
url "https://files.pythonhosted.org/packages/92/98/83910402ef751af502a1b836da22ea6a7868bb865b28bc78c2cf46a3bed1/dbt-core-0.18.1.tar.gz"
sha256 "043b0b2637bf98ae9151c7f1509979047acdbe627f497d44695a5dcf2b156f08"
end
resource "dbt-postgres" do
url "https://files.pythonhosted.org/packages/95/62/4457466ed1603864aeac6a8d4257500e024519b2e0145fdf5629dfd5e06b/dbt-postgres-0.18.1.tar.gz"
sha256 "0b464bfa1ac90c285cfad0e59e45cebcfd7adb5a56a37654c30f79a16a28ae1d"
end
resource "dbt-redshift" do
url "https://files.pythonhosted.org/packages/e5/81/54b9a47831bb48dcc082b0fa842a9c04b335df8ae4e604dd7b0caa2ebee5/dbt-redshift-0.18.1.tar.gz"
sha256 "8e21dab8ad50ac896fac06812eea27027732bd4c9cbbe361c80bd3c2edc56700"
end
resource "dbt-snowflake" do
url "https://files.pythonhosted.org/packages/9e/d1/256509691dc94d811483f5d724b007d726bf3d128114b2ccbf0b158c8036/dbt-snowflake-0.18.1.tar.gz"
sha256 "04464c5475d4b9d86362a27b04244f60d5a8c0d4b33b6cfc4d6a1895089bc262"
end
resource "decorator" do
url "https://files.pythonhosted.org/packages/da/93/84fa12f2dc341f8cf5f022ee09e109961055749df2d0c75c5f98746cfe6c/decorator-4.4.2.tar.gz"
sha256 "e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7"
end
resource "docutils" do
url "https://files.pythonhosted.org/packages/93/22/953e071b589b0b1fee420ab06a0d15e5aa0c7470eb9966d60393ce58ad61/docutils-0.15.2.tar.gz"
sha256 "a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99"
end
resource "google-api-core" do
url "https://files.pythonhosted.org/packages/b9/c6/b9483b94e85e4088198bc99c807a6a458800d278ae49f79a0dee0cfdc171/google-api-core-1.16.0.tar.gz"
sha256 "92e962a087f1c4b8d1c5c88ade1c1dfd550047dcffb320c57ef6a534a20403e2"
end
resource "google-auth" do
url "https://files.pythonhosted.org/packages/65/1c/eee2dbfefe37855300e4f89245c698bb1713de215d191a5127317b418576/google-auth-1.23.0.tar.gz"
sha256 "5176db85f1e7e837a646cd9cede72c3c404ccf2e3373d9ee14b2db88febad440"
end
resource "google-cloud-bigquery" do
url "https://files.pythonhosted.org/packages/eb/20/8cb50efd1a90175aff85f0852e65b74a4ce186a410bcea75f608d364bc09/google-cloud-bigquery-1.25.0.tar.gz"
sha256 "be035d9cbcce907bee971861567848384748a88977d1ad608e7818da283e6c14"
end
resource "google-cloud-core" do
url "https://files.pythonhosted.org/packages/a8/b8/34847b0833ce80c078258e982510cf65a336fd3ec0d560e4ae546791fa9e/google-cloud-core-1.3.0.tar.gz"
sha256 "878f9ad080a40cdcec85b92242c4b5819eeb8f120ebc5c9f640935e24fc129d8"
end
resource "google-resumable-media" do
url "https://files.pythonhosted.org/packages/79/70/8d2afddae61b0a0189dbefcdcd024a4030c9c696ca3ea410e43498520ed9/google-resumable-media-0.5.1.tar.gz"
sha256 "97155236971970382b738921f978a6f86a7b5a0b0311703d991e065d3cb55773"
end
resource "googleapis-common-protos" do
url "https://files.pythonhosted.org/packages/eb/ee/e59e74ecac678a14d6abefb9054f0bbcb318a6452a30df3776f133886d7d/googleapis-common-protos-1.6.0.tar.gz"
sha256 "e61b8ed5e36b976b487c6e7b15f31bb10c7a0ca7bd5c0e837f4afab64b53a0c6"
end
resource "hologram" do
url "https://files.pythonhosted.org/packages/bd/3f/79e44c96727f1bfe79c1efcf58d631308572396d8ff6983c4961a97387eb/hologram-0.0.10.tar.gz"
sha256 "d898059ea675bf5159361fd3a61d878c0e5cd66cec98e0dd57ba316af8c8f9e7"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/cb/19/57503b5de719ee45e83472f339f617b0c01ad75cba44aba1e4c97c2b0abd/idna-2.9.tar.gz"
sha256 "7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb"
end
resource "importlib-metadata" do
url "https://files.pythonhosted.org/packages/3f/a8/16dc098b0addd1c20719c18a86e985be851b3ec1e103e703297169bb22cc/importlib_metadata-3.1.1.tar.gz"
sha256 "b0c2d3b226157ae4517d9625decf63591461c66b3a808c2666d538946519d170"
end
resource "importlib-resources" do
url "https://files.pythonhosted.org/packages/d1/34/f2aedc50b3a32eefd249159ea7497ece70022e35f6531bd4718fd9688cb1/importlib_resources-3.3.0.tar.gz"
sha256 "7b51f0106c8ec564b1bef3d9c588bc694ce2b92125bbb6278f4f2f5b54ec3592"
end
resource "isodate" do
url "https://files.pythonhosted.org/packages/b1/80/fb8c13a4cd38eb5021dc3741a9e588e4d1de88d895c1910c6fc8a08b7a70/isodate-0.6.0.tar.gz"
sha256 "2e364a3d5759479cdb2d37cce6b9376ea504db2ff90252a2e5b7cc89cc9ff2d8"
end
resource "jeepney" do
url "https://files.pythonhosted.org/packages/bb/4f/06017fbbe94eeaf1e7852c2dd7a065ca7d813e17b4500f4e842531d72593/jeepney-0.6.0.tar.gz"
sha256 "7d59b6622675ca9e993a6bd38de845051d315f8b0c72cca3aef733a20b648657"
end
resource "Jinja2" do
url "https://files.pythonhosted.org/packages/64/a7/45e11eebf2f15bf987c3bc11d37dcc838d9dc81250e67e4c5968f6008b6c/Jinja2-2.11.2.tar.gz"
sha256 "89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0"
end
resource "jmespath" do
url "https://files.pythonhosted.org/packages/3c/56/3f325b1eef9791759784aa5046a8f6a1aff8f7c898a2e34506771d3b99d8/jmespath-0.10.0.tar.gz"
sha256 "b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"
end
resource "json-rpc" do
url "https://files.pythonhosted.org/packages/43/5a/7c2ea59e622682fff34d5aa3b301aa9a10bb0dbf0120f85cd391e4badad8/json-rpc-1.13.0.tar.gz"
sha256 "def0dbcf5b7084fc31d677f2f5990d988d06497f2f47f13024274cfb2d5d7589"
end
resource "jsonschema" do
url "https://files.pythonhosted.org/packages/43/52/0a4dabd8d42efe6bb039d61731cb20a73d5425e29be16a7a2003b923e542/jsonschema-3.1.1.tar.gz"
sha256 "2fa0684276b6333ff3c0b1b27081f4b2305f0a36cf702a23db50edb141893c3f"
end
resource "keyring" do
url "https://files.pythonhosted.org/packages/c9/5a/c7aefc112d75872ea2099c494b84ba6e108b20584264929e614c60939368/keyring-21.5.0.tar.gz"
sha256 "207bd66f2a9881c835dad653da04e196c678bf104f8252141d2d3c4f31051579"
end
resource "leather" do
url "https://files.pythonhosted.org/packages/a0/44/1acad8bfe958874c66825a4bdddbd277a549580b88c5daf3a4c128c521b0/leather-0.3.3.tar.gz"
sha256 "076d1603b5281488285718ce1a5ce78cf1027fe1e76adf9c548caf83c519b988"
end
resource "Logbook" do
url "https://files.pythonhosted.org/packages/2f/d9/16ac346f7c0102835814cc9e5b684aaadea101560bb932a2403bd26b2320/Logbook-1.5.3.tar.gz"
sha256 "66f454ada0f56eae43066f604a222b09893f98c1adc18df169710761b8f32fe8"
end
resource "MarkupSafe" do
url "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz"
sha256 "29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"
end
resource "minimal-snowplow-tracker" do
url "https://files.pythonhosted.org/packages/e4/9f/004f810169a48ed5c520279d98327e7793b6491f09d42cb2c5636c994f34/minimal-snowplow-tracker-0.0.2.tar.gz"
sha256 "acabf7572db0e7f5cbf6983d495eef54081f71be392330eb3aadb9ccb39daaa4"
end
resource "msrest" do
url "https://files.pythonhosted.org/packages/6f/ad/fc4dc6c53ec8db010e9acbb1cb6c2626bed9a6646fc5a3383d171affb375/msrest-0.6.19.tar.gz"
sha256 "55f8c3940bc5dc609f8cf9fcd639444716cc212a943606756272e0d0017bbb5b"
end
resource "networkx" do
url "https://files.pythonhosted.org/packages/ef/d0/f706a9e5814a42c544fa1b2876fc33e5d17e1f2c92a5361776632c4f41ab/networkx-2.5.tar.gz"
sha256 "7978955423fbc9639c10498878be59caf99b44dc304c2286162fd24b458c1602"
end
resource "oauthlib" do
url "https://files.pythonhosted.org/packages/fc/c7/829c73c64d3749da7811c06319458e47f3461944da9d98bb4df1cb1598c2/oauthlib-3.1.0.tar.gz"
sha256 "bee41cc35fcca6e988463cacc3bcb8a96224f470ca547e697b604cc697b2f889"
end
resource "oscrypto" do
url "https://files.pythonhosted.org/packages/9f/54/1581ecd00c74bce2eadb08603003ffa96b6321703055551aa89bbdf77359/oscrypto-1.2.1.tar.gz"
sha256 "7d2cca6235d89d1af6eb9cfcd4d2c0cb405849868157b2f7b278beb644d48694"
end
resource "packaging" do
url "https://files.pythonhosted.org/packages/16/7c/33ae3aa02eb10ca726b21aa88d338e3f619c674e4fb8544eb352330d880a/packaging-20.7.tar.gz"
sha256 "05af3bb85d320377db281cf254ab050e1a7ebcbf5410685a9a407e18a1f81236"
end
resource "panoramic-auth" do
url "https://files.pythonhosted.org/packages/58/95/d5bec86d364228fe8a110828326d4bcad27655c2d43836d9edc4756c6fc3/panoramic-auth-1.0.0.tar.gz"
sha256 "9104d087aa57c7b9d6e6a839546f2646d0e77ffc6540fc036ff370296acccb8d"
end
resource "parsedatetime" do
url "https://files.pythonhosted.org/packages/a8/20/cb587f6672dbe585d101f590c3871d16e7aec5a576a1694997a3777312ac/parsedatetime-2.6.tar.gz"
sha256 "4cb368fbb18a0b7231f4d76119165451c8d2e35951455dfee97c62a87b04d455"
end
resource "protobuf" do
url "https://files.pythonhosted.org/packages/c9/d5/e6e789e50e478463a84bd1cdb45aa408d49a2e1aaffc45da43d10722c007/protobuf-3.11.3.tar.gz"
sha256 "c77c974d1dadf246d789f6dad1c24426137c9091e930dbf50e0a29c1fcf00b1f"
end
resource "psycopg2-binary" do
url "https://files.pythonhosted.org/packages/fc/51/0f2c6aec5c59e5640f507b59567f63b9d73a9317898810b4db311da32dfc/psycopg2-binary-2.8.6.tar.gz"
sha256 "11b9c0ebce097180129e422379b824ae21c8f2a6596b159c7659e2e5a00e1aa0"
end
resource "pyasn1" do
url "https://files.pythonhosted.org/packages/a4/db/fffec68299e6d7bad3d504147f9094830b704527a7fc098b721d38cc7fa7/pyasn1-0.4.8.tar.gz"
sha256 "aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"
end
resource "pyasn1-modules" do
url "https://files.pythonhosted.org/packages/88/87/72eb9ccf8a58021c542de2588a867dbefc7556e14b2866d1e40e9e2b587e/pyasn1-modules-0.2.8.tar.gz"
sha256 "905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz"
sha256 "2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"
end
resource "pycryptodomex" do
url "https://files.pythonhosted.org/packages/14/90/f4a934bffae029e16fb33f3bd87014a0a18b4bec591249c4fc01a18d3ab6/pycryptodomex-3.9.9.tar.gz"
sha256 "7b5b7c5896f8172ea0beb283f7f9428e0ab88ec248ce0a5b8c98d73e26267d51"
end
resource "PyJWT" do
url "https://files.pythonhosted.org/packages/2f/38/ff37a24c0243c5f45f5798bd120c0f873eeed073994133c084e1cf13b95c/PyJWT-1.7.1.tar.gz"
sha256 "8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96"
end
resource "pyOpenSSL" do
url "https://files.pythonhosted.org/packages/0d/1d/6cc4bd4e79f78be6640fab268555a11af48474fac9df187c3361a1d1d2f0/pyOpenSSL-19.1.0.tar.gz"
sha256 "9a24494b2602aaf402be5c9e30a0b82d4a5c67528fe8fb475e3f3bc00dd69507"
end
resource "pyparsing" do
url "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz"
sha256 "c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"
end
resource "pyrsistent" do
url "https://files.pythonhosted.org/packages/4d/70/fd441df751ba8b620e03fd2d2d9ca902103119616f0f6cc42e6405035062/pyrsistent-0.17.3.tar.gz"
sha256 "2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e"
end
resource "python-dateutil" do
url "https://files.pythonhosted.org/packages/be/ed/5bbc91f03fa4c839c4c7360375da77f9659af5f7086b7a7bdda65771c8e0/python-dateutil-2.8.1.tar.gz"
sha256 "73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"
end
resource "python-dotenv" do
url "https://files.pythonhosted.org/packages/53/04/1a8126516c8febfeb2015844edee977c9b783bdff9b3bcd89b1cc2e1f372/python-dotenv-0.15.0.tar.gz"
sha256 "587825ed60b1711daea4832cf37524dfd404325b7db5e25ebe88c495c9f807a0"
end
resource "python-slugify" do
url "https://files.pythonhosted.org/packages/9f/42/e336f96a8b6007428df772d0d159b8eee9b2f1811593a4931150660402c0/python-slugify-4.0.1.tar.gz"
sha256 "69a517766e00c1268e5bbfc0d010a0a8508de0b18d30ad5a1ff357f8ae724270"
end
resource "pytimeparse" do
url "https://files.pythonhosted.org/packages/37/5d/231f5f33c81e09682708fb323f9e4041408d8223e2f0fb9742843328778f/pytimeparse-1.1.8.tar.gz"
sha256 "e86136477be924d7e670646a98561957e8ca7308d44841e21f5ddea757556a0a"
end
resource "pytz" do
url "https://files.pythonhosted.org/packages/09/07/448a8887c7195450604dfc0305d80d74324c36ee18ed997664051d4bffe3/pytz-2020.4.tar.gz"
sha256 "3e6b7dd2d1e0a59084bcee14a17af60c5c562cdc16d828e8eba2e683d3a7e268"
end
resource "PyYAML" do
url "https://files.pythonhosted.org/packages/64/c2/b80047c7ac2478f9501676c988a5411ed5572f35d1beff9cae07d321512c/PyYAML-5.3.1.tar.gz"
sha256 "b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/f5/4f/280162d4bd4d8aad241a21aecff7a6e46891b905a4341e7ab549ebaf7915/requests-2.23.0.tar.gz"
sha256 "b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6"
end
resource "requests-oauthlib" do
url "https://files.pythonhosted.org/packages/23/eb/68fc8fa86e0f5789832f275c8289257d8dc44dbe93fce7ff819112b9df8f/requests-oauthlib-1.3.0.tar.gz"
sha256 "b4261601a71fd721a8bd6d7aa1cc1d6a8a93b4a9f5e96626f8e4d91e8beeaa6a"
end
resource "rsa" do
url "https://files.pythonhosted.org/packages/a2/d5/04b8a9719149583fec76efdff2e7a81c6e3cc34909ee818d3fbf115edc2e/rsa-4.6.tar.gz"
sha256 "109ea5a66744dd859bf16fe904b8d8b627adafb9408753161e766a92e7d681fa"
end
resource "s3transfer" do
url "https://files.pythonhosted.org/packages/50/de/2b688c062107942486c81a739383b1432a72717d9a85a6a1a692f003c70c/s3transfer-0.3.3.tar.gz"
sha256 "921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db"
end
resource "SecretStorage" do
url "https://files.pythonhosted.org/packages/ec/6e/2b7f0a6d85e20c918cce50ea89e5f72081d56088c98c4fa71e483c3b2826/SecretStorage-3.3.0.tar.gz"
sha256 "30cfdef28829dad64d6ea1ed08f8eff6aa115a77068926bcc9f5225d5a3246aa"
end
resource "six" do
url "https://files.pythonhosted.org/packages/6b/34/415834bfdafca3c5f451532e8a8d9ba89a21c9743a0c59fbd0205c7f9426/six-1.15.0.tar.gz"
sha256 "30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"
end
resource "snowflake-connector-python" do
url "https://files.pythonhosted.org/packages/ef/ac/f5912b4d739084f77a3a904cd1fa360d0e4e99196a0775d81230563add9e/snowflake-connector-python-2.2.10.tar.gz"
sha256 "0beba8eb9c1dec2782d52491d058256e1f5d9e010114a80ff3b8e3905be655fd"
end
resource "sqlparse" do
url "https://files.pythonhosted.org/packages/67/4b/253b6902c1526885af6d361ca8c6b1400292e649f0e9c95ee0d2e8ec8681/sqlparse-0.3.1.tar.gz"
sha256 "e162203737712307dfe78860cc56c8da8a852ab2ee33750e33aeadf38d12c548"
end
resource "text-unidecode" do
url "https://files.pythonhosted.org/packages/ab/e2/e9a00f0ccb71718418230718b3d900e71a5d16e701a3dae079a21e9cd8f8/text-unidecode-1.3.tar.gz"
sha256 "bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"
end
resource "tqdm" do
url "https://files.pythonhosted.org/packages/3a/76/467422c5a0157c92a8b8e1ffe14411443682e2951e6f6dde3748e47b31ba/tqdm-4.54.0.tar.gz"
sha256 "5c0d04e06ccc0da1bd3fa5ae4550effcce42fcad947b4a6cafa77bdc9b09ff22"
end
resource "typing-extensions" do
url "https://files.pythonhosted.org/packages/16/06/0f7367eafb692f73158e5c5cbca1aec798cdf78be5167f6415dd4205fa32/typing_extensions-3.7.4.3.tar.gz"
sha256 "99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"
end
resource "urllib3" do
url "https://files.pythonhosted.org/packages/76/d9/bbbafc76b18da706451fa91bc2ebe21c0daf8868ef3c30b869ac7cb7f01d/urllib3-1.25.11.tar.gz"
sha256 "8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2"
end
resource "Werkzeug" do
url "https://files.pythonhosted.org/packages/c3/1d/1c0761d9365d166dc9d882a48c437111d22b0df564d6d5768045d9a51fd0/Werkzeug-0.16.1.tar.gz"
sha256 "b353856d37dec59d6511359f97f6a4b2468442e454bd1c98298ddce53cac1f04"
end
resource "zipp" do
url "https://files.pythonhosted.org/packages/ce/b0/757db659e8b91cb3ea47d90350d7735817fe1df36086afc77c1c4610d559/zipp-3.4.0.tar.gz"
sha256 "ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb"
end
def install
venv = virtualenv_create(libexec, "python3")
venv.pip_install resources
venv.pip_install buildpath
(bin/"pano").write_env_script "#{libexec}/bin/pano", :RUNNING_UNDER_HOMEBREW => "1"
end
test do
false
end
end
| 49.476718 | 157 | 0.82939 |
3320c63904949c26ea31d8c4e5674f3059d0a2a9 | 2,989 | # GETTING STARTED
# -----------------
# This documentation is intended to show you how to get started with a
# simple Appium & appium_lib test. This example is written without a specific
# testing framework in mind; You can use appium_lib on any framework you like.
#
# INSTALLING RVM
# --------------
# If you don't have rvm installed, run the following terminal command
#
# \curl -L https://get.rvm.io | bash -s stable --ruby
#
# INSTALLING GEMS
# ---------------
# Then, change to the example directory:
# cd appium-location/sample-code/examples/ruby
#
# and install the required gems with bundler by doing:
# bundle install
#
# RUNNING THE TESTS
# -----------------
# To run the tests, make sure appium is running in another terminal
# window, then from the same window you used for the above commands, type
#
# bundle exec ruby simple_test.rb
#
# It will take a while, but once it's done you should get nothing but a line
# telling you "Tests Succeeded"; You'll see the iOS Simulator cranking away
# doing actions while we're running.
require 'rubygems'
require 'appium_lib'
APP_PATH = '../../apps/TestApp/build/release-iphonesimulator/TestApp.app'
desired_caps = {
caps: {
platformName: 'iOS',
versionNumber: '7.1',
app: APP_PATH,
},
appium_lib: {
sauce_username: nil, # don't run on Sauce
sauce_access_key: nil
}
}
# Start the driver
Appium::Driver.new(desired_caps).start_driver
module Calculator
module IOS
# Add all the Appium library methods to Test to make
# calling them look nicer.
Appium.promote_singleton_appium_methods Calculator
# Add two numbers
values = [rand(10), rand(10)]
expected_sum = values.reduce(&:+)
# Find every textfield.
elements = textfields
elements.each_with_index do |element, index|
element.type values[index]
end
# Click the first button
button(1).click
# Get the first static text field, then get its text
actual_sum = first_text.text
raise unless actual_sum == (expected_sum.to_s)
# Alerts are visible
button('show alert').click
find_element :class_name, 'UIAAlert' # Elements can be found by :class_name
# wait for alert to show
wait { text 'this alert is so cool' }
# Or by find
find('Cancel').click
# Waits until alert doesn't exist
wait_true { !exists { tag('UIAAlert') } }
# Alerts can be switched into
button('show alert').click # Get a button by its text
alert = driver.switch_to.alert # Get the text of the current alert, using
# the Selenium::WebDriver directly
alerting_text = alert.text
raise Exception unless alerting_text.include? 'Cool title'
alert_accept # Accept the current alert
# Window Size is easy to get
sizes = window_size
raise Exception unless sizes.height == 568
raise Exception unless sizes.width == 320
# Quit when you're done!
driver_quit
puts 'Tests Succeeded!'
end
end | 28.466667 | 85 | 0.678488 |
f70954c876eaab320f0bd6aa348b068564b9d3ec | 102 | FactoryBot.define do
factory :oembed_link do
url { "MyString" }
code { "MyText" }
end
end
| 14.571429 | 25 | 0.637255 |
4ae58f3c626444971a5ef30638343833f814f996 | 3,667 | class Prestodb < Formula
desc "Distributed SQL query engine for big data"
homepage "https://prestodb.io"
url "https://search.maven.org/remotecontent?filepath=com/facebook/presto/presto-server/0.243/presto-server-0.243.tar.gz"
sha256 "f6db5678760a89b13200aeae9a67444afbee472b0f379e58ed3246be4d6746d0"
license "Apache-2.0"
# The source of the Presto download page at https://prestodb.io/download.html
# contains old version information. The current version information is loaded
# from the JavaScript file below, so we check that instead. We don't check
# Maven because sometimes the directory listing page contains a newer version
# that hasn't been released yet and we probably don't want to upgrade until
# it's official on the first-party website, etc.
livecheck do
url "https://prestodb.io/static/js/version.js"
regex(/latest_presto_version.*?(\d+(?:\.\d+)+)/i)
end
bottle :unneeded
depends_on "openjdk"
conflicts_with "prestosql", because: "both install `presto` and `presto-server` binaries"
resource "presto-cli" do
url "https://search.maven.org/remotecontent?filepath=com/facebook/presto/presto-cli/0.243/presto-cli-0.243-executable.jar"
sha256 "8237ba228fad54776cd913e91537f1d62afd3fc598bfebfc47b4d890c7856423"
end
def install
libexec.install Dir["*"]
(libexec/"etc/node.properties").write <<~EOS
node.environment=production
node.id=ffffffff-ffff-ffff-ffff-ffffffffffff
node.data-dir=#{var}/presto/data
EOS
(libexec/"etc/jvm.config").write <<~EOS
-server
-Xmx16G
-XX:+UseG1GC
-XX:G1HeapRegionSize=32M
-XX:+UseGCOverheadLimit
-XX:+ExplicitGCInvokesConcurrent
-XX:+HeapDumpOnOutOfMemoryError
-XX:+ExitOnOutOfMemoryError
-Djdk.attach.allowAttachSelf=true
EOS
(libexec/"etc/config.properties").write <<~EOS
coordinator=true
node-scheduler.include-coordinator=true
http-server.http.port=8080
query.max-memory=5GB
query.max-memory-per-node=1GB
discovery-server.enabled=true
discovery.uri=http://localhost:8080
EOS
(libexec/"etc/log.properties").write "com.facebook.presto=INFO"
(libexec/"etc/catalog/jmx.properties").write "connector.name=jmx"
(bin/"presto-server").write_env_script libexec/"bin/launcher", Language::Java.overridable_java_home_env
resource("presto-cli").stage do
libexec.install "presto-cli-#{version}-executable.jar"
bin.write_jar_script libexec/"presto-cli-#{version}-executable.jar", "presto"
end
end
def post_install
(var/"presto/data").mkpath
end
def caveats
<<~EOS
Add connectors to #{opt_libexec}/etc/catalog/. See:
https://prestodb.io/docs/current/connector.html
EOS
end
plist_options manual: "presto-server run"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN"
"http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>RunAtLoad</key>
<true/>
<key>AbandonProcessGroup</key>
<true/>
<key>WorkingDirectory</key>
<string>#{opt_libexec}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/presto-server</string>
<string>run</string>
</array>
</dict>
</plist>
EOS
end
test do
system bin/"presto-server", "run", "--help"
assert_match "Presto CLI #{version}", shell_output("#{bin}/presto --version").chomp
end
end
| 31.612069 | 126 | 0.672212 |
4acee7f1b75381c9273206d18dfb0ce6b0d4ee7c | 1,798 | class Wxmaxima < Formula
desc "Cross platform GUI for Maxima"
homepage "https://wxmaxima-developers.github.io/wxmaxima/"
url "https://github.com/wxMaxima-developers/wxmaxima/archive/Version-20.09.0.tar.gz"
sha256 "a2ba6797642c7efa96c5dbb6249134a0ace246ebd390e42f7c227fa94609ef27"
license "GPL-2.0-or-later"
head "https://github.com/wxMaxima-developers/wxmaxima.git"
bottle do
sha256 "b93bf5c0c94a2636dbefb94fc94ed53018b6de08de5bf4381681fb478ddc75f4" => :catalina
sha256 "670ccdceddf8e124d4d402048a949616a4559eb8380f437d26a6f63d37467d2c" => :mojave
sha256 "cc37eed806d9ad260c98959a9a19da3fe5abab4159b52f0e1275d5a546e1652a" => :high_sierra
end
depends_on "cmake" => :build
depends_on "gettext" => :build
depends_on "ninja" => :build
depends_on "maxima"
depends_on "wxmac"
def install
# en_US.UTF8 is not a valid locale for macOS
# https://github.com/wxMaxima-developers/wxmaxima/issues/1402
inreplace "src/StreamUtils.cpp", "en_US.UTF8", "en_US.UTF-8"
mkdir "build-wxm" do
system "cmake", "..", "-GNinja", *std_cmake_args
system "ninja"
system "ninja", "install"
prefix.install "src/wxMaxima.app"
end
bash_completion.install "data/wxmaxima"
bin.write_exec_script "#{prefix}/wxMaxima.app/Contents/MacOS/wxmaxima"
end
def caveats
<<~EOS
When you start wxMaxima the first time, set the path to Maxima
(e.g. #{HOMEBREW_PREFIX}/bin/maxima) in the Preferences.
Enable gnuplot functionality by setting the following variables
in ~/.maxima/maxima-init.mac:
gnuplot_command:"#{HOMEBREW_PREFIX}/bin/gnuplot"$
draw_command:"#{HOMEBREW_PREFIX}/bin/gnuplot"$
EOS
end
test do
assert_match "algebra", shell_output("#{bin}/wxmaxima --help 2>&1")
end
end
| 33.924528 | 93 | 0.723582 |
91102f23b9b1ecbf065cecf1b8e4e9194148ef98 | 25,383 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
# require "google/ads/google_ads/error"
require "google/ads/google_ads/v9/services/campaign_service_pb"
module Google
module Ads
module GoogleAds
module V9
module Services
module CampaignService
##
# Client for the CampaignService service.
#
# Service to manage campaigns.
#
class Client
include Paths
# @private
attr_reader :campaign_service_stub
##
# Configure the CampaignService Client class.
#
# See {::Google::Ads::GoogleAds::V9::Services::CampaignService::Client::Configuration}
# for a description of the configuration fields.
#
# @example
#
# # Modify the configuration for all CampaignService clients
# ::Google::Ads::GoogleAds::V9::Services::CampaignService::Client.configure do |config|
# config.timeout = 10.0
# end
#
# @yield [config] Configure the Client client.
# @yieldparam config [Client::Configuration]
#
# @return [Client::Configuration]
#
def self.configure
@configure ||= begin
default_config = Client::Configuration.new
default_config.timeout = 3600.0
default_config.retry_policy = {
initial_delay: 5.0, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4]
}
default_config
end
yield @configure if block_given?
@configure
end
##
# Configure the CampaignService Client instance.
#
# The configuration is set to the derived mode, meaning that values can be changed,
# but structural changes (adding new fields, etc.) are not allowed. Structural changes
# should be made on {Client.configure}.
#
# See {::Google::Ads::GoogleAds::V9::Services::CampaignService::Client::Configuration}
# for a description of the configuration fields.
#
# @yield [config] Configure the Client client.
# @yieldparam config [Client::Configuration]
#
# @return [Client::Configuration]
#
def configure
yield @config if block_given?
@config
end
##
# Create a new CampaignService client object.
#
# @example
#
# # Create a client using the default configuration
# client = ::Google::Ads::GoogleAds::V9::Services::CampaignService::Client.new
#
# # Create a client using a custom configuration
# client = ::Google::Ads::GoogleAds::V9::Services::CampaignService::Client.new do |config|
# config.timeout = 10.0
# end
#
# @yield [config] Configure the CampaignService client.
# @yieldparam config [Client::Configuration]
#
def initialize
# These require statements are intentionally placed here to initialize
# the gRPC module only when it's required.
# See https://github.com/googleapis/toolkit/issues/446
require "gapic/grpc"
require "google/ads/google_ads/v9/services/campaign_service_services_pb"
# Create the configuration object
@config = Configuration.new Client.configure
# Yield the configuration if needed
yield @config if block_given?
# Create credentials
credentials = @config.credentials
# Use self-signed JWT if the endpoint is unchanged from default,
# but only if the default endpoint does not have a region prefix.
enable_self_signed_jwt = @config.endpoint == Client.configure.endpoint &&
[email protected](".").first.include?("-")
credentials ||= Credentials.default scope: @config.scope,
enable_self_signed_jwt: enable_self_signed_jwt
if credentials.is_a?(::String) || credentials.is_a?(::Hash)
credentials = Credentials.new credentials, scope: @config.scope
end
@quota_project_id = @config.quota_project
@quota_project_id ||= credentials.quota_project_id if credentials.respond_to? :quota_project_id
@campaign_service_stub = ::Gapic::ServiceStub.new(
::Google::Ads::GoogleAds::V9::Services::CampaignService::Stub,
credentials: credentials,
endpoint: @config.endpoint,
channel_args: @config.channel_args,
interceptors: @config.interceptors
)
end
# Service calls
##
# Returns the requested campaign in full detail.
#
# List of thrown errors:
# [AuthenticationError]()
# [AuthorizationError]()
# [HeaderError]()
# [InternalError]()
# [QuotaError]()
# [RequestError]()
#
# @overload get_campaign(request, options = nil)
# Pass arguments to `get_campaign` via a request object, either of type
# {::Google::Ads::GoogleAds::V9::Services::GetCampaignRequest} or an equivalent Hash.
#
# @param request [::Google::Ads::GoogleAds::V9::Services::GetCampaignRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload get_campaign(resource_name: nil)
# Pass arguments to `get_campaign` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param resource_name [::String]
# Required. The resource name of the campaign to fetch.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Ads::GoogleAds::V9::Resources::Campaign]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Ads::GoogleAds::V9::Resources::Campaign]
#
# @raise [Google::Ads::GoogleAds::Error] if the RPC is aborted.
#
def get_campaign request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request,
to: ::Google::Ads::GoogleAds::V9::Services::GetCampaignRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.get_campaign.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Ads::GoogleAds::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.resource_name
header_params["resource_name"] = request.resource_name
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.get_campaign.timeout,
metadata: metadata,
retry_policy: @config.rpcs.get_campaign.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@campaign_service_stub.call_rpc :get_campaign, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
# rescue GRPC::BadStatus => grpc_error
# raise Google::Ads::GoogleAds::Error.new grpc_error.message
end
##
# Creates, updates, or removes campaigns. Operation statuses are returned.
#
# List of thrown errors:
# [AdxError]()
# [AuthenticationError]()
# [AuthorizationError]()
# [BiddingError]()
# [BiddingStrategyError]()
# [CampaignBudgetError]()
# [CampaignError]()
# [ContextError]()
# [DatabaseError]()
# [DateError]()
# [DateRangeError]()
# [DistinctError]()
# [FieldError]()
# [FieldMaskError]()
# [HeaderError]()
# [IdError]()
# [InternalError]()
# [ListOperationError]()
# [MutateError]()
# [NewResourceCreationError]()
# [NotAllowlistedError]()
# [NotEmptyError]()
# [NullError]()
# [OperationAccessDeniedError]()
# [OperatorError]()
# [QuotaError]()
# [RangeError]()
# [RegionCodeError]()
# [RequestError]()
# [ResourceCountLimitExceededError]()
# [SettingError]()
# [SizeLimitError]()
# [StringFormatError]()
# [StringLengthError]()
# [UrlFieldError]()
#
# @overload mutate_campaigns(request, options = nil)
# Pass arguments to `mutate_campaigns` via a request object, either of type
# {::Google::Ads::GoogleAds::V9::Services::MutateCampaignsRequest} or an equivalent Hash.
#
# @param request [::Google::Ads::GoogleAds::V9::Services::MutateCampaignsRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload mutate_campaigns(customer_id: nil, operations: nil, partial_failure: nil, validate_only: nil, response_content_type: nil)
# Pass arguments to `mutate_campaigns` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param customer_id [::String]
# Required. The ID of the customer whose campaigns are being modified.
# @param operations [::Array<::Google::Ads::GoogleAds::V9::Services::CampaignOperation, ::Hash>]
# Required. The list of operations to perform on individual campaigns.
# @param partial_failure [::Boolean]
# If true, successful operations will be carried out and invalid
# operations will return errors. If false, all operations will be carried
# out in one transaction if and only if they are all valid.
# Default is false.
# @param validate_only [::Boolean]
# If true, the request is validated but not executed. Only errors are
# returned, not results.
# @param response_content_type [::Google::Ads::GoogleAds::V9::Enums::ResponseContentTypeEnum::ResponseContentType]
# The response content type setting. Determines whether the mutable resource
# or just the resource name should be returned post mutation.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Ads::GoogleAds::V9::Services::MutateCampaignsResponse]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Ads::GoogleAds::V9::Services::MutateCampaignsResponse]
#
# @raise [Google::Ads::GoogleAds::Error] if the RPC is aborted.
#
def mutate_campaigns request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request,
to: ::Google::Ads::GoogleAds::V9::Services::MutateCampaignsRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.mutate_campaigns.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Ads::GoogleAds::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.customer_id
header_params["customer_id"] = request.customer_id
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.mutate_campaigns.timeout,
metadata: metadata,
retry_policy: @config.rpcs.mutate_campaigns.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@campaign_service_stub.call_rpc :mutate_campaigns, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
# rescue GRPC::BadStatus => grpc_error
# raise Google::Ads::GoogleAds::Error.new grpc_error.message
end
##
# Configuration class for the CampaignService API.
#
# This class represents the configuration for CampaignService,
# providing control over timeouts, retry behavior, logging, transport
# parameters, and other low-level controls. Certain parameters can also be
# applied individually to specific RPCs. See
# {::Google::Ads::GoogleAds::V9::Services::CampaignService::Client::Configuration::Rpcs}
# for a list of RPCs that can be configured independently.
#
# Configuration can be applied globally to all clients, or to a single client
# on construction.
#
# @example
#
# # Modify the global config, setting the timeout for
# # get_campaign to 20 seconds,
# # and all remaining timeouts to 10 seconds.
# ::Google::Ads::GoogleAds::V9::Services::CampaignService::Client.configure do |config|
# config.timeout = 10.0
# config.rpcs.get_campaign.timeout = 20.0
# end
#
# # Apply the above configuration only to a new client.
# client = ::Google::Ads::GoogleAds::V9::Services::CampaignService::Client.new do |config|
# config.timeout = 10.0
# config.rpcs.get_campaign.timeout = 20.0
# end
#
# @!attribute [rw] endpoint
# The hostname or hostname:port of the service endpoint.
# Defaults to `"googleads.googleapis.com"`.
# @return [::String]
# @!attribute [rw] credentials
# Credentials to send with calls. You may provide any of the following types:
# * (`String`) The path to a service account key file in JSON format
# * (`Hash`) A service account key as a Hash
# * (`Google::Auth::Credentials`) A googleauth credentials object
# (see the [googleauth docs](https://googleapis.dev/ruby/googleauth/latest/index.html))
# * (`Signet::OAuth2::Client`) A signet oauth2 client object
# (see the [signet docs](https://googleapis.dev/ruby/signet/latest/Signet/OAuth2/Client.html))
# * (`GRPC::Core::Channel`) a gRPC channel with included credentials
# * (`GRPC::Core::ChannelCredentials`) a gRPC credentails object
# * (`nil`) indicating no credentials
# @return [::Object]
# @!attribute [rw] scope
# The OAuth scopes
# @return [::Array<::String>]
# @!attribute [rw] lib_name
# The library name as recorded in instrumentation and logging
# @return [::String]
# @!attribute [rw] lib_version
# The library version as recorded in instrumentation and logging
# @return [::String]
# @!attribute [rw] channel_args
# Extra parameters passed to the gRPC channel. Note: this is ignored if a
# `GRPC::Core::Channel` object is provided as the credential.
# @return [::Hash]
# @!attribute [rw] interceptors
# An array of interceptors that are run before calls are executed.
# @return [::Array<::GRPC::ClientInterceptor>]
# @!attribute [rw] timeout
# The call timeout in seconds.
# @return [::Numeric]
# @!attribute [rw] metadata
# Additional gRPC headers to be sent with the call.
# @return [::Hash{::Symbol=>::String}]
# @!attribute [rw] retry_policy
# The retry policy. The value is a hash with the following keys:
# * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
# * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
# * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
# * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
# trigger a retry.
# @return [::Hash]
# @!attribute [rw] quota_project
# A separate project against which to charge quota.
# @return [::String]
#
class Configuration
extend ::Gapic::Config
config_attr :endpoint, "googleads.googleapis.com", ::String
config_attr :credentials, nil do |value|
allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client,
nil]
allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
allowed.any? { |klass| klass === value }
end
config_attr :scope, nil, ::String, ::Array, nil
config_attr :lib_name, nil, ::String, nil
config_attr :lib_version, nil, ::String, nil
config_attr(:channel_args, { "grpc.service_config_disable_resolution" => 1 }, ::Hash, nil)
config_attr :interceptors, nil, ::Array, nil
config_attr :timeout, nil, ::Numeric, nil
config_attr :metadata, nil, ::Hash, nil
config_attr :retry_policy, nil, ::Hash, ::Proc, nil
config_attr :quota_project, nil, ::String, nil
# @private
def initialize parent_config = nil
@parent_config = parent_config unless parent_config.nil?
yield self if block_given?
end
##
# Configurations for individual RPCs
# @return [Rpcs]
#
def rpcs
@rpcs ||= begin
parent_rpcs = nil
parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config.respond_to?(:rpcs)
Rpcs.new parent_rpcs
end
end
##
# Configuration RPC class for the CampaignService API.
#
# Includes fields providing the configuration for each RPC in this service.
# Each configuration object is of type `Gapic::Config::Method` and includes
# the following configuration fields:
#
# * `timeout` (*type:* `Numeric`) - The call timeout in seconds
# * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers
# * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields
# include the following keys:
# * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
# * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
# * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
# * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
# trigger a retry.
#
class Rpcs
##
# RPC-specific configuration for `get_campaign`
# @return [::Gapic::Config::Method]
#
attr_reader :get_campaign
##
# RPC-specific configuration for `mutate_campaigns`
# @return [::Gapic::Config::Method]
#
attr_reader :mutate_campaigns
# @private
def initialize parent_rpcs = nil
get_campaign_config = parent_rpcs.get_campaign if parent_rpcs.respond_to? :get_campaign
@get_campaign = ::Gapic::Config::Method.new get_campaign_config
mutate_campaigns_config = parent_rpcs.mutate_campaigns if parent_rpcs.respond_to? :mutate_campaigns
@mutate_campaigns = ::Gapic::Config::Method.new mutate_campaigns_config
yield self if block_given?
end
end
end
end
end
end
end
end
end
end
| 49.67319 | 147 | 0.531182 |
184fd82713bf3c15fa371c0b6eb1bdb7b14fe428 | 4,153 | require "test_helper"
require "minitest/mock"
class Pay::Braintree::Billable::Test < ActiveSupport::TestCase
setup do
@billable = User.new email: "[email protected]"
@billable.processor = "braintree"
end
test "getting a customer" do
customer = @billable.customer
assert customer.id.present?
assert_equal "[email protected]", customer.email
end
test "can store card" do
@billable.card_token = "fake-valid-visa-nonce"
@billable.customer
assert_equal "Visa", @billable.card_type
assert_nil @billable.card_token
end
test "fails with invalid cards" do
# This requires Card Verification to be enabled in the Braintree account
@billable.card_token = "fake-processor-declined-visa-nonce"
err = assert_raises(Pay::Braintree::Error) { @billable.customer }
assert_equal "Do Not Honor", err.message
end
test "can update card" do
@billable.update_card("fake-valid-discover-nonce")
assert_equal "Discover", @billable.card_type
assert_nil @billable.card_token
end
test "can charge card with credit card" do
@billable.card_token = "fake-valid-visa-nonce"
charge = @billable.charge(29_00)
# Make sure it saved to the database correctly
assert_equal 29_00, charge.amount
assert_equal "Visa", charge.card_type
end
test "can charge card with venmo" do
@billable.card_token = "fake-venmo-account-nonce"
charge = @billable.charge(29_00)
# Make sure it saved to the database correctly
assert_equal 29_00, charge.amount
assert_equal "Venmo", charge.card_type
end
# Invalid amount will cause the transaction to fail
# https://developers.braintreepayments.com/reference/general/testing/ruby#amount-200000-300099
test "handles charge failures" do
@billable.card_token = "fake-valid-visa-nonce"
@billable.customer
assert_raises(Pay::Braintree::Error) { @billable.charge(2000_00) }
end
test "fails with paypal processor declined" do
@billable.card_token = "fake-paypal-billing-agreement-nonce "
@billable.customer
assert_raises(Pay::Braintree::Error) { @billable.charge(5001_01) }
end
test "can create a braintree subscription" do
@billable.card_token = "fake-valid-visa-nonce"
@billable.subscribe
assert @billable.subscribed?
end
test "email changed" do
# Must already have a processor ID
@billable.update(processor_id: "fake")
Pay::EmailSyncJob.expects(:perform_later).with(@billable.id, @billable.class.name)
@billable.update(email: "[email protected]")
end
test "braintree trial period options" do
travel_to(VCR.current_cassette.originally_recorded_at || Time.current) do
@billable.card_token = "fake-valid-visa-nonce"
subscription = @billable.subscribe(trial_period_days: 15)
# Braintree subscriptions don't use trialing status for simplicity
assert_equal "active", subscription.status
assert_not_nil subscription.trial_ends_at
# Time.zone may not match the timezone in your Braintree account, so we'll be lenient on this assertion
assert subscription.trial_ends_at > 14.days.from_now
end
end
test "fails charges with invalid cards" do
# This requires Card Verification to be enabled in the Braintree account
@billable.card_token = "fake-processor-declined-visa-nonce"
err = assert_raises(Pay::Braintree::Error) { @billable.charge(10_00) }
assert_equal "Do Not Honor", err.message
end
test "fails subscribing with invalid cards" do
# This requires Card Verification to be enabled in the Braintree account
@billable.card_token = "fake-processor-declined-visa-nonce"
err = assert_raises(Pay::Braintree::Error) { @billable.subscribe }
assert_equal "Do Not Honor", err.message
assert_equal Braintree::ErrorResult, err.cause.class
end
test "handles invalid parameters" do
err = assert_raises(Pay::Braintree::AuthorizationError) { @billable.charge(10_00, metadata: {}) }
assert_equal "Either the data you submitted is malformed and does not match the API or the API key you used may not be authorized to perform this action.", err.message
end
end
| 36.113043 | 171 | 0.732001 |
1a7bce62887049bd0a987cc6ac3b071fe0e3e6a0 | 3,020 | require_relative "schema"
module SchemaRD
module MigrationContext
class Loader
class TableDefinition
[
:bigint,
:binary,
:boolean,
:date,
:datetime,
:decimal,
:float,
:integer,
:string,
:text,
:time,
:timestamp,
:virtual,
].each do |column_type|
module_eval <<-CODE, __FILE__, __LINE__ + 1
def #{column_type}(*args, **options)
args.each { |name| column(name, :#{column_type}, options) }
end
CODE
end
alias_method :numeric, :decimal
# "with_comment" must be assigned, but ruby2.0 needs default value.
def initialize(table, with_comment: false)
@table = table
@parse_db_comment = with_comment
end
def method_missing(name, *args)
self.column(args[0], "unknown", args[1])
end
def column(name, type, options = {})
if options[:comment] && @parse_db_comment
options[:parsed_db_comment] = options.delete(:comment)
end
@table.columns << SchemaRD::TableColumn.new(options.merge({ name: name, type: type }))
end
def timestamps
column("created_at", :timestamp, null: false)
column("updated_at", :timestamp, null: false)
end
def index(column_name, options = {})
column_name = [ column_name ] unless column_name.is_a?(Array)
@table.indexes << SchemaRD::TableIndex.new(options.merge({ columns: column_name }))
end
end
# "with_comment" must be assigned, but ruby2.0 needs default value.
def initialize(schema, with_comment: false)
@schema = schema
@parse_db_comment = with_comment
end
def create_table(table_name, options = {})
if options[:comment] && @parse_db_comment
options[:parsed_db_comment] = options.delete(:comment)
end
table = SchemaRD::Table.new(options.merge(name: table_name))
@schema.add_table(table_name, table)
yield TableDefinition.new(table, with_comment: @parse_db_comment)
end
def add_index(table_name, column_name, options = {})
column_name = [ column_name ] unless column_name.is_a?(Array)
index = SchemaRD::TableIndex.new(options.merge({ columns: column_name }))
@schema.table(table_name).indexes << index
end
def enable_extension(*args); end
module ActiveRecord
class Schema
def self.define(*args)
yield
end
end
end
end
end
class SchemaParser
def initialize(filename)
@filename = filename
end
def parse(with_comment: false)
Schema.new.tap do |schema|
File.open(@filename) do |file|
MigrationContext::Loader.new(schema, with_comment: with_comment).instance_eval(file.read, @filename)
end
end
end
end
end
| 31.789474 | 110 | 0.586093 |
21572ddedc19b95fee9397d56fb3cfad220b45e3 | 1,626 | # encoding: UTF-8
version = File.read(File.expand_path("../../SPREE_VERSION", __FILE__)).strip
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'spree_core'
s.version = version
s.summary = 'The bare bones necessary for Spree.'
s.description = 'The bare bones necessary for Spree.'
s.required_ruby_version = '>= 1.9.3'
s.author = 'Sean Schofield'
s.email = '[email protected]'
s.homepage = 'http://spreecommerce.com'
s.license = %q{BSD-3}
s.files = Dir['LICENSE', 'README.md', 'app/**/*', 'config/**/*', 'lib/**/*', 'db/**/*', 'vendor/**/*']
s.require_path = 'lib'
s.add_dependency 'activemerchant', '~> 1.43.1'
s.add_dependency 'acts_as_list', '= 0.3.0'
s.add_dependency 'awesome_nested_set', '~> 3.0.0.rc.3'
s.add_dependency 'cancancan', '~> 1.8.4'
s.add_dependency 'deface', '~> 1.0.0'
s.add_dependency 'ffaker', '~> 1.16'
s.add_dependency 'font-awesome-rails', '~> 4.0'
s.add_dependency 'friendly_id', '~> 5.0.4'
s.add_dependency 'highline', '~> 1.6.18' # Necessary for the install generator
s.add_dependency 'httparty', '~> 0.11' # For checking alerts.
s.add_dependency 'json', '~> 1.7'
s.add_dependency 'kaminari', '~> 0.15.0'
s.add_dependency 'monetize'
s.add_dependency 'paperclip', '~> 4.1.1'
s.add_dependency 'paranoia', '~> 2.0'
s.add_dependency 'rails', '~> 4.1.4'
s.add_dependency 'ransack', '~> 1.2.2'
s.add_dependency 'state_machine', '1.2.0'
s.add_dependency 'stringex', '~> 1.5.1'
s.add_dependency 'truncate_html', '0.9.2'
s.add_dependency 'twitter_cldr', '~> 3.0'
end
| 38.714286 | 111 | 0.630996 |
117b6660bd67a001e5396a78cd36e4aa7161cfe5 | 645 | #!/usr/bin/env ruby
# encoding: utf-8
require "rubygems"
require "bunni"
puts "=> Demonstrating consumer cancellation notification"
puts
conn = Bunni.new
conn.start
ch = conn.create_channel
module Bunni
module Examples
class ExampleConsumer < Bunni::Consumer
def cancelled?
@cancelled
end
def handle_cancellation(basic_cancel)
puts "#{@consumer_tag} was cancelled"
@cancelled = true
end
end
end
end
q = ch.queue("", :exclusive => true)
c = Bunni::Examples::ExampleConsumer.new(ch, q)
q.subscribe_with(c)
sleep 0.1
q.delete
sleep 0.1
puts "Disconnecting..."
conn.close
| 16.125 | 58 | 0.672868 |
ed06205bd2348c240061932849c53d49ec3a1404 | 1,123 | # Internationalization methods
class Internationalization
class << self
def full_path(file)
path(file)
end
def file_url(file)
path(file).file_url
end
def path(file, add_resources_path=true)
ident = NSLocale.currentLocale.localeIdentifier
lang = ident.split("_").first
if file.resource_path.file_exists?
# Regular file path
file.resource_path
elsif "#{lang}.lproj/#{file}".resource_path.file_exists?
# Base language file
"#{lang}.lproj/#{file}".resource_path
elsif "#{lang}.lproj/#{Version.version}/#{file}".resource_path.file_exists?
# Language version file
"#{lang}.lproj/#{Version.version}/#{file}".resource_path
elsif "en.lproj/#{Version.version}/#{file}".resource_path.file_exists?
# en version falback
"en.lproj/#{Version.version}/#{file}".resource_path
elsif "#{lang}.lproj/2008/#{file}".resource_path.file_exists?
# Language 2008 fallback
"#{lang}.lproj/2008/#{file}".resource_path
elsif "en.lproj/2008/#{file}".resource_path.file_exists?
# en 2008 fallback
"en.lproj/2008/#{file}".resource_path
end
end
end
end
| 28.075 | 78 | 0.694568 |
f7b6c8fe8b2471caa593a7795408eb323be58eb8 | 378 | class Api::V1::EvidenceItemSerializer < Api::V1::BaseSerializer
attributes :id, :title, :description, :votes_score, :current_vote
has_one :submitter, serializer: Api::V1::ProfileSerializer
def votes_score
object.cached_votes_score || 0
end
def current_vote
if current_user
return current_user.voted_as_when_voted_for(object)
end
nil
end
end
| 22.235294 | 67 | 0.740741 |
0827f3512c7d83dba02fe73c25cc45cf8749459c | 144 | module ArenaApi
class Item < Resource
has_many :bom_items, class_name: BOMItem
has_many :files, class_name: FileAssociation
end
end
| 20.571429 | 48 | 0.756944 |
e9be8853d2760ce0675b08bbb56ba3e8f47ec9ea | 1,407 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Configuration::SaveAutoFixService do
describe '#execute' do
let_it_be_with_reload(:project) { create(:project) }
let(:service) { described_class.new(project, feature) }
subject(:response) { service.execute(enabled: false) }
context 'with supported scanner type' do
let(:feature) { 'dependency_scanning' }
it 'returns success status' do
expect(response).to be_success
expect(response.payload).to eq({ container_scanning: true, dependency_scanning: false })
end
it 'changes setting' do
response
expect(project.security_setting.auto_fix_dependency_scanning).to be_falsey
end
end
context 'with all scanners' do
let(:feature) { 'all' }
it 'returns success status' do
expect(response).to be_success
end
it 'changes setting' do
response
expect(project.security_setting.auto_fix_dependency_scanning).to be_falsey
expect(project.security_setting.auto_fix_container_scanning).to be_falsey
end
end
context 'with not supported scanner type' do
let(:feature) { :dep_scan }
it 'does not change setting' do
expect(response).to be_error
expect(response.message).to eq('Auto fix is not available for dep_scan feature')
end
end
end
end
| 26.54717 | 96 | 0.681592 |
1aa65691dd1d2af607f838445ca7dc396f89fcf3 | 1,579 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
$:.push File.dirname(__FILE__) + '/../gen-rb'
$:.push File.join(File.dirname(__FILE__), '../../../lib/rb/lib')
require 'thrift'
require 'ThriftTest'
class SimpleHandler
[:testString, :testByte, :testI32, :testI64, :testDouble,
:testStruct, :testMap, :testSet, :testList, :testNest,
:testEnum, :testTypedef].each do |meth|
define_method(meth) do |thing|
thing
end
end
def testInsanity(thing)
num, uid = thing.userMap.find { true }
return {uid => {num => thing}}
end
def testMapMap(thing)
return {thing => {thing => thing}}
end
def testEnum(thing)
return thing
end
def testTypedef(thing)
return thing
end
def testException(thing)
raise Thrift::Test::Xception, :message => 'error'
end
end
@handler = SimpleHandler.new
@processor = Thrift::Test::ThriftTest::Processor.new(@handler)
@transport = Thrift::ServerSocket.new(9090)
@server = Thrift::ThreadedServer.new(@processor, @transport)
@server.serve
| 26.316667 | 74 | 0.702977 |
91f46dc304b06389b0f4236387faa20dd8cd6b24 | 4,755 | require 'fileutils'
require 'securerandom'
require 'rugged'
require 'spec_helper'
$:.unshift(File.expand_path('../proto', __dir__))
require 'gitaly'
if ENV.key?('GITALY_TESTING_GIT_BINARY')
GIT_BINARY_PATH = ENV['GITALY_TESTING_GIT_BINARY']
elsif ENV.key?('GITALY_TESTING_BUNDLED_GIT_PATH')
GIT_BINARY_PATH = File.join(ENV['GITALY_TESTING_BUNDLED_GIT_PATH'], 'gitaly-git')
GIT_EXEC_PATH = File.join(TMP_DIR, 'git-exec-path')
# We execute git-clone(1) to set up the test repo, and this requires Git to
# find git-upload-pack(1). We thus symlink it into a temporary Git exec path
# and make it known to Git where it lives.
Dir.mkdir(GIT_EXEC_PATH)
File.symlink(GIT_BINARY_PATH, File.join(GIT_EXEC_PATH, 'git-upload-pack'))
else
GIT_BINARY_PATH = 'git'.freeze
end
Gitlab.config.git.test_global_ivar_override(:bin_path, GIT_BINARY_PATH)
Gitlab.config.git.test_global_ivar_override(:hooks_directory, File.join(GITALY_RUBY_DIR, "hooks"))
Gitlab.config.gitaly.test_global_ivar_override(:bin_dir, __dir__)
DEFAULT_STORAGE_DIR = File.join(TMP_DIR, 'repositories', __dir__)
DEFAULT_STORAGE_NAME = 'default'.freeze
TEST_REPO_PATH = File.join(DEFAULT_STORAGE_DIR, 'gitlab-test.git')
TEST_REPO_ORIGIN = '../_build/testrepos/gitlab-test.git'.freeze
GIT_TEST_REPO_PATH = File.join(DEFAULT_STORAGE_DIR, 'gitlab-git-test.git')
GIT_TEST_REPO_ORIGIN = '../_build/testrepos/gitlab-git-test.git'.freeze
module TestRepo
def self.prepare_test_repository
FileUtils.rm_rf(Dir["#{DEFAULT_STORAGE_DIR}/mutable-*"])
FileUtils.mkdir_p(DEFAULT_STORAGE_DIR)
{
TEST_REPO_ORIGIN => TEST_REPO_PATH,
GIT_TEST_REPO_ORIGIN => GIT_TEST_REPO_PATH
}.each do |origin, path|
next if File.directory?(path)
clone_new_repo!(origin, path)
end
end
def git_test_repo_read_only
Gitaly::Repository.new(storage_name: DEFAULT_STORAGE_NAME, relative_path: File.basename(GIT_TEST_REPO_PATH))
end
def test_repo_read_only
Gitaly::Repository.new(storage_name: DEFAULT_STORAGE_NAME, relative_path: File.basename(TEST_REPO_PATH))
end
def new_mutable_test_repo
relative_path = random_repository_relative_path(:mutable)
TestRepo.clone_new_repo!(TEST_REPO_ORIGIN, File.join(DEFAULT_STORAGE_DIR, relative_path))
Gitaly::Repository.new(storage_name: DEFAULT_STORAGE_NAME, relative_path: relative_path)
end
def new_mutable_git_test_repo
relative_path = random_repository_relative_path(:mutable)
TestRepo.clone_new_repo!(GIT_TEST_REPO_ORIGIN, File.join(DEFAULT_STORAGE_DIR, relative_path))
Gitaly::Repository.new(storage_name: DEFAULT_STORAGE_NAME, relative_path: relative_path)
end
def new_broken_test_repo
relative_path = random_repository_relative_path(:broken)
repo_path = File.join(DEFAULT_STORAGE_DIR, relative_path)
TestRepo.clone_new_repo!(TEST_REPO_ORIGIN, repo_path)
refs_path = File.join(repo_path, 'refs')
FileUtils.rm_r(refs_path)
Gitaly::Repository.new(storage_name: DEFAULT_STORAGE_NAME, relative_path: relative_path)
end
def new_empty_test_repo
relative_path = random_repository_relative_path(:mutable)
TestRepo.init_new_repo!(File.join(DEFAULT_STORAGE_DIR, relative_path))
Gitaly::Repository.new(storage_name: DEFAULT_STORAGE_NAME, relative_path: relative_path)
end
def rugged_from_gitaly(gitaly_repo)
Rugged::Repository.new(repo_path_from_gitaly(gitaly_repo))
end
def repo_path_from_gitaly(gitaly_repo)
storage_name = gitaly_repo.storage_name
raise "this helper does not know storage #{storage_name.inspect}" unless storage_name == DEFAULT_STORAGE_NAME
File.join(DEFAULT_STORAGE_DIR, gitaly_repo.relative_path)
end
def gitlab_git_from_gitaly(gitaly_repo)
Gitlab::Git::Repository.new(
gitaly_repo,
repo_path_from_gitaly(gitaly_repo),
'project-123',
''
)
end
def repository_from_relative_path(relative_path)
gitlab_git_from_gitaly(
Gitaly::Repository.new(storage_name: DEFAULT_STORAGE_NAME, relative_path: relative_path)
)
end
def self.clone_new_repo!(origin, destination)
env = {}
env['GIT_EXEC_PATH'] = GIT_EXEC_PATH if defined?(GIT_EXEC_PATH)
return if system(env, Gitlab.config.git.bin_path, "-c", "init.templateDir=", "clone", "--quiet", "--bare", origin.to_s, destination.to_s)
abort "Failed to clone test repo. Try running 'make prepare-tests' and try again."
end
def self.init_new_repo!(destination)
return if system(Gitlab.config.git.bin_path, "-c", "init.templateDir=", "init", "--quiet", "--bare", destination.to_s)
abort "Failed to init test repo."
end
private
def random_repository_relative_path(prefix)
"#{prefix}-#{SecureRandom.hex(6)}.git"
end
end
TestRepo.prepare_test_repository
| 34.708029 | 141 | 0.768244 |
1cfde80e5581c71075f383f1daccd76b26dc7952 | 143 | # frozen_string_literal: true
module Fields::Validations
class DateField < FieldOptions
prepend Fields::Validations::Presence
end
end
| 17.875 | 41 | 0.783217 |
6a028acc43e84d2cae0a55b8a9ebd37d41252983 | 320 | # == Schema Information
#
# Table name: shares
#
# id :integer not null, primary key
# account_id :integer
# created_at :datetime not null
# updated_at :datetime not null
#
class Share < ApplicationRecord
belongs_to :account
has_one :seed
has_one :project, through: :seed
end
| 20 | 53 | 0.65 |
6adb832421de8e7ac8b69946d196fdfc341c2d84 | 125 | module Contentful
class SiteDocumentation < ContentfulModel::Base
self.content_type_id = 'siteDocumentation'
end
end
| 20.833333 | 49 | 0.792 |
618efe59b21fdff91f4a3629099b116970023534 | 749 | module Cborb::Decoding::Types
# To represent part of major type: 7
#
# @see https://tools.ietf.org/html/rfc7049#section-2.3
# @see https://tools.ietf.org/html/rfc7049#appendix-D
class HalfPrecisionFloatingPoint < Type
class << self
def decode(state, additional_info)
bits = state.consume(2).unpack("n".freeze).first
bits = (bits & 0x7FFF) << 13 | (bits & 0x8000) << 16
fp =
if (bits & 0x7C00) != 0x7C00
Math.ldexp(to_single(bits), 112)
else
to_single(bits | 0x7F800000)
end
state.accept_value(self, fp)
end
private
def to_single(bits)
[bits].pack("N".freeze).unpack("g".freeze).first
end
end
end
end
| 25.827586 | 60 | 0.582109 |
ff2e199226302291b4b0a828e8e96e846973a024 | 884 | require_relative '../lib/typograph'
require_relative 'data/test_data'
describe Typograph::Client do
context "Testing with valid input to get type desinged text" do
typograph = Typograph::MdashTypograph.new
it "Should type design the passed text if mdash.ru's API is available" do
formatted_text = typograph.text(Test_data::TEST_TEXT)
expect(formatted_text).to eql "<p>Текст — зафиксированная на носителе человеческая мысль.</p>"
end
end
context "Testing with invalid input to get type desinged text" do
it "Should raise ArgumentError for passing nil to the #text method" do
nil_text = nil
typograph = Typograph::MdashTypograph.new
expect{
formatted_text = typograph.text(nil_text)
}.to raise_error(ArgumentError, "Incorrect argument given - #{nil_text.class}. Must be String!")
end
end
end
| 38.434783 | 116 | 0.725113 |
79a458d0b56f825d73d0f8f1f8a56d1aa9402e15 | 174 | class AddLtiLaunchToken < ActiveRecord::Migration[5.0]
def change
add_column :lti_launches, :token, :string
add_index :lti_launches, :token, unique: true
end
end
| 24.857143 | 54 | 0.741379 |
26a11a6972c741e47226a7049a66cb71653ebf75 | 1,219 | # encoding: UTF-8
require 'spec_helper'
describe Haystack::ZippedPayload do
describe "#initialize" do
it "should initialize a new `Haystack::ZippedPayload` and zip the body" do
payload = Haystack::ZippedPayload.new({'the' => 'payload'})
expect( payload.body ).to eql(Zlib::Deflate.deflate(
"{\"the\":\"payload\"}",
Zlib::BEST_SPEED
))
end
end
describe ".json_generate" do
subject { Haystack::ZippedPayload.send(:json_generate, body) }
context "with a valid body" do
let(:body) { {'the' => 'payload'} }
it { should == "{\"the\":\"payload\"}" }
end
context "with a body that contains strings with invalid utf-8 content" do
let(:string_with_invalid_utf8) { [0x61, 0x61, 0x85].pack('c*') }
let(:body) { {
'field_one' => [0x61, 0x61].pack('c*'),
'field_two' => string_with_invalid_utf8,
'field_three' => [
'one', string_with_invalid_utf8
],
'field_four' => {
'one' => string_with_invalid_utf8
}
} }
it { should == "{\"field_one\":\"aa\",\"field_two\":\"aa�\",\"field_three\":[\"one\",\"aa�\"],\"field_four\":{\"one\":\"aa�\"}}" }
end
end
end
| 28.348837 | 136 | 0.568499 |
1862f427ec5ffe9c3dc1b17264ffe942f2556f70 | 1,632 | # frozen_string_literal: true
require "dry/monads/result"
module Dry
module Validation
# Hints extension
#
# @example
# Dry::Validation.load_extensions(:hints)
#
# contract = Dry::Validation::Contract.build do
# schema do
# required(:name).filled(:string, min_size?: 2..4)
# end
# end
#
# contract.call(name: "fo").hints
# # {:name=>["size must be within 2 - 4"]}
#
# contract.call(name: "").messages
# # {:name=>["must be filled", "size must be within 2 - 4"]}
#
# @api public
module Hints
# Hints extensions for Result
#
# @api public
module ResultExtensions
# Return error messages excluding hints
#
# @macro errors-options
# @return [MessageSet]
#
# @api public
def errors(new_options = EMPTY_HASH)
opts = new_options.merge(hints: false)
@errors.with(schema_errors(opts), opts)
end
# Return errors and hints
#
# @macro errors-options
#
# @return [MessageSet]
#
# @api public
def messages(new_options = EMPTY_HASH)
errors.with(hints.to_a, options.merge(**new_options))
end
# Return hint messages
#
# @macro errors-options
#
# @return [MessageSet]
#
# @api public
def hints(new_options = EMPTY_HASH)
schema_result.hints(new_options)
end
end
Dry::Schema.load_extensions(:hints)
Result.prepend(ResultExtensions)
end
end
end
| 23.314286 | 66 | 0.545343 |
5dd1d0e532de764a8a67f83c4e415c2b90facbe2 | 400 | cask :v1 => 'scrup' do
version '1.3.3'
sha256 '5004222db9a6ddd4e6cb525d00e95f8a38e9fb623bc1397e5258b2ef2c4bd3b0'
url "http://data.hunch.se/scrup/Scrup-#{version}-bd23160.zip"
appcast 'https://s.rsms.me/scrup/appcast.xml',
:sha256 => '140f4487d00bb157286f261bfddb8f7a8c29a4fc2e53a63119bdbe1c828a6d00'
homepage 'https://github.com/rsms/scrup'
license :oss
app 'Scrup.app'
end
| 30.769231 | 87 | 0.7425 |
4a336abbb0cc9a54257820c49a0bd6b627c1b70f | 1,847 | class Binutils < Formula
desc "GNU binary tools for native development"
homepage "https://www.gnu.org/software/binutils/binutils.html"
url "https://ftp.gnu.org/gnu/binutils/binutils-2.32.tar.gz"
mirror "https://ftpmirror.gnu.org/binutils/binutils-2.32.tar.gz"
sha256 "9b0d97b3d30df184d302bced12f976aa1e5fbf4b0be696cdebc6cca30411a46e"
# binutils is portable.
bottle do
cellar :any
sha256 "ef7ed4e1f676ba42a72928a6925febf3a95d73424986f8ebb8d4b458923004b1" => :x86_64_linux
end
if OS.mac?
keg_only :provided_by_macos,
"because Apple provides the same tools and binutils is poorly supported on macOS"
end
uses_from_macos "zlib"
def install
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
("--with-sysroot=/" unless OS.mac?),
"--enable-deterministic-archives",
"--prefix=#{prefix}",
"--infodir=#{info}",
"--mandir=#{man}",
"--disable-werror",
"--enable-interwork",
"--enable-multilib",
"--enable-64-bit-bfd",
("--enable-gold" unless OS.mac?),
("--enable-plugins" unless OS.mac?),
"--enable-targets=all"
system "make"
system "make", "install"
bin.install_symlink "ld.gold" => "gold" unless OS.mac?
if OS.mac?
Dir["#{bin}/*"].each do |f|
bin.install_symlink f => "g" + File.basename(f)
end
end
# Reduce the size of the bottle.
system "strip", *Dir[bin/"*", lib/"*.a"] unless OS.mac?
end
test do
assert_match "Usage:", shell_output("#{bin}/strings #{bin}/strings")
end
end
| 34.203704 | 94 | 0.551164 |
28103242c3f65ab55490799f224390594b572509 | 2,487 | #
# Autogenerated by Thrift Compiler (0.9.3)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
require 'thrift'
require 'serde_types'
SERIALIZATION_LIB = %q"serialization.lib"
SERIALIZATION_CLASS = %q"serialization.class"
SERIALIZATION_FORMAT = %q"serialization.format"
SERIALIZATION_DDL = %q"serialization.ddl"
SERIALIZATION_NULL_FORMAT = %q"serialization.null.format"
SERIALIZATION_ESCAPE_CRLF = %q"serialization.escape.crlf"
SERIALIZATION_LAST_COLUMN_TAKES_REST = %q"serialization.last.column.takes.rest"
SERIALIZATION_SORT_ORDER = %q"serialization.sort.order"
SERIALIZATION_NULL_SORT_ORDER = %q"serialization.sort.order.null"
SERIALIZATION_USE_JSON_OBJECTS = %q"serialization.use.json.object"
SERIALIZATION_ENCODING = %q"serialization.encoding"
FIELD_DELIM = %q"field.delim"
COLLECTION_DELIM = %q"collection.delim"
LINE_DELIM = %q"line.delim"
MAPKEY_DELIM = %q"mapkey.delim"
QUOTE_CHAR = %q"quote.delim"
ESCAPE_CHAR = %q"escape.delim"
HEADER_COUNT = %q"skip.header.line.count"
FOOTER_COUNT = %q"skip.footer.line.count"
VOID_TYPE_NAME = %q"void"
BOOLEAN_TYPE_NAME = %q"boolean"
TINYINT_TYPE_NAME = %q"tinyint"
SMALLINT_TYPE_NAME = %q"smallint"
INT_TYPE_NAME = %q"int"
BIGINT_TYPE_NAME = %q"bigint"
FLOAT_TYPE_NAME = %q"float"
DOUBLE_TYPE_NAME = %q"double"
STRING_TYPE_NAME = %q"string"
CHAR_TYPE_NAME = %q"char"
VARCHAR_TYPE_NAME = %q"varchar"
DATE_TYPE_NAME = %q"date"
DATETIME_TYPE_NAME = %q"datetime"
TIMESTAMP_TYPE_NAME = %q"timestamp"
DECIMAL_TYPE_NAME = %q"decimal"
BINARY_TYPE_NAME = %q"binary"
INTERVAL_YEAR_MONTH_TYPE_NAME = %q"interval_year_month"
INTERVAL_DAY_TIME_TYPE_NAME = %q"interval_day_time"
TIMESTAMPTZ_TYPE_NAME = %q"timestamp with time zone"
LIST_TYPE_NAME = %q"array"
MAP_TYPE_NAME = %q"map"
STRUCT_TYPE_NAME = %q"struct"
UNION_TYPE_NAME = %q"uniontype"
LIST_COLUMNS = %q"columns"
LIST_COLUMN_TYPES = %q"columns.types"
TIMESTAMP_FORMATS = %q"timestamp.formats"
COLUMN_NAME_DELIMITER = %q"column.name.delimiter"
PrimitiveTypes = Set.new([
%q"void",
%q"boolean",
%q"tinyint",
%q"smallint",
%q"int",
%q"bigint",
%q"float",
%q"double",
%q"string",
%q"varchar",
%q"char",
%q"date",
%q"datetime",
%q"timestamp",
%q"interval_year_month",
%q"interval_day_time",
%q"decimal",
%q"binary",
%q"timestamp with time zone",
])
CollectionTypes = Set.new([
%q"array",
%q"map",
])
IntegralTypes = Set.new([
%q"tinyint",
%q"smallint",
%q"int",
%q"bigint",
])
| 18.286765 | 79 | 0.733816 |
f86883d967f33f5b848407f9f933759a3377bd87 | 1,540 | set :stage, :production_es
set :deploy_to, '/var/www/pyx4.com/translate_es'
# Simple Role Syntax
# ==================
# Supports bulk-adding hosts to roles, the primary
# server in each group is considered to be the first
# unless any hosts have the primary property set.
role :app, %w{[email protected]}
role :web, %w{[email protected]}
role :db, %w{[email protected]}
# Extended Server Syntax
# ======================
# This can be used to drop a more detailed server
# definition into the server list. The second argument
# something that quacks like a hash can be used to set
# extended properties on the server.
server 'prod1.pyx4.com', user: 'deployer', roles: %w{web app}
# you can set custom ssh options
# it's possible to pass any option but you need to keep in mind that net/ssh understand limited list of options
# you can see them in [net/ssh documentation](http://net-ssh.github.io/net-ssh/classes/Net/SSH.html#method-c-start)
# set it globally
# set :ssh_options, {
# keys: %w(/home/rlisowski/.ssh/id_rsa),
# forward_agent: false,
# auth_methods: %w(password)
# }
# and/or per server
# server 'example.com',
# user: 'user_name',
# roles: %w{web app},
# ssh_options: {
# user: 'user_name', # overrides user setting above
# keys: %w(/home/user_name/.ssh/id_rsa),
# forward_agent: false,
# auth_methods: %w(publickey password)
# # password: 'please use keys'
# }
# setting per server overrides global ssh_options
# fetch(:default_env).merge!(rails_env: :production)
| 34.222222 | 115 | 0.692857 |
793434b8fda25d570606507b583ee24dc16d15a5 | 36 | module Iphy
VERSION = "0.1.0"
end
| 9 | 19 | 0.638889 |
38da0c1f440c1edc5d685856227c9ea1328065ef | 243 | ## $:.unshift(File.dirname(__FILE__))
## minitest setup
require 'minitest/autorun'
## our own code
require 'csvreader'
## add test_data_dir helper
class CsvReader
def self.test_data_dir
"#{root}/test/data"
end
end
| 14.294118 | 38 | 0.662551 |
e247e60cc19ac60d4a8955cb2f8d90b374f7c2d7 | 1,994 | class TerraformDocs < Formula
desc "Tool to generate documentation from Terraform modules"
homepage "https://github.com/terraform-docs/terraform-docs"
url "https://github.com/terraform-docs/terraform-docs/archive/v0.10.1.tar.gz"
sha256 "f3cc429d8edd129c73ca18feafd17bf1aacb0397b3653b7f65aa3978c4d6c337"
license "MIT"
bottle do
cellar :any_skip_relocation
sha256 "b4e60a1c75bdaf1ede49d26eab11db036a22282a1580ec0d34c2eb323b74dd6d" => :big_sur
sha256 "9af166d0e3af3d696770e94b819739e97efdedb623a07f2bc1fb3f0698760b44" => :catalina
sha256 "219f33fc44d5c4d032997fae412694f510ab62a33eb9816d387e1c24d8717605" => :mojave
sha256 "2544851eb87efa344c48392c85404f82c3ef9c5a868555f743cf12f45c860b48" => :high_sierra
sha256 "2277a1fcbce8925e2d1f0616db401645054aec01460bfb42291d36b40bb55ace" => :x86_64_linux
end
depends_on "go" => :build
def install
system "make", "build"
bin.install "bin/#{OS.mac? ? "darwin" : "linux"}-amd64/terraform-docs"
prefix.install_metafiles
end
test do
(testpath/"main.tf").write <<~EOS
/**
* Module usage:
*
* module "foo" {
* source = "github.com/foo/baz"
* subnet_ids = "${join(",", subnet.*.id)}"
* }
*/
variable "subnet_ids" {
description = "a comma-separated list of subnet IDs"
}
variable "security_group_ids" {
default = "sg-a, sg-b"
}
variable "amis" {
default = {
"us-east-1" = "ami-8f7687e2"
"us-west-1" = "ami-bb473cdb"
"us-west-2" = "ami-84b44de4"
"eu-west-1" = "ami-4e6ffe3d"
"eu-central-1" = "ami-b0cc23df"
"ap-northeast-1" = "ami-095dbf68"
"ap-southeast-1" = "ami-cf03d2ac"
"ap-southeast-2" = "ami-697a540a"
}
}
// The VPC ID.
output "vpc_id" {
value = "vpc-5c1f55fd"
}
EOS
system "#{bin}/terraform-docs", "json", testpath
end
end
| 30.676923 | 94 | 0.630893 |
e87412959ae6764a8185999ae130555e354d2b8f | 4,311 | # frozen_string_literal: tr
class DonationService
include ActiveModel::Validations
attr_reader :user
attr_accessor :address_city,
:address_country_code,
:address_line1,
:address_zip,
:amount,
:customer_ip,
:email,
:fund_id,
:name,
:phone_number,
:stripe_phone_number,
:stripe_token
validates :name, presence: true
validates :email, presence: true, 'valid_email_2/email': true
validates :phone_number, presence: true
validates :amount, presence: true, numericality: {only_integer: true, greater_than_or_equal_to: 5}
validates :stripe_token, presence: true
validates :fund_id, presence: true
validates :address_line1, presence: true
validates :address_city, presence: true
validates :address_zip, presence: true
validates :address_country_code,
presence: true,
inclusion: {in: ISO3166::Country.all.map(&:alpha2)}
validates :customer_ip, presence: true
def initialize(params, user = nil)
params.each { |k, v| send("#{k}=", v) }
# amount needs to be int
self.amount = params[:amount].to_i
@user = user
end
def execute
return Donation.new, errors unless valid?
# Stripe max length for the phone field is 20
self.stripe_phone_number = phone_number.truncate(20, omission: "")
!!user ? save_donation_with_user : save_donation_without_user
end
def save_donation_with_user
stripe_customer = user.find_or_create_stripe_customer
# amount needs to be in cents for Stripe
amount_in_cents = amount * 100
stripe_charge =
Stripe::Charge.create(
amount: amount_in_cents,
currency: "usd",
source: stripe_token,
description: "One-time contribution."
)
if stripe_charge
donation =
Donation.new(
amount: amount,
charge_data: JSON.parse(stripe_charge.to_json),
charge_id: stripe_charge.id,
charge_provider: "stripe",
customer_ip: customer_ip,
customer_stripe_id: stripe_customer.id,
donation_type: Donation::DONATION_TYPES[:one_off],
fund_id: fund_id,
status: stripe_charge.status,
user_id: user.id,
user_data: {
address_city: address_city,
address_country: ISO3166::Country[address_country_code].name,
address_country_code: address_country_code,
address_line1: address_line1,
address_zip: address_zip,
email: email,
name: name,
phone_number: phone_number,
customer_ip: customer_ip
}
)
donation.save
[donation, errors]
end
rescue Stripe::StripeError => e
Raven.capture_exception(e)
errors.add(:base, e.message)
[Donation.new, errors]
end
def save_donation_without_user
customer =
Stripe::Customer.create(
name: name,
email: email,
phone: stripe_phone_number,
source: stripe_token
)
# amount needs to be in cents for Stripe
amount_in_cents = amount * 100
stripe_charge =
Stripe::Charge.create(
customer: customer.id,
amount: amount_in_cents,
description: "One time contribution",
currency: "usd"
)
if stripe_charge
donation =
Donation.new(
amount: amount,
charge_data: JSON.parse(stripe_charge.to_json),
charge_id: stripe_charge.id,
charge_provider: "stripe",
customer_ip: customer_ip,
customer_stripe_id: customer.id,
donation_type: Donation::DONATION_TYPES[:one_off],
fund_id: fund_id,
status: stripe_charge.status,
user_data: {
address_city: address_city,
address_country: ISO3166::Country[address_country_code].name,
address_country_code: address_country_code,
address_line1: address_line1,
address_zip: address_zip,
email: email,
name: name,
phone_number: phone_number,
customer_ip: customer_ip
}
)
donation.save
[donation, errors]
end
rescue Stripe::StripeError => e
Raven.capture_exception(e)
errors.add(:base, e.message)
[Donation.new, errors]
end
end
| 26.776398 | 100 | 0.639295 |
f83a7b3e5792b9084a7c25c07566f8c6af6ce3b7 | 521 | require('spec_helper')
describe(Project) do
describe("#employees") do
it("tells which employees are in it") do
test_project = Project.create({:title => "project"})
test_employee1 = Employee.create({:name => "employee1", :division_id => nil, :project_id => test_project.id})
test_employee2 = Employee.create({:name => "employee2", :division_id => nil, :project_id => test_project.id})
expect(test_project.employees()).to(eq([test_employee1, test_employee2]))
end
end
end #Project class
| 40.076923 | 115 | 0.68906 |
ff23282bdab0a21aa6a8c176ba0233a3eab2b6d1 | 195 | RSpec.describe New::Music::Cli do
it "has a version number" do
expect(New::Music::Cli::VERSION).not_to be nil
end
it "does something useful" do
expect(false).to eq(true)
end
end
| 19.5 | 50 | 0.682051 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.