hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
1aa00216a01f2d03eb81c64ffb0cb081884d517f | 2,063 | # Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "varnish-rb"
s.version = "0.2.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Andrea Campi"]
s.date = "2013-03-29"
s.description = "varnish-rb provides a bridge between Ruby and [Varnish 3](http://varnish-cache.org/)"
s.email = "[email protected]"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"examples/log.rb",
"examples/log_tail.rb",
"examples/request_stream.rb",
"examples/request_tail.rb",
"lib/em/buffered_channel.rb",
"lib/em/varnish_log/connection.rb",
"lib/varnish.rb",
"lib/varnish/utils.rb",
"lib/varnish/utils/timer.rb",
"lib/varnish/vsl.rb",
"lib/varnish/vsl/enum.rb",
"lib/varnish/vsm.rb",
"varnish-rb.gemspec"
]
s.homepage = "http://github.com/andreacampi/varnish-rb"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.25"
s.summary = "A bridge between Ruby and Varnish 3"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<ffi>, [">= 0"])
s.add_development_dependency(%q<bundler>, [">= 0"])
s.add_development_dependency(%q<jeweler>, [">= 0"])
s.add_development_dependency(%q<rake>, ["~> 0.9.2"])
else
s.add_dependency(%q<ffi>, [">= 0"])
s.add_dependency(%q<bundler>, [">= 0"])
s.add_dependency(%q<jeweler>, [">= 0"])
s.add_dependency(%q<rake>, ["~> 0.9.2"])
end
else
s.add_dependency(%q<ffi>, [">= 0"])
s.add_dependency(%q<bundler>, [">= 0"])
s.add_dependency(%q<jeweler>, [">= 0"])
s.add_dependency(%q<rake>, ["~> 0.9.2"])
end
end
| 30.338235 | 105 | 0.619971 |
bf2dac1597c16d225a9e9101b937aa456b6f20ec | 1,026 | require_relative './answer_formatter.rb'
module BaseballNews
class Answerer
def answer(*arg)
msg = arg[0]
return unless msg =~ /\A野球(|速報)/
msg_a = msg.split
return AnswerFormatter.day_all_game if msg_a[1].nil?
teamnum = fetcher msg_a.last
date = AnswerFormatter.make_date teamnum, msg_a[1]
return unless date
return AnswerFormatter.day_all_game date if teamnum == -1
return AnswerFormatter.team_game teamnum, date
end
def fetcher(text)
@words = [
/^(巨|ジャイアンツ|読売|G|G|兎)/i,
/^(東京ヤクルト|ヤ|スワローズ|S|S|燕)/i,
/^(横浜|De|De|DB|ベイスターズ|DB|星)/i,
/^(中|ドラゴンズ|D|D|竜)/i,
/^(タイガース|虎|神|T|阪|T)/i,
/^(広|東洋|カープ|C|C|鯉)/i,
/^(埼玉|西|ライオンズ|L|L|猫)/i,
/^(日|ハム|ファイターズ|F|F|公)/i,
/^(千葉|ロ|マリーンズ|M|M|鴎)/i,
/^(オリックス|オ|バファローズ|B|B|檻)/i,
/^(ソ|ホークス|H|H|福岡|SB|SB|鷹)/i,
/^(楽|東北|E|イーグルス|鷲|E)/i
]
@words.each_with_index { |w, i| return i if text =~ w }
return -1
end
end
end
| 27.72973 | 63 | 0.546784 |
111f3855835c0efe487509ba56206012b0f11d12 | 3,782 | # frozen_string_literal: true
require "spec_helper"
describe Innodb::Page::Index do
before :all do
@space = Innodb::Space.new("spec/data/t_empty.ibd")
@page = @space.page(3)
end
describe "class" do
it "registers itself as a specialized page type" do
Innodb::Page.specialization_for?(:INDEX).should be_truthy
end
end
describe "#new" do
it "returns an Innodb::Page::Index" do
@page.should be_an_instance_of Innodb::Page::Index
end
it "is an Innodb::Page" do
@page.should be_a Innodb::Page
end
end
describe "#page_header" do
it "is a Innodb::Page::Index::PageHeader" do
@page.page_header.should be_an_instance_of Innodb::Page::Index::PageHeader
end
it "has the right keys and values" do
@page.page_header.n_dir_slots.should eql 2
@page.page_header.heap_top.should eql 120
@page.page_header.garbage_offset.should eql 0
@page.page_header.garbage_size.should eql 0
@page.page_header.last_insert_offset.should eql 0
@page.page_header.direction.should eql :no_direction
@page.page_header.n_direction.should eql 0
@page.page_header.n_recs.should eql 0
@page.page_header.max_trx_id.should eql 0
@page.page_header.level.should eql 0
@page.page_header.index_id.should eql 16
@page.page_header.n_heap.should eql 2
@page.page_header.format.should eql :compact
end
it "has helper functions" do
@page.level.should eql @page.page_header[:level]
@page.records.should eql @page.page_header[:n_recs]
@page.directory_slots.should eql @page.page_header[:n_dir_slots]
@page.root?.should eql true
end
end
describe "#free_space" do
it "returns the amount of free space" do
@page.free_space.should eql 16_252
end
end
describe "#used_space" do
it "returns the amount of used space" do
@page.used_space.should eql 132
end
end
describe "#record_space" do
it "returns the amount of record space" do
@page.record_space.should eql 0
end
end
describe "#fseg_header" do
it "is a Innodb::Page::Index::FsegHeader" do
@page.fseg_header.should be_an_instance_of Innodb::Page::Index::FsegHeader
end
it "has the right keys and values" do
@page.fseg_header[:leaf].should be_an_instance_of Innodb::Inode
@page.fseg_header[:internal].should be_an_instance_of Innodb::Inode
end
end
describe "#record_header" do
before :all do
@header = @page.record_header(@page.cursor(@page.pos_infimum))
end
it "is a Innodb::Page::Index::RecordHeader" do
@header.should be_an_instance_of Innodb::Page::Index::RecordHeader
end
it "has the right keys and values" do
@header.type.should eql :infimum
@header.next.should eql 112
@header.heap_number.should eql 0
@header.n_owned.should eql 1
@header.min_rec?.should eql false
@header.deleted?.should eql false
end
end
describe "#system_record" do
it "can read infimum" do
rec = @page.infimum
rec.should be_an_instance_of Innodb::Record
rec.record[:data].should eql "infimum\x00"
rec.header.should be_an_instance_of Innodb::Page::Index::RecordHeader
rec.header[:type].should eql :infimum
end
it "can read supremum" do
rec = @page.supremum
rec.should be_an_instance_of Innodb::Record
rec.record[:data].should eql "supremum"
rec.header.should be_an_instance_of Innodb::Page::Index::RecordHeader
rec.header[:type].should eql :supremum
end
end
describe "#record_cursor" do
it "returns an Innodb::Page::Index::RecordCursor" do
@page.record_cursor.should be_an_instance_of Innodb::Page::Index::RecordCursor
end
end
end
| 29.546875 | 84 | 0.690904 |
f8050b874b4d96c533e0b9d5ac5f68896223ed9c | 362 | # frozen_string_literal: true
class NewestWorksController < ApplicationV6Controller
include WorkListable
def index
set_page_category PageCategory::NEWEST_WORK_LIST
@works = Work
.only_kept
.preload(:work_image)
.order(id: :desc)
.page(params[:page])
.per(display_works_count)
set_resource_data(@works)
end
end
| 19.052632 | 53 | 0.707182 |
5d7b0b5ce1422028d282bd2cd16db47a38057e55 | 623 | $LOAD_PATH << File.expand_path("../lib", __FILE__)
require 'slice_rename'
Gem::Specification.new do |s|
s.name = 'slice_rename'
s.version = SliceRename::VERSION
s.executables << 'slice_rename'
s.authors = ['Zoee Silcock']
s.email = %q{[email protected]}
s.homepage = %q{https://github.com/rocketpants/slice_rename}
s.description = %q{Slice an image and give each slice a specific name.}
s.summary = %q{Slice an image and give each slice a specific name.}
s.licenses = ['MIT']
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {spec}/*`.split("\n")
s.require_paths = ['lib']
end
| 31.15 | 73 | 0.672552 |
918b03dbff5dfa955126c4105a56a51083e9db39 | 3,221 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2020_06_01
module Models
#
# Private dns zone group resource.
#
class PrivateDnsZoneGroup < SubResource
include MsRestAzure
# @return [String] Name of the resource that is unique within a resource
# group. This name can be used to access the resource.
attr_accessor :name
# @return [String] A unique read-only string that changes whenever the
# resource is updated.
attr_accessor :etag
# @return [ProvisioningState] The provisioning state of the private dns
# zone group resource. Possible values include: 'Succeeded', 'Updating',
# 'Deleting', 'Failed'
attr_accessor :provisioning_state
# @return [Array<PrivateDnsZoneConfig>] A collection of private dns zone
# configurations of the private dns zone group.
attr_accessor :private_dns_zone_configs
#
# Mapper for PrivateDnsZoneGroup class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'PrivateDnsZoneGroup',
type: {
name: 'Composite',
class_name: 'PrivateDnsZoneGroup',
model_properties: {
id: {
client_side_validation: true,
required: false,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
serialized_name: 'name',
type: {
name: 'String'
}
},
etag: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'etag',
type: {
name: 'String'
}
},
provisioning_state: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.provisioningState',
type: {
name: 'String'
}
},
private_dns_zone_configs: {
client_side_validation: true,
required: false,
serialized_name: 'properties.privateDnsZoneConfigs',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'PrivateDnsZoneConfigElementType',
type: {
name: 'Composite',
class_name: 'PrivateDnsZoneConfig'
}
}
}
}
}
}
}
end
end
end
end
| 30.971154 | 78 | 0.502639 |
1c5437f1a93c4857bab2583944cbb9e66215a227 | 2,512 | class NewHermes
attr_reader :sell_price, :notifier, :pair, :poloniex_clazz, :redis
attr_accessor :last_buy_usd, :profit_sum
def initialize(pair, poloniex_clazz, redis)
@pair = pair
@profit_sum = 0.0
@poloniex_clazz = poloniex_clazz
@redis = redis
end
def last_buy_price
@last_buy_price ||= redis.get("hermes:last_buy_price").try(:to_f)
end
def last_buy_price=(x)
redis.set("hermes:last_buy_price", x)
@last_buy_price = x
end
def balances
@balances ||= balances!
end
def balances!
if CONFIG[:mode].to_sym == :production
Balances.new(poloniex_clazz).balances!
else
@fake_balances ||= {"USDT" => 100.0, "BTC" => 0.0}
end
end
def balance_usd=(usd)
if CONFIG[:mode].to_sym != :production
balances!["USDT"] = usd
end
end
def balance_btc=(btc)
if CONFIG[:mode].to_sym != :production
balances!["BTC"] = btc
end
end
def clear_balances!
@balances = nil
end
def balance_usd
balances[pair.split("_").first]
end
def balance_btc
balances[pair.split("_").last]
end
def handle!
yield
end
def profit(sell_price)
return 0 unless last_buy_usd
AFTER_FEE*(balance_btc * sell_price) - last_buy_usd
end
def buy!(sell_price)
return unless balance_usd > 0
self.last_buy_usd = balance_usd
res = poloniex_clazz.buy(pair, sell_price, (balance_usd / sell_price))
self.last_buy_price = sell_price
self.balance_btc = AFTER_FEE*(balance_usd / sell_price)
tmp = AFTER_FEE*(balance_usd / sell_price)
Morpheus.logger.info("Buy: got #{"%0.5f" % tmp} for #{sell_price/AFTER_FEE} | -#{"%0.2f" % balance_usd} USD")
self.balance_usd = 0.0
res
end
def sell!(sell_price)
return unless balance_btc > 0
self.balance_usd = AFTER_FEE*(balance_btc * sell_price)
self.profit_sum += profit(sell_price)
res = poloniex_clazz.sell(pair, sell_price, (balance_btc))
Morpheus.logger.info("Sell: got #{"%0.1f" % balance_usd} USD for #{"%0.1f" % (sell_price*AFTER_FEE)} | -#{"%0.5f" % balance_btc} BTC | Profit=#{profit(sell_price).round(1).traffic_light(0.0)} | Sum=#{profit_sum.round(1).traffic_light(0.0)}")
self.balance_btc = 0.0
self.last_buy_usd = 0.0
self.last_buy_price = 0.0
res
end
def final_usd(close_prices)
if balance_usd == 0.0
(balance_btc / AFTER_FEE) * close_prices.last
else
balance_usd
end
end
def profit_point
hermes.last_buy_price / AFTER_FEE
end
end | 23.698113 | 245 | 0.663615 |
4ae3798fceea4ff60964f07ae7058932063908ca | 649 | class API
def self.tequila_drinks
response = HTTParty.get("https://www.thecocktaildb.com/api/json/v1/1/filter.php?i=tequila")
response["drinks"].each do |arr|
arr.reject! {|attr| attr == "strDrinkThumb"}
TequilaDrink.new(arr)
end
end
def self.tequila_by_id(tequila_obj)
tequila_id = tequila_obj.idDrink
url = "https://www.thecocktaildb.com/api/json/v1/1/lookup.php?i="
response = HTTParty.get("#{url} #{tequila_id}")
api_hash=response["drinks"][0]
tequila_obj.set_attributes(api_hash)
end
def self.all
@@all
end
end | 25.96 | 99 | 0.604006 |
bfbb195690938402abad95a960c9e7b0577c0a2a | 39,331 | # Copyright 2014-2015 Taxamo, Ltd.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at [apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0)
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "uri"
require "swagger"
require "taxamo/version"
require "require_all"
require_rel "taxamo/models/*"
module Taxamo
class <<self
def escapeString(string)
URI.encode(string.to_s)
end
def create_refund (key,body,opts={})
query_param_keys = []
# verify existence of params
raise "key is required" if key.nil?
raise "body is required" if body.nil?
# set default values and merge with input
options = {
:key => key,
:body => body}.merge(opts)
#resource path
path = "/api/v1/transactions/{key}/refunds".sub('{format}','json').sub('{' + 'key' + '}', escapeString(key))
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
if body != nil
if body.is_a?(Array)
array = Array.new
body.each do |item|
if item.respond_to?("to_body".to_sym)
array.push item.to_body
else
array.push item
end
end
post_body = array
else
if body.respond_to?("to_body".to_sym)
post_body = body.to_body
else
post_body = body
end
end
end
response = Swagger::Request.new(:POST, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
CreateRefundOut.new(response)
end
def list_refunds (key,opts={})
query_param_keys = []
# verify existence of params
raise "key is required" if key.nil?
# set default values and merge with input
options = {
:key => key}.merge(opts)
#resource path
path = "/api/v1/transactions/{key}/refunds".sub('{format}','json').sub('{' + 'key' + '}', escapeString(key))
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
ListRefundsOut.new(response)
end
def create_payment (key,body,opts={})
query_param_keys = []
# verify existence of params
raise "key is required" if key.nil?
raise "body is required" if body.nil?
# set default values and merge with input
options = {
:key => key,
:body => body}.merge(opts)
#resource path
path = "/api/v1/transactions/{key}/payments".sub('{format}','json').sub('{' + 'key' + '}', escapeString(key))
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
if body != nil
if body.is_a?(Array)
array = Array.new
body.each do |item|
if item.respond_to?("to_body".to_sym)
array.push item.to_body
else
array.push item
end
end
post_body = array
else
if body.respond_to?("to_body".to_sym)
post_body = body.to_body
else
post_body = body
end
end
end
response = Swagger::Request.new(:POST, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
CreatePaymentOut.new(response)
end
def list_payments (limit,offset,key,opts={})
query_param_keys = [:limit,:offset]
# verify existence of params
raise "key is required" if key.nil?
# set default values and merge with input
options = {
:limit => limit,
:offset => offset,
:key => key}.merge(opts)
#resource path
path = "/api/v1/transactions/{key}/payments".sub('{format}','json').sub('{' + 'key' + '}', escapeString(key))
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
ListPaymentsOut.new(response)
end
def capture_payment (key,opts={})
query_param_keys = []
# verify existence of params
raise "key is required" if key.nil?
# set default values and merge with input
options = {
:key => key}.merge(opts)
#resource path
path = "/api/v1/transactions/{key}/payments/capture".sub('{format}','json').sub('{' + 'key' + '}', escapeString(key))
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:POST, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
CapturePaymentOut.new(response)
end
def email_invoice (key,body,opts={})
query_param_keys = []
# verify existence of params
raise "key is required" if key.nil?
raise "body is required" if body.nil?
# set default values and merge with input
options = {
:key => key,
:body => body}.merge(opts)
#resource path
path = "/api/v1/transactions/{key}/invoice/send_email".sub('{format}','json').sub('{' + 'key' + '}', escapeString(key))
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
if body != nil
if body.is_a?(Array)
array = Array.new
body.each do |item|
if item.respond_to?("to_body".to_sym)
array.push item.to_body
else
array.push item
end
end
post_body = array
else
if body.respond_to?("to_body".to_sym)
post_body = body.to_body
else
post_body = body
end
end
end
response = Swagger::Request.new(:POST, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
EmailInvoiceOut.new(response)
end
def email_refund (key,refund_note_number,body,opts={})
query_param_keys = []
# verify existence of params
raise "key is required" if key.nil?
raise "refund_note_number is required" if refund_note_number.nil?
raise "body is required" if body.nil?
# set default values and merge with input
options = {
:key => key,
:refund_note_number => refund_note_number,
:body => body}.merge(opts)
#resource path
path = "/api/v1/transactions/{key}/invoice/refunds/{refund_note_number}/send_email".sub('{format}','json').sub('{' + 'key' + '}', escapeString(key)).sub('{' + 'refund_note_number' + '}', escapeString(refund_note_number))
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
if body != nil
if body.is_a?(Array)
array = Array.new
body.each do |item|
if item.respond_to?("to_body".to_sym)
array.push item.to_body
else
array.push item
end
end
post_body = array
else
if body.respond_to?("to_body".to_sym)
post_body = body.to_body
else
post_body = body
end
end
end
response = Swagger::Request.new(:POST, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
EmailRefundOut.new(response)
end
def create_transaction (body,opts={})
query_param_keys = []
# verify existence of params
raise "body is required" if body.nil?
# set default values and merge with input
options = {
:body => body}.merge(opts)
#resource path
path = "/api/v1/transactions".sub('{format}','json')
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
if body != nil
if body.is_a?(Array)
array = Array.new
body.each do |item|
if item.respond_to?("to_body".to_sym)
array.push item.to_body
else
array.push item
end
end
post_body = array
else
if body.respond_to?("to_body".to_sym)
post_body = body.to_body
else
post_body = body
end
end
end
response = Swagger::Request.new(:POST, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
CreateTransactionOut.new(response)
end
def get_transaction (key,opts={})
query_param_keys = []
# verify existence of params
raise "key is required" if key.nil?
# set default values and merge with input
options = {
:key => key}.merge(opts)
#resource path
path = "/api/v1/transactions/{key}".sub('{format}','json').sub('{' + 'key' + '}', escapeString(key))
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
GetTransactionOut.new(response)
end
def update_transaction (key,body,opts={})
query_param_keys = []
# verify existence of params
raise "key is required" if key.nil?
raise "body is required" if body.nil?
# set default values and merge with input
options = {
:key => key,
:body => body}.merge(opts)
#resource path
path = "/api/v1/transactions/{key}".sub('{format}','json').sub('{' + 'key' + '}', escapeString(key))
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
if body != nil
if body.is_a?(Array)
array = Array.new
body.each do |item|
if item.respond_to?("to_body".to_sym)
array.push item.to_body
else
array.push item
end
end
post_body = array
else
if body.respond_to?("to_body".to_sym)
post_body = body.to_body
else
post_body = body
end
end
end
response = Swagger::Request.new(:PUT, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
UpdateTransactionOut.new(response)
end
def confirm_transaction (key,body,opts={})
query_param_keys = []
# verify existence of params
raise "key is required" if key.nil?
raise "body is required" if body.nil?
# set default values and merge with input
options = {
:key => key,
:body => body}.merge(opts)
#resource path
path = "/api/v1/transactions/{key}/confirm".sub('{format}','json').sub('{' + 'key' + '}', escapeString(key))
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
if body != nil
if body.is_a?(Array)
array = Array.new
body.each do |item|
if item.respond_to?("to_body".to_sym)
array.push item.to_body
else
array.push item
end
end
post_body = array
else
if body.respond_to?("to_body".to_sym)
post_body = body.to_body
else
post_body = body
end
end
end
response = Swagger::Request.new(:POST, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
ConfirmTransactionOut.new(response)
end
def cancel_transaction (key,opts={})
query_param_keys = []
# verify existence of params
raise "key is required" if key.nil?
# set default values and merge with input
options = {
:key => key}.merge(opts)
#resource path
path = "/api/v1/transactions/{key}".sub('{format}','json').sub('{' + 'key' + '}', escapeString(key))
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:DELETE, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
CancelTransactionOut.new(response)
end
def unconfirm_transaction (key,body,opts={})
query_param_keys = []
# verify existence of params
raise "key is required" if key.nil?
raise "body is required" if body.nil?
# set default values and merge with input
options = {
:key => key,
:body => body}.merge(opts)
#resource path
path = "/api/v1/transactions/{key}/unconfirm".sub('{format}','json').sub('{' + 'key' + '}', escapeString(key))
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
if body != nil
if body.is_a?(Array)
array = Array.new
body.each do |item|
if item.respond_to?("to_body".to_sym)
array.push item.to_body
else
array.push item
end
end
post_body = array
else
if body.respond_to?("to_body".to_sym)
post_body = body.to_body
else
post_body = body
end
end
end
response = Swagger::Request.new(:POST, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
UnconfirmTransactionOut.new(response)
end
def list_transactions (filter_text,offset,key_or_custom_id,currency_code,order_date_to,sort_reverse,limit,invoice_number,statuses,order_date_from,total_amount_greater_than,format,total_amount_less_than,tax_country_code,original_transaction_key,has_note,tax_country_codes, opts={})
query_param_keys = [:filter_text,:offset,:key_or_custom_id,:currency_code,:order_date_to,:sort_reverse,:limit,:invoice_number,:statuses,:original_transaction_key,:order_date_from,:total_amount_greater_than,:format,:total_amount_less_than,:tax_country_code,:has_note, :tax_country_codes]
# set default values and merge with input
options = {
:filter_text => filter_text,
:offset => offset,
:has_note => has_note,
:key_or_custom_id => key_or_custom_id,
:currency_code => currency_code,
:order_date_to => order_date_to,
:sort_reverse => sort_reverse,
:limit => limit,
:invoice_number => invoice_number,
:tax_country_codes => tax_country_codes,
:statuses => statuses,
:original_transaction_key => original_transaction_key,
:order_date_from => order_date_from,
:total_amount_greater_than => total_amount_greater_than,
:format => format,
:total_amount_less_than => total_amount_less_than,
:tax_country_code => tax_country_code}.merge(opts)
#resource path
path = "/api/v1/transactions".sub('{format}','json')
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
ListTransactionsOut.new(response)
end
def calculate_tax (body,opts={})
query_param_keys = []
# verify existence of params
raise "body is required" if body.nil?
# set default values and merge with input
options = {
:body => body}.merge(opts)
#resource path
path = "/api/v1/tax/calculate".sub('{format}','json')
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
if body != nil
if body.is_a?(Array)
array = Array.new
body.each do |item|
if item.respond_to?("to_body".to_sym)
array.push item.to_body
else
array.push item
end
end
post_body = array
else
if body.respond_to?("to_body".to_sym)
post_body = body.to_body
else
post_body = body
end
end
end
response = Swagger::Request.new(:POST, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
CalculateTaxOut.new(response)
end
def calculate_simple_tax (product_type,invoice_address_city,buyer_credit_card_prefix,currency_code,invoice_address_region,unit_price,quantity,buyer_tax_number,force_country_code,order_date,amount,billing_country_code,invoice_address_postal_code,total_amount,tax_deducted,b2b_number_service_on_error=nil,b2b_number_service_timeoutms=nil,b2b_number_service_cache_expiry_days=nil,opts={})
query_param_keys = [:product_type,:invoice_address_city,:buyer_credit_card_prefix,:currency_code,:invoice_address_region,:unit_price,:quantity,:buyer_tax_number,:force_country_code,:order_date,:amount,:billing_country_code,:invoice_address_postal_code,:total_amount,:tax_deducted,:b2b_number_service_on_error,:b2b_number_service_timeoutms,:b2b_number_service_cache_expiry_days]
# verify existence of params
raise "currency_code is required" if currency_code.nil?
# set default values and merge with input
options = {
:product_type => product_type,
:invoice_address_city => invoice_address_city,
:buyer_credit_card_prefix => buyer_credit_card_prefix,
:currency_code => currency_code,
:invoice_address_region => invoice_address_region,
:unit_price => unit_price,
:quantity => quantity,
:buyer_tax_number => buyer_tax_number,
:force_country_code => force_country_code,
:order_date => order_date,
:amount => amount,
:billing_country_code => billing_country_code,
:invoice_address_postal_code => invoice_address_postal_code,
:total_amount => total_amount,
:tax_deducted => tax_deducted,
:b2b_number_service_on_error => b2b_number_service_on_error,
:b2b_number_service_timeoutms => b2b_number_service_timeoutms,
:b2b_number_service_cache_expiry_days => b2b_number_service_cache_expiry_days}.merge(opts)
#resource path
path = "/api/v1/tax/calculate".sub('{format}','json')
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
CalculateSimpleTaxOut.new(response)
end
def validate_tax_number (country_code,tax_number,opts={})
query_param_keys = [:country_code]
# verify existence of params
raise "tax_number is required" if tax_number.nil?
# set default values and merge with input
options = {
:country_code => country_code,
:tax_number => tax_number}.merge(opts)
#resource path
path = "/api/v1/tax/vat_numbers/{tax_number}/validate".sub('{format}','json').sub('{' + 'tax_number' + '}', escapeString(tax_number))
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
ValidateTaxNumberOut.new(response)
end
def calculate_tax_location (billing_country_code,buyer_credit_card_prefix,opts={})
query_param_keys = [:billing_country_code,:buyer_credit_card_prefix]
# set default values and merge with input
options = {
:billing_country_code => billing_country_code,
:buyer_credit_card_prefix => buyer_credit_card_prefix}.merge(opts)
#resource path
path = "/api/v1/tax/location/calculate".sub('{format}','json')
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
CalculateTaxLocationOut.new(response)
end
def locate_my_i_p (opts={})
query_param_keys = []
# set default values and merge with input
options = {
}.merge(opts)
#resource path
path = "/api/v1/geoip".sub('{format}','json')
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
LocateMyIPOut.new(response)
end
def locate_given_i_p (ip,opts={})
query_param_keys = []
# verify existence of params
raise "ip is required" if ip.nil?
# set default values and merge with input
options = {
:ip => ip}.merge(opts)
#resource path
path = "/api/v1/geoip/{ip}".sub('{format}','json').sub('{' + 'ip' + '}', escapeString(ip))
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
LocateGivenIPOut.new(response)
end
def get_transactions_stats_by_country (global_currency_code,date_from,date_to,opts={})
query_param_keys = [:global_currency_code,:date_from,:date_to]
# verify existence of params
raise "date_from is required" if date_from.nil?
raise "date_to is required" if date_to.nil?
# set default values and merge with input
options = {
:global_currency_code => global_currency_code,
:date_from => date_from,
:date_to => date_to}.merge(opts)
#resource path
path = "/api/v1/stats/transactions/by_country".sub('{format}','json')
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
GetTransactionsStatsByCountryOut.new(response)
end
def get_transactions_stats (date_from,date_to,interval,opts={})
query_param_keys = [:date_from,:date_to,:interval]
# verify existence of params
raise "date_from is required" if date_from.nil?
raise "date_to is required" if date_to.nil?
# set default values and merge with input
options = {
:date_from => date_from,
:date_to => date_to,
:interval => interval}.merge(opts)
#resource path
path = "/api/v1/stats/transactions".sub('{format}','json')
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
GetTransactionsStatsOut.new(response)
end
def get_settlement_stats_by_country (date_from,date_to,opts={})
query_param_keys = [:date_from,:date_to]
# verify existence of params
raise "date_from is required" if date_from.nil?
raise "date_to is required" if date_to.nil?
# set default values and merge with input
options = {
:date_from => date_from,
:date_to => date_to}.merge(opts)
#resource path
path = "/api/v1/stats/settlement/by_country".sub('{format}','json')
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
GetSettlementStatsByCountryOut.new(response)
end
def get_settlement_stats_by_taxation_type (date_from,date_to,opts={})
query_param_keys = [:date_from,:date_to]
# verify existence of params
raise "date_from is required" if date_from.nil?
raise "date_to is required" if date_to.nil?
# set default values and merge with input
options = {
:date_from => date_from,
:date_to => date_to}.merge(opts)
#resource path
path = "/api/v1/stats/settlement/by_taxation_type".sub('{format}','json')
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
GetSettlementStatsByTaxationTypeOut.new(response)
end
def get_daily_settlement_stats (interval,date_from,date_to,opts={})
query_param_keys = [:interval,:date_from,:date_to]
# verify existence of params
raise "interval is required" if interval.nil?
raise "date_from is required" if date_from.nil?
raise "date_to is required" if date_to.nil?
# set default values and merge with input
options = {
:interval => interval,
:date_from => date_from,
:date_to => date_to}.merge(opts)
#resource path
path = "/api/v1/stats/settlement/daily".sub('{format}','json')
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
GetDailySettlementStatsOut.new(response)
end
def get_eu_vies_report (period_length,lff_sequence_number,transformation,currency_code,end_month,tax_id,start_month,eu_country_code,fx_date_type,format,opts={})
query_param_keys = [:period_length,:lff_sequence_number,:transformation,:currency_code,:end_month,:tax_id,:start_month,:eu_country_code,:fx_date_type,:format]
# verify existence of params
raise "end_month is required" if end_month.nil?
raise "start_month is required" if start_month.nil?
raise "eu_country_code is required" if eu_country_code.nil?
# set default values and merge with input
options = {
:period_length => period_length,
:lff_sequence_number => lff_sequence_number,
:transformation => transformation,
:currency_code => currency_code,
:end_month => end_month,
:tax_id => tax_id,
:start_month => start_month,
:eu_country_code => eu_country_code,
:fx_date_type => fx_date_type,
:format => format}.merge(opts)
#resource path
path = "/api/v1/reports/eu/vies".sub('{format}','json')
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
GetEuViesReportOut.new(response)
end
def get_domestic_summary_report (format,country_code,currency_code,start_month,end_month,fx_date_type,opts={})
query_param_keys = [:format,:country_code,:currency_code,:start_month,:end_month,:fx_date_type]
# verify existence of params
raise "country_code is required" if country_code.nil?
raise "start_month is required" if start_month.nil?
raise "end_month is required" if end_month.nil?
# set default values and merge with input
options = {
:format => format,
:country_code => country_code,
:currency_code => currency_code,
:start_month => start_month,
:end_month => end_month,
:fx_date_type => fx_date_type}.merge(opts)
#resource path
path = "/api/v1/reports/domestic/summary".sub('{format}','json')
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
GetDomesticSummaryReportOut.new(response)
end
def get_detailed_refunds (format,country_codes,date_from,date_to,limit,offset,opts={})
query_param_keys = [:format,:country_codes,:date_from,:date_to,:limit,:offset]
# set default values and merge with input
options = {
:format => format,
:country_codes => country_codes,
:date_from => date_from,
:date_to => date_to,
:limit => limit,
:offset => offset}.merge(opts)
#resource path
path = "/api/v1/settlement/detailed_refunds".sub('{format}','json')
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
GetDetailedRefundsOut.new(response)
end
def get_refunds (format,moss_country_code,tax_region,date_from,opts={})
query_param_keys = [:format,:moss_country_code,:tax_region,:date_from]
# verify existence of params
raise "date_from is required" if date_from.nil?
# set default values and merge with input
options = {
:format => format,
:moss_country_code => moss_country_code,
:tax_region => tax_region,
:date_from => date_from}.merge(opts)
#resource path
path = "/api/v1/settlement/refunds".sub('{format}','json')
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
GetRefundsOut.new(response)
end
def get_settlement (moss_tax_id,currency_code,end_month,tax_id,refund_date_kind_override,start_month,moss_country_code,format,tax_country_code,quarter,opts={})
query_param_keys = [:moss_tax_id,:currency_code,:end_month,:tax_id,:refund_date_kind_override,:start_month,:moss_country_code,:format,:tax_country_code]
# verify existence of params
raise "quarter is required" if quarter.nil?
# set default values and merge with input
options = {
:moss_tax_id => moss_tax_id,
:currency_code => currency_code,
:end_month => end_month,
:tax_id => tax_id,
:refund_date_kind_override => refund_date_kind_override,
:start_month => start_month,
:moss_country_code => moss_country_code,
:format => format,
:tax_country_code => tax_country_code,
:quarter => quarter}.merge(opts)
#resource path
path = "/api/v1/settlement/{quarter}".sub('{format}','json').sub('{' + 'quarter' + '}', escapeString(quarter))
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
GetSettlementOut.new(response)
end
def get_settlement_summary (moss_country_code,tax_region,start_month,end_month,quarter,opts={})
query_param_keys = [:moss_country_code,:tax_region,:start_month,:end_month]
# verify existence of params
raise "quarter is required" if quarter.nil?
# set default values and merge with input
options = {
:moss_country_code => moss_country_code,
:tax_region => tax_region,
:start_month => start_month,
:end_month => end_month,
:quarter => quarter}.merge(opts)
#resource path
path = "/api/v1/settlement/summary/{quarter}".sub('{format}','json').sub('{' + 'quarter' + '}', escapeString(quarter))
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
GetSettlementSummaryOut.new(response)
end
def create_s_m_s_token (body,opts={})
query_param_keys = []
# verify existence of params
raise "body is required" if body.nil?
# set default values and merge with input
options = {
:body => body}.merge(opts)
#resource path
path = "/api/v1/verification/sms".sub('{format}','json')
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
if body != nil
if body.is_a?(Array)
array = Array.new
body.each do |item|
if item.respond_to?("to_body".to_sym)
array.push item.to_body
else
array.push item
end
end
post_body = array
else
if body.respond_to?("to_body".to_sym)
post_body = body.to_body
else
post_body = body
end
end
end
response = Swagger::Request.new(:POST, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
CreateSMSTokenOut.new(response)
end
def verify_s_m_s_token (token,opts={})
query_param_keys = []
# verify existence of params
raise "token is required" if token.nil?
# set default values and merge with input
options = {
:token => token}.merge(opts)
#resource path
path = "/api/v1/verification/sms/{token}".sub('{format}','json').sub('{' + 'token' + '}', escapeString(token))
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
VerifySMSTokenOut.new(response)
end
def get_currencies_dict (opts={})
query_param_keys = []
# set default values and merge with input
options = {
}.merge(opts)
#resource path
path = "/api/v1/dictionaries/currencies".sub('{format}','json')
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
GetCurrenciesDictOut.new(response)
end
def get_product_types_dict (opts={})
query_param_keys = []
# set default values and merge with input
options = {
}.merge(opts)
#resource path
path = "/api/v1/dictionaries/product_types".sub('{format}','json')
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
GetProductTypesDictOut.new(response)
end
def get_countries_dict (tax_supported,opts={})
query_param_keys = [:tax_supported]
# set default values and merge with input
options = {
:tax_supported => tax_supported}.merge(opts)
#resource path
path = "/api/v1/dictionaries/countries".sub('{format}','json')
# pull querystring keys from options
queryopts = options.select do |key,value|
query_param_keys.include? key
end
headers = nil
post_body = nil
response = Swagger::Request.new(:GET, path, {:params=>queryopts,:headers=>headers, :body=>post_body }, opts[:configuration]).make.body
GetCountriesDictOut.new(response)
end
end
end
| 32.264971 | 389 | 0.626452 |
0366ae87ff46296aa145d7efb776a935f59896cb | 122 | # frozen_string_literal: true
class ChannelsWebHook < ApplicationRecord
belongs_to :channel
belongs_to :web_hook
end
| 17.428571 | 41 | 0.819672 |
030e5e6c9c767f911394a2045fc55aee2e85b878 | 4,538 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Compute::Mgmt::V2019_12_01
module Models
#
# Describes a virtual machine scale set network profile's network
# configurations.
#
class VirtualMachineScaleSetUpdateNetworkConfiguration < SubResource
include MsRestAzure
# @return [String] The network configuration name.
attr_accessor :name
# @return [Boolean] Whether this is a primary NIC on a virtual machine.
attr_accessor :primary
# @return [Boolean] Specifies whether the network interface is
# accelerated networking-enabled.
attr_accessor :enable_accelerated_networking
# @return [SubResource] The network security group.
attr_accessor :network_security_group
# @return [VirtualMachineScaleSetNetworkConfigurationDnsSettings] The dns
# settings to be applied on the network interfaces.
attr_accessor :dns_settings
# @return [Array<VirtualMachineScaleSetUpdateIPConfiguration>] The
# virtual machine scale set IP Configuration.
attr_accessor :ip_configurations
# @return [Boolean] Whether IP forwarding enabled on this NIC.
attr_accessor :enable_ipforwarding
#
# Mapper for VirtualMachineScaleSetUpdateNetworkConfiguration class as
# Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'VirtualMachineScaleSetUpdateNetworkConfiguration',
type: {
name: 'Composite',
class_name: 'VirtualMachineScaleSetUpdateNetworkConfiguration',
model_properties: {
id: {
client_side_validation: true,
required: false,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
serialized_name: 'name',
type: {
name: 'String'
}
},
primary: {
client_side_validation: true,
required: false,
serialized_name: 'properties.primary',
type: {
name: 'Boolean'
}
},
enable_accelerated_networking: {
client_side_validation: true,
required: false,
serialized_name: 'properties.enableAcceleratedNetworking',
type: {
name: 'Boolean'
}
},
network_security_group: {
client_side_validation: true,
required: false,
serialized_name: 'properties.networkSecurityGroup',
type: {
name: 'Composite',
class_name: 'SubResource'
}
},
dns_settings: {
client_side_validation: true,
required: false,
serialized_name: 'properties.dnsSettings',
type: {
name: 'Composite',
class_name: 'VirtualMachineScaleSetNetworkConfigurationDnsSettings'
}
},
ip_configurations: {
client_side_validation: true,
required: false,
serialized_name: 'properties.ipConfigurations',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'VirtualMachineScaleSetUpdateIPConfigurationElementType',
type: {
name: 'Composite',
class_name: 'VirtualMachineScaleSetUpdateIPConfiguration'
}
}
}
},
enable_ipforwarding: {
client_side_validation: true,
required: false,
serialized_name: 'properties.enableIPForwarding',
type: {
name: 'Boolean'
}
}
}
}
}
end
end
end
end
| 33.367647 | 96 | 0.529749 |
6a370a59e1937149231770a6017a7d83a39189a9 | 112 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'rake-multifile'
require 'minitest/autorun'
| 22.4 | 58 | 0.75 |
33136b6c9d4e4dfb8fc12d16eba71d2b2b906e66 | 861 | module YeetDba
class VerifyData
attr_accessor :column
def initialize(column:)
@column = column
end
def orphaned_rows?
orphaned_rows.first
end
def orphaned_rows_count
orphaned_rows.count
end
def query
orphaned_rows.to_sql
end
def orphaned_rows
association = column.association
column_name = column.db_column.name
table_name = column.table_name
association_table = column.association_table_name
model = column.model
# Check to see there could be rows with bad data
if model
model.joins("left join #{association_table} as association_table on association_table.id = #{table_name}.#{column_name}")
.where.not(column_name => nil)
.where('association_table.id is null')
else
[]
end
end
end
end
| 21.525 | 129 | 0.650407 |
e202f5a1632e70ff3729f0cca6a887ad595ca3fb | 275 | cask 'auto-updates' do
version '2.57'
sha256 'e44ffa103fbf83f55c8d0b1bea309a43b2880798dae8620b1ee8da5e1095ec68'
url "file://#{TEST_FIXTURE_DIR}/cask/transmission-2.61.dmg"
homepage 'http://example.com/auto-updates'
auto_updates true
app 'Transmission.app'
end
| 22.916667 | 75 | 0.770909 |
382e625c0378d0549b9465eafde6cc1ea2fb4b66 | 2,803 | # frozen_string_literal: true
#
# Copyright:: 2019, Chef Software, Inc.
# Author:: Tim Smith (<[email protected]>)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module RuboCop
module Cop
module Chef
module ChefModernize
# Instead of using the execute or powershell_script resources to to run the `tzutil` command, use
# Chef Infra Client's built-in timezone resource which is available in Chef Infra Client 14.6 and later.
#
# # bad
# execute 'set tz' do
# command 'tzutil.exe /s UTC'
# end
#
# execute 'tzutil /s UTC'
#
# powershell_script 'set windows timezone' do
# code "tzutil.exe /s UTC"
# not_if { shell_out('tzutil.exe /g').stdout.include?('UTC') }
# end
#
# # good
# timezone 'UTC'
#
class ExecuteTzUtil < Base
include RuboCop::Chef::CookbookHelpers
extend TargetChefVersion
minimum_target_chef_version '14.6'
MSG = 'Use the timezone resource included in Chef Infra Client 14.6+ instead of shelling out to tzutil'
RESTRICT_ON_SEND = [:execute].freeze
def_node_matcher :execute_resource?, <<-PATTERN
(send nil? :execute $str)
PATTERN
def on_send(node)
execute_resource?(node) do
return unless node.arguments.first.value.match?(/^tzutil/i)
add_offense(node, message: MSG, severity: :refactor)
end
end
def on_block(node)
match_property_in_resource?(:execute, 'command', node) do |code_property|
next unless calls_tzutil?(code_property)
add_offense(node, message: MSG, severity: :refactor)
end
match_property_in_resource?(:powershell_script, 'code', node) do |code_property|
next unless calls_tzutil?(code_property)
add_offense(node, message: MSG, severity: :refactor)
end
end
private
def calls_tzutil?(ast_obj)
property_data = method_arg_ast_to_string(ast_obj)
return true if property_data && property_data.match?(/^tzutil/i)
end
end
end
end
end
end
| 33.771084 | 113 | 0.617196 |
086fbb9deebead3c9b97273a44041bfab3be113a | 540 | # frozen_string_literal: true
module GraphQL8
module Types
module Relay
# This can be used for Relay's `Node` interface,
# or you can take it as inspiration for your own implementation
# of the `Node` interface.
module Node
include Types::Relay::BaseInterface
default_relay(true)
description "An object with an ID."
field(:id, ID, null: false, description: "ID of the object.")
# TODO Should I implement `id` here to call the schema's hook?
end
end
end
end
| 28.421053 | 70 | 0.646296 |
616e89a0d895055a0b751add5aef69c8093c5798 | 292 | class CreateLicenses < ActiveRecord::Migration
def self.up
create_table :licenses do |t|
t.column :name, :string
t.column :short_name, :string
t.column :url, :string
t.column :identifier, :string
end
end
def self.down
drop_table :licenses
end
end
| 19.466667 | 46 | 0.657534 |
ed397735586bbc22baeb56b55625a816f3eb395b | 6,387 | #
# Copyright 2011-2013, Dell
# Copyright 2013-2014, SUSE LINUX Products GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "uri"
# Filters added to this controller apply to all controllers in the application.
# Likewise, all the methods added will be available for all controllers.
class ApplicationController < ActionController::Base
rescue_from ActionController::ParameterMissing, with: :render_param_missing
rescue_from ActiveRecord::RecordNotFound, with: :render_not_found
rescue_from Crowbar::Error::NotFound, with: :render_not_found
rescue_from Crowbar::Error::ChefOffline, with: :chef_is_offline
before_action do |c|
Crowbar::Sanity.cache! unless Rails.cache.exist?(:sanity_check_errors)
end
before_filter :enforce_installer, unless: proc {
Crowbar::Installer.successful? || \
Rails.env.test?
}
before_filter :upgrade, if: proc {
File.exist?("/var/lib/crowbar/upgrade/7-to-8-upgrade-running")
}
before_filter :sanity_checks, unless: proc {
Rails.env.test? || \
Rails.cache.fetch(:sanity_check_errors).empty?
}
# Basis for the reflection/help system.
# First, a place to stash the help contents.
# Using a class_inheritable_accessor ensures that
# these contents are inherited by children, but can be
# overridden or appended to by child classes without messing up
# the contents we are building here.
class_attribute :help_contents
self.help_contents = []
# Class method for adding method-specific help/API information
# for each method we are going to expose to the CLI.
# Since it is a class method, it will not be bothered by the Rails
# trying to expose it to everything else, and we can call it to build
# up our help contents at class creation time instead of instance creation
# time, so there is minimal overhead.
# Since we are just storing an arrray of singleton hashes, adding more
# user-oriented stuff (descriptions, exmaples, etc.) should not be a problem.
def self.add_help(method,args=[],http_method=[:get])
# if we were passed multiple http_methods, build an entry for each.
# This assumes that they all take the same parameters, if they do not
# you should call add_help for each different set of parameters that the
# method/http_method combo can take.
http_method.each { |m|
self.help_contents = self.help_contents.push({
method => {
"args" => args,
"http_method" => m
}
})
}
end
helper :all
protect_from_forgery with: :exception
# TODO: Disable it only for API calls
skip_before_action :verify_authenticity_token
def self.set_layout(template = "application")
layout proc { |controller|
if controller.is_ajax?
nil
else
template
end
}
end
def is_ajax?
request.xhr?
end
add_help(:help)
def help
render json: { self.controller_name => self.help_contents.collect { |m|
res = {}
m.each { |k,v|
# sigh, we cannot resolve url_for at class definition time.
# I suppose we have to do it at runtime.
url=URI::unescape(url_for({ action: k,
controller: self.controller_name
}.merge(v["args"].inject({}) {|acc,x|
acc.merge({x.to_s => "(#{x.to_s})"})
}
)
))
res.merge!({ k.to_s => v.merge({"url" => url})})
}
res
}
}
end
set_layout
#########################
# private stuff below.
private
def flash_and_log_exception(e)
flash[:alert] = e.message
log_exception(e)
end
def log_exception(e)
lines = [e.message] + e.backtrace
Rails.logger.warn lines.join("\n")
end
def render_param_missing(exception)
Rails.logger.warn exception.message
respond_to do |format|
format.html do
render "errors/param_missing", status: :not_acceptable
end
format.json do
render json: { error: I18n.t("error.param_missing") }, status: :not_acceptable
end
format.any do
render plain: I18n.t("error.param_missing"), status: :not_acceptable
end
end
end
def render_not_found
respond_to do |format|
format.html do
render "errors/not_found", status: :not_found
end
format.json do
render json: { error: I18n.t("error.not_found") }, status: :not_found
end
format.any do
render plain: I18n.t("error.not_found"), status: :not_found
end
end
end
def chef_is_offline
respond_to do |format|
format.html do
render "errors/chef_offline", status: :internal_server_error
end
format.json do
render json: { error: I18n.t("error.chef_server_down") }, status: :internal_server_error
end
format.any do
render plain: I18n.t("error.chef_server_down"), status: :internal_server_error
end
end
end
def enforce_installer
respond_to do |format|
format.html do
redirect_to installer_root_path
end
format.json do
render json: { error: I18n.t("error.before_install") }, status: :unprocessable_entity
end
end
end
def sanity_checks
respond_to do |format|
format.html do
redirect_to sanity_path
end
format.json do
render json: {
error: Rails.cache.fetch(:sanity_check_errors)
}, status: :unprocessable_entity
end
end
end
def upgrade
respond_to do |format|
format.json do
if request.post?
render json: { error: I18n.t("error.during_upgrade") }, status: :service_unavailable
else
return
end
end
format.html do
render "errors/during_upgrade", status: :service_unavailable
end
end
end
end
| 29.164384 | 96 | 0.655394 |
bf2e2cee51ffdfa18f1aba9ad01e6610381c38f7 | 778 | Pod::Spec.new do |s|
s.name = "WJNetworkContextService"
s.version = "1.0.1"
s.summary = "network context service."
s.description = <<-DESC
网络环境服务,网络连接状态、通知等
DESC
s.homepage = "https://github.com/yunhaiwu"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "吴云海" => "[email protected]" }
s.platform = :ios, "6.0"
s.source = { :git => "https://github.com/yunhaiwu/ios-wj-network-context-service.git", :tag => "#{s.version}" }
s.source_files = "Classes/*.{h,m}"
s.exclude_files = "Example"
s.public_header_files = "Classes/*.h"
s.frameworks = "Foundation", "UIKit"
s.requires_arc = true
s.dependency "WJLoggingAPI"
s.dependency "WJAppContext", '>=2.0'
s.dependency "AFNetworking/Reachability", '~> 3.1'
end
| 21.027027 | 117 | 0.620823 |
e2781c80c948b8fa795080885ea076bd347fd74c | 5,555 | =begin
#Square Connect API
OpenAPI spec version: 2.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
=end
require 'date'
module SquareConnect
#
class BatchDeleteCatalogObjectsRequest
# The IDs of the [CatalogObject](#type-catalogobject)s to be deleted. When an object is deleted, other objects in the graph that depend on that object will be deleted as well (for example, deleting a [CatalogItem](#type-catalogitem) will delete its [CatalogItemVariation](#type-catalogitemvariation)s).
attr_accessor :object_ids
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'object_ids' => :'object_ids'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'object_ids' => :'Array<String>'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v}
if attributes.has_key?(:'object_ids')
if (value = attributes[:'object_ids']).is_a?(Array)
self.object_ids = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = Array.new
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
object_ids == o.object_ids
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[object_ids].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = SquareConnect.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map{ |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 29.391534 | 306 | 0.626643 |
ff678477368f9480ab5760dabae7bb105376c9a3 | 2,125 | require 'uri'
module RHC
module Rest
class Base
include Attributes
extend AttributesClass
define_attr :messages
URI_ESCAPE_REGEX = Regexp.new("[^#{URI::PATTERN::UNRESERVED}]")
def initialize(attrs=nil, client=nil)
@attributes = (attrs || {}).stringify_keys!
@attributes['messages'] ||= []
@client = client
end
def add_message(msg)
messages << msg
end
def rest_method(link_name, payload={}, options={})
link = link(link_name)
raise "No link defined for #{link_name}" unless link
url = link['href']
url = url.gsub(/:\w+/) { |s| URI.escape(options[:params][s], URI_ESCAPE_REGEX) || s } if options[:params]
method = options[:method] || link['method']
result = client.request(options.merge({
:url => url,
:method => method,
:payload => payload,
}))
if result.is_a?(Hash) && (result['messages'] || result['errors'])
attributes['messages'] = Array(result['messages'])
result = self
end
result
end
def links
attributes['links'] || {}
end
def supports?(sym)
!!link(sym)
end
def has_param?(sym, name)
if l = link(sym)
(l['required_params'] || []).any?{ |p| p['name'] == name} or (l['optional_params'] || []).any?{ |p| p['name'] == name}
end
end
def link_href(sym, params=nil, resource=nil, &block)
if (l = link(sym)) && (h = l['href'])
h = h.gsub(/:\w+/){ |s| params[s].nil? ? s : URI.escape(params[s], URI_ESCAPE_REGEX) } if params
h = "#{h}/#{resource}" if resource
return h
end
yield if block_given?
end
protected
attr_reader :client
def link(sym)
(links[sym.to_s] || links[sym.to_s.upcase])
end
def debug(msg, obj=nil)
client.debug("#{msg}#{obj ? " #{obj}" : ''}") if client && client.debug?
end
def debug?
client && client.debug?
end
end
end
end | 26.234568 | 128 | 0.519529 |
792e38e2aead129a69936c53c75bf09b8a80e3bd | 932 | # frozen_string_literal: true
require "spec_helper"
require "net/http"
describe "health server" do
before(:all) do
@server = Object.new.tap do |obj|
obj.define_singleton_method(:running?) { true }
end
@health_server = AnyCable::HealthServer.new(
@server,
port: 54_321
)
@health_server.start
end
after(:all) { @health_server.stop }
context "when server is running" do
before { allow(@server).to receive(:running?).and_return(true) }
it "responds with 200" do
res = Net::HTTP.get_response(URI("http://localhost:54321/health"))
expect(res.code).to eq("200"), res.body
end
end
context "when server is not running" do
before { allow(@server).to receive(:running?).and_return(false) }
it "responds with 200" do
res = Net::HTTP.get_response(URI("http://localhost:54321/health"))
expect(res.code).to eq("503"), res.body
end
end
end
| 23.897436 | 72 | 0.654506 |
bffb70a564fe4e88de8d69a9d2e7a5b4a8aece1e | 175 | FactoryBot.define do
factory :project_transition do
association :project
most_recent { true }
to_state { 'online' }
sort_key { generate(:serial) }
end
end
| 19.444444 | 34 | 0.685714 |
18c603c3d4d733dc1fd1b961ebdd903c6cc3b7f9 | 494 | class ApplicationController < ActionController::Base
# Prevent CSRF attacks by raising an exception.
# For APIs, you may want to use :null_session instead.
protect_from_forgery with: :exception
helper :all
def authenticate_active_admin_user!
authenticate_user!
unless current_user
flash[:alert] = "Unauthorized Access!"
redirect_to root_path
end
end
rescue_from CanCan::AccessDenied do |exception|
render(:template => 'articles/missing')
end
end
| 23.52381 | 56 | 0.736842 |
392d77fd7a1ba0264f1bbb332190bb48200797fc | 3,257 | module HappyMapper
module SupportedTypes
extend self
#
# All of the registerd supported types that can be parsed.
#
# All types defined here are set through #register.
#
def types
@types ||= []
end
#
# Add a new converter to the list of supported types. A converter
# is an object that adheres to the protocol which is defined with two
# methods #apply?(value,convert_to_type) and #apply(value).
#
# @example Defining a class that would process `nil` or values that have
# already been converted.
#
# class NilOrAlreadyConverted
# def apply?(value,convert_to_type)
# value.kind_of?(convert_to_type) || value.nil?
# end
#
# def apply(value)
# value
# end
# end
#
#
def register(type_converter)
types.push type_converter
end
#
# An additional shortcut registration method that assumes that you want
# to perform a conversion on a specific type. A block is provided which
# is the operation to perform when #apply(value) has been called.
#
# @example Registering a DateTime parser
#
# HappyMapper::SupportedTypes.register_type DateTime do |value|
# DateTime.parse(value,to_s)
# end
#
def register_type(type,&block)
register CastWhenType.new(type,&block)
end
#
# Many of the conversions are based on type. When the type specified
# matches then perform the action specified in the specified block.
# If no block is provided the value is simply returned.
#
class CastWhenType
attr_reader :type
def initialize(type,&block)
@type = type
@apply_block = block || no_operation
end
def no_operation
lambda {|value| value }
end
def apply?(value,convert_to_type)
convert_to_type == type
end
def apply(value)
@apply_block.call(value)
end
end
#
# For the cases when the value is nil or is already the
# intended type then no work needs to be done and the
# value simply can be returned.
#
class NilOrAlreadyConverted
def type
NilClass
end
def apply?(value,convert_to_type)
value.kind_of?(convert_to_type) || value.nil?
end
def apply(value)
value
end
end
register NilOrAlreadyConverted.new
register_type String do |value|
value.to_s
end
register_type Float do |value|
value.to_f
end
register_type Time do |value|
Time.parse(value.to_s) rescue Time.at(value.to_i)
end
register_type Date do |value|
Date.parse(value.to_s)
end
register_type DateTime do |value|
DateTime.parse(value.to_s)
end
register_type Boolean do |value|
['true', 't', '1'].include?(value.to_s.downcase)
end
register_type Integer do |value|
value_to_i = value.to_i
if value_to_i == 0 && value != '0'
value_to_s = value.to_s
begin
Integer(value_to_s =~ /^(\d+)/ ? $1 : value_to_s)
rescue ArgumentError
nil
end
else
value_to_i
end
end
end
end | 23.264286 | 76 | 0.610685 |
1d4f2a8d9436603e5a2aa1379b8f9226a59307b2 | 942 | require 'perigren_github_webhooks'
RSpec.describe PerigrenGithubWebhooks::CreateEventService, type: :service do
let(:test_data) { JSON.parse(File.read('spec/test_data/webhooks/event-create.json')) }
describe '#perform' do
event = nil
before do
event = described_class.new(test_data).perform
end
it 'creates a create event' do
expect(event.ref).to eq 'simple-tag'
expect(event.master_branch).to eq 'master'
expect(event.pusher_type).to eq 'user'
end
it 'creates the repository' do
repo = PerigrenGithubWebhooks::Repository.find(test_data['repository']['id'])
expect(repo.node_id).to eq test_data['repository']['node_id']
expect(repo.name).to eq 'Hello-World'
end
it 'creates the sender' do
user = PerigrenGithubWebhooks::GithubUser.find(21031067)
expect(user.login).to eq 'Codertocat'
expect(user.site_admin).to be_falsey
end
end
end
| 30.387097 | 88 | 0.694268 |
5d048610b2bc7d2dc49ff015728638cde30f10ce | 1,703 | require 'rails_admin/config/fields/association'
module RailsAdmin
module Config
module Fields
module Types
class BelongsToAssociation < RailsAdmin::Config::Fields::Association
RailsAdmin::Config::Fields::Types.register(self)
register_instance_option :formatted_value do
(o = value) && o.send(associated_model_config.object_label_method)
end
register_instance_option :sortable do
@sortable ||= abstract_model.adapter_supports_joins? && associated_model_config.abstract_model.properties.collect { |p| p.name }.include?(associated_model_config.object_label_method) ? associated_model_config.object_label_method : {abstract_model.table_name => method_name}
end
register_instance_option :searchable do
@searchable ||= associated_model_config.abstract_model.properties.collect { |p| p.name }.include?(associated_model_config.object_label_method) ? [associated_model_config.object_label_method, {abstract_model.model => method_name}] : {abstract_model.model => method_name}
end
register_instance_option :partial do
nested_form ? :form_nested_one : :form_filtering_select
end
register_instance_option :inline_add do
true
end
register_instance_option :inline_edit do
true
end
def selected_id
bindings[:object].send(foreign_key)
end
def method_name
nested_form ? "#{name}_attributes".to_sym : association.foreign_key
end
def multiple?
false
end
end
end
end
end
end
| 34.06 | 285 | 0.665884 |
39c88b349b852bed630eef2561a8de223361dc76 | 82 | class ApplicationController < ActionController::Base
include LogQueryParams
end
| 20.5 | 52 | 0.853659 |
792640fb0275d28363aec1b4299b2a8a7d0c8ff2 | 361 | class AddUsaMembershipConfigToEventConfigurations < ActiveRecord::Migration[4.2]
def up
add_column :event_configurations, :usa_membership_config, :boolean, default: false
execute "UPDATE event_configurations SET usa_membership_config = true WHERE usa = true"
end
def down
remove_column :event_configurations, :usa_membership_config
end
end
| 32.818182 | 91 | 0.800554 |
e8a2a1a0d593c9b12287a37c0ce863689ac7b9bc | 4,310 | require File.expand_path(File.dirname(__FILE__) + '/neo')
# Project: Create a Proxy Class
#
# In this assignment, create a proxy class (one is started for you
# below). You should be able to initialize the proxy object with any
# object. Any messages sent to the proxy object should be forwarded
# to the target object. As each message is sent, the proxy should
# record the name of the method sent.
#
# The proxy class is started for you. You will need to add a method
# missing handler and any other supporting methods. The specification
# of the Proxy class is given in the AboutProxyObjectProject koan.
class Proxy
def initialize(target_object)
@object = target_object
@object_method_calls = Hash.new(0)
@methods_names_to_respond = target_object.methods - Object.methods
end
def method_missing(method_name, *args, &block)
if @methods_names_to_respond.include?(method_name)
forward_call(method_name, *args)
else
super(method_name, *args, &block)
end
end
def respond_to_missing?(symbol, include_all)
super
end
def respond_to?(method_name)
if @methods_names_to_respond.include?(method_name)
true
else
super(method_name)
end
end
def forward_call(method_name, *args)
method_name_as_sym = method_name.to_sym
@object_method_calls[method_name_as_sym] = @object_method_calls[method_name_as_sym] + 1
if args.empty?
@object.send(method_name_as_sym)
else
@object.send(method_name_as_sym, *args)
end
end
def messages
@object_method_calls.keys
end
def called?(method_name)
@object_method_calls.keys.include?(method_name)
end
def number_of_times_called(method_name)
@object_method_calls[method_name]
end
end
# The proxy object should pass the following Koan:
#
class AboutProxyObjectProject < Neo::Koan
def test_proxy_method_returns_wrapped_object
# NOTE: The Television class is defined below
tv = Proxy.new(Television.new)
# HINT: Proxy class is defined above, may need tweaking...
assert tv.instance_of?(Proxy)
end
def test_tv_methods_still_perform_their_function
tv = Proxy.new(Television.new)
tv.channel = 10
tv.power
assert_equal 10, tv.channel
assert tv.on?
end
def test_proxy_records_messages_sent_to_tv
tv = Proxy.new(Television.new)
tv.power
tv.channel = 10
assert_equal [:power, :channel=], tv.messages
end
def test_proxy_handles_invalid_messages
tv = Proxy.new(Television.new)
assert_raise(NoMethodError) do
tv.no_such_method
end
end
def test_proxy_reports_methods_have_been_called
tv = Proxy.new(Television.new)
tv.power
tv.power
assert tv.called?(:power)
assert !tv.called?(:channel)
end
def test_proxy_counts_method_calls
tv = Proxy.new(Television.new)
tv.power
tv.channel = 48
tv.power
assert_equal 2, tv.number_of_times_called(:power)
assert_equal 1, tv.number_of_times_called(:channel=)
assert_equal 0, tv.number_of_times_called(:on?)
end
def test_proxy_can_record_more_than_just_tv_objects
proxy = Proxy.new("Code Mash 2009")
proxy.upcase!
result = proxy.split
assert_equal ["CODE", "MASH", "2009"], result
assert_equal [:upcase!, :split], proxy.messages
end
end
# ====================================================================
# The following code is to support the testing of the Proxy class. No
# changes should be necessary to anything below this comment.
# Example class using in the proxy testing above.
class Television
attr_accessor :channel
def power
if @power == :on
@power = :off
else
@power = :on
end
end
def on?
@power == :on
end
end
# Tests for the Television class. All of theses tests should pass.
class TelevisionTest < Neo::Koan
def test_it_turns_on
tv = Television.new
tv.power
assert tv.on?
end
def test_it_also_turns_off
tv = Television.new
tv.power
tv.power
assert !tv.on?
end
def test_edge_case_on_off
tv = Television.new
tv.power
tv.power
tv.power
assert tv.on?
tv.power
assert !tv.on?
end
def test_can_set_the_channel
tv = Television.new
tv.channel = 11
assert_equal 11, tv.channel
end
end
| 21.767677 | 91 | 0.697912 |
ffba30bb93a954f52b0a4be9b18bb8fe7ef40da1 | 444 | module Lolita
module Configuration
class FieldSet
@@last_fieldset=0
attr_reader :parent
attr_accessor :name
def initialize parent,name=nil
@parent=parent
self.name=name || "fieldset_#{next_fieldset}"
end
def fields
self.parent.fields.reject{|f| f.field_set!=self}
end
private
def next_fieldset
@@last_fieldset+=1
end
end
end
end | 17.076923 | 56 | 0.596847 |
219120587cb3f925731a585af05d2f476fe81e64 | 319 | module CompareFiles
class DiffTools
# pass in arrays of filtered items to compare against one another
def self.new_right(left, right)
left ||= []
right ||= []
right - left
end
def self.new_left(left, right)
left ||= []
right ||= []
left - right
end
end
end
| 17.722222 | 69 | 0.570533 |
ab93b49c840789240b6fddbd36fc7727f59e2e54 | 312 | cask :v1 => 'air-video-server' do
version '2.4.6-beta3u2'
sha256 '479af913987a4cc8414969a8d4a4c164a4bd0a22d156829a983b4c58e9dd3f6e'
url "https://s3.amazonaws.com/AirVideo/Air+Video+Server+#{version}.dmg"
homepage 'http://www.inmethod.com/air-video/'
license :unknown
app 'Air Video Server.app'
end
| 28.363636 | 75 | 0.75 |
1c542f5767a74a17cd7d191707cd5245605df8d4 | 2,176 | ActiveRecord::Migration.suppress_messages do
%w{gates multiple_gates readers writers transients simples no_scopes multiple_no_scopes no_direct_assignments multiple_no_direct_assignments thieves multiple_thieves localizer_test_models persisted_states provided_and_persisted_states with_enums with_enum_without_columns multiple_with_enum_without_columns with_true_enums with_false_enums false_states multiple_with_enums multiple_with_true_enums multiple_with_false_enums multiple_false_states readme_jobs}.each do |table_name|
ActiveRecord::Migration.create_table table_name, :force => true do |t|
t.string "aasm_state"
end
end
ActiveRecord::Migration.create_table "simple_new_dsls", :force => true do |t|
t.string "status"
end
ActiveRecord::Migration.create_table "multiple_simple_new_dsls", :force => true do |t|
t.string "status"
end
ActiveRecord::Migration.create_table "complex_active_record_examples", :force => true do |t|
t.string "left"
t.string "right"
end
%w(validators multiple_validators).each do |table_name|
ActiveRecord::Migration.create_table table_name, :force => true do |t|
t.string "name"
t.string "status"
end
end
%w(transactors no_lock_transactors lock_transactors lock_no_wait_transactors multiple_transactors).each do |table_name|
ActiveRecord::Migration.create_table table_name, :force => true do |t|
t.string "name"
t.string "status"
t.integer "worker_id"
end
end
ActiveRecord::Migration.create_table "workers", :force => true do |t|
t.string "name"
t.string "status"
end
ActiveRecord::Migration.create_table "invalid_persistors", :force => true do |t|
t.string "name"
t.string "status"
end
ActiveRecord::Migration.create_table "multiple_invalid_persistors", :force => true do |t|
t.string "name"
t.string "status"
end
ActiveRecord::Migration.create_table "fathers", :force => true do |t|
t.string "aasm_state"
t.string "type"
end
ActiveRecord::Migration.create_table "basic_active_record_two_state_machines_examples", :force => true do |t|
t.string "search"
t.string "sync"
end
end
| 36.881356 | 481 | 0.753217 |
61af8354f1ba50b6b18be41b7941093d485b57ea | 147 | module EShipper
class Status < OpenStruct
POSSIBLE_FIELDS = [ :name, :date, :assigned_by, :comments ]
REQUIRED_FIELDS = []
end
end | 21 | 63 | 0.666667 |
e9b5dfaeacb8093ada353b9ce009f74fe3eb39ce | 12,845 | LightweightStandalone::Application.routes.draw do
resources :approved_scripts
resources :projects do
member do
get :about
get :help
get :contact_us
end
end
resources :themes
root :to => 'home#home'
resources :question_trackers do
member do
post 'add_embeddable'
post 'replace_master'
end
end
namespace :embeddable do
resources :image_question_answers
end
namespace :embeddable do
resources :image_questions
end
resources :sequences, :constraints => { :id => /\d+/ } do
member do
post :add_activity
post :remove_activity
post :remote_duplicate
get :reorder_activities
get :print_blank
get :publish
get :duplicate
get :export
get :export_for_portal
get :show_status
# TODO: dpeprecate this Dashboard route
get :dashboard_toc, to: redirect(path: "/api/v1/dashboard_toc/sequences/%{id}")
end
resources :activities, :controller => 'lightweight_activities', :constraints => { :id => /\d+/, :sequence_id => /\d+/ }, :only => [:show, :summary]
end
namespace :embeddable do
resources :open_response_answers
resources :multiple_choice_answers
end
namespace :c_rater do
resources :item_settings, :only => [:edit, :update]
post "/argumentation_blocks/:page_id/create_embeddables" => 'argumentation_blocks#create_embeddables', :as => 'arg_block_create_embeddables'
post "/argumentation_blocks/:page_id/remove_embeddables" => 'argumentation_blocks#remove_embeddables', :as => 'arg_block_remove_embeddables'
post "/argumentation_blocks/:page_id/save_feedback/:run_key" => 'argumentation_blocks#save_feedback', :as => 'arg_block_save_feedback', :constraints => { :run_key => /[-\w]{36}/ }
post "/argumentation_blocks/feedback_on_feedback" => 'argumentation_blocks#feedback_on_feedback', :as => 'arg_block_feedback_on_feedback'
resources :score_mappings
post "/argumentation_blocks/report" => 'argumentation_blocks#report'
end
namespace :admin do
resources :users
end
devise_for :users, :controllers => { :omniauth_callbacks => "users/omniauth_callbacks" }
resources :activities, :controller => 'lightweight_activities', :constraints => { :id => /\d+/ } do
member do
get 'reorder_pages'
get 'single_page'
get 'print_blank'
get 'summary'
get 'resubmit_answers'
get 'publish'
get 'duplicate'
post 'remote_duplicate'
get 'preview'
get 'export'
get 'export_for_portal'
get 'show_status'
post 'add_plugin'
# TODO: dpeprecate this Dashboard route
get :dashboard_toc, to: redirect(path: "/api/v1/dashboard_toc/activities/%{id}")
end
resources :pages, :controller => 'interactive_pages', :constraints => { :id => /\d+/ } do
member do
get 'reorder_embeddables'
post 'add_embeddable'
get 'add_tracked'
get 'move_up', :controller => 'lightweight_activities'
get 'move_down', :controller => 'lightweight_activities'
get 'preview'
end
end
resources :runs, :only => [:index, :show ], :constraints => { :id => /[-\w]{36}/, :activity_id => /\d+/ }
end
resources :runs, :only => [:index, :show ], :constraints => { :id => /[-\w]{36}/ } do
resource :global_interactive_state, :only => [:create]
collection do
post 'unauthorized_feedback'
end
end
# These don't need index or show pages - though there might be something to be said for an
# index .xml file as a feed for select menus - but they need create-update-delete.
resources :mw_interactives, :controller => 'mw_interactives', :constraints => { :id => /\d+/ }, :except => :show
resources :image_interactives, :constraints => { :id => /\d+/ }, :except => :show
resources :video_interactives, :constraints => { :id => /\d+/ }, :except => :show do
member do
post :add_source
end
end
resources :pages, :controller => 'interactive_pages', :constraints => { :id => /\d+/ }, :except => :create do
resources :mw_interactives, :controller => 'mw_interactives', :constraints => { :id => /\d+/ }, :except => :show do
member do
post 'toggle_visibility'
end
end
resources :image_interactives, :constraints => { :id => /\d+/ }, :except => :show do
member do
post 'toggle_visibility'
end
end
resources :video_interactives, :constraints => { :id => /\d+/ }, :except => :show do
member do
post 'toggle_visibility'
end
end
member do
get 'preview'
end
end
# the in-place editor needed interactive_page_path
resources :pages, :as => 'interactive_pages', :controller => 'interactive_pages', :constraints => { :id => /\d+/ }, :except => [:new, :create]
resources :plugins
namespace :embeddable do
# When new embeddables are supported, they should be added here.
resources :multiple_choices do
member do
post :add_choice
end
end
resources :xhtmls
resources :external_scripts
resources :embeddable_plugins
resources :open_responses
resources :labbooks
resources :labbook_answers, :only => [:update]
end
namespace :api do
namespace :v1 do
# For UW style tracked question reports (longitudinal reports)
resources :question_trackers, only: [:index] do
match 'report' => "question_trackers#report", via: ['get','post', 'put'], defaults: { format: 'json' }
end
resources :activities, :controller => 'lightweight_activities', only: [:destroy]
resources :sequences, only: [:destroy]
match 'import' => 'import#import', :via => 'post'
match 'question_trackers/find_by_activity/:activity_id' => "question_trackers#find_by_activity", via: ['get'], defaults: { format: 'json' }
match 'question_trackers/find_by_sequence/:sequence_id' => "question_trackers#find_by_sequence", via: ['get'], defaults: { format: 'json' }
# For HASBOT C-Rater reports aka HAS Dashboard
match 'dashboard_runs' => "dashboard#runs", defaults: { format: 'json' }
match 'dashboard_runs_all' => "dashboard#runs_all", defaults: { format: 'json' }
match 'dashboard_toc/:runnable_type/:runnable_id' => "dashboard#toc", defaults: { format: 'json' }
match "interactive_run_states/:key" => 'interactive_run_states#show', :as => 'show_interactive_run_state', :via => 'get'
match "interactive_run_states/:key" => 'interactive_run_states#update', :as => 'update_interactive_run_state', :via => 'put'
match "user_check" => 'user_check#index', defaults: { format: 'json' }
match 'get_firebase_jwt(/:run_id)' => 'jwt#get_firebase_jwt', :as => 'get_firebase_jwt', :via => 'post'
match 'plugin_learner_states/:plugin_id/:run_id' =>
'plugin_learner_states#load', as: 'show_plugin_learner_state', via: 'get'
match 'plugin_plugin_learner_state/:plugin_id/:run_id' =>
'plugin_learner_states#save', as: 'update_plugin_learner_state', via: 'put'
match 'plugins/:plugin_id/author_data' => 'plugins#load_author_data', as: 'show_plugin_author_data', via: 'get'
match 'plugins/:plugin_id/author_data' => 'plugins#save_author_data', as: 'update_plugin_author_data', via: 'put'
end
end
match "/publications/show_status/:publishable_type/:publishable_id"=> 'publications#show_status', :as => 'publication_show_status'
match "/publications/autopublishing_status/:publishable_type/:publishable_id"=> 'publications#autopublishing_status', :as => 'publication_autopublishing_status'
match "/publications/add/:publishable_type/:publishable_id"=> 'publications#add_portal', :as => 'publication_add_portal'
match "/publications/publish/:publishable_type/:publishable_id"=> 'publications#publish', :as => 'publication_publish'
match "/publications/publish_to_other_portals/:publishable_type/:publishable_id"=> 'publications#publish_to_other_portals', :as => 'publication_publish_to_other_portals'
match "/import" => 'import#import_status', :as => 'import_status', :via => 'get'
match "/import" => 'import#import', :as => 'import', :via => 'post'
match "/import/import_portal_activity" => 'import#import_portal_activity', :as => 'import_portal_activity', :via => 'post', :defaults => { format: 'json' }
# These routes didn't work as nested resources
delete "/embeddable/multiple_choice/:id/remove_choice/:choice_id" => 'embeddable/multiple_choices#remove_choice', :as => 'remove_choice_embeddable_multiple_choice', :constraints => { :id => /\d+/, :choice_id => /\d+/ }
delete "/video_interactives/:id/remove_source/:source_id" => "video_interactives#remove_source", :as => 'remove_source_video_interactive', :constraints => { :id => /\d+/, :source_id => /\d+/ }
post "/pages/:id/remove_embeddable/:embeddable_id" => 'interactive_pages#remove_embeddable', :as => 'page_remove_embeddable', :constraints => { :id => /\d+/, :embeddable_id => /\d+/ }
post "/pages/:id/hideshow_embeddable/:embeddable_id" => 'interactive_pages#toggle_hideshow_embeddable', :as => 'page_hideshow_embeddable', :constraints => { :id => /\d+/, :embeddable_id => /\d+/ }
get "/embeddable/multiple_choice/:id/check" => 'embeddable/multiple_choices#check', :as => 'check_multiple_choice_answer', :constraints => { :id => /\d+/ }
get "/activities/:activity_id/pages/:id/:run_key" => 'interactive_pages#show', :as => 'page_with_run', :constraints => { :id => /\d+/, :activity_id => /\d+/, :run_key => /[-\w]{36}/ }
get "/activities/:activity_id/summary/:run_key" => 'lightweight_activities#summary', :as => 'summary_with_run', :constraints => { :activity_id => /\d+/, :run_key => /[-\w]{36}/ }
get "/activities/:activity_id/resubmit_answers/:run_key" => 'lightweight_activities#resubmit_answers', :as => 'resubmit_answers_for_run', :constraints => { :activity_id => /\d+/, :run_key => /[-\w]{36}/ }
get "/activities/:id/:run_key" => 'lightweight_activities#show', :as => 'activity_with_run', :constraints => { :activity_id => /\d+/, :run_key => /[-\w]{36}/ }
get "/activities/:id/single_page/:run_key" => 'lightweight_activities#single_page', :as => 'activity_single_page_with_run', :constraints => { :id => /\d+/, :run_key => /[-\w]{36}/ }
get "/runs/dirty" => 'runs#dirty', :as => 'dirty_runs'
get "/runs/details" => 'runs#details', :as => 'run_details'
get "/sequences/:sequence_id/activities/:id/:run_key" => 'lightweight_activities#show', :as => 'sequence_activity_with_run', :constraints => { :sequence_id => /\d+/, :activity_id => /\d+/, :run_key => /[-\w]{36}/ }
get "/sequences/:sequence_id/activities/:activity_id/single_page/:run_key" => 'lightweight_activities#single_page', :as => 'sequence_activity_single_page_with_run', :constraints => { :sequence_id => /\d+/, :activity_id => /\d+/, :run_key => /[-\w]{36}/ }
get "/sequences/:sequence_id/activities/:activity_id/pages/:id" => 'interactive_pages#show', :as => 'sequence_page', :constraints => { :id => /\d+/, :sequence_id => /\d+/, :activity_id => /\d+/ }
get "/sequences/:sequence_id/activities/:activity_id/pages/:id/:run_key" => 'interactive_pages#show', :as => 'sequence_page_with_run', :constraints => { :id => /\d+/, :sequence_id => /\d+/, :activity_id => /\d+/, :run_key => /[-\w]{36}/ }
get "/sequences/:sequence_id/activities/:activity_id/summary/:run_key" => 'lightweight_activities#summary', :as => 'sequence_summary_with_run', :constraints => { :sequence_id => /\d+/, :activity_id => /\d+/, :run_key => /[-\w]{36}/ }
get "/sequences/:id/sequence_run/:sequence_run_key" => 'sequences#show', :as => 'sequence_with_sequence_run_key', :constraints => { :id => /\d+/, :sequence_id => /\d+/, :activity_id => /\d+/, :run_key => /[-\w]{36}/ }
# TODO: Depricate this older dashboard route
get "/runs/dashboard" => 'api/v1/dashboard#runs'
match "/runs/fix_broken_portal_runs/:run_id" => 'runs#fix_broken_portal_runs', :as => 'fix_broken_portal_runs'
match "/runs/run_info/:run_id" => 'runs#run_info', :as => 'run_info'
# Simple image proxy used by Drawing Tool.
match "/image-proxy" => 'image_proxy#get'
match "/home/bad_browser" => "home#bad_browser"
match "/print_headers" => "home#print_headers"
# Web interface to show the delayed jobs for admins
# unfortunately this route has caused other route constraints to stop working?
match "/delayed_job" => DelayedJobWeb, :anchor => false, :via => [:get, :post], :constraints => lambda { |request|
warden = request.env['warden']
warden.user && warden.user.admin?
}
match "/dev/test_argblock" => 'dev#test_argblock', :as => 'test_argblock'
match "/dev/test_mail" => 'dev#test_mail', :as => 'test_mail'
match "/dev/test_exception" => 'dev#test_error', :as => 'test_exception'
match "/dev/test_error" => 'dev#test_error', :as => 'test_error'
end
| 51.794355 | 256 | 0.672168 |
e290904d7416b72959c203f46b57230bf06c8e1c | 209 | class AddNotNullToTransactionUuids < ActiveRecord::Migration[4.2]
def change
change_column_null :transactions, :listing_uuid, false
change_column_null :transactions, :community_uuid, false
end
end
| 29.857143 | 65 | 0.799043 |
e925ae0c3fd8e255b17d58cd0d1843c813e9d9d9 | 401 | class ChangeImageSearchLabelToNullableOnAffiliates < ActiveRecord::Migration
def self.up
change_column_null :affiliates, :image_search_label, true
rename_column :affiliates, :image_search_label, :old_image_search_label
end
def self.down
rename_column :affiliates, :old_image_search_label, :image_search_label
change_column_null :affiliates, :image_search_label, false
end
end
| 33.416667 | 76 | 0.812968 |
ab02f7948e09b1305cb18ad6790d9819f8391bf5 | 809 | module Twine
@@stdout = STDOUT
@@stderr = STDERR
def self.stdout
@@stdout
end
def self.stdout=(out)
@@stdout = out
end
def self.stderr
@@stderr
end
def self.stderr=(err)
@@stderr = err
end
class Error < StandardError
end
require 'twine/version'
require 'twine/plugin'
require 'twine/twine_file'
require 'twine/encoding'
require 'twine/output_processor'
require 'twine/placeholders'
require 'twine/formatters'
require 'twine/formatters/abstract'
require 'twine/formatters/android'
require 'twine/formatters/apple'
require 'twine/formatters/django'
require 'twine/formatters/flash'
require 'twine/formatters/gettext'
require 'twine/formatters/jquery'
require 'twine/formatters/tizen'
require 'twine/runner'
require 'twine/cli'
end
| 19.261905 | 37 | 0.710754 |
87a83dc96d36ad9d94d0a1b31d894629512971ec | 385 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2020_05_01
module Models
#
# Defines values for ConnectionMonitorEndpointFilterItemType
#
module ConnectionMonitorEndpointFilterItemType
AgentAddress = "AgentAddress"
end
end
end
| 25.666667 | 70 | 0.755844 |
1a2aae1e3ca85118a1fa5c7a8740adc2aa074f52 | 12,608 | module Blacklight
##
# This module contains methods that are specified by SolrHelper.solr_search_params_logic
# They transform user parameters into parameters that are sent as a request to Solr when
# RequestBuilders#solr_search_params is called.
#
module RequestBuilders
extend ActiveSupport::Concern
included do
# We want to install a class-level place to keep
# solr_search_params_logic method names. Compare to before_filter,
# similar design. Since we're a module, we have to add it in here.
# There are too many different semantic choices in ruby 'class variables',
# we choose this one for now, supplied by Rails.
class_attribute :solr_search_params_logic
# Set defaults. Each symbol identifies a _method_ that must be in
# this class, taking two parameters (solr_parameters, user_parameters)
# Can be changed in local apps or by plugins, eg:
# CatalogController.include ModuleDefiningNewMethod
# CatalogController.solr_search_params_logic += [:new_method]
# CatalogController.solr_search_params_logic.delete(:we_dont_want)
self.solr_search_params_logic = [:default_solr_parameters , :add_query_to_solr, :add_facet_fq_to_solr, :add_facetting_to_solr, :add_solr_fields_to_query, :add_paging_to_solr, :add_sorting_to_solr, :add_group_config_to_solr ]
end
# @returns a params hash for searching solr.
# The CatalogController #index action uses this.
# Solr parameters can come from a number of places. From lowest
# precedence to highest:
# 1. General defaults in blacklight config (are trumped by)
# 2. defaults for the particular search field identified by params[:search_field] (are trumped by)
# 3. certain parameters directly on input HTTP query params
# * not just any parameter is grabbed willy nilly, only certain ones are allowed by HTTP input)
# * for legacy reasons, qt in http query does not over-ride qt in search field definition default.
# 4. extra parameters passed in as argument.
#
# spellcheck.q will be supplied with the [:q] value unless specifically
# specified otherwise.
#
# Incoming parameter :f is mapped to :fq solr parameter.
def solr_search_params(user_params = params || {})
Blacklight::Solr::Request.new.tap do |solr_parameters|
solr_search_params_logic.each do |method_name|
send(method_name, solr_parameters, user_params)
end
end
end
####
# Start with general defaults from BL config. Need to use custom
# merge to dup values, to avoid later mutating the original by mistake.
def default_solr_parameters(solr_parameters, user_params)
blacklight_config.default_solr_params.each do |key, value|
solr_parameters[key] = value.dup rescue value
end
end
##
# Take the user-entered query, and put it in the solr params,
# including config's "search field" params for current search field.
# also include setting spellcheck.q.
def add_query_to_solr(solr_parameters, user_parameters)
###
# Merge in search field configured values, if present, over-writing general
# defaults
###
# legacy behavior of user param :qt is passed through, but over-ridden
# by actual search field config if present. We might want to remove
# this legacy behavior at some point. It does not seem to be currently
# rspec'd.
solr_parameters[:qt] = user_parameters[:qt] if user_parameters[:qt]
search_field_def = search_field_def_for_key(user_parameters[:search_field])
if (search_field_def)
solr_parameters[:qt] = search_field_def.qt
solr_parameters.merge!( search_field_def.solr_parameters) if search_field_def.solr_parameters
end
##
# Create Solr 'q' including the user-entered q, prefixed by any
# solr LocalParams in config, using solr LocalParams syntax.
# http://wiki.apache.org/solr/LocalParams
##
if (search_field_def && hash = search_field_def.solr_local_parameters)
local_params = hash.collect do |key, val|
key.to_s + "=" + solr_param_quote(val, :quote => "'")
end.join(" ")
solr_parameters[:q] = "{!#{local_params}}#{user_parameters[:q]}"
else
solr_parameters[:q] = user_parameters[:q] if user_parameters[:q]
end
##
# Set Solr spellcheck.q to be original user-entered query, without
# our local params, otherwise it'll try and spellcheck the local
# params! Unless spellcheck.q has already been set by someone,
# respect that.
#
# TODO: Change calling code to expect this as a symbol instead of
# a string, for consistency? :'spellcheck.q' is a symbol. Right now
# rspec tests for a string, and can't tell if other code may
# insist on a string.
solr_parameters["spellcheck.q"] = user_parameters[:q] unless solr_parameters["spellcheck.q"]
end
##
# Add any existing facet limits, stored in app-level HTTP query
# as :f, to solr as appropriate :fq query.
def add_facet_fq_to_solr(solr_parameters, user_params)
# convert a String value into an Array
if solr_parameters[:fq].is_a? String
solr_parameters[:fq] = [solr_parameters[:fq]]
end
# :fq, map from :f.
if ( user_params[:f])
f_request_params = user_params[:f]
f_request_params.each_pair do |facet_field, value_list|
Array(value_list).each do |value|
next if value.blank? # skip empty strings
solr_parameters.append_filter_query facet_value_to_fq_string(facet_field, value)
end
end
end
end
##
# Add appropriate Solr facetting directives in, including
# taking account of our facet paging/'more'. This is not
# about solr 'fq', this is about solr facet.* params.
def add_facetting_to_solr(solr_parameters, user_params)
# While not used by BL core behavior, legacy behavior seemed to be
# to accept incoming params as "facet.field" or "facets", and add them
# on to any existing facet.field sent to Solr. Legacy behavior seemed
# to be accepting these incoming params as arrays (in Rails URL with []
# on end), or single values. At least one of these is used by
# Stanford for "faux hieararchial facets".
if user_params.has_key?("facet.field") || user_params.has_key?("facets")
solr_parameters[:"facet.field"].concat( [user_params["facet.field"], user_params["facets"]].flatten.compact ).uniq!
end
blacklight_config.facet_fields.select { |field_name,facet|
facet.include_in_request || (facet.include_in_request.nil? && blacklight_config.add_facet_fields_to_solr_request)
}.each do |field_name, facet|
solr_parameters[:facet] ||= true
case
when facet.pivot
solr_parameters.append_facet_pivot with_ex_local_param(facet.ex, facet.pivot.join(","))
when facet.query
solr_parameters.append_facet_query facet.query.map { |k, x| with_ex_local_param(facet.ex, x[:fq]) }
else
solr_parameters.append_facet_fields with_ex_local_param(facet.ex, facet.field)
end
if facet.sort
solr_parameters[:"f.#{facet.field}.facet.sort"] = facet.sort
end
if facet.solr_params
facet.solr_params.each do |k, v|
solr_parameters[:"f.#{facet.field}.#{k}"] = v
end
end
# Support facet paging and 'more'
# links, by sending a facet.limit one more than what we
# want to page at, according to configured facet limits.
solr_parameters[:"f.#{facet.field}.facet.limit"] = (facet_limit_for(field_name) + 1) if facet_limit_for(field_name)
end
end
def add_solr_fields_to_query solr_parameters, user_parameters
blacklight_config.show_fields.select(&method(:should_add_to_solr)).each do |field_name, field|
if field.solr_params
field.solr_params.each do |k, v|
solr_parameters[:"f.#{field.field}.#{k}"] = v
end
end
end
blacklight_config.index_fields.select(&method(:should_add_to_solr)).each do |field_name, field|
if field.highlight
solr_parameters[:hl] = true
solr_parameters.append_highlight_field field.field
end
if field.solr_params
field.solr_params.each do |k, v|
solr_parameters[:"f.#{field.field}.#{k}"] = v
end
end
end
end
###
# copy paging params from BL app over to solr, changing
# app level per_page and page to Solr rows and start.
def add_paging_to_solr(solr_params, user_params)
# user-provided parameters should override any default row
solr_params[:rows] = user_params[:rows].to_i unless user_params[:rows].blank?
solr_params[:rows] = user_params[:per_page].to_i unless user_params[:per_page].blank?
# configuration defaults should only set a default value, not override a value set elsewhere (e.g. search field parameters)
solr_params[:rows] ||= blacklight_config.default_per_page unless blacklight_config.default_per_page.blank?
solr_params[:rows] ||= blacklight_config.per_page.first unless blacklight_config.per_page.blank?
# set a reasonable default
Rails.logger.info "Solr :rows parameter not set (by the user, configuration, or default solr parameters); using 10 rows by default"
solr_params[:rows] ||= 10
# ensure we don't excede the max page size
solr_params[:rows] = blacklight_config.max_per_page if solr_params[:rows].to_i > blacklight_config.max_per_page
unless user_params[:page].blank?
solr_params[:start] = solr_params[:rows].to_i * (user_params[:page].to_i - 1)
solr_params[:start] = 0 if solr_params[:start].to_i < 0
end
end
###
# copy sorting params from BL app over to solr
def add_sorting_to_solr(solr_parameters, user_params)
if user_params[:sort].blank? and sort_field = blacklight_config.default_sort_field
# no sort param provided, use default
solr_parameters[:sort] = sort_field.sort unless sort_field.sort.blank?
elsif sort_field = blacklight_config.sort_fields[user_params[:sort]]
# check for sort field key
solr_parameters[:sort] = sort_field.sort unless sort_field.sort.blank?
else
# just pass the key through
solr_parameters[:sort] = user_params[:sort]
end
end
# Remove the group parameter if we've faceted on the group field (e.g. for the full results for a group)
def add_group_config_to_solr solr_parameters, user_parameters
if user_parameters[:f] and user_parameters[:f][grouped_key_for_results]
solr_parameters[:group] = false
end
end
def with_ex_local_param(ex, value)
if ex
"{!ex=#{ex}}#{value}"
else
value
end
end
private
##
# Convert a facet/value pair into a solr fq parameter
def facet_value_to_fq_string(facet_field, value)
facet_config = blacklight_config.facet_fields[facet_field]
local_params = []
local_params << "tag=#{facet_config.tag}" if facet_config and facet_config.tag
prefix = ""
prefix = "{!#{local_params.join(" ")}}" unless local_params.empty?
fq = case
when (facet_config and facet_config.query)
facet_config.query[value][:fq]
when (facet_config and facet_config.date)
# in solr 3.2+, this could be replaced by a !term query
"#{prefix}#{facet_field}:#{RSolr.escape(value)}"
when (value.is_a?(DateTime) or value.is_a?(Date) or value.is_a?(Time))
"#{prefix}#{facet_field}:#{RSolr.escape(value.to_time.utc.strftime("%Y-%m-%dT%H:%M:%SZ"))}"
when (value.is_a?(TrueClass) or value.is_a?(FalseClass) or value == 'true' or value == 'false'),
(value.is_a?(Integer) or (value.to_i.to_s == value if value.respond_to? :to_i)),
(value.is_a?(Float) or (value.to_f.to_s == value if value.respond_to? :to_f))
"#{prefix}#{facet_field}:#{value}"
when value.is_a?(Range)
"#{prefix}#{facet_field}:[#{value.first} TO #{value.last}]"
else
"{!raw f=#{facet_field}#{(" " + local_params.join(" ")) unless local_params.empty?}}#{value}"
end
end
end
end
| 43.32646 | 230 | 0.668544 |
0861de53695a1deaae8dc4a2ee1815a41f68c6d3 | 4,473 | # coding: utf-8
lib = File.expand_path('lib', __dir__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'alchemy/version'
Gem::Specification.new do |gem|
gem.name = 'alchemy_cms'
gem.version = Alchemy::VERSION
gem.platform = Gem::Platform::RUBY
gem.authors = ['Thomas von Deyen', 'Robin Boening', 'Marc Schettke', 'Hendrik Mans', 'Carsten Fregin', 'Martin Meyerhoff']
gem.email = ['[email protected]']
gem.homepage = 'https://alchemy-cms.com'
gem.summary = 'A powerful, userfriendly and flexible CMS for Rails'
gem.description = 'Alchemy is a powerful, userfriendly and flexible Rails CMS.'
gem.requirements << 'ImageMagick (libmagick), v6.6 or greater.'
gem.required_ruby_version = '>= 2.3.0'
gem.license = 'BSD New'
gem.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^spec/}) }
gem.require_paths = ['lib']
gem.add_runtime_dependency 'active_model_serializers', ['~> 0.10.0']
gem.add_runtime_dependency 'acts_as_list', ['>= 0.3', '< 2']
gem.add_runtime_dependency 'awesome_nested_set', ['~> 3.1']
gem.add_runtime_dependency 'cancancan', ['>= 2.1', '< 4.0']
gem.add_runtime_dependency 'coffee-rails', ['>= 4.0', '< 6.0']
gem.add_runtime_dependency 'dragonfly', ['~> 1.0', '>= 1.0.7']
gem.add_runtime_dependency 'dragonfly_svg', ['~> 0.0.4']
gem.add_runtime_dependency 'gutentag', ['~> 2.2', '>= 2.2.1']
gem.add_runtime_dependency 'handlebars_assets', ['~> 0.23']
gem.add_runtime_dependency 'jquery-rails', ['~> 4.0', '>= 4.0.4']
gem.add_runtime_dependency 'jquery-ui-rails', ['~> 6.0']
gem.add_runtime_dependency 'kaminari', ['~> 1.1']
gem.add_runtime_dependency 'originator', ['~> 3.1']
gem.add_runtime_dependency 'non-stupid-digest-assets', ['~> 1.0.8']
gem.add_runtime_dependency 'rails', ['>= 5.2.0']
gem.add_runtime_dependency 'ransack', ['>= 1.8', '< 2.4.2'] # 2.4.2 dropped Ruby 2.5 support in a patch level release
gem.add_runtime_dependency 'request_store', ['~> 1.2']
gem.add_runtime_dependency 'responders', ['>= 2.0', '< 4.0']
gem.add_runtime_dependency 'sassc-rails', ['~> 2.1']
gem.add_runtime_dependency 'simple_form', ['>= 4.0', '< 6']
gem.add_runtime_dependency 'sprockets', ['>= 3.0', '< 5']
gem.add_runtime_dependency 'turbolinks', ['>= 2.5']
gem.add_runtime_dependency 'webpacker', ['>= 4.0', '< 6']
gem.add_development_dependency 'capybara', ['~> 3.0']
gem.add_development_dependency 'capybara-screenshot', ['~> 1.0']
gem.add_development_dependency 'factory_bot_rails', ['~> 6.0']
gem.add_development_dependency 'puma', ['~> 5.0']
gem.add_development_dependency 'rails-controller-testing', ['~> 1.0']
gem.add_development_dependency 'rspec-activemodel-mocks', ['~> 1.0']
gem.add_development_dependency 'rspec-rails', ['>= 4.0.0.beta2']
gem.add_development_dependency 'simplecov', ['~> 0.20']
gem.add_development_dependency 'webdrivers', ['~> 4.0']
gem.add_development_dependency 'webmock', ['~> 3.3']
gem.add_development_dependency 'shoulda-matchers', ['~> 4.0']
gem.add_development_dependency 'timecop', ['~> 0.9']
gem.post_install_message = <<-MSG
-------------------------------------------------------------
Thank you for installing Alchemy CMS
-------------------------------------------------------------
- Complete the installation in an existing Rails application:
$ bin/rake alchemy:install
- Complete the upgrade of an existing Alchemy installation:
$ bin/rake alchemy:upgrade
and follow the onscreen instructions.
Need help? Try:
* https://stackoverflow.com/questions/tagged/alchemy-cms
* https://slackin.alchemy-cms.com
-------------------------------------------------------------
MSG
end
| 55.222222 | 144 | 0.539235 |
28faef7a669fe0ea5f09e7bad4acc9e6e828833e | 2,684 | require 'middleman-core'
module Middleman
module Cells
class Extension < ::Middleman::Extension
option :cells_dir, 'cells', 'Directory where to place cells'
option :autoload, true, 'Whether to autoload cells or not'
def initialize(app, options_hash={}, &block)
super
require 'cells'
require 'active_support/inflector'
yield if block_given? # Expect to require template engines (like cells-erb).
::Cell::ViewModel.send(:include, ::Cell::Erb) if defined?(::Cell::Erb)
::Cell::ViewModel.send(:include, ::Cell::Hamlit) if defined?(::Cell::Hamlit)
::Cell::ViewModel.send(:include, ::Cell::Haml) if defined?(::Cell::Haml)
if defined?(::Cell::Slim)
::Cell::ViewModel.send(:include, ::Cell::Slim)
# HACK: In Tilt's convention, All arguments of Template#initialize
# are optional. But Middleman breaks it when overriding
# Slim::Template and it causes ArgumentError. So we re-define it here
# to follow the convention for the nonce. This very smells...:hankey:
::Slim::Template.class_eval do
alias orig_initialize initialize
def initialize(file=nil, line=1, options={}, &block)
orig_initialize(file, line, options, &block)
end
end
end
end
def after_configuration
cells_dir = File.join(app.root, app.config[:source], options.cells_dir)
helper_modules = app.template_context_class.included_modules
app_proxy = app
# Extending Cell::ViewModel to adapt Middleman
::Cell::ViewModel.class_eval do
self.view_paths << cells_dir
# Required for Padrino's helpers
def current_engine
end
# Include view helpers
helper_modules.each {|helper| include helper }
# Shortcut to global values on the app instance
globals = %i[config logger sitemap server? build? environment? data extensions root]
globals.each do |name|
define_method(name) { app_proxy.send(name) }
end
end
if options.autoload
require 'active_support/dependencies'
::ActiveSupport::Dependencies.autoload_paths << cells_dir
end
if app.config[:environment] == :development
require 'cell/development'
::Cell::ViewModel.send(:include, ::Cell::Development)
end
end
helpers do
# Refer to Cell::ViewModel::Helpers#cell
def cell(name, *args, &block)
"#{name}_cell".camelize.constantize.(*args, &block)
end
end
end
end
end
| 34.410256 | 94 | 0.616617 |
acf034d294d96a0618d025f209b63061cb5116cc | 179 | class AddButtonFields < ActiveRecord::Migration[5.1]
def change
add_column :applications, :button_url, :string
add_column :applications, :button_text, :string
end
end
| 25.571429 | 52 | 0.75419 |
1892365f12c0a96ecd0137f94d56a71377ec2553 | 131 | class AddQuantityIntegerToColors < ActiveRecord::Migration[5.2]
def change
add_column :colors, :quantity, :integer
end
end
| 21.833333 | 63 | 0.763359 |
616423c8e0fe3be17f175ef69ae896d19cefad8e | 503 | module Api
module V1x0
module Mixins
module IndexMixin
def index
raise_unless_primary_instance_exists
render :json => Insights::API::Common::PaginatedResponseV2.new(
:base_query => scoped(filtered.where(params_for_list)),
:request => request,
:limit => pagination_limit,
:offset => pagination_offset,
:sort_by => query_sort_by
).response
end
end
end
end
end
| 26.473684 | 73 | 0.5666 |
18425a77a30a821a027890c0090fd8a23aadfeb4 | 379 | # Program logger - method puts bookends on block, and the returned data
def log description, &block
puts "Beginning '#{description}'"
response = block.call
puts "Finished '#{description}' returning:"
puts response
end
log 'outer block' do
log 'some little block' do
5
end
log 'yet another block' do
'I like Thai food!'
end
false
end
| 19.947368 | 72 | 0.656992 |
d5aefa3343ae0f2545301006853de1737ddd5bf2 | 1,706 | require 'test_helper'
class UsersLoginTest < ActionDispatch::IntegrationTest
def setup
@user = users(:michael)
end
test "login with valid email/invalid password" do
get login_path
assert_template 'sessions/new'
post login_path, params: { session: { email: @user.email,
password: "invalid" } }
assert_not is_logged_in?
assert_template 'sessions/new'
assert_not flash.empty?
get root_path
assert flash.empty?
end
test "login with valid information followed by logout" do
get login_path
post login_path, params: { session: { email: @user.email,
password: "password" } }
assert is_logged_in?
assert_redirected_to @user
follow_redirect!
assert_template 'users/show'
assert_select "a[href=?]", login_path, count: 0
assert_select "a[href=?]", logout_path
assert_select "a[href=?]", user_path(@user)
delete logout_path
assert_not is_logged_in?
assert_redirected_to root_url
#simulate a user clicking logout in a second window
delete logout_path
follow_redirect!
assert_select "a[href=?]", login_path
assert_select "a[href=?]", logout_path, count: 0
assert_select "a[href=?]", user_path(@user), count: 0
end
test "login with remembering" do
log_in_as(@user, remember_me: '1')
assert_not cookies[:remember_token].blank?
end
test "login without remembering" do
# Log in to set the cookie.
log_in_as(@user, remember_me: '1')
# Log in again and verify that the cookie is deleted.
log_in_as(@user, remember_me: '0')
assert cookies[:remember_token].blank?
end
end | 31.018182 | 66 | 0.662368 |
912b67678ef00edbab1df74cbb7a9a4369172f17 | 1,456 | module Payments
AlreadyAuthorized = Class.new(StandardError)
NotAuthorized = Class.new(StandardError)
AlreadyCaptured = Class.new(StandardError)
AlreadyReleased = Class.new(StandardError)
class Payment
include AggregateRoot
def authorize(transaction_id, order_id)
raise AlreadyAuthorized if authorized?
apply(PaymentAuthorized.new(data: {
transaction_id: transaction_id,
order_id: order_id
}))
end
def capture
raise AlreadyCaptured if captured?
raise NotAuthorized unless authorized?
apply(PaymentCaptured.new(data: {
transaction_id: @transaction_id,
order_id: @order_id
}))
end
def release
raise AlreadyReleased if released?
raise AlreadyCaptured if captured?
raise NotAuthorized unless authorized?
apply(PaymentReleased.new(data: {
transaction_id: @transaction_id,
order_id: @order_id
}))
end
private
on PaymentAuthorized do |event|
@state = :authorized
@transaction_id = event.data.fetch(:transaction_id)
@order_id = event.data.fetch(:order_id)
end
on PaymentCaptured do |event|
@state = :captured
end
on PaymentReleased do |event|
@state = :released
end
def authorized?
@state == :authorized
end
def captured?
@state == :captured
end
def released?
@state == :released
end
end
end
| 21.731343 | 57 | 0.65522 |
e88e3d74905bf7747df53217c574c1d47a019b3e | 4,646 | # encoding: UTF-8
# Copyright 2012 Twitter, Inc
# http://www.apache.org/licenses/LICENSE-2.0
require 'spec_helper'
describe TwitterCldr::Localized::LocalizedDateTime do
let(:date_time) { DateTime.new(1987, 9, 20, 22, 5) }
describe '#initialize' do
it 'sets calendar type' do
expect(date_time.localize(:th, calendar_type: :buddhist).calendar_type).to eq(:buddhist)
end
it 'uses default calendar type' do
expect(date_time.localize(:en).calendar_type).to eq(TwitterCldr::DEFAULT_CALENDAR_TYPE)
end
end
describe "stringify" do
it "should stringify with a default calendar" do
#date_time.localize(:th, :calendar_type => :buddhist).to_full_s # It doesn't support era
date_time.localize(:th).to_long_s
date_time.localize(:th).to_medium_s
date_time.localize(:th).to_short_s
end
it "should stringify with buddhist calendar" do
# Ensure that buddhist calendar data is present in th locale.
expect(TwitterCldr.get_locale_resource(:th, :calendars)[:th][:calendars][:buddhist]).not_to(
be_nil, 'buddhist calendar is missing for :th locale (check resources/locales/th/calendars.yml)'
)
#date_time.localize(:th, :calendar_type => :buddhist).to_full_s # It doesn't support era
date_time.localize(:th, calendar_type: :buddhist).to_long_s
date_time.localize(:th, calendar_type: :buddhist).to_medium_s
date_time.localize(:th, calendar_type: :buddhist).to_short_s
end
it "should remove quotes around plaintext tokens" do
# notice there are no single quotes around the "at"
expect(date_time.localize(:en).to_long_s).to eq("September 20, 1987 at 10:05:00 PM UTC")
end
it 'should stringify with proper time zone' do
expect(date_time.localize(:en).with_timezone('Asia/Tokyo').to_full_s).to(
eq('Monday, September 21, 1987 at 7:05:00 AM Japan Standard Time')
)
end
end
describe "#to_date" do
it "should convert to a date" do
expect(date_time.localize.to_date.base_obj.strftime("%Y-%m-%d")).to eq("1987-09-20")
end
it 'forwards calendar type' do
date_time.localize(:th, calendar_type: :buddhist).to_date.calendar_type == :buddhist
end
end
describe "#to_time" do
it "should convert to a time" do
expect(date_time.localize.to_time.base_obj.getgm.strftime("%H:%M:%S")).to eq("22:05:00")
end
it 'forwards calendar type' do
date_time.localize(:th, calendar_type: :buddhist).to_time.calendar_type == :buddhist
end
end
describe "#to_timespan" do
it "should return a localized timespan" do
expect(date_time.localize.to_timespan).to be_a(TwitterCldr::Localized::LocalizedTimespan)
end
end
describe 'formatters' do
it "don't raise errors for any locale" do
TwitterCldr.supported_locales.each do |locale|
(TwitterCldr::DataReaders::CalendarDataReader.types - [:additional]).each do |type|
expect { date_time.localize(locale).send(:"to_#{type}_s") }.not_to raise_error
end
end
end
it "don't raise errors for additional date formats" do
TwitterCldr.supported_locales.each do |locale|
data_reader = TwitterCldr::DataReaders::CalendarDataReader.new(locale)
data_reader.additional_format_selector.patterns.each do |pattern|
expect do
date_time.localize(locale).to_additional_s(pattern.to_s)
end.to_not raise_error
end
end
end
end
describe "#to_additional_s" do
it "should format using additional patterns" do
expect(date_time.localize(:en).to_additional_s("EHms")).to eq("Sun 22:05:00")
end
it "should properly handle single quotes escaping" do
expect(date_time.localize(:ru).to_additional_s("GyMMMd")).to eq("20 сент. 1987 г. н. э.")
end
it "should unescape multiple groups" do
expect(date_time.localize(:es).to_additional_s("yMMMd")).to eq("20 sept 1987")
end
end
describe "#to_s" do
it "uses the default format if no :format is given" do
loc_date = date_time.localize
expect(loc_date).to receive(:to_default_s).and_call_original
expect(loc_date.to_s).to eq("Sep 20, 1987, 10:05:00 PM")
end
end
describe "#with_timezone" do
it "calculates the right time depending on the timezone" do
loc_date = date_time.localize
expect(loc_date.to_s).to eq("Sep 20, 1987, 10:05:00 PM")
expect(loc_date.with_timezone("America/Los_Angeles").to_s).to eq("Sep 20, 1987, 3:05:00 PM")
expect(loc_date.with_timezone("America/New_York").to_s).to eq("Sep 20, 1987, 6:05:00 PM")
end
end
end
| 34.932331 | 104 | 0.690486 |
b9947b91f114f5f161046ca6aeff621f0eaf1be9 | 205 | class WikipediaController < ApplicationController
include WikipediaHelper
def manufacturer
render plain: get_manufacturer_summary
end
def model
render plain: get_model_summary
end
end
| 18.636364 | 49 | 0.790244 |
3980cc07b8dd0d9dbfbf606ef374a7af551d92ea | 610 | require "test_helper"
class ServicesAndInformationFinderTest < ActiveSupport::TestCase
test "#find searches rummager for the services and information for an org" do
organisation = build_stubbed(:organisation)
search_client = mock()
expected_search_query = {
count: "0",
filter_organisations: [organisation.slug],
facet_specialist_sectors: "1000,examples:4,example_scope:global,order:value.title",
}
finder = ServicesAndInformationFinder.new(organisation, search_client)
search_client.expects(:unified_search).with(expected_search_query)
finder.find
end
end
| 32.105263 | 89 | 0.759016 |
331deae894f91986a221440978f8f618eebc91e2 | 2,691 | require "formula"
class Babl < Formula
desc "Dynamic, any-to-any, pixel format translation library"
homepage "http://www.gegl.org/babl/"
stable do
# The official url is unreliable. Use Debian instead.
url "https://mirrors.kernel.org/debian/pool/main/b/babl/babl_0.1.10.orig.tar.bz2"
mirror "http://ftp.gtk.org/pub/babl/0.1/babl-0.1.10.tar.bz2"
sha1 "ee60089e8e9d9390e730d3ae5e41074549928b7a"
# There are two patches.
# The first one changes an include <values.h> (deleted on Mac OS X) to <limits.h>
# The second one fixes an error when compiling with clang. See:
# https://trac.macports.org/browser/trunk/dports/graphics/babl/files/clang.patch
patch :DATA
end
bottle do
sha1 "d3ead1808b7c029ab864d3318d7009379cc205a5" => :yosemite
sha1 "4fcb4a9c92b59796d40ffc4312935ca756d5264f" => :mavericks
sha1 "a35994e97093d303d02d30c3369bccfd1f33af37" => :mountain_lion
end
head do
# Use Github instead of GNOME's git. The latter is unreliable.
url "https://github.com/GNOME/babl.git"
depends_on "automake" => :build
depends_on "autoconf" => :build
depends_on "libtool" => :build
end
depends_on "pkg-config" => :build
option :universal
def install
if build.universal?
ENV.universal_binary
if ENV.compiler == :gcc
opoo "Compilation may fail at babl-cpuaccel.c using gcc for a universal build"
end
end
system "./autogen.sh" if build.head?
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
end
__END__
diff --git a/babl/babl-palette.c b/babl/babl-palette.c
index 7e72eaa..2f9bf8d 100644
--- a/babl/babl-palette.c
+++ b/babl/babl-palette.c
@@ -19,7 +19,7 @@
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
-#include <values.h>
+#include <limits.h>
#include <assert.h>
#include "config.h"
#include "babl-internal.h"
diff --git a/extensions/sse-fixups.c b/extensions/sse-fixups.c
index b44bb5e..7f633d1 100644
--- a/extensions/sse-fixups.c
+++ b/extensions/sse-fixups.c
@@ -21,7 +21,7 @@
#include "config.h"
-#if defined(__GNUC__) && (__GNUC__ >= 4) && defined(USE_SSE) && defined(USE_MMX)
+#if !defined(__clang__) && defined(__GNUC__) && (__GNUC__ >= 4) && defined(USE_SSE) && defined(USE_MMX)
#include <stdint.h>
#include <stdlib.h>
@@ -177,7 +177,7 @@ int init (void);
int
init (void)
{
-#if defined(__GNUC__) && (__GNUC__ >= 4) && defined(USE_SSE) && defined(USE_MMX)
+#if !defined(__clang__) && defined(__GNUC__) && (__GNUC__ >= 4) && defined(USE_SSE) && defined(USE_MMX)
const Babl *rgbaF_linear = babl_format_new (
babl_model ("RGBA"),
| 29.9 | 104 | 0.677443 |
b920b880db23c21571e87629812c61af1c382223 | 1,802 | require 'active_support/callbacks'
require 'active_support/core_ext/module/attribute_accessors_per_thread'
require 'concurrent'
module ActionCable
module Server
# Worker used by Server.send_async to do connection work in threads.
class Worker # :nodoc:
include ActiveSupport::Callbacks
thread_mattr_accessor :connection
define_callbacks :work
include ActiveRecordConnectionManagement
def initialize(max_size: 5)
@pool = Concurrent::ThreadPoolExecutor.new(
min_threads: 1,
max_threads: max_size,
max_queue: 0,
)
end
def async_invoke(receiver, method, *args)
@pool.post do
invoke(receiver, method, *args)
end
end
def invoke(receiver, method, *args)
begin
self.connection = receiver
run_callbacks :work do
receiver.send method, *args
end
rescue Exception => e
logger.error "There was an exception - #{e.class}(#{e.message})"
logger.error e.backtrace.join("\n")
receiver.handle_exception if receiver.respond_to?(:handle_exception)
ensure
self.connection = nil
end
end
def async_run_periodic_timer(channel, callback)
@pool.post do
run_periodic_timer(channel, callback)
end
end
def run_periodic_timer(channel, callback)
begin
self.connection = channel.connection
run_callbacks :work do
callback.respond_to?(:call) ? channel.instance_exec(&callback) : channel.send(callback)
end
ensure
self.connection = nil
end
end
private
def logger
ActionCable.server.logger
end
end
end
end
| 25.027778 | 99 | 0.617092 |
e2e7ec40a2a478eef983059d238314ffec514e82 | 1,825 | class K3d < Formula
desc "Little helper to run Rancher Lab's k3s in Docker"
homepage "https://k3d.io"
url "https://github.com/rancher/k3d/archive/v3.1.5.tar.gz"
sha256 "1e4b88265c697704e5b0d12b167b41add73e327644ee1b27ec813590cd5170df"
license "MIT"
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
cellar :any_skip_relocation
sha256 "4afec5f7051b7f906ab1a3de513618a74d3a08c496a4729f2497294dbf7aecd5" => :catalina
sha256 "3e1ac4f7f8afc37c7abbb3b0e130a5cc7ea103e39b1b1675fdb037f16971e81e" => :mojave
sha256 "7ba8d6fa3157a436d345cb34289250bdca6b9bd208cf33992b1ee7ca744e350e" => :high_sierra
end
depends_on "go" => :build
def install
system "go", "build",
"-mod", "vendor",
"-ldflags", "-s -w -X github.com/rancher/k3d/v#{version.major}/version.Version=v#{version}"\
" -X github.com/rancher/k3d/v#{version.major}/version.K3sVersion=latest",
"-trimpath", "-o", bin/"k3d"
# Install bash completion
output = Utils.safe_popen_read("#{bin}/k3d", "completion", "bash")
(bash_completion/"k3d").write output
# Install zsh completion
output = Utils.safe_popen_read("#{bin}/k3d", "completion", "zsh")
(zsh_completion/"_k3d").write output
end
test do
assert_match "k3d version v#{version}\nk3s version latest (default)", shell_output("#{bin}/k3d --version")
# Either docker is not present or it is, where the command will fail in the first case.
# In any case I wouldn't expect a cluster with name 6d6de430dbd8080d690758a4b5d57c86 to be present
# (which is the md5sum of 'homebrew-failing-test')
output = shell_output("#{bin}/k3d cluster get 6d6de430dbd8080d690758a4b5d57c86 2>&1", 1).split("\n").pop
assert_match "No nodes found for given cluster", output
end
end
| 38.829787 | 110 | 0.700822 |
21d00c7ff78f703f398b1a3e957a95687e86f776 | 2,883 | # frozen_string_literal: true
module RuboCop
module Cop
module Style
# This cop checks for the formatting of empty method definitions.
# By default it enforces empty method definitions to go on a single
# line (compact style), but it can be configured to enforce the `end`
# to go on its own line (expanded style).
#
# Note: A method definition is not considered empty if it contains
# comments.
#
# @example EnforcedStyle: compact (default)
# # bad
# def foo(bar)
# end
#
# def self.foo(bar)
# end
#
# # good
# def foo(bar); end
#
# def foo(bar)
# # baz
# end
#
# def self.foo(bar); end
#
# @example EnforcedStyle: expanded
# # bad
# def foo(bar); end
#
# def self.foo(bar); end
#
# # good
# def foo(bar)
# end
#
# def self.foo(bar)
# end
class EmptyMethod < Cop
include ConfigurableEnforcedStyle
MSG_COMPACT = 'Put empty method definitions on a single line.'.freeze
MSG_EXPANDED = 'Put the `end` of empty method definitions on the ' \
'next line.'.freeze
def on_def(node)
return if node.body || comment_lines?(node)
return if correct_style?(node)
add_offense(node)
end
alias on_defs on_def
def autocorrect(node)
lambda do |corrector|
corrector.replace(node.source_range, corrected(node))
end
end
private
def message(_node)
compact_style? ? MSG_COMPACT : MSG_EXPANDED
end
def correct_style?(node)
compact_style? && compact?(node) ||
expanded_style? && expanded?(node)
end
def corrected(node)
has_parentheses = parentheses?(node.arguments)
arguments = node.arguments? ? node.arguments.source : ''
extra_space = node.arguments? && !has_parentheses ? ' ' : ''
scope = node.receiver ? "#{node.receiver.source}." : ''
signature = [scope, node.method_name, extra_space, arguments].join
["def #{signature}", 'end'].join(joint(node))
end
def joint(node)
indent = ' ' * node.loc.column
compact_style? ? '; ' : "\n#{indent}"
end
def comment_lines?(node)
processed_source[line_range(node)].any? { |line| comment_line?(line) }
end
def compact?(node)
node.single_line?
end
def expanded?(node)
node.multiline?
end
def compact_style?
style == :compact
end
def expanded_style?
style == :expanded
end
end
end
end
end
| 24.853448 | 80 | 0.53035 |
e9f00a900dfddf76bfd298634fa6e62198fe6774 | 2,435 | class KeystoneDevelop < Formula
desc 'Securely share application secret with your team'
homepage 'https://keytone.sh'
head 'https://github.com/wearedevx/keystone.git', branch: 'develop'
url 'https://github.com/wearedevx/keystone/archive/develop.tar.gz'
sha256 '4c43cc962976c6b91f0c74a789ebdde6dd08bf147a4390aabf11d7cbd48e9e88'
version 'develop'
depends_on 'git'
depends_on 'gcc@11'
depends_on 'make'
depends_on '[email protected]'
depends_on '[email protected]'
depends_on 'libsodium'
def install_themis
system 'git', 'clone', '--depth', '1', '--branch', '0.13.13', 'https://github.com/cossacklabs/themis.git'
Dir.chdir 'themis' do
ENV['ENGINE'] = 'openssl'
ENV['ENGINE_INCLUDE_PATH'] = Formula['[email protected]'].include
ENV['ENGINE_LIB_PATH'] = Formula['[email protected]'].lib
ENV['PREFIX'] = prefix
system 'make', 'install'
end
end
def install
install_themis()
ENV['CGO_ENABLED'] = '1'
ENV['CGO_LDFLAGS'] = "-L#{prefix}/lib"
ENV['CGO_CFLAGS'] = "-I#{prefix}/include"
system 'ls', "#{prefix}/include"
packagePrefix = "github.com/wearedevx/keystone/cli"
clientPkg = "#{packagePrefix}/pkg/client"
constantsPkg = "#{packagePrefix}/pkg/constants"
authPkg = "#{packagePrefix}/pkg/client/auth"
apiFlag = "-X '#{clientPkg}.ApiURL=https://develop---keystone-server-esk4nrfqlq-oa.a.run.app'"
authProxyFlag = "-X '#{authPkg}.authRedirectURL=https://europe-west6-keystone-245200.cloudfunctions.net/auth-proxy'"
versionFlag = "-X '#{constantsPkg}.Version=develop'"
ghClientIdFlag = "-X '#{authPkg}.githubClientId=60165e42468cf5e34aa8'"
ghClientSecretFlag = "-X '#{authPkg}.githubClientSecret=016a30fed8fe9029b22272650af6aa18b3dcf590'"
glClientIdFlag = "-X '#{authPkg}.gitlabClientId=d372c2f3eebd9c498b41886667609fbdcf149254bcb618ddc199047cbbc46b78'"
glClientSecretFlag = "-X '#{authPkg}.gitlabClientSecret=ffe9317fd42d32ea7db24c79f9ee25a3e30637b886f3bc99f951710c8cdc3650'"
Dir.chdir 'cli' do
system(Formula['[email protected]'].bin + 'go', 'clean')
system(Formula['[email protected]'].bin + 'go', 'get')
system(Formula['[email protected]'].bin + 'go',
'build',
'-ldflags',
"#{apiFlag} #{authProxyFlag} #{versionFlag} #{ghClientIdFlag} #{ghClientSecretFlag} #{glClientIdFlag} #{glClientSecretFlag}",
'-o',
'ks')
end
bin.install "cli/ks" => "ks"
end
end
| 36.343284 | 138 | 0.67269 |
bbcbc439ab7ea1d4bf88bec2dae5a316fd131a31 | 1,002 | require "rails_helper"
if ExchangeTestingConfigurationHelper.individual_market_is_enabled?
require File.join(Rails.root, "app", "data_migrations", "deactivate_consumer_role")
describe DeactivateConsumerRole, dbclean: :after_each do
let(:given_task_name) { "deactivate_consumer_role" }
subject { DeactivateConsumerRole.new(given_task_name, double(:current_scope => nil)) }
describe "given a task name" do
it "has the given task name" do
expect(subject.name).to eql given_task_name
end
end
describe "deactivate consumer role" do
let(:person) { FactoryGirl.create(:person, :with_consumer_role, hbx_id: "12345678")}
before(:each) do
allow(ENV).to receive(:[]).with("hbx_id").and_return("12345678")
end
it "should change is_active field" do
role_status = person.consumer_role
role_status.is_active = true
role_status.save
subject.migrate
person.reload
expect(person.consumer_role.is_active).to eq false
end
end
end
end
| 29.470588 | 90 | 0.733533 |
1cb98da5535694bffdfb64713c19346301b8bfdd | 2,916 | module Pry::Config::Behavior
ASSIGNMENT = "=".freeze
NODUP = [TrueClass, FalseClass, NilClass, Symbol, Numeric, Module, Proc].freeze
INSPECT_REGEXP = /#{Regexp.escape "default=#<"}/
module Builder
def from_hash(hash, default = nil)
new(default).tap do |config|
config.merge!(hash)
end
end
end
def self.included(klass)
unless defined?(RESERVED_KEYS)
const_set :RESERVED_KEYS, instance_methods(false).map(&:to_s).freeze
end
klass.extend(Builder)
end
def initialize(default = Pry.config)
@default = default
@lookup = {}
end
#
# @return [Pry::Config::Behavior]
# returns the default used if a matching value for a key isn't found in self
#
def default
@default
end
def [](key)
@lookup[key.to_s]
end
def []=(key, value)
key = key.to_s
if RESERVED_KEYS.include?(key)
raise ArgumentError, "few things are reserved by pry, but using '#{key}' as a configuration key is."
end
@lookup[key] = value
end
def method_missing(name, *args, &block)
key = name.to_s
if key[-1] == ASSIGNMENT
short_key = key[0..-2]
self[short_key] = args[0]
elsif key?(key)
self[key]
elsif @default.respond_to?(name)
value = @default.public_send(name, *args, &block)
# FIXME: refactor Pry::Hook so that it stores config on the config object,
# so that we can use the normal strategy.
self[key] = value.dup if key == 'hooks'
value
else
nil
end
end
def merge!(other)
other = try_convert_to_hash(other)
raise TypeError, "unable to convert argument into a Hash" unless other
other.each do |key, value|
self[key] = value
end
end
def ==(other)
@lookup == try_convert_to_hash(other)
end
alias_method :eql?, :==
def respond_to_missing?(key, include_private=false)
key?(key) or @default.respond_to?(key) or super(key, include_private)
end
def key?(key)
key = key.to_s
@lookup.key?(key)
end
def clear
@lookup.clear
true
end
alias_method :refresh, :clear
def forget(key)
@lookup.delete(key.to_s)
end
def keys
@lookup.keys
end
def to_hash
@lookup.dup
end
alias_method :to_h, :to_hash
def inspect
key_str = keys.map { |key| "'#{key}'" }.join(",")
"#<#{_clip_inspect(self)} local_keys=[#{key_str}] default=#{@default.inspect}>"
end
def pretty_print(q)
q.text inspect[1..-1].gsub(INSPECT_REGEXP, "default=<")
end
private
def _clip_inspect(obj)
"#{obj.class}:0x%x" % obj.object_id << 1
end
def _dup(value)
if NODUP.any? { |klass| klass === value }
value
else
value.dup
end
end
def try_convert_to_hash(obj)
if Hash === obj
obj
elsif obj.respond_to?(:to_h)
obj.to_h
elsif obj.respond_to?(:to_hash)
obj.to_hash
else
nil
end
end
end
| 20.828571 | 106 | 0.622428 |
91bb84dd50f7d730d7123c7e6d31a7e1e9288557 | 1,523 | # Copyright (c) 2018 Public Library of Science
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
class CommentLook < ActiveRecord::Base
include ViewableModel
include EventStream::Notifiable
belongs_to :comment, inverse_of: :comment_looks
belongs_to :user, inverse_of: :comment_looks
has_one :paper, through: :task
has_one :task, through: :comment
validates :comment, :user, presence: true
def user_can_view?(check_user)
# A user can view their own comment looks
check_user == user
end
end
| 41.162162 | 76 | 0.772817 |
216d53a1371af0b78a65d2ca5668f127bac35442 | 1,977 | <% if namespaced? -%>
require_dependency "<%= namespaced_path %>/application_controller"
<% end -%>
<% module_namespacing do -%>
class <%= controller_class_name %>Controller < ApplicationController
before_action :set_<%= singular_table_name %>, only: [:show, :edit, :update, :destroy]
# GET <%= route_url %>
def index
@<%= plural_table_name %> = <%= orm_class.all(class_name) %>
end
# GET <%= route_url %>/1
def show
end
# GET <%= route_url %>/new
def new
@<%= singular_table_name %> = <%= orm_class.build(class_name) %>
end
# GET <%= route_url %>/1/edit
def edit
end
# POST <%= route_url %>
def create
@<%= singular_table_name %> = <%= orm_class.build(class_name, "#{singular_table_name}_params") %>
if @<%= orm_instance.save %>
redirect_to <%= plural_table_name %>_path, notice: <%= "'#{human_name} was successfully created.'" %>
else
render :new
end
end
# PATCH/PUT <%= route_url %>/1
def update
if @<%= orm_instance.update("#{singular_table_name}_params") %>
redirect_to @<%= singular_table_name %>, notice: <%= "'#{human_name} was successfully updated.'" %>
else
render :edit
end
end
# DELETE <%= route_url %>/1
def destroy
@<%= orm_instance.destroy %>
redirect_to <%= index_helper %>_url, notice: <%= "'#{human_name} was successfully destroyed.'" %>
end
private
# Use callbacks to share common setup or constraints between actions.
def set_<%= singular_table_name %>
@<%= singular_table_name %> = <%= orm_class.find(class_name, "params[:id]") %>
end
# Only allow a trusted parameter "white list" through.
def <%= "#{singular_table_name}_params" %>
<%- if attributes_names.empty? -%>
params.fetch(:<%= singular_table_name %>, {})
<%- else -%>
params.require(:<%= singular_table_name %>).permit(<%= attributes_names.map { |name| ":#{name}" }.join(', ') %>)
<%- end -%>
end
end
<% end -%>
| 28.652174 | 118 | 0.614062 |
e8a9ea4bbd0f80b2bb45d07d5a0afea5b150f401 | 990 | $:.unshift File.expand_path("../lib", __FILE__)
require 'sinatra/asset_pipeline/version'
Gem::Specification.new do |gem|
gem.name = "sinatra-asset-pipeline"
gem.version = Sinatra::AssetPipeline::VERSION
gem.authors = ["Joakim Ekberg"]
gem.email = ["[email protected]"]
gem.description = "An asset pipeline implementation for Sinatra based on Sprockets with support for SASS, CoffeeScript and ERB."
gem.summary = "An asset pipeline implementation for Sinatra."
gem.homepage = "https://github.com/kalasjocke/sinatra-asset-pipeline"
gem.license = "MIT"
gem.files = Dir["README.md", "lib/**/*.rb"]
gem.add_dependency 'rake', '~> 12.3'
gem.add_dependency 'sinatra', '~> 2.0'
gem.add_dependency 'sassc', '~> 2.0'
gem.add_dependency 'coffee-script', '~> 2.4'
gem.add_dependency 'sprockets', '~> 3.7'
gem.add_dependency 'sprockets-helpers', '~> 1.2'
gem.add_development_dependency 'rspec', '~> 3.7'
gem.add_development_dependency 'rack-test', '~> 0.8'
end
| 39.6 | 130 | 0.70404 |
f897a38b14aa90ec4125afe11ae4c34a5ff4e629 | 353 | require 'rspotify/oauth'
#includes client id, client secret, and the scopes specified grants access to a user's country as well as "Your Music" library
Rails.application.config.middleware.use OmniAuth::Builder do
provider :spotify, "b9e86650cc7a4733a7e24180f5b9014a", "e47d7214bf674d5586916f70b0bc68bd", scope: 'user-read-email user-library-read'
end | 58.833333 | 135 | 0.813031 |
f70291d363271f9afb07b7b7d421362216695481 | 1,419 | class AmazonCharts::CLI
def call
greeting
menu
end
def greeting
puts <<~DOC
Welcome to the Amazon Charts App!
Here are the top selling books for:
*** #{AmazonCharts::Scraper.date} ***
DOC
end
def menu
input = nil
while input != "exit"
puts <<~DOC
Please select [1] to see the top 20 books sold in fiction.
Select [2] to see the top 20 books sold in non-fiction.
Or, type "exit" to exit the program.
DOC
input = gets.strip.downcase
if input.to_i == 1
fiction_books
elsif input.to_i == 2
non_fiction_books
elsif input != 1 && input != 2 && input != "exit"
puts "OOPS!"
menu
else input == "exit"
goodbye
end
end
end
def fiction_books
AmazonCharts::Scraper.fiction
list_books("Fiction")
end
def non_fiction_books
AmazonCharts::Scraper.non_fiction
list_books("Non-fiction")
end
def goodbye
puts "Thanks for stopping by!"
exit
end
def list_books(type)
AmazonCharts::Book.all.select {|book| book.type == type}.each do |book|
book_details = [
"Rank",
"Title",
"Author",
"Publisher",
"Type"
]
puts "---------------------------------------------------"
book.instance_variables.each_with_index do |var, index|
puts "#{book_details[index]}: #{book.instance_variable_get(var)}"
end
puts "---------------------------------------------------"
end
end
end
| 19.708333 | 73 | 0.591261 |
1ad763158ecddd62fe54d1640626db548417b27e | 615 | module Disposable009
class Twin
# hash for #update_attributes (model API): {title: "Future World", album: <Album>}
def self.save_representer
# TODO: do that only at compile-time!
save = Class.new(write_representer) # inherit configuration
save.representable_attrs.
find_all { |attr| attr[:twin] }.
each { |attr| attr.merge!(
:representable => true,
:serialize => lambda { |obj, args| obj.send(:model) }) }
save.representable_attrs.each do |attr|
attr.merge!(:as => attr[:private_name])
end
save
end
end
end | 29.285714 | 86 | 0.604878 |
1cde3a15ab860edab48d067cee19f9d7a6d27223 | 319 | class ApplicationController < ActionController::Base
protect_from_forgery with: :exception
include SessionsHelper
private
# confirms a logged in user
def logged_in_user
unless logged_in?
store_location
flash[:danger] = "Please log in"
redirect_to login_url
end
end
end
| 17.722222 | 52 | 0.705329 |
bbecf1130668ea083fb899b34235d5d6b8c2dcf6 | 345 | require 'rails_helper'
RSpec.feature "Quick Reference" do
describe "pipelines" do
it "should render valid json" do
visit "/docs/quick-reference/pipelines.json"
body = JSON.parse(page.body)
expect(body).to be_a Hash
expect(body["steps"]).to be_an Hash
expect(body["notify"]).to be_an Hash
end
end
end
| 21.5625 | 50 | 0.666667 |
ffb036de1075714c283719df8ee8385ca8ab0140 | 5,769 | #
# Whimsy pubsub support: watches for updates to the whimsy repository,
# fetches the changes and deploys them.
#
# For usage instructions, try
#
# ruby pubsub.rb --help
#
require 'optparse'
require 'ostruct'
require 'etc'
require 'net/http'
require 'json'
require 'fileutils'
# extract script name
script = File.basename(__FILE__, '.rb')
#
### option parsing
#
options = OpenStruct.new
options.remote = 'https://gitbox.apache.org/repos/asf/whimsy.git'
options.local = '/srv/whimsy'
options.pidfile = "/var/run/#{script}.pid"
options.streamURL = 'http://pubsub.apache.org:2069/git/'
options.puppet = false
optionparser = OptionParser.new do |opts|
opts.on '-u', '--user id', "Optional user to run #{script} as" do |user|
options.user = user
end
opts.on '-g', '--group id', "Optional group to run #{script} as" do |group|
options.group = group
end
opts.on '-p', '--pidfile path', "Optional pid file location" do |path|
options.pidfile = path
end
opts.on '-d', '--daemonize', "Run as daemon" do
options.daemonize = true
end
opts.on '--puppet', "Use puppet agent to update" do
options.puppet = true
end
opts.on '-s', '--stream', "StreamURL" do |url|
options.streamURL = url
end
opts.on '-r', '--remote', "Git Clone URL" do |url|
options.streamURL = url
end
opts.on '-c', '--clone', "Git Clone Directory" do |path|
options.local = path
end
opts.on '--stop', "Kill the currently running #{script} process" do
options.kill = true
end
end
optionparser.parse!
# Check for required tools
if options.puppet and `which puppet 2>/dev/null`.empty?
STDERR.puts 'puppet not found in path; exiting'
exit 1
end
%w(git rake).each do |tool|
if `which #{tool} 2>/dev/null`.empty?
STDERR.puts "#{tool} not found in path; exiting"
exit 1
end
end
#
### process management
#
# Either kill old process, or start a new one
if options.kill
if File.exist? options.pidfile
Process.kill 'TERM', File.read(options.pidfile).to_i
File.delete options.pidfile if File.exist? options.pidfile
exit 0
end
else
# optionally daemonize
Process.daemon if options.daemonize
# Determine if pidfile is writable
if File.exist? options.pidfile
writable = File.writable? options.pidfile
else
writable = File.writable? File.dirname(options.pidfile)
end
# PID file management
if writable
File.write options.pidfile, Process.pid.to_s
at_exit { File.delete options.pidfile if File.exist? options.pidfile }
else
STDERR.puts "EACCES: Skipping creation of pidfile #{options.pidfile}"
end
end
# Optionally change user/group
if Process.uid == 0
Process::Sys.setgid Etc.getgrnam(options.group).gid if options.group
Process::Sys.setuid Etc.getpwnam(options.user).uid if options.user
end
# Perform initial clone
if not Dir.exist? options.local
FileUtils.mkdir_p File.basename(options.local)
system('git', 'clone', options.remote, options.local)
end
#
# Monitor PubSub endpoint (see https://infra.apache.org/pypubsub.html)
#
PROJECT = File.basename(options.remote, '.git')
# prime the pump
restartable = false
notification_queue = Queue.new
notification_queue.push 'project' => PROJECT
ps_thread = Thread.new do
begin
uri = URI.parse(options.streamURL)
Net::HTTP.start(uri.host, uri.port) do |http|
request = Net::HTTP::Get.new uri.request_uri
http.request request do |response|
body = ''
response.read_body do |chunk|
# Looks like the service only sends \n terminators now
if chunk =~ /\r?\n$|\0$/
notification = JSON.parse(body + chunk.chomp("\0"))
body = ''
if notification['stillalive']
restartable = true
elsif notification['push']
notification_queue << notification['push']
elsif notification['commit']
notification_queue << notification['commit']
elsif notification['svnpubsub']
next
else
STDERR.puts '*** unexpected notification ***'
STDERR.puts notification.inspect
end
else
body += chunk
end
end
end
end
rescue Errno::ECONNREFUSED => e
restartable = true
STDERR.puts e
sleep 3
rescue Exception => e
STDERR.puts e
STDERR.puts e.backtrace
end
end
#
# Process queued requests
#
begin
mtime = File.mtime(__FILE__)
while ps_thread.alive?
notification = notification_queue.pop
next unless notification['project'] == PROJECT
notification_queue.clear
if options.puppet
# Update using puppet. If puppet fails, it may be due to puppet already
# running; in which case it may not have picked up this update. So try
# again in 30, 60, 90, and 120 seconds, for a total of five minutes.
4.times do |i|
break if system('puppet', 'agent', '-t')
sleep 30 * (i+1)
end
else
# update git directories in the foreground
Dir.chdir(options.local) do
before = `git log --oneline -1`
system('git', 'fetch', 'origin')
system('git', 'clean', '-df')
system('git', 'reset', '--hard', 'origin/master')
if File.exist? 'Rakefile' and `git log --oneline -1` != before
system('rake', 'update')
end
end
end
break if mtime != File.mtime(__FILE__)
end
rescue SignalException => e
STDERR.puts e
restartable = false
rescue Exception => e
if ps_thread.alive?
STDERR.puts e
STDERR.puts e.backtrace
restartable = false
end
end
#
# restart
#
if restartable
STDERR.puts 'restarting'
# relaunch script after a one second delay
sleep 1
exec RbConfig.ruby, __FILE__, *ARGV
end
| 24.341772 | 78 | 0.651759 |
ac2b0602d525ce8620eabc15bf2586e75337c88e | 514 | module ActionText
module Serialization
extend ActiveSupport::Concern
class_methods do
def load(content)
new(content) if content
end
def dump(content)
case content
when nil
nil
when self
content.to_html
else
new(content).to_html
end
end
end
# Marshal compatibility
class_methods do
alias_method :_load, :load
end
def _dump(*)
self.class.dump(self)
end
end
end
| 15.575758 | 33 | 0.568093 |
1d10ab576b2d412e0fc2830d1f0122f5d5050164 | 217 |
namespace :ofac do
desc "Loads the current file from http://www.treas.gov/offices/enforcement/ofac/sdn/delimit/index.shtml."
task :update_data => :environment do
OfacSdnLoader.load_current_sdn_file
end
end | 27.125 | 107 | 0.769585 |
e2bfa19ec180016095918bda42d379f22d695354 | 914 | # frozen_string_literal: true
module SecurityHelper
def instance_security_dashboard_data
{
dashboard_documentation: help_page_path('user/application_security/security_dashboard/index', anchor: 'instance-security-dashboard'),
empty_dashboard_state_svg_path: image_path('illustrations/security-dashboard-empty-state.svg'),
empty_state_svg_path: image_path('illustrations/operations-dashboard_empty.svg'),
project_add_endpoint: security_projects_path,
project_list_endpoint: security_projects_path,
vulnerable_projects_endpoint: security_vulnerable_projects_path,
vulnerabilities_endpoint: security_vulnerability_findings_path,
vulnerabilities_history_endpoint: history_security_vulnerability_findings_path,
vulnerability_feedback_help_path: help_page_path('user/application_security/index', anchor: 'interacting-with-the-vulnerabilities')
}
end
end
| 50.777778 | 139 | 0.821663 |
11003c2fb243ed577568354b0192db3ed6fca77a | 10,644 | # This script takes a class name, an investigation, study, and assay xml configuration file as its arguments and generates
# a java class representing and validating that configuration in isa4j
require "nokogiri"
class_name = ARGV[0]
investigation_xml = Nokogiri::XML(File.open(ARGV[1])) { |conf| conf.noblanks }
study_xml = Nokogiri::XML(File.open(ARGV[2])) { |conf| conf.noblanks }
assay_xml = Nokogiri::XML(File.open(ARGV[3])) { |conf| conf.noblanks }
def description_from_field(field)
description = field.css("description")[0].content.gsub("\n", " ").lstrip
description = description.sub(/^\(MIAPPE: .+\) /, "")
if field.css("default-value").length > 0 and not field.css("default-value").first.content.empty?
description = description + "\n" + field.css("default-value")[0].content.gsub("\n", " ").lstrip
end
if field["is-required"] == "true"
description = description + "\n" + "<b>[required]</b>"
else
description = description + "\n" + "[optional]"
end
description_text = "
/**"
description.lines.each do |l|
description_text << "
* #{l.chomp}
* <br>"
end
description_text << "
*/"
return description_text
end
investigation_enum = investigation_xml.css("field").map do |field|
if field["header"].start_with?("Comment[")
header = field["header"][8..-2]
name = header.upcase.gsub(" ", "_")
section = field["section"].gsub(" ", "_")
required = field["is-required"]
description = description_from_field(field)
"
#{description}
#{name}(\"#{header}\", InvestigationAttribute.#{section}, #{required})"
else
nil
end
end.reject(&:nil?).join(",") + ";"
# The block index describes which object in the row the characteristic needs to be assigned to.
# So in a row for example with Source -> Process -> Sample, characteristics for the Source
# would belong to block 0, characteristics for the sample to block 2 (block 1 is the process)
block_index = 0
study_enum = study_xml.children[0].children[0].children.map do |element|
# If we encounter a protocol field (i.e. a process), that means the following fields belong
# to the output of that process, i.e. the block index needs to be increased by 2
block_index += 2 if element.name == "protocol-field"
if element.name == "field" and element["header"].start_with?("Characteristics[")
header = element["header"][16..-2]
name = header.upcase.gsub(" ", "_")
required = element["is-required"]
description = description_from_field(element)
"
#{description}
#{name}(\"#{header}\", #{required}, #{block_index})"
else
nil
end
end.reject(&:nil?).join(",") + ";"
block_index = 0
assay_enum = assay_xml.children[0].children[0].children.map do |element|
block_index += 2 if element.name == "protocol-field"
if element.name == "field" and element["header"].start_with?("Characteristics[")
header = element["header"][16..-2]
name = header.upcase.gsub(" ", "_")
required = element["is-required"]
description = description_from_field(element)
"
#{description}
#{name}(\"#{header}\", #{required}, #{block_index})"
else
nil
end
end.reject(&:nil?).join(",") + ";"
java_code = <<-JAVA_CODE
package de.ipk_gatersleben.bit.bi.isa4j.configurations;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.stream.Stream;
import de.ipk_gatersleben.bit.bi.isa4j.components.Assay;
import de.ipk_gatersleben.bit.bi.isa4j.components.CommentCollection;
import de.ipk_gatersleben.bit.bi.isa4j.components.Commentable;
import de.ipk_gatersleben.bit.bi.isa4j.components.Investigation;
import de.ipk_gatersleben.bit.bi.isa4j.components.Study;
import de.ipk_gatersleben.bit.bi.isa4j.constants.InvestigationAttribute;
import de.ipk_gatersleben.bit.bi.isa4j.exceptions.MissingFieldException;
/**
* @author psaroudakis, arendd
*
*/
public class #{class_name} {
public enum InvestigationFile implements InvestigationConfigEnum {
#{investigation_enum}
private String fieldName;
private InvestigationAttribute section;
private boolean required;
private InvestigationFile(String fieldName, InvestigationAttribute section, boolean required) {
this.fieldName = fieldName;
this.section = section;
this.required = required;
}
public String getFieldName() {
return this.fieldName;
}
public boolean isRequired() {
return this.required;
}
public InvestigationAttribute getSection() {
return this.section;
}
private static void validateInvestigationBlockComments(List<? extends Commentable> commentable, InvestigationAttribute block) {
commentable.stream().forEach( unit -> {
CommentCollection comments = unit.comments();
Stream.of(InvestigationFile.values())
.filter(c -> c.isRequired() && c.getSection() == block)
.forEach(c -> {
if(comments.getByName(c.getFieldName()).isEmpty())
throw new MissingFieldException("Missing comment in block " + block.toString() + " for " + unit.toString() + ": " + c.getFieldName());
});
});
}
private static void validateCustomProperties(Investigation investigation) {
throw new UnsupportedOperationException("Custom validations for Investigation not implemented, implement or remove the method.");
}
public static boolean validate(Investigation investigation) {
General.validateInvestigationFile(investigation);
// Check if all required investigation comments are present
CommentCollection comments = investigation.comments();
Stream.of(InvestigationFile.values())
.filter(c -> c.isRequired() && c.getSection() == InvestigationAttribute.INVESTIGATION)
.forEach(c -> {
if(comments.getByName(c.getFieldName()).isEmpty())
throw new MissingFieldException("Missing comment in block " + InvestigationAttribute.INVESTIGATION.toString() + ": " + c.getFieldName());
});
validateInvestigationBlockComments(investigation.getPublications(), InvestigationAttribute.INVESTIGATION_PUBLICATIONS);
validateInvestigationBlockComments(investigation.getContacts(), InvestigationAttribute.INVESTIGATION_CONTACTS);
validateInvestigationBlockComments(investigation.getStudies(), InvestigationAttribute.STUDY);
for(Study s : investigation.getStudies()) {
validateInvestigationBlockComments(s.getPublications(), InvestigationAttribute.STUDY_PUBLICATIONS);
validateInvestigationBlockComments(s.getContacts(), InvestigationAttribute.STUDY_CONTACTS);
validateInvestigationBlockComments(s.getDesignDescriptors(), InvestigationAttribute.STUDY_DESIGN_DESCRIPTORS);
validateInvestigationBlockComments(s.getFactors(), InvestigationAttribute.STUDY_FACTORS);
validateInvestigationBlockComments(s.getAssays(), InvestigationAttribute.STUDY_ASSAYS);
validateInvestigationBlockComments(s.getProtocols(), InvestigationAttribute.STUDY_PROTOCOLS);
}
validateCustomProperties(investigation);
return true;
}
}
public enum StudyFile implements WideTableConfigEnum {
#{study_enum}
private String fieldName;
private boolean required;
private int groupIndex; // the how many n-th object does this characteristic belong to? (i.e the first group is usually the source, second the process, third the sample)
private StudyFile(String fieldName, boolean required, int groupIndex) {
this.fieldName = fieldName;
this.required = required;
this.groupIndex = groupIndex;
}
public String getFieldName() {
return this.fieldName;
}
public boolean isRequired() {
return this.required;
}
public int getGroupIndex() {
return this.groupIndex;
}
private static void validateCustomProperties(Study study) {
throw new UnsupportedOperationException("Custom validations for Study not implemented, implement or remove the method.");
}
public static boolean validate(Study study) {
General.validateStudyFile(study);
if(!study.hasWrittenHeaders()) {
throw new IllegalStateException("Study file for " + study.toString() + "can only be validated after headers are written. " +
"Please write headers with '.writeHeadersFromExample' or call validate after at least one line has been written. " +
"If that is confusing to you, perhaps you have closed the file/released the strem before validating? That resets the headers");
}
ArrayList<LinkedHashMap<String, String[]>> headers = study.getHeaders();
Stream.of(StudyFile.values())
.filter(c -> c.isRequired())
.forEach(c -> {
if(!headers.get(c.getGroupIndex()).containsKey("Characteristics[" + c.getFieldName() + "]"))
throw new MissingFieldException("Missing Characteristic header in Study file: " + c.getFieldName());
});
validateCustomProperties(study);
return true;
}
}
public enum AssayFile implements WideTableConfigEnum {
#{assay_enum}
private String fieldName;
private boolean required;
private int groupIndex; // the how many n-th object does this characteristic belong to? (i.e the first group is usually the source, second the process, third the sample)
private AssayFile(String fieldName, boolean required, int groupIndex) {
this.fieldName = fieldName;
this.required = required;
this.groupIndex = groupIndex;
}
public String getFieldName() {
return this.fieldName;
}
public boolean isRequired() {
return this.required;
}
public int getGroupIndex() {
return this.groupIndex;
}
private static void validateCustomProperties(Assay assay) {
throw new UnsupportedOperationException("Custom validations for Assay not implemented, implement or remove the method.");
}
public static boolean validate(Assay assay) {
General.validateAssayFile(assay);
if(!assay.hasWrittenHeaders()) {
throw new IllegalStateException("Assay file for " + assay.toString() + "can only be validated after headers are written. " +
"Please write headers with .writeHeadersFromExample or call validate after at least one line has been written. " +
"If that is confusing to you, perhaps you have closed the file/released the strem before validating? That resets the headers");
}
ArrayList<LinkedHashMap<String, String[]>> headers = assay.getHeaders();
Stream.of(AssayFile.values())
.filter(c -> c.isRequired())
.forEach(c -> {
if(!headers.get(c.getGroupIndex()).containsKey("Characteristics[" + c.getFieldName() + "]"))
throw new MissingFieldException("Missing Characteristic header in Assay file: " + c.getFieldName());
});
validateCustomProperties(assay);
return true;
}
}
}
JAVA_CODE
IO.write(class_name+".java", java_code)
| 37.216783 | 171 | 0.721533 |
38e9e6a71ef5c3b517ea047424667d87ca186cb3 | 502 | class CreateConfigurations < ActiveRecord::Migration
def up
create_table :configurations do |t|
t.references :customer, index: true
t.boolean :send_birthdays, null: false, default: false
t.string :text_birthdays
end
add_foreign_key :configurations, :customers
Customer.find_each do |customer|
Configuration.create customer_id: customer.id
end
end
def down
remove_foreign_key :configurations, :customers
drop_table :configurations
end
end
| 22.818182 | 60 | 0.7251 |
ab1353e3369d0540f1c40d67c2f8afac98d1f567 | 9,453 | require 'rails_helper'
describe 'Merge request', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:project_only_mwps) { create(:project, :repository, only_allow_merge_if_pipeline_succeeds: true) }
let(:merge_request) { create(:merge_request, source_project: project) }
let(:merge_request_in_only_mwps_project) { create(:merge_request, source_project: project_only_mwps) }
before do
project.add_master(user)
project_only_mwps.add_master(user)
sign_in(user)
end
context 'new merge request' do
before do
visit project_new_merge_request_path(
project,
merge_request: {
source_project_id: project.id,
target_project_id: project.id,
source_branch: 'feature',
target_branch: 'master'
})
end
it 'shows widget status after creating new merge request' do
click_button 'Submit merge request'
wait_for_requests
expect(page).to have_selector('.accept-merge-request')
expect(find('.accept-merge-request')['disabled']).not_to be(true)
end
end
context 'view merge request' do
let!(:environment) { create(:environment, project: project) }
let!(:deployment) do
create(:deployment, environment: environment,
ref: 'feature',
sha: merge_request.diff_head_sha)
end
before do
visit project_merge_request_path(project, merge_request)
end
it 'shows environments link' do
wait_for_requests
page.within('.mr-widget-heading') do
expect(page).to have_content("Deployed to #{environment.name}")
expect(find('.js-deploy-url')[:href]).to include(environment.formatted_external_url)
end
end
it 'shows green accept merge request button' do
# Wait for the `ci_status` and `merge_check` requests
wait_for_requests
expect(page).to have_selector('.accept-merge-request')
expect(find('.accept-merge-request')['disabled']).not_to be(true)
end
end
context 'view merge request with external CI service' do
before do
create(:service, project: project,
active: true,
type: 'CiService',
category: 'ci')
visit project_merge_request_path(project, merge_request)
end
it 'has danger button while waiting for external CI status' do
# Wait for the `ci_status` and `merge_check` requests
wait_for_requests
expect(page).to have_selector('.accept-merge-request.btn-danger')
end
end
context 'view merge request with failed GitLab CI pipelines' do
before do
commit_status = create(:commit_status, project: project, status: 'failed')
pipeline = create(:ci_pipeline, project: project,
sha: merge_request.diff_head_sha,
ref: merge_request.source_branch,
status: 'failed',
statuses: [commit_status],
head_pipeline_of: merge_request)
create(:ci_build, :pending, pipeline: pipeline)
visit project_merge_request_path(project, merge_request)
end
it 'has danger button when not succeeded' do
# Wait for the `ci_status` and `merge_check` requests
wait_for_requests
expect(page).to have_selector('.accept-merge-request.btn-danger')
end
end
context 'when merge request is in the blocked pipeline state' do
before do
create(
:ci_pipeline,
project: project,
sha: merge_request.diff_head_sha,
ref: merge_request.source_branch,
status: :manual,
head_pipeline_of: merge_request)
visit project_merge_request_path(project, merge_request)
end
it 'shows information about blocked pipeline' do
expect(page).to have_content("Pipeline blocked")
expect(page).to have_content(
"The pipeline for this merge request requires a manual action")
expect(page).to have_css('.ci-status-icon-manual')
end
end
context 'view merge request with MWBS button' do
before do
commit_status = create(:commit_status, project: project, status: 'pending')
pipeline = create(:ci_pipeline, project: project,
sha: merge_request.diff_head_sha,
ref: merge_request.source_branch,
status: 'pending',
statuses: [commit_status],
head_pipeline_of: merge_request)
create(:ci_build, :pending, pipeline: pipeline)
visit project_merge_request_path(project, merge_request)
end
it 'has info button when MWBS button' do
# Wait for the `ci_status` and `merge_check` requests
wait_for_requests
expect(page).to have_selector('.accept-merge-request.btn-info')
end
end
context 'view merge request where project has CI setup but no CI status' do
before do
pipeline = create(:ci_pipeline, project: project,
sha: merge_request.diff_head_sha,
ref: merge_request.source_branch)
create(:ci_build, pipeline: pipeline)
visit project_merge_request_path(project, merge_request)
end
it 'has pipeline error text' do
# Wait for the `ci_status` and `merge_check` requests
wait_for_requests
expect(page).to have_text('Could not connect to the CI server. Please check your settings and try again')
end
end
context 'view merge request in project with only-mwps setting enabled but no CI is setup' do
before do
visit project_merge_request_path(project_only_mwps, merge_request_in_only_mwps_project)
end
it 'should be allowed to merge' do
# Wait for the `ci_status` and `merge_check` requests
wait_for_requests
expect(page).to have_selector('.accept-merge-request')
expect(find('.accept-merge-request')['disabled']).not_to be(true)
end
end
context 'view merge request with MWPS enabled but automatically merge fails' do
before do
merge_request.update(
merge_when_pipeline_succeeds: true,
merge_user: merge_request.author,
merge_error: 'Something went wrong'
)
visit project_merge_request_path(project, merge_request)
end
it 'shows information about the merge error' do
# Wait for the `ci_status` and `merge_check` requests
wait_for_requests
page.within('.mr-widget-body') do
expect(page).to have_content('Something went wrong')
end
end
end
context 'view merge request with MWPS enabled but automatically merge fails' do
before do
merge_request.update(
merge_when_pipeline_succeeds: true,
merge_user: merge_request.author,
merge_error: 'Something went wrong'
)
visit project_merge_request_path(project, merge_request)
end
it 'shows information about the merge error' do
# Wait for the `ci_status` and `merge_check` requests
wait_for_requests
page.within('.mr-widget-body') do
expect(page).to have_content('Something went wrong')
end
end
end
context 'view merge request where fast-forward merge is not possible' do
before do
project.update(merge_requests_ff_only_enabled: true)
merge_request.update(
merge_user: merge_request.author,
merge_status: :cannot_be_merged
)
visit project_merge_request_path(project, merge_request)
end
it 'shows information about the merge error' do
# Wait for the `ci_status` and `merge_check` requests
wait_for_requests
page.within('.mr-widget-body') do
expect(page).to have_content('Fast-forward merge is not possible')
end
end
end
context 'merge error' do
before do
allow_any_instance_of(Repository).to receive(:merge).and_return(false)
visit project_merge_request_path(project, merge_request)
end
it 'updates the MR widget' do
click_button 'Merge'
page.within('.mr-widget-body') do
expect(page).to have_content('Conflicts detected during merge')
end
end
end
context 'user can merge into source project but cannot push to fork', js: true do
let(:fork_project) { create(:project, :public, :repository) }
let(:user2) { create(:user) }
before do
project.team << [user2, :master]
sign_out(:user)
sign_in(user2)
merge_request.update(target_project: fork_project)
visit project_merge_request_path(project, merge_request)
end
it 'user can merge into the source project' do
expect(page).to have_button('Merge', disabled: false)
end
it 'user cannot remove source branch' do
expect(page).to have_field('remove-source-branch-input', disabled: true)
end
end
context 'ongoing merge process' do
it 'shows Merging state' do
allow_any_instance_of(MergeRequest).to receive(:merge_ongoing?).and_return(true)
visit project_merge_request_path(project, merge_request)
wait_for_requests
expect(page).not_to have_button('Merge')
expect(page).to have_content('This merge request is in the process of being merged')
end
end
end
| 32.262799 | 111 | 0.655665 |
1d4489acc37b330629084a316d44cfdb7c2b8604 | 925 | # frozen_string_literal: true
RSpec.describe "bundle command names" do
it "work when given fully" do
ensure_no_gemfile
bundle "install"
expect(err).to eq("Could not locate Gemfile")
expect(last_command.stdboth).not_to include("Ambiguous command")
end
it "work when not ambiguous" do
ensure_no_gemfile
bundle "ins"
expect(err).to eq("Could not locate Gemfile")
expect(last_command.stdboth).not_to include("Ambiguous command")
end
it "print a friendly error when ambiguous" do
bundle "in"
expect(err).to eq("Ambiguous command in matches [info, init, inject, install]")
end
context "when cache_command_is_package is set" do
before { bundle! "config set cache_command_is_package true" }
it "dispatches `bundle cache` to the package command" do
bundle "cache --verbose"
expect(out).to start_with "Running `bundle package --verbose`"
end
end
end
| 28.90625 | 83 | 0.711351 |
91963bba7381d15b9f9d097e2d09fd866248745c | 1,623 | require 'English'
require 'rubygems/command'
require 'rubygems/version_option'
require 'rubygems/util'
class Gem::Commands::OpenCommand < Gem::Command
include Gem::VersionOption
def initialize
super 'open', 'Open gem sources in editor'
add_option('-e', '--editor EDITOR', String,
"Opens gem sources in EDITOR") do |editor, options|
options[:editor] = editor || get_env_editor
end
end
def arguments # :nodoc:
"GEMNAME name of gem to open in editor"
end
def defaults_str # :nodoc:
"-e #{get_env_editor}"
end
def description # :nodoc:
<<-EOF
The open command opens gem in editor and changes current path
to gem's source directory. Editor can be specified with -e option,
otherwise rubygems will look for editor in $EDITOR, $VISUAL and
$GEM_EDITOR variables.
EOF
end
def usage # :nodoc:
"#{program_name} GEMNAME [-e EDITOR]"
end
def get_env_editor
ENV['GEM_EDITOR'] ||
ENV['VISUAL'] ||
ENV['EDITOR'] ||
'vi'
end
def execute
@version = options[:version] || Gem::Requirement.default
@editor = options[:editor] || get_env_editor
found = open_gem(get_one_gem_name)
terminate_interaction 1 unless found
end
def open_gem name
spec = spec_for name
return false unless spec
open_editor(spec.full_gem_path)
end
def open_editor path
system(*@editor.split(/\s+/) + [path])
end
def spec_for name
spec = Gem::Specification.find_all_by_name(name, @version).last
return spec if spec
say "Unable to find gem '#{name}'"
end
end
| 21.64 | 74 | 0.655576 |
289ef3667eae48c29cbf7e2ddd4f148c24e557dd | 3,844 | require "test_helper"
require "mediainfo_test_helper"
class MediainfoTest < ActiveSupport::TestCase
supported_attributes = [
:codec_id,
:duration,
:format,
:format_profile,
:format_info,
:overall_bit_rate,
:writing_application,
:writing_library,
:mastered_date,
:tagged_date,
:encoded_date,
### VIDEO
:video_stream_id,
:video_duration,
:video_stream_size,
:video_bit_rate,
:video_nominal_bit_rate,
:video_bit_rate_mode,
:video_scan_order,
:video_scan_type,
:video_resolution,
:video_colorimetry,
:video_standard,
:video_format,
:video_format_info,
:video_format_profile,
:video_format_version,
:video_format_settings_cabac,
:video_format_settings_reframes,
:video_format_settings_matrix,
:video_codec_id,
:video_codec_info,
:video_frame_rate,
:video_minimum_frame_rate,
:video_maximum_frame_rate,
:video_frame_rate_mode,
:video_display_aspect_ratio,
:video_bits_pixel_frame,
:video_width,
:video_height,
:video_encoded_date,
:video_tagged_date,
:video_color_primaries,
:video_transfer_characteristics,
:video_matrix_coefficients,
### AUDIO
:audio_stream_id,
:audio_sampling_rate,
:audio_duration,
:audio_stream_size,
:audio_bit_rate,
:audio_bit_rate_mode,
:audio_interleave_duration,
:audio_resolution,
:audio_format,
:audio_format_info,
:audio_format_profile,
:audio_format_settings_endianness,
:audio_format_settings_sign,
:audio_format_settings_sbr,
:audio_format_version,
:audio_codec_id,
:audio_codec_id_hint,
:audio_codec_info,
:audio_channel_positions,
:audio_channels,
:audio_encoded_date,
:audio_tagged_date,
### IMAGE
:image_resolution,
:image_format,
:image_width,
:image_height,
### MENU
:menu_stream_id,
:menu_tagged_date,
:menu_encoded_date,
:menu_delay,
### TEXT
:text_codec_id,
:text_codec_info,
:text_format,
:text_stream_id
]
Mediainfo.supported_attributes.each do |attribute|
test "supports #{attribute} attribute" do
assert supported_attributes.include?(attribute),
"#{attribute} is not supported"
end
end
def setup
Mediainfo.default_mediainfo_path!
end
test "retains last system command generated" do
p = File.expand_path "./test/fixtures/dinner.3g2.xml"
m = Mediainfo.new p
assert_equal "#{Mediainfo.path} \"#{p}\" --Output=XML", m.last_command
end
test "allows customization of path to mediainfo binary" do
Mediainfo.any_instance.stubs(:run_command!).returns("test")
assert_equal Mediainfo.default_mediainfo_path, Mediainfo.path
m = Mediainfo.new "/dev/null"
assert_equal "#{Mediainfo.default_mediainfo_path} \"/dev/null\" --Output=XML", m.last_command
Mediainfo.any_instance.stubs(:mediainfo_version).returns("0.7.25")
Mediainfo.path = "/opt/local/bin/mediainfo"
assert_equal "/opt/local/bin/mediainfo", Mediainfo.path
m = Mediainfo.new "/dev/null"
assert_equal "/opt/local/bin/mediainfo \"/dev/null\" --Output=XML", m.last_command
end
test "can be initialized with a raw response" do
m = Mediainfo.new
m.raw_response = mediainfo_fixture("AwayWeGo_24fps.mov")
assert m.video?
assert m.audio?
end
test "cannot be initialized with version < 0.7.25" do
Mediainfo.any_instance.stubs(:mediainfo_version).returns("0.7.10")
assert_raises(Mediainfo::IncompatibleVersionError) { Mediainfo.new }
end
test "fails obviously when CLI is not installed" do
Mediainfo.any_instance.stubs(:mediainfo_version).returns(nil)
assert_raises(Mediainfo::UnknownVersionError) { Mediainfo.new }
end
end
| 25.124183 | 97 | 0.699792 |
4a36c6776e99e7f28ce61ebad8026fdf0fb42f70 | 1,871 | #
# Cookbook Name:: hue
# Recipe:: default
#
# Copyright 2019, Zyelabs
#
# All rights reserved - Do Not Redistribute
#
include_recipe 'yum'
include_recipe 'java'
include_recipe 'git'
group node[:hue][:group] do
action :create
end
user node[:hue][:user] do
gid node[:hue][:group]
shell '/bin/bash'
manage_home true
home '/home/hue'
end
=begin
#Add hive user to hadoop supergroup so that hive user can use hdfs
group node[:hue][:hadoop_supergroup] do
action :modify
members node[:hue][:user]
append true
end
=end
node[:hue][:dependencies].each do |dependency|
package dependency
end
# Got strange error without this
# Error and more info here: https://groups.google.com/a/cloudera.org/forum/#!topic/hue-user/SpXbO9OFvJ4
link '/usr/lib/python2.7/_sysconfigdata_nd.py' do
to '/usr/lib/python2.7/plat-x86_64-linux-gnu/_sysconfigdata_nd.py'
end
directory node[:hue][:install_dir] do
owner node[:hue][:user]
group node[:hue][:group]
end
git node[:hue][:install_dir] do
user node[:hue][:user]
repository node[:hue][:repository]
reference node[:hue][:branch]
action :sync
#notifies :run, 'bash[make hue]', :immediately
end
bash 'make hue' do
cwd node[:hue][:install_dir]
user 'root'
code <<-EOH
. /etc/profile
make apps
EOH
not_if { ::File.exists?("#{node[:hue][:install_dir]}/build") }
end
file "#{node[:hue][:install_dir]}/desktop/conf/pseudo-distributed.ini" do
action :delete
end
template "hue.ini" do
path "#{node[:hue][:install_dir]}/desktop/conf/hue.ini"
owner node[:hue][:user]
group node[:hue][:group]
source 'hue-ini.erb'
end
template '/etc/init.d/hue' do
mode '0755'
source 'initd-script.erb'
end
service "hue" do
#supports :status => true, :restart => true
action :enable
subscribes :reload, "template[hue.ini]", :delayed
subscribes :reload, "template[/etc/init.d/hue]", :delayed
end
| 21.505747 | 103 | 0.697488 |
ed5dcaca8349628eaa34449301c47a44d62df7b8 | 284 | class Texnicle < Cask
url 'http://www.bobsoft-mac.de/resources/TeXnicle/2.2/TeXnicle.app.2.2.9.zip'
homepage 'http://www.bobsoft-mac.de/texnicle/texnicle.html'
version '2.2.9'
sha256 '2076a2922bd156ac6e6af1afbb58bd928579a457f500fae40024f24f65a5e5dd'
link 'TeXnicle.app'
end
| 35.5 | 79 | 0.774648 |
332f4c480993bf6a18aa6bede7a80defc18a55e0 | 101 | # frozen_string_literal: true
module QuetzalDbPipeline
module Cfn
VERSION = '0.1.0'
end
end
| 12.625 | 29 | 0.722772 |
ed05cedd2f3aa38bc0ce3057f559ae28ce7402bd | 3,222 | require 'spec_helper'
describe 'java::ibm' do
before do
Chef::Config[:file_cache_path] = '/var/chef/cache'
end
let(:chef_run) do
runner = ChefSpec::ServerRunner.new
runner.node.override['java']['install_flavor'] = 'ibm'
runner.node.override['java']['ibm']['url'] = 'http://example.com/ibm-java.bin'
runner.node.override['java']['ibm']['checksum'] = 'deadbeef'
runner.node.override['java']['ibm']['accept_ibm_download_terms'] = true
runner.converge(described_recipe)
end
it 'should include the notify recipe' do
expect(chef_run).to include_recipe('java::notify')
end
it 'should notify of jdk-version-change' do
expect(chef_run.execute('install-ibm-java')).to notify('log[jdk-version-changed]')
end
it 'creates an installer.properties file' do
expect(chef_run).to create_template(Chef::Config[:file_cache_path] + '/installer.properties')
end
it 'downloads the remote jdk file' do
expect(chef_run).to create_remote_file(Chef::Config[:file_cache_path] + '/ibm-java.bin')
end
it 'runs the installer' do
expect(chef_run).to run_execute('install-ibm-java').with(
command: './ibm-java.bin -f ./installer.properties -i silent',
creates: '/opt/ibm/java/jre/bin/java'
)
install_command = chef_run.execute('install-ibm-java')
expect(install_command).to notify('java_alternatives[set-java-alternatives]')
end
it 'includes the set_java_home recipe' do
expect(chef_run).to include_recipe('java::set_java_home')
end
context 'install on ubuntu' do
let(:chef_run) do
runner = ChefSpec::ServerRunner.new(platform: 'ubuntu', version: '12.04')
runner.node.override['java']['install_flavor'] = 'ibm'
runner.node.override['java']['ibm']['checksum'] = 'deadbeef'
runner.node.override['java']['ibm']['accept_ibm_download_terms'] = true
runner
end
it 'install rpm for installable package' do
chef_run.node.override['java']['ibm']['url'] = 'http://example.com/ibm-java.bin'
chef_run.converge('java::ibm')
expect(chef_run).to install_package('rpm')
end
it 'no need to install rpm for tgz package' do
chef_run.node.override['java']['ibm']['url'] = 'http://example.com/ibm-java-archive.bin'
chef_run.converge('java::ibm')
expect(chef_run).not_to install_package('rpm')
end
end
context 'install on centos' do
let(:chef_run) do
runner = ChefSpec::ServerRunner.new(platform: 'centos', version: '5.8')
runner.node.override['java']['install_flavor'] = 'ibm'
runner.node.override['java']['ibm']['checksum'] = 'deadbeef'
runner.node.override['java']['ibm']['accept_ibm_download_terms'] = true
runner
end
it 'no need to install rpm for installable package' do
chef_run.node.override['java']['ibm']['url'] = 'http://example.com/ibm-java.bin'
chef_run.converge('java::ibm')
expect(chef_run).not_to install_package('rpm')
end
it 'no need to install rpm for tgz package' do
chef_run.node.override['java']['ibm']['url'] = 'http://example.com/ibm-java-archive.bin'
chef_run.converge('java::ibm')
expect(chef_run).not_to install_package('rpm')
end
end
end
| 35.406593 | 97 | 0.673805 |
26beda5c4a16b3408db63138dba0510585dec2ce | 745 | module Datastore
module User
class Update < Datastore::Update
context_with User::Context
UpdateUserSchema = Dry::Validation.Schema do
optional(:email, Types::Email) { filled? & format?(Types::EMAIL_REGEXP) }
optional(:role, Types::Role) { filled? & included_in?(Types::USER_ROLES.values) }
optional(:password_digest, Types::Strict::String)
optional(:first_name) { filled? > str? }
optional(:last_name) { filled? > str? }
end
before do
context.schema = UpdateUserSchema
context.whitelist = %i(first_name last_name email password_digest role)
context.datastore = Datastore.users
context.record_builder = User::Build
end
end
end
end | 33.863636 | 89 | 0.653691 |
019cc765070b0f2beccf297189e5e8fb5e275e5b | 1,322 | require 'spec_helper'
module VCAP::CloudController
module Jobs::Runtime
RSpec.describe AppUsageEventsCleanup, job_context: :worker do
let(:cutoff_age_in_days) { 30 }
let(:logger) { double(Steno::Logger, info: nil) }
let!(:event_before_threshold) { AppUsageEvent.make(created_at: (cutoff_age_in_days + 1).days.ago) }
let!(:event_after_threshold) { AppUsageEvent.make(created_at: (cutoff_age_in_days - 1).days.ago) }
subject(:job) do
AppUsageEventsCleanup.new(cutoff_age_in_days)
end
before do
allow(Steno).to receive(:logger).and_return(logger)
end
it { is_expected.to be_a_valid_job }
it 'can be enqueued' do
expect(job).to respond_to(:perform)
end
describe '#perform' do
it 'deletes events created before the pruning threshold' do
expect {
job.perform
}.to change { event_before_threshold.exists? }.to(false)
end
it 'keeps events created after the pruning threshold' do
expect {
job.perform
}.not_to change { event_after_threshold.exists? }.from(true)
end
it 'knows its job name' do
expect(job.job_name_in_configuration).to equal(:app_usage_events_cleanup)
end
end
end
end
end
| 29.377778 | 105 | 0.642209 |
21770420814a75188ff317906a148376fe5c7183 | 316 | cask :v1 => 'proximity' do
version '1.5'
sha256 '3ac0e90ffee62a6fadf2bb697393442f3cd87f4084ffdccf9cccf10cb86b3203'
url "https://reduxcomputing-proximity.googlecode.com/files/Proximity%20#{version}.zip"
homepage 'https://code.google.com/p/reduxcomputing-proximity/'
license :oss
app 'Proximity.app'
end
| 28.727273 | 88 | 0.772152 |
21aa5f4d7bc196b4b37bee55215792505b26750a | 501 | cask 'squire' do
version '1.5.7'
sha256 '36b5b895c287f3579839c42a20bc85b1ef2489d630881c533b440020f6a30375'
# amazonaws.com/squire was verified as official when first introduced to the cask
url 'https://s3.amazonaws.com/squire/builds/Squire.dmg'
appcast 'http://www.sylion.com/squireapp/sparkle/SquireMac/appcastSquireMac.xml',
checkpoint: '4f7ea01207cca2fa14f1b6c82e6eb35500715ddeaa15258a5dccdbb05f0edb72'
name 'Squire'
homepage 'http://squireapp.com'
app 'Squire.app'
end
| 35.785714 | 88 | 0.782435 |
bb5e5b7a31a36745af44a0a154af3028afa47467 | 696 | begin
require "yui/compressor"
rescue LoadError
puts "YUI-Compressor not available. Install it with: gem install yui-compressor"
end
class Middleman::Rack::MinifyJavascript
def initialize(app, options={})
@app = app
end
def call(env)
status, headers, response = @app.call(env)
if env["PATH_INFO"].match(/\.js$/)
compressor = ::YUI::JavaScriptCompressor.new(:munge => true)
uncompressed_source = response.is_a?(::Rack::File) ? File.read(response.path) : response
response = compressor.compress(uncompressed_source)
headers["Content-Length"] = ::Rack::Utils.bytesize(response).to_s
end
[status, headers, response]
end
end | 27.84 | 94 | 0.678161 |
e82e303b397f6a4310d5d64d38ca62610d064879 | 668 | require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module DosamigosFootball
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
end
end
| 33.4 | 82 | 0.767964 |
62b11ae71990eaae4e189672ca0564b60c898d12 | 1,076 | require 'test_helper'
class RowsControllerTest < ActionController::TestCase
setup do
@row = rows(:one)
end
test "should get index" do
get :index
assert_response :success
assert_not_nil assigns(:rows)
end
test "should get new" do
get :new
assert_response :success
end
test "should create row" do
assert_difference('Row.count') do
post :create, row: { data: @row.data, identifier: @row.identifier, project_id: @row.project_id }
end
assert_redirected_to row_path(assigns(:row))
end
test "should show row" do
get :show, id: @row
assert_response :success
end
test "should get edit" do
get :edit, id: @row
assert_response :success
end
test "should update row" do
patch :update, id: @row, row: { data: @row.data, identifier: @row.identifier, project_id: @row.project_id }
assert_redirected_to row_path(assigns(:row))
end
test "should destroy row" do
assert_difference('Row.count', -1) do
delete :destroy, id: @row
end
assert_redirected_to rows_path
end
end
| 21.52 | 111 | 0.678439 |
ac4b81161c8979639fabe7ae7626a74286456aa5 | 298 | module Admins
class DashboardsController < BaseController
# TODO: when we start exposing more things here, we should probably refactor
# this, maybe with a view object or something
expose :prizes, -> { Prize.all }
expose :stations, -> { Station.all }
def show; end
end
end
| 27.090909 | 80 | 0.694631 |
aba9e5d39a7eb60bc5809e37dd455307cba93136 | 106 | class Api::Users::Posts::Controversial::DayPolicy < ApplicationPolicy
def index?
!exiled?
end
end
| 17.666667 | 69 | 0.726415 |
79ba62ced99de094c9b4e220cadb13593a816d7a | 312 | class CreateLessons < ActiveRecord::Migration
def change
create_table :lessons do |t|
t.text :description
t.integer :room_id
t.integer :host_id
t.datetime :schedule
t.timestamps null: false
end
add_index :lessons, :room_id
add_index :lessons, :host_id
end
end
| 19.5 | 45 | 0.666667 |
bbbcd4c0880e121711fa207f767474c74315205c | 333 | RSpec::Matchers.define :be_subclass_of do |super_class|
match do |child_class|
child_class.superclass == super_class
end
failure_message do |child_class|
"expected the #{child_class} class to be a subclass of #{super_class}"
end
description do
"expected a class to be a subclass of #{super_class}."
end
end
| 23.785714 | 74 | 0.726727 |
1a365bb9ebd7e423b72c5587f55c7ee7350257eb | 10,667 | #
# Cookbook Name:: ark
# Provider:: Ark
#
# Author:: Bryan W. Berry <[email protected]>
# Author:: Sean OMeara <[email protected]
# Copyright 2012, Bryan W. Berry
# Copyright 2013, Opscode, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
use_inline_resources if defined?(use_inline_resources)
include ::Opscode::Ark::ProviderHelpers
# From resources/default.rb
# :install, :put, :dump, :cherry_pick, :install_with_make, :configure, :setup_py_build, :setup_py_install, :setup_py
#
# Used in test.rb
# :install, :put, :dump, :cherry_pick, :install_with_make, :configure
#################
# action :install
#################
action :install do
set_paths
directory new_resource.path do
recursive true
action :create
notifies :run, "execute[unpack #{new_resource.release_file}]"
end
remote_file new_resource.release_file do
Chef::Log.debug('DEBUG: new_resource.release_file')
source new_resource.url
checksum new_resource.checksum if new_resource.checksum
action :create
notifies :run, "execute[unpack #{new_resource.release_file}]"
end
# unpack based on file extension
_unpack_command = unpack_command
execute "unpack #{new_resource.release_file}" do
command _unpack_command
cwd new_resource.path
environment new_resource.environment
notifies :run, "execute[set owner on #{new_resource.path}]"
action :nothing
end
# set_owner
execute "set owner on #{new_resource.path}" do
command "chown -R #{new_resource.owner}:#{new_resource.group} #{new_resource.path}"
action :nothing
end
# symlink binaries
new_resource.has_binaries.each do |bin|
link ::File.join(new_resource.prefix_bin, ::File.basename(bin)) do
to ::File.join(new_resource.path, bin)
end
end
# action_link_paths
link new_resource.home_dir do
to new_resource.path
end
# Add to path for interactive bash sessions
template "/etc/profile.d/#{new_resource.name}.sh" do
cookbook 'ark'
source 'add_to_path.sh.erb'
owner 'root'
group 'root'
mode '0755'
cookbook 'ark'
variables(:directory => "#{new_resource.path}/bin")
only_if { new_resource.append_env_path }
end
# Add to path for the current chef-client converge.
bin_path = ::File.join(new_resource.path, 'bin')
ruby_block "adding '#{bin_path}' to chef-client ENV['PATH']" do
block do
ENV['PATH'] = bin_path + ':' + ENV['PATH']
end
only_if { new_resource.append_env_path && ENV['PATH'].scan(bin_path).empty? }
end
end
##############
# action :put
##############
action :put do
set_put_paths
directory new_resource.path do
recursive true
action :create
notifies :run, "execute[unpack #{new_resource.release_file}]"
end
# download
remote_file new_resource.release_file do
source new_resource.url
checksum new_resource.checksum if new_resource.checksum
action :create
notifies :run, "execute[unpack #{new_resource.release_file}]"
end
# unpack based on file extension
_unpack_command = unpack_command
execute "unpack #{new_resource.release_file}" do
command _unpack_command
cwd new_resource.path
environment new_resource.environment
notifies :run, "execute[set owner on #{new_resource.path}]"
action :nothing
end
# set_owner
execute "set owner on #{new_resource.path}" do
command "chown -R #{new_resource.owner}:#{new_resource.group} #{new_resource.path}"
action :nothing
end
end
###########################
# action :dump
###########################
action :dump do
set_dump_paths
directory new_resource.path do
recursive true
action :create
notifies :run, "execute[unpack #{new_resource.release_file}]"
end
# download
remote_file new_resource.release_file do
Chef::Log.debug("DEBUG: new_resource.release_file #{new_resource.release_file}")
source new_resource.url
checksum new_resource.checksum if new_resource.checksum
action :create
notifies :run, "execute[unpack #{new_resource.release_file}]"
end
# unpack based on file extension
_dump_command = dump_command
execute "unpack #{new_resource.release_file}" do
command _dump_command
cwd new_resource.path
environment new_resource.environment
notifies :run, "execute[set owner on #{new_resource.path}]"
action :nothing
end
# set_owner
execute "set owner on #{new_resource.path}" do
command "chown -R #{new_resource.owner}:#{new_resource.group} #{new_resource.path}"
action :nothing
end
end
###########################
# action :unzip
###########################
action :unzip do
set_dump_paths
directory new_resource.path do
recursive true
action :create
notifies :run, "execute[unpack #{new_resource.release_file}]"
end
# download
remote_file new_resource.release_file do
Chef::Log.debug("DEBUG: new_resource.release_file #{new_resource.release_file}")
source new_resource.url
checksum new_resource.checksum if new_resource.checksum
action :create
notifies :run, "execute[unpack #{new_resource.release_file}]"
end
# unpack based on file extension
_unzip_command = unzip_command
execute "unpack #{new_resource.release_file}" do
command _unzip_command
cwd new_resource.path
environment new_resource.environment
notifies :run, "execute[set owner on #{new_resource.path}]"
action :nothing
end
# set_owner
execute "set owner on #{new_resource.path}" do
command "chown -R #{new_resource.owner}:#{new_resource.group} #{new_resource.path}"
action :nothing
end
end
#####################
# action :cherry_pick
#####################
action :cherry_pick do
set_dump_paths
Chef::Log.debug("DEBUG: new_resource.creates #{new_resource.creates}")
directory new_resource.path do
recursive true
action :create
notifies :run, "execute[cherry_pick #{new_resource.creates} from #{new_resource.release_file}]"
end
# download
remote_file new_resource.release_file do
source new_resource.url
checksum new_resource.checksum if new_resource.checksum
action :create
notifies :run, "execute[cherry_pick #{new_resource.creates} from #{new_resource.release_file}]"
end
_unpack_type = unpack_type
_cherry_pick_command = cherry_pick_command
execute "cherry_pick #{new_resource.creates} from #{new_resource.release_file}" do
Chef::Log.debug("DEBUG: unpack_type: #{_unpack_type}")
command _cherry_pick_command
creates "#{new_resource.path}/#{new_resource.creates}"
notifies :run, "execute[set owner on #{new_resource.path}]"
action :nothing
end
# set_owner
execute "set owner on #{new_resource.path}" do
command "chown -R #{new_resource.owner}:#{new_resource.group} #{new_resource.path}"
action :nothing
end
end
###########################
# action :install_with_make
###########################
action :install_with_make do
set_paths
directory new_resource.path do
recursive true
action :create
notifies :run, "execute[unpack #{new_resource.release_file}]"
end
remote_file new_resource.release_file do
Chef::Log.debug('DEBUG: new_resource.release_file')
source new_resource.url
checksum new_resource.checksum if new_resource.checksum
action :create
notifies :run, "execute[unpack #{new_resource.release_file}]"
end
# unpack based on file extension
_unpack_command = unpack_command
execute "unpack #{new_resource.release_file}" do
command _unpack_command
cwd new_resource.path
environment new_resource.environment
notifies :run, "execute[autogen #{new_resource.path}]"
notifies :run, "execute[configure #{new_resource.path}]"
notifies :run, "execute[make #{new_resource.path}]"
notifies :run, "execute[make install #{new_resource.path}]"
action :nothing
end
execute "autogen #{new_resource.path}" do
command './autogen.sh'
only_if { ::File.exist? "#{new_resource.path}/autogen.sh" }
cwd new_resource.path
environment new_resource.environment
action :nothing
ignore_failure true
end
execute "configure #{new_resource.path}" do
command "./configure #{new_resource.autoconf_opts.join(' ')}"
only_if { ::File.exist? "#{new_resource.path}/configure" }
cwd new_resource.path
environment new_resource.environment
action :nothing
end
execute "make #{new_resource.path}" do
command "make #{new_resource.make_opts.join(' ')}"
cwd new_resource.path
environment new_resource.environment
action :nothing
end
execute "make install #{new_resource.path}" do
command "make install #{new_resource.make_opts.join(' ')}"
cwd new_resource.path
environment new_resource.environment
action :nothing
end
# unless new_resource.creates and ::File.exists? new_resource.creates
# end
end
action :configure do
set_paths
directory new_resource.path do
recursive true
action :create
notifies :run, "execute[unpack #{new_resource.release_file}]"
end
remote_file new_resource.release_file do
Chef::Log.debug('DEBUG: new_resource.release_file')
source new_resource.url
checksum new_resource.checksum if new_resource.checksum
action :create
notifies :run, "execute[unpack #{new_resource.release_file}]"
end
# unpack based on file extension
_unpack_command = unpack_command
execute "unpack #{new_resource.release_file}" do
command _unpack_command
cwd new_resource.path
environment new_resource.environment
notifies :run, "execute[autogen #{new_resource.path}]"
notifies :run, "execute[configure #{new_resource.path}]"
action :nothing
end
execute "autogen #{new_resource.path}" do
command './autogen.sh'
only_if { ::File.exist? "#{new_resource.path}/autogen.sh" }
cwd new_resource.path
environment new_resource.environment
action :nothing
ignore_failure true
end
execute "configure #{new_resource.path}" do
command "./configure #{new_resource.autoconf_opts.join(' ')}"
only_if { ::File.exist? "#{new_resource.path}/configure" }
cwd new_resource.path
environment new_resource.environment
action :nothing
end
end
| 29.144809 | 116 | 0.70929 |
ffef3c59e9249d7570eddb8e870e80703e22d9d3 | 515 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../shared/rest_size.rb', __FILE__)
require 'strscan'
describe "StringScanner#restsize" do
it_behaves_like :strscan_rest_size, :restsize
it "warns in verbose mode that the method is obsolete" do
s = StringScanner.new("abc")
lambda {
$VERBOSE = true
s.restsize
}.should complain(/restsize.*obsolete.*rest_size/)
lambda {
$VERBOSE = false
s.restsize
}.should_not complain
end
end
| 24.52381 | 60 | 0.687379 |
f8472678354a012851cd94ae574399fe2696e366 | 27 | module HelloPageHelper
end
| 9 | 22 | 0.888889 |
6a8f58d5d73fbdcd9b25611aaa5cf6b48355889d | 3,286 | describe SessionsController do
let(:cookie_hash) { {} }
let(:response_body) { nil }
describe '#lookup' do
let(:omniauth_auth) do
{
'uid' => user_id
}
end
before(:each) do
@request.env['omniauth.auth'] = omniauth_auth
cookie_hash = {}
:logout
end
context 'session management' do
let(:user_id) { random_id }
it 'logs the user out when CAS uid does not match original user uid' do
expect(controller).to receive(:cookies).and_return cookie_hash
:create_reauth_cookie
different_user_id = "some_other_#{user_id}"
session[SessionKey.original_user_id] = different_user_id
session['user_id'] = different_user_id
get :lookup, renew: 'true'
expect(@response.status).to eq 302
expect(cookie_hash[:reauthenticated]).to be_nil
expect(session).to be_empty
expect(cookie_hash).to be_empty
end
it 'will create reauth cookie if original user_id not found in session' do
expect(controller).to receive(:cookies).and_return cookie_hash
session['user_id'] = user_id
get :lookup, renew: 'true'
cookie_hash[:reauthenticated].should_not be_nil
reauth_cookie = cookie_hash[:reauthenticated]
expect(reauth_cookie[:value]).to be true
expect(reauth_cookie[:expires]).to be > Date.today
expect(session).to_not be_empty
expect(session['user_id']).to eq user_id
end
it 'will reset session when CAS uid does not match uid in session' do
expect(controller).to receive(:cookies).and_return cookie_hash
:create_reauth_cookie
session[SessionKey.original_user_id] = user_id
session['user_id'] = user_id
get :lookup, renew: 'true'
reauth_cookie = cookie_hash[:reauthenticated]
expect(reauth_cookie).to_not be_nil
expect(reauth_cookie[:value]).to be true
expect(reauth_cookie[:expires]).to be > Date.today
expect(session).to_not be_empty
expect(session['user_id']).to eq user_id
end
it 'will redirect to CAS logout, despite LTI user session, when CAS user_id is an unexpected value' do
expect(controller).to receive(:cookies).and_return cookie_hash
session['lti_authenticated_only'] = true
session['user_id'] = "some_other_#{user_id}"
# No 'renew' param
get :lookup
expect(session).to be_empty
expect(cookie_hash).to be_empty
end
end
context 'with SAML attributes' do
let(:cs_id) { random_id }
let(:user_id) { random_id }
let(:omniauth_auth) do
dbl = double
allow(dbl).to receive(:[]).with('uid').and_return user_id
allow(dbl).to receive(:extra).and_return({
'berkeleyEduCSID' => cs_id
})
dbl
end
it 'will cache the Campus Solutions ID if provided through CAS' do
session['user_id'] = user_id
expect(User::Identifiers).to receive(:cache).with(user_id, cs_id)
get :lookup
end
end
end
describe '#reauth_admin' do
it 'will redirect to designated reauth path' do
# The after hook below will make the appropriate assertions
get :reauth_admin
end
end
end
| 31.902913 | 108 | 0.643944 |
ed6b0a440f1fb3cb232f0492e08311b67cbe4c76 | 150 | # frozen_string_literal: true
class Exercises::Card::Component < ApplicationComponent
def initialize(exercise:)
@exercise = exercise
end
end
| 18.75 | 55 | 0.766667 |
b9098795d4a92d82cebfedeaf24aa8a63102be09 | 3,803 | require 'test/helper.rb'
class PaperclipTest < Test::Unit::TestCase
context "An ActiveRecord model with an 'avatar' attachment" do
setup do
rebuild_model :path => "tmp/:class/omg/:style.:extension"
@file = File.new(File.join(FIXTURES_DIR, "5k.png"))
end
context "that is attr_protected" do
setup do
Dummy.class_eval do
attr_protected :avatar
end
@dummy = Dummy.new
end
should "not assign the avatar on mass-set" do
@dummy.logger.expects(:debug)
@dummy.attributes = { :other => "I'm set!",
:avatar => @file }
assert_equal "I'm set!", @dummy.other
assert ! @dummy.avatar?
end
should "still allow assigment on normal set" do
@dummy.logger.expects(:debug).times(0)
@dummy.other = "I'm set!"
@dummy.avatar = @file
assert_equal "I'm set!", @dummy.other
assert @dummy.avatar?
end
end
context "with a subclass" do
setup do
class ::SubDummy < Dummy; end
end
should "be able to use the attachment from the subclass" do
assert_nothing_raised do
@subdummy = SubDummy.create(:avatar => @file)
end
end
should "be able to see the attachment definition from the subclass's class" do
assert_equal "tmp/:class/omg/:style.:extension", SubDummy.attachment_definitions[:avatar][:path]
end
teardown do
Object.send(:remove_const, "SubDummy") rescue nil
end
end
should "have an #avatar method" do
assert Dummy.new.respond_to?(:avatar)
end
should "have an #avatar= method" do
assert Dummy.new.respond_to?(:avatar=)
end
[[:presence, nil, "5k.png", nil],
[:size, {:in => 1..10240}, "5k.png", "12k.png"],
[:size2, {:in => 1..10240}, nil, "12k.png"],
[:content_type1, {:content_type => "image/png"}, "5k.png", "text.txt"],
[:content_type2, {:content_type => "text/plain"}, "text.txt", "5k.png"],
[:content_type3, {:content_type => %r{image/.*}}, "5k.png", "text.txt"],
[:content_type4, {:content_type => "image/png"}, nil, "text.txt"]].each do |args|
context "with #{args[0]} validations" do
setup do
Dummy.class_eval do
send(*[:"validates_attachment_#{args[0].to_s[/[a-z_]*/]}", :avatar, args[1]].compact)
end
@dummy = Dummy.new
end
context "and a valid file" do
setup do
@file = args[2] && File.new(File.join(FIXTURES_DIR, args[2]))
end
should "not have any errors" do
@dummy.avatar = @file
assert @dummy.avatar.valid?
assert_equal 0, @dummy.avatar.errors.length
end
end
context "and an invalid file" do
setup do
@file = args[3] && File.new(File.join(FIXTURES_DIR, args[3]))
end
should "have errors" do
@dummy.avatar = @file
assert ! @dummy.avatar.valid?
assert_equal 1, @dummy.avatar.errors.length
end
end
# context "and an invalid file with :message" do
# setup do
# @file = args[3] && File.new(File.join(FIXTURES_DIR, args[3]))
# end
#
# should "have errors" do
# if args[1] && args[1][:message] && args[4]
# @dummy.avatar = @file
# assert ! @dummy.avatar.valid?
# assert_equal 1, @dummy.avatar.errors.length
# assert_equal args[4], @dummy.avatar.errors[0]
# end
# end
# end
end
end
end
end
| 30.669355 | 104 | 0.534315 |
1aec25714cbd553c26e06aad80fb09eee332e5db | 954 | module VagrantPlugins
module VagrantCPUBurn
class Provisioner < Vagrant.plugin("2", :provisioner)
def provision
command = ''
if @config.block_all_network
#TODO implement this!
end
if @config.cpu_burn
command << cpu_burn_command(@config)
end
if @config.shut_down_type
#TODO implement this
end
@machine.communicate.sudo(command) do |t, data|
@machine.env.ui.info(data.chomp, prefix: false)
end
end
def cpu_burn_command(config)
install_stress_command(config)
end
def install_stress_command(config)
command = 'wget %s &&' % [config.stress_package_url]
command << 'tar -zxvf %s &&' % [config.stress_tar_ball_name]
command << 'cd %s &&' % [config.stress_package_name]
command << './configure && make && sudo make install'
command
end
end
end
end
| 26.5 | 68 | 0.593291 |
acb34f780d988d54b0904b27e2074f7b61c2180a | 501 | # Be sure to restart your server when you modify this file.
# Your secret key for verifying the integrity of signed cookies.
# If you change this key, all old signed cookies will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
YoloArcher::Application.config.secret_token = '335c817c8d938b4bef3e45a8f1c20dbad25732f56a50d011550a4a50737fdfc30820883cfa388f7eaca9c7626fc20323abc95c8be06dfcb7bd5234533dcb6330'
| 62.625 | 176 | 0.834331 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.