hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
7a6f038aed9191c7eb6ce23567e97a0dbbe27201 | 958 | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'spec_helper'
describe service('mysql'), :if => os[:family] == 'debian' do
it { should be_enabled }
it { should be_running }
end
describe service('redis'), :if => os[:family] == 'debian' do
it { should be_enabled }
it { should be_running }
end
describe service('nginx'), :if => os[:family] == 'debian' do
it { should be_enabled }
it { should be_running }
end
| 30.903226 | 74 | 0.717119 |
b97665495f296fa555b4a93c0216b17f04a67bb7 | 44 | require 'gnome2'
require 'panelapplet2.so'
| 11 | 25 | 0.772727 |
e8270534b65fa1d3bc1a0e5e8c9d25f2cff447ac | 1,693 | #Composed in Melbourne Australia (Not in Berlin, livecoding the-world?)
#By the Yarra River
bar = 1
quart = 2*bar
live_loop :schedule do
cue :start
7.times do
cue :bar
sleep bar;cue :quart
cue :quart
sleep bar
end
end
def say(words)
sync :bar
with_fx :reverb, room: 100 do
with_fx :lpf do
use_synth :dark_sea_horn
t = scale(:a1, :major_pentatonic, num_octaves: 2).shuffle
notes = words.strip.split("").map(&:ord).map{|n| t[n%t.length]}
notes.map{|n| if n.chr == " "
play n, release: 6, attack: 3
sleep 4
else
play n, release: 4.0, attack: 2
sleep 2.0
end
}
end end end
live_loop :poem do
with_fx :level, amp: 0.2 do
say <<-FOREST
Tiger tiger burning bright
FOREST
end
end
live_loop :drums do
use_synth :beep
sync :bar
sample :drum_bass_soft
play :A3
end
live_loop :dark do; use_synth :dark_ambience; sync :bar
play :A1
end
n_inc = 0
live_loop :high do; sync :quart; use_synth_defaults release: quart
use_synth :singer
play chord_degree(ring(1,3,4, 1,3,5)[n_inc], :A2, :major_pentatonic)
n_inc+=1
end
live_loop :drums do; use_synth :growl; sync :bar
play degree(1, :A2, :major), release: bar
sleep bar
end
live_loop :chords do;with_fx :level, amp: 1.0 do;sync :bar ;with_fx :reverb, room: 0.8, mix: 0.5 do
with_fx :slicer, phase: bar/4.0 do
use_synth :tri; use_synth_defaults decay: bar*2, release: 1, attack: 0.2, amp: 0.2
play chord_degree(4, :A2, :major)[0]
sleep bar/2
play chord_degree(4, :A2, :major)[1..2]
sync :bar
play chord_degree(4, :A2, :major)[1..2]
end
end
end
end
set_volume! 1.0
| 21.705128 | 99 | 0.638512 |
f8c49595951f63e931133dd56a49bfd6f3f1177b | 2,054 | #
# Cookbok Name:: td-agent
# Provider:: filter
#
# Author:: Anthony Scalisi <[email protected]>
#
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include ::TdAgent::Version
action :create do
fail 'You should set the node[:td_agent][:includes] attribute to true to use this resource.' unless node['td_agent']['includes']
template "/etc/td-agent/conf.d/#{new_resource.filter_name}.conf" do
source 'filter.conf.erb'
owner 'root'
group 'root'
mode '0644'
variables(type: new_resource.type,
params: params_to_text(new_resource.params),
tag: new_resource.tag)
cookbook 'td-agent'
notifies reload_action, 'service[td-agent]'
end
new_resource.updated_by_last_action(true)
end
action :delete do
file "/etc/td-agent/conf.d/#{new_resource.filter_name}.conf" do
action :delete
only_if { ::File.exist?("/etc/td-agent/conf.d/#{new_resource.filter_name}.conf") }
notifies reload_action, 'service[td-agent]'
end
new_resource.updated_by_last_action(true)
end
def reload_action
if reload_available?
:reload
else
:restart
end
end
def params_to_text(params)
body = ''
params.each do |k,v|
if v.is_a?(Hash)
body += "<#{k}>\n"
body += params_to_text(v)
body += "</#{k}>\n"
elsif v.is_a?(Array)
v.each do |v|
body += "<#{k}>\n"
body += params_to_text(v)
body += "</#{k}>\n"
end
else
body += "#{k} #{v}\n"
end
end
indent = ' '
body.each_line.map{|line| "#{indent}#{line}"}.join
end
| 25.675 | 130 | 0.662123 |
915990fe5a96a317d2c46148b971f9d76226bd91 | 1,542 | RSpec::Matchers.define :have_styling do |rules|
normalized_rules = StylingExpectation.new(rules)
chain(:at_selector) { |selector| @selector = selector }
match { |document|
@selector ||= 'body > *:first'
normalized_rules == styles_at_selector(document)
}
description {
"have styles #{normalized_rules.inspect} at selector #{@selector.inspect}"
}
failure_message { |document|
"expected styles at #{@selector.inspect} to be:\n#{normalized_rules}\nbut was:\n#{styles_at_selector(document)}"
}
failure_message_when_negated {
"expected styles at #{@selector.inspect} to not be:\n#{normalized_rules}"
}
def styles_at_selector(document)
expect(document).to have_selector(@selector)
StylingExpectation.new document.at_css(@selector)['style']
end
end
class StylingExpectation
def initialize(styling)
case styling
when String then @rules = parse_rules(styling)
when Array then @rules = styling
when Hash then @rules = styling.to_a
when nil then @rules = []
else fail "I don't understand #{styling.inspect}!"
end
end
def ==(other)
rules == other.rules
end
def to_s() rules.to_s end
protected
attr_reader :rules
private
def parse_rules(css)
css.split(';').map { |property| parse_property(property) }
end
def parse_property(property)
rule, value = property.split(':', 2).map(&:strip)
[rule, normalize_quotes(value)]
end
# JRuby's Nokogiri encodes quotes
def normalize_quotes(string)
string.gsub '%22', '"'
end
end
| 24.47619 | 116 | 0.692607 |
5da7bf2d7b6d33446dbdeb5651b5f76458f87e6d | 6,939 | require 'time'
require 'openssl'
module Aws
module Signers
class V4
def self.sign(context)
new(
context.config.credentials,
context.config.sigv4_name,
context.config.sigv4_region
).sign(context.http_request)
end
# @param [Credentials] credentials
# @param [String] service_name The name used by the service in
# signing signature version 4 requests. This is generally
# the endpoint prefix.
# @param [String] region The region (e.g. 'us-west-1') the request
# will be made to.
def initialize(credentials, service_name, region)
@service_name = service_name
@credentials = credentials.credentials
@region = region
end
# @param [Seahorse::Client::Http::Request] req
# @return [Seahorse::Client::Http::Request] the signed request.
def sign(req)
datetime = Time.now.utc.strftime("%Y%m%dT%H%M%SZ")
body_digest = req.headers['X-Amz-Content-Sha256'] || hexdigest(req.body)
req.headers['X-Amz-Date'] = datetime
req.headers['Host'] = req.endpoint.host
req.headers['X-Amz-Security-Token'] = @credentials.session_token if
@credentials.session_token
req.headers['X-Amz-Content-Sha256'] ||= body_digest
req.headers['Authorization'] = authorization(req, datetime, body_digest)
req
end
# Generates an returns a presigned URL.
# @param [Seahorse::Client::Http::Request] request
# @option options [required, Integer<Seconds>] :expires_in
# @option options [optional, String] :body_digest The SHA256 hexdigest of
# the payload to sign. For S3, this should be the string literal
# `UNSIGNED-PAYLOAD`.
# @return [Seahorse::Client::Http::Request] the signed request.
# @api private
def presigned_url(request, options = {})
now = Time.now.utc.strftime("%Y%m%dT%H%M%SZ")
body_digest = options[:body_digest] || hexdigest(request.body)
request.headers['Host'] = request.endpoint.host
request.headers.delete('User-Agent')
params = Aws::Query::ParamList.new
request.headers.keys.each do |key|
if key.match(/^x-amz/i)
params.set(key, request.headers.delete(key))
end
end
params.set("X-Amz-Algorithm", "AWS4-HMAC-SHA256")
params.set("X-Amz-Credential", credential(now))
params.set("X-Amz-Date", now)
params.set("X-Amz-Expires", options[:expires_in].to_s)
params.set("X-Amz-SignedHeaders", signed_headers(request))
params.set('X-Amz-Security-Token', @credentials.session_token) if
@credentials.session_token
endpoint = request.endpoint
if endpoint.query
endpoint.query += '&' + params.to_s
else
endpoint.query = params.to_s
end
endpoint.to_s + '&X-Amz-Signature=' + signature(request, now, body_digest)
end
def authorization(request, datetime, body_digest)
parts = []
parts << "AWS4-HMAC-SHA256 Credential=#{credential(datetime)}"
parts << "SignedHeaders=#{signed_headers(request)}"
parts << "Signature=#{signature(request, datetime, body_digest)}"
parts.join(', ')
end
def credential(datetime)
"#{@credentials.access_key_id}/#{credential_scope(datetime)}"
end
def signature(request, datetime, body_digest)
k_secret = @credentials.secret_access_key
k_date = hmac("AWS4" + k_secret, datetime[0,8])
k_region = hmac(k_date, @region)
k_service = hmac(k_region, @service_name)
k_credentials = hmac(k_service, 'aws4_request')
hexhmac(k_credentials, string_to_sign(request, datetime, body_digest))
end
def string_to_sign(request, datetime, body_digest)
parts = []
parts << 'AWS4-HMAC-SHA256'
parts << datetime
parts << credential_scope(datetime)
parts << hexdigest(canonical_request(request, body_digest))
parts.join("\n")
end
def credential_scope(datetime)
parts = []
parts << datetime[0,8]
parts << @region
parts << @service_name
parts << 'aws4_request'
parts.join("/")
end
def canonical_request(request, body_digest)
[
request.http_method,
path(request.endpoint),
normalized_querystring(request.endpoint.query || ''),
canonical_headers(request) + "\n",
signed_headers(request),
body_digest
].join("\n")
end
def path(uri)
path = uri.path == '' ? '/' : uri.path
if @service_name == 's3'
path
else
path.gsub(/[^\/]+/) { |segment| Seahorse::Util.uri_escape(segment) }
end
end
def normalized_querystring(querystring)
params = querystring.split('&')
params = params.map { |p| p.match(/=/) ? p : p + '=' }
# We have to sort by param name and preserve order of params that
# have the same name. Default sort <=> in JRuby will swap members
# occasionally when <=> is 0 (considered still sorted), but this
# causes our normalized query string to not match the sent querystring.
# When names match, we then sort by their original order
params = params.each.with_index.sort do |a, b|
a, a_offset = a
a_name = a.split('=')[0]
b, b_offset = b
b_name = b.split('=')[0]
if a_name == b_name
a_offset <=> b_offset
else
a_name <=> b_name
end
end.map(&:first).join('&')
end
def signed_headers(request)
headers = request.headers.keys
headers.delete('authorization')
headers.sort.join(';')
end
def canonical_headers(request)
headers = []
request.headers.each_pair do |k,v|
k = k.downcase
headers << [k,v] unless k == 'authorization'
end
headers = headers.sort_by(&:first)
headers.map{|k,v| "#{k}:#{canonical_header_value(v.to_s)}" }.join("\n")
end
def canonical_header_value(value)
value.match(/^".*"$/) ? value : value.gsub(/\s+/, ' ').strip
end
def hexdigest(value)
digest = OpenSSL::Digest::SHA256.new
if value.respond_to?(:read)
chunk = nil
chunk_size = 1024 * 1024 # 1 megabyte
digest.update(chunk) while chunk = value.read(chunk_size)
value.rewind
else
digest.update(value)
end
digest.hexdigest
end
def hmac(key, value)
OpenSSL::HMAC.digest(OpenSSL::Digest.new('sha256'), key, value)
end
def hexhmac(key, value)
OpenSSL::HMAC.hexdigest(OpenSSL::Digest.new('sha256'), key, value)
end
end
end
end
| 33.684466 | 82 | 0.593601 |
039b0605ae8d97372ee70359df87c7ff3af69fd8 | 1,201 | require File.dirname(__FILE__) + '/../../spec_helper'
include Mingle4r::API
describe Wiki do
it "should set the collection name to 'wiki'" do
Wiki.collection_name.should == 'wiki'
end
it "should element_name to 'page'" do
Wiki.element_name.should == 'page'
end
it "should return the identifier as the id of the wiki" do
Wiki.site = 'http://localhost'
wiki = Wiki.new :identifier => 'some_wiki'
wiki.id.should == 'some_wiki'
end
context "post setup hook" do
it "should set the primary key as identifier" do
Wiki.site = 'http://localhost'
primary_key = Wiki.new.class.primary_key
primary_key.should == 'identifier'
end
end
context "for attachments" do
before(:each) do
Wiki.site = 'http://localhost:9090/'
Wiki.user = 'test'
Wiki.password = 'test'
end
it "should set attributes for Attachment class only once" do
wiki = Wiki.new({:identifier => 'hello_world'})
Attachment.stub!(:find)
Attachment.should_receive(:site=).once
Attachment.should_receive(:user=).once
Attachment.should_receive(:password=).once
wiki.attachments
wiki.attachments
end
end
end
| 25.020833 | 64 | 0.658618 |
f836d863f09f3546929c3312869576956481577c | 1,822 | require 'rails_helper'
# rubocop:disable Metrics/BlockLength, Performance/HashEachMethods, RSpec/ContextWording
describe "Google Analytics", type: :feature do
before do
visit map_path
end
context "Header links" do
links = { title: { selector: '#logo a.title-link',
description: 'Title link' },
logo: { selector: '#logo a.logo-link',
description: 'Logo link' },
program: { selector: 'a#hubzone-program-link',
description: 'Program link' },
help: { selector: 'a#map-help-guide',
description: 'Help Link' } }
links.each do |_key, info|
it "will be ready to send an event for the #{info[:description]} link" do
link = find(:css, info[:selector])
expect(link[:onclick]).to match(/HZApp.GA.openLink/)
end
end
end
context "Help links" do
before do
visit help_path
end
links = { help_overview: { selector: 'a#help-overview-link',
description: 'Help Overview Link' },
help_faq: { selector: 'a#help-faq-link',
description: 'Help FAQ Link' } }
links.each do |_key, info|
it "will be ready to send an event for the #{info[:description]} link" do
link = find(:css, info[:selector])
expect(link[:onclick]).to match(/HZApp.GA.openLink/)
end
end
end
context "Searching" do
it "will be ready to send an event when a user searches an address" do
form = find(:css, 'form.usa-search')
expect(form[:onsubmit]).to match(/HZApp.GA.trackSubmit/)
end
end
end
# rubocop:enable Metrics/BlockLength, Performance/HashEachMethods, RSpec/ContextWording
| 34.377358 | 88 | 0.573546 |
918a7572d8e326be436fe758e905a5801eaf0841 | 2,989 | require 'ip_ranger'
RSpec.describe IPRanger::IPRange do
it 'converts an IP range to two IPs' do
ip_range = described_class.new('1.1.1.1', '1.1.1.2')
expect(ip_range.cidrs).to contain_exactly(IPAddr.new('1.1.1.1/32'), IPAddr.new('1.1.1.2/32'))
end
it 'converts an IP range with two common subnets to CIDR blocks' do
ip_range = described_class.new('1.1.3.5', '1.1.11.50')
expect(ip_range.cidrs).to contain_exactly(
IPAddr.new('1.1.3.5/32'),
IPAddr.new('1.1.3.6/31'),
IPAddr.new('1.1.3.8/29'),
IPAddr.new('1.1.3.16/28'),
IPAddr.new('1.1.3.32/27'),
IPAddr.new('1.1.3.64/26'),
IPAddr.new('1.1.3.128/25'),
IPAddr.new('1.1.4.0/22'),
IPAddr.new('1.1.8.0/23'),
IPAddr.new('1.1.10.0/24'),
IPAddr.new('1.1.11.0/27'),
IPAddr.new('1.1.11.32/28'),
IPAddr.new('1.1.11.48/31'),
IPAddr.new('1.1.11.50/32')
)
end
it 'converts an IP range with no common subnets to CIDR blocks' do
ip_range = described_class.new('192.168.0.1', '200.50.50.50')
expect(ip_range.cidrs).to contain_exactly(
IPAddr.new('192.168.0.1/32'),
IPAddr.new('192.168.0.2/31'),
IPAddr.new('192.168.0.4/30'),
IPAddr.new('192.168.0.8/29'),
IPAddr.new('192.168.0.16/28'),
IPAddr.new('192.168.0.32/27'),
IPAddr.new('192.168.0.64/26'),
IPAddr.new('192.168.0.128/25'),
IPAddr.new('192.168.1.0/24'),
IPAddr.new('192.168.2.0/23'),
IPAddr.new('192.168.4.0/22'),
IPAddr.new('192.168.8.0/21'),
IPAddr.new('192.168.16.0/20'),
IPAddr.new('192.168.32.0/19'),
IPAddr.new('192.168.64.0/18'),
IPAddr.new('192.168.128.0/17'),
IPAddr.new('192.169.0.0/16'),
IPAddr.new('192.170.0.0/15'),
IPAddr.new('192.172.0.0/14'),
IPAddr.new('192.176.0.0/12'),
IPAddr.new('192.192.0.0/10'),
IPAddr.new('193.0.0.0/8'),
IPAddr.new('194.0.0.0/7'),
IPAddr.new('196.0.0.0/6'),
IPAddr.new('200.0.0.0/11'),
IPAddr.new('200.32.0.0/12'),
IPAddr.new('200.48.0.0/15'),
IPAddr.new('200.50.0.0/19'),
IPAddr.new('200.50.32.0/20'),
IPAddr.new('200.50.48.0/23'),
IPAddr.new('200.50.50.0/27'),
IPAddr.new('200.50.50.32/28'),
IPAddr.new('200.50.50.48/31'),
IPAddr.new('200.50.50.50/32')
)
end
it 'returns one address when the range is a single IP' do
ip_range = described_class.new('192.168.1.1', '192.168.1.1')
expect(ip_range.cidrs).to contain_exactly(IPAddr.new('192.168.1.1/32'))
end
it 'converts an IPv6 range to CIDR blocks' do
ip_range = described_class.new('2001:db8::', '2001:db8:0000:0000:0000:0000:0000:0001')
expect(ip_range.cidrs).to contain_exactly(IPAddr.new('2001:db8::/127'))
end
it 'raises if given incompatible IP addresses' do
expect { described_class.new('192.168.1.1', '2001:0db8:0000:0042:0000:8a2e:0370:7334') }.
to raise_error('IP sequence cannot contain both IPv4 and IPv6!')
end
end
| 33.58427 | 97 | 0.592171 |
e22667336575e34a88c690284be5c74e4dcbe2cf | 945 | class RemoveColumns < ActiveRecord::Migration
def change
remove_column :test_target_instances, :remark
remove_column :test_targets, :remark
remove_column :test_case_templates, :remark
remove_columns :test_case_test_case_template_param_instance_associations, :created_at, :updated_at
remove_columns :test_case_template_params, :remark, :created_at, :updated_at
remove_columns :test_case_template_param_instances, :created_at, :updated_at
remove_columns :test_cases, :remark, :test_case_status_type_id, :description, :created_at, :lowercase_name, :updated_at, :command, :test_type_id, :first_test_case_id, :test_execution_time, :created_by, :version, :next_test_case_id
remove_column :test_suites, :remark
remove_column :projects, :remark
remove_column :pictures, :remark
remove_column :members, :remark
remove_column :change_lists, :remark
drop_table :test_case_status_types
end
end
| 37.8 | 234 | 0.791534 |
21196b3510217c2272d71de2f03da2f337c7d12b | 1,628 | # /*******************************************************************************************
# *
# * raylib [audio] example - Sound loading and playing
# *
# * NOTE: This example requires OpenAL Soft library installed
# *
# * This example has been created using raylib 1.0 (www.raylib.com)
# * raylib is licensed under an unmodified zlib/libpng license (View raylib.h for details)
# *
# * Copyright (c) 2014 Ramon Santamaria (@raysan5)
# *
# ********************************************************************************************/
# Ported to ruby by Aldrin Martoq (@aldrinmartoq)
require 'raylib'
# Initialization
screen_w = 800
screen_h = 450
RayWindow.init screen_w, screen_h, 'ruby raylib [audio] example - sound loading and playing'
RayAudioDevice.init # Initialize audio device
fx_wav = RaySound.load 'resources/sound.wav'
fx_ogg = RaySound.load 'resources/tanatana.ogg'
RayWindow.target_fps = 60
# Main game loop
until RayWindow.should_close? # Detect window close button or ESC key
# Update
fx_wav.play if RayKey.pressed? :space # Play WAV sound
fx_ogg.play if RayKey.pressed? :enter # Play OGG sound
# Draw
RayDraw.drawing do
RayDraw.clear_background :white
RayDraw.text 'Press SPACE to PLAY the WAV sound!', 200, 180, 20, :lightgray
RayDraw.text 'Press ENTER to PLAY the OGG sound!', 200, 220, 20, :lightgray
end
end
# De-Initialization
fx_wav.unload # Unload sound data
fx_ogg.unload # Unload sound data
RayAudioDevice.close # Close audio device (music streaming is automatically stopped)
RayWindow.close # Close window and OpenGL context
| 32.56 | 95 | 0.641278 |
626a549d319516c343210042e1ef9b70deb0ca8f | 4,676 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `bin/rails
# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2022_01_09_175753) do
# These are extensions that must be enabled in order to support this database
enable_extension "pgcrypto"
enable_extension "plpgsql"
create_table "customers", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "registered_at"
end
create_table "event_store_events", id: :serial, force: :cascade do |t|
t.uuid "event_id", null: false
t.string "event_type", null: false
t.jsonb "metadata"
t.jsonb "data", null: false
t.datetime "created_at", null: false
t.datetime "valid_at"
t.index ["created_at"], name: "index_event_store_events_on_created_at"
t.index ["event_id"], name: "index_event_store_events_on_event_id", unique: true
t.index ["event_type"], name: "index_event_store_events_on_event_type"
t.index ["valid_at"], name: "index_event_store_events_on_valid_at"
end
create_table "event_store_events_in_streams", id: :serial, force: :cascade do |t|
t.string "stream", null: false
t.integer "position"
t.uuid "event_id", null: false
t.datetime "created_at", null: false
t.index ["created_at"], name: "index_event_store_events_in_streams_on_created_at"
t.index ["stream", "event_id"], name: "index_event_store_events_in_streams_on_stream_and_event_id", unique: true
t.index ["stream", "position"], name: "index_event_store_events_in_streams_on_stream_and_position", unique: true
end
create_table "invoice_items", force: :cascade do |t|
t.bigint "invoice_id"
t.string "name"
t.decimal "unit_price", precision: 8, scale: 2
t.decimal "vat_rate", precision: 4, scale: 1
t.integer "quantity"
t.decimal "value", precision: 8, scale: 2
t.index ["invoice_id"], name: "index_invoice_items_on_invoice_id"
end
create_table "invoices", force: :cascade do |t|
t.string "order_uid", null: false
t.string "number"
t.string "tax_id_number"
t.string "address_line_1"
t.string "address_line_2"
t.string "address_line_3"
t.string "address_line_4"
t.boolean "address_present", default: false
t.boolean "issued", default: false
t.date "issue_date"
t.date "disposal_date"
t.date "payment_date"
t.decimal "total_value", precision: 8, scale: 2
end
create_table "invoices_orders", force: :cascade do |t|
t.uuid "uid", null: false
t.boolean "submitted", default: false
end
create_table "order_lines", force: :cascade do |t|
t.uuid "order_uid", null: false
t.string "product_name"
t.integer "quantity"
t.decimal "price", precision: 8, scale: 2
t.uuid "product_id"
end
create_table "orders", force: :cascade do |t|
t.uuid "uid", null: false
t.string "number"
t.string "customer"
t.string "state"
t.decimal "percentage_discount", precision: 8, scale: 2
t.decimal "total_value", precision: 8, scale: 2
t.decimal "discounted_value", precision: 8, scale: 2
end
create_table "orders_customers", force: :cascade do |t|
t.uuid "uid", null: false
t.string "name"
end
create_table "orders_products", force: :cascade do |t|
t.uuid "uid", null: false
t.string "name"
t.decimal "price", precision: 8, scale: 2
end
create_table "products", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
t.string "name"
t.decimal "price", precision: 8, scale: 2
t.integer "stock_level"
t.datetime "registered_at"
t.string "vat_rate_code"
end
create_table "shipments", force: :cascade do |t|
t.string "order_uid", null: false
t.string "address_line_1"
t.string "address_line_2"
t.string "address_line_3"
t.string "address_line_4"
end
create_table "shipments_orders", force: :cascade do |t|
t.uuid "uid", null: false
t.boolean "submitted", default: false
end
end
| 35.694656 | 116 | 0.704448 |
d505911c169d735ca8da5e91e6565529795209af | 887 | Riddle::Client::Versions[:search] = 0x117
Riddle::Client::Versions[:excerpt] = 0x102
class Riddle::Client
private
# Generation of the message to send to Sphinx for an excerpts request.
def excerpts_message(options)
message = Message.new
message.append [0, excerpt_flags(options)].pack('N2') # 0 = mode
message.append_string options[:index]
message.append_string options[:words]
# options
message.append_string options[:before_match]
message.append_string options[:after_match]
message.append_string options[:chunk_separator]
message.append_ints options[:limit], options[:around]
message.append_ints options[:limit_passages], options[:limit_words]
message.append_ints options[:start_passage_id]
message.append_string options[:html_strip_mode]
message.append_array options[:docs]
message.to_s
end
end | 31.678571 | 72 | 0.733935 |
33ebc7ba695f6941fb893e65e287de68f9d2f09b | 940 | require "gfa/record"
class GFA
# Class-level
MIN_VERSION = "1.0"
MAX_VERSION = "1.0"
def self.load(file)
gfa = GFA.new
fh = File.open(file, "r")
fh.each { |ln| gfa << ln }
fh.close
gfa
end
def self.supported_version?(v)
v.to_f >= MIN_VERSION.to_f and v.to_f <= MAX_VERSION.to_f
end
# Instance-level
def <<(obj)
obj = parse_line(obj) unless obj.is_a? GFA::Record
return if obj.nil? or obj.empty?
@records[obj.type] << obj
if obj.type==:Header and not obj.fields[:VN].nil?
set_gfa_version(obj.fields[:VN].value)
end
end
def set_gfa_version(v)
@gfa_version = v
raise "GFA version currently unsupported: #{v}." unless
GFA::supported_version? gfa_version
end
private
def parse_line(ln)
ln.chomp!
return nil if ln =~ /^\s*$/
cols = ln.split("\t")
GFA::Record.code_class(cols.shift).new(*cols)
end
end
| 20.434783 | 61 | 0.605319 |
91a6393146210f50a733734f38767b5048037d78 | 768 | $:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "news/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "news"
s.version = News::VERSION
s.authors = ["Seb Ashton"]
s.email = ["[email protected]"]
s.homepage = "http://www.madebymade.co.uk/"
s.summary = "News engine."
s.description = "Rails engine that provides the functionality required to add news to a site"
s.files = Dir["{app,config,db,lib}/**/*"] + ["LICENSE", "Rakefile", "README.md"]
s.test_files = Dir["test/**/*"]
s.add_dependency "rails", "~> 3.2.11"
s.add_dependency "paperclip", "~> 3.0"
s.add_dependency "stringex", "~> 1.5.1"
s.add_dependency "kaminari"
end
| 32 | 95 | 0.638021 |
f75909fb41475ce037818f37654c1ae3f389050e | 1,845 | module Support
module MongoSeed
def seed_db
category1 = Mongoid::Category.create(:name => 'first')
category2 = Mongoid::Category.create(:name => 'second')
post1 = category1.posts.create(:name => 'first')
post1a = category1.posts.create(:name => 'like first')
post2 = category2.posts.create(:name => 'second')
post1.users << Mongoid::User.create(:name => 'first')
post1.users << Mongoid::User.create(:name => 'another')
post2.users << Mongoid::User.create(:name => 'second')
comment1 = post1.comments.create(:name => 'first')
comment2 = post1.comments.create(:name => 'first2')
comment3 = post1.comments.create(:name => 'first3')
comment4 = post1.comments.create(:name => 'second')
comment8 = post1a.comments.create(:name => "like first 1")
comment9 = post1a.comments.create(:name => "like first 2")
comment5 = post2.comments.create(:name => 'third')
comment6 = post2.comments.create(:name => 'fourth')
comment7 = post2.comments.create(:name => 'fourth')
entry1 = category1.entries.create(:name => 'first')
entry2 = category1.entries.create(:name => 'second')
company1 = Mongoid::Company.create(:name => 'first')
company2 = Mongoid::Company.create(:name => 'second')
Mongoid::Address.create(:name => 'first', :company => company1)
Mongoid::Address.create(:name => 'second', :company => company2)
end
def setup_db
if Mongoid::VERSION =~ /\A2\.[4|5]/
Mongoid.configure do |config|
config.master = Mongo::Connection.new.db("bullet")
end
elsif Mongoid::VERSION =~ /\A3/
Mongoid.configure do |config|
config.connect_to("bullet")
end
end
end
def teardown_db
Mongoid.purge!
end
extend self
end
end
| 34.166667 | 70 | 0.615718 |
4a11d8cc106ea7df20f3f924ab48d810676792eb | 10,345 | #
# these tests taken from the HTML5 sanitization project and modified for use with Loofah
# see the original here: http://code.google.com/p/html5lib/source/browse/ruby/test/test_sanitizer.rb
#
# license text at the bottom of this file
#
require File.expand_path(File.join(File.dirname(__FILE__), '..', 'helper'))
require 'json'
class Html5TestSanitizer < Test::Unit::TestCase
include Loofah
def sanitize_xhtml stream
Loofah.fragment(stream).scrub!(:escape).to_xhtml
end
def sanitize_html stream
Loofah.fragment(stream).scrub!(:escape).to_html
end
def check_sanitization(input, htmloutput, xhtmloutput, rexmloutput)
## libxml uses double-quotes, so let's swappo-boppo our quotes before comparing.
sane = sanitize_html(input).gsub('"',"'")
## HTML5's parsers are shit. there's so much inconsistency with what has closing tags, etc, that
## it would require a lot of manual hacking to make the tests match libxml's output.
## instead, I'm taking the shotgun approach, and trying to match any of the described outputs.
assert((htmloutput == sane) || (rexmloutput == sane) || (xhtmloutput == sane), input)
end
HTML5::WhiteList::ALLOWED_ELEMENTS.each do |tag_name|
define_method "test_should_allow_#{tag_name}_tag" do
input = "<#{tag_name} title='1'>foo <bad>bar</bad> baz</#{tag_name}>"
htmloutput = "<#{tag_name.downcase} title='1'>foo <bad>bar</bad> baz</#{tag_name.downcase}>"
xhtmloutput = "<#{tag_name} title='1'>foo <bad>bar</bad> baz</#{tag_name}>"
rexmloutput = xhtmloutput
if %w[caption colgroup optgroup option tbody td tfoot th thead tr].include?(tag_name)
htmloutput = "foo <bad>bar</bad> baz"
xhtmloutput = htmloutput
elsif tag_name == 'col'
htmloutput = "<col title='1'>foo <bad>bar</bad> baz"
xhtmloutput = htmloutput
rexmloutput = "<col title='1' />"
elsif tag_name == 'table'
htmloutput = "foo <bad>bar</bad>baz<table title='1'> </table>"
xhtmloutput = htmloutput
elsif tag_name == 'image'
htmloutput = "<img title='1'/>foo <bad>bar</bad> baz"
xhtmloutput = htmloutput
rexmloutput = "<image title='1'>foo <bad>bar</bad> baz</image>"
elsif HTML5::WhiteList::VOID_ELEMENTS.include?(tag_name)
htmloutput = "<#{tag_name} title='1'>foo <bad>bar</bad> baz"
xhtmloutput = htmloutput
htmloutput += '<br/>' if tag_name == 'br'
rexmloutput = "<#{tag_name} title='1' />"
end
check_sanitization(input, htmloutput, xhtmloutput, rexmloutput)
end
end
##
## libxml2 downcases elements, so this is moot.
##
# HTML5::WhiteList::ALLOWED_ELEMENTS.each do |tag_name|
# define_method "test_should_forbid_#{tag_name.upcase}_tag" do
# input = "<#{tag_name.upcase} title='1'>foo <bad>bar</bad> baz</#{tag_name.upcase}>"
# output = "<#{tag_name.upcase} title=\"1\">foo <bad>bar</bad> baz</#{tag_name.upcase}>"
# check_sanitization(input, output, output, output)
# end
# end
HTML5::WhiteList::ALLOWED_ATTRIBUTES.each do |attribute_name|
next if attribute_name == 'style'
define_method "test_should_allow_#{attribute_name}_attribute" do
input = "<p #{attribute_name}='foo'>foo <bad>bar</bad> baz</p>"
if %w[checked compact disabled ismap multiple nohref noshade nowrap readonly selected].include?(attribute_name)
output = "<p #{attribute_name}>foo <bad>bar</bad> baz</p>"
htmloutput = "<p #{attribute_name.downcase}>foo <bad>bar</bad> baz</p>"
else
output = "<p #{attribute_name}='foo'>foo <bad>bar</bad> baz</p>"
htmloutput = "<p #{attribute_name.downcase}='foo'>foo <bad>bar</bad> baz</p>"
end
check_sanitization(input, htmloutput, output, output)
end
end
##
## libxml2 downcases attributes, so this is moot.
##
# HTML5::WhiteList::ALLOWED_ATTRIBUTES.each do |attribute_name|
# define_method "test_should_forbid_#{attribute_name.upcase}_attribute" do
# input = "<p #{attribute_name.upcase}='display: none;'>foo <bad>bar</bad> baz</p>"
# output = "<p>foo <bad>bar</bad> baz</p>"
# check_sanitization(input, output, output, output)
# end
# end
HTML5::WhiteList::ALLOWED_PROTOCOLS.each do |protocol|
define_method "test_should_allow_#{protocol}_uris" do
input = %(<a href="#{protocol}">foo</a>)
output = "<a href='#{protocol}'>foo</a>"
check_sanitization(input, output, output, output)
end
end
HTML5::WhiteList::ALLOWED_PROTOCOLS.each do |protocol|
define_method "test_should_allow_uppercase_#{protocol}_uris" do
input = %(<a href="#{protocol.upcase}">foo</a>)
output = "<a href='#{protocol.upcase}'>foo</a>"
check_sanitization(input, output, output, output)
end
end
HTML5::WhiteList::SVG_ALLOW_LOCAL_HREF.each do |tag_name|
next unless HTML5::WhiteList::ALLOWED_ELEMENTS.include?(tag_name)
define_method "test_#{tag_name}_should_allow_local_href" do
input = %(<#{tag_name} xlink:href="#foo"/>)
output = "<#{tag_name.downcase} xlink:href='#foo'></#{tag_name.downcase}>"
xhtmloutput = "<#{tag_name} xlink:href='#foo'></#{tag_name}>"
check_sanitization(input, output, xhtmloutput, xhtmloutput)
end
define_method "test_#{tag_name}_should_allow_local_href_with_newline" do
input = %(<#{tag_name} xlink:href="\n#foo"/>)
output = "<#{tag_name.downcase} xlink:href='\n#foo'></#{tag_name.downcase}>"
xhtmloutput = "<#{tag_name} xlink:href='\n#foo'></#{tag_name}>"
check_sanitization(input, output, xhtmloutput, xhtmloutput)
end
define_method "test_#{tag_name}_should_forbid_nonlocal_href" do
input = %(<#{tag_name} xlink:href="http://bad.com/foo"/>)
output = "<#{tag_name.downcase}></#{tag_name.downcase}>"
xhtmloutput = "<#{tag_name}></#{tag_name}>"
check_sanitization(input, output, xhtmloutput, xhtmloutput)
end
define_method "test_#{tag_name}_should_forbid_nonlocal_href_with_newline" do
input = %(<#{tag_name} xlink:href="\nhttp://bad.com/foo"/>)
output = "<#{tag_name.downcase}></#{tag_name.downcase}>"
xhtmloutput = "<#{tag_name}></#{tag_name}>"
check_sanitization(input, output, xhtmloutput, xhtmloutput)
end
end
def test_should_handle_astral_plane_characters
input = "<p>𝒵 𝔸</p>"
output = "<p>\360\235\222\265 \360\235\224\270</p>"
check_sanitization(input, output, output, output)
input = "<p><tspan>\360\235\224\270</tspan> a</p>"
output = "<p><tspan>\360\235\224\270</tspan> a</p>"
check_sanitization(input, output, output, output)
end
# This affects only NS4. Is it worth fixing?
# def test_javascript_includes
# input = %(<div size="&{alert('XSS')}">foo</div>)
# output = "<div>foo</div>"
# check_sanitization(input, output, output, output)
# end
##
## these tests primarily test the parser logic, not the sanitizer
## logic. i call bullshit. we're not writing a test suite for
## libxml2 here, so let's rely on the unit tests above to take care
## of our valid elements and attributes.
##
# Dir[File.join(File.dirname(__FILE__), 'testdata', '*.*')].each do |filename|
# JSON::parse(open(filename).read).each do |test|
# define_method "test_#{test['name']}" do
# check_sanitization(
# test['input'],
# test['output'],
# test['xhtml'] || test['output'],
# test['rexml'] || test['output']
# )
# end
# end
# end
## added because we don't have any coverage above on SVG_ATTR_VAL_ALLOWS_REF
HTML5::WhiteList::SVG_ATTR_VAL_ALLOWS_REF.each do |attr_name|
define_method "test_should_allow_uri_refs_in_svg_attribute_#{attr_name}" do
input = "<rect fill='url(#foo)' />"
output = "<rect fill='url(#foo)'></rect>"
check_sanitization(input, output, output, output)
end
define_method "test_absolute_uri_refs_in_svg_attribute_#{attr_name}" do
input = "<rect fill='url(http://bad.com/) #fff' />"
output = "<rect fill=' #fff'></rect>"
check_sanitization(input, output, output, output)
end
define_method "test_uri_ref_with_space_in_svg_attribute_#{attr_name}" do
input = "<rect fill='url(\n#foo)' />"
rexml = "<rect fill='url(\n#foo)'></rect>"
end
define_method "test_absolute_uri_ref_with_space_in_svg_attribute_#{attr_name}" do
input = "<rect fill=\"url(\nhttp://bad.com/)\" />"
rexml = "<rect fill=' '></rect>"
end
end
end
# <html5_license>
#
# Copyright (c) 2006-2008 The Authors
#
# Contributors:
# James Graham - [email protected]
# Anne van Kesteren - [email protected]
# Lachlan Hunt - [email protected]
# Matt McDonald - [email protected]
# Sam Ruby - [email protected]
# Ian Hickson (Google) - [email protected]
# Thomas Broyer - [email protected]
# Jacques Distler - [email protected]
# Henri Sivonen - [email protected]
# The Mozilla Foundation (contributions from Henri Sivonen since 2008)
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# </html5_license>
| 42.052846 | 118 | 0.671629 |
21dd3d044473deaaeaeceb7b8f14454d17c6dc5f | 1,365 | #!/usr/bin/env ruby
require 'nokogiri'
require 'open-uri'
require 'json'
page = Nokogiri::HTML(open('http://www.alec.org/model-legislation/'))
FILENAME = ARGV.first || "alec-model-bills.json"
bill_urls = []
bills = []
# helper functions
def get_bill_info(url)
bill_page = Nokogiri::HTML(open(url))
bill_title = bill_page.css('#title').text
puts "#{bill_title}: #{url}"
# this removes the bill title and tags from the content
content_paragraphs = bill_page.css('#main p')
text = content_paragraphs[1..content_paragraphs.length - 2].map(&:text).join(' ')
# extract the bill tags
tags = []
tags_paragraph = bill_page.css('#content p').select{ |p| p.text.downcase.include?('keyword tags') }
tags = tags_paragraph[0].text.gsub!('Keyword Tags:', '').split(',').map(&:strip) if tags_paragraph.count > 0
bill = {
title: bill_title,
source_url: url,
content: bill_page.css('#main')[0].text,
text: text,
html: bill_page.css('#main')[0].inner_html,
tags: tags
}
end
def save_results(results)
File.open(FILENAME, "w") do |f|
f.write(results.to_json)
end
end
# process
page.css('#features .model-legislation').each do |bill|
title = bill.css('h3').text.strip
url = bill.css('a')[0]['href']
bill_urls << url
end
bill_urls.each do |url|
bills << get_bill_info(url)
save_results(bills)
end
| 23.534483 | 110 | 0.663736 |
183f6e99517e1b1bf1d75a8799c9b1d3156aae45 | 776 | cask 'axure-rp' do
version '9.0.0.3704'
sha256 '1b175588b5248db1abfb644b64e5165ac511055edb79be5c1d294b92ec5008b7'
# axure.cachefly.net/ was verified as official when first introduced to the cask
url 'https://axure.cachefly.net/AxureRP-Setup.dmg'
appcast 'https://www.axure.com/release-history'
name 'Axure RP'
homepage 'https://www.axure.com/'
app "Axure RP #{version.major}.app"
zap trash: [
"~/Library/Preferences/com.axure.AxureRP#{version.major}.plist",
"~/Library/Saved Application State/com.axure.AxureRP#{version.major}.savedState",
"~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.axure.axurerp#{version.major}.sfl*",
]
end
| 40.842105 | 165 | 0.710052 |
910e525c5cacb73462140c7c8178cbdfdcaa7f3c | 1,146 | module SunspotCell
module Indexer
def self.included(base)
base.class_eval do
def add_documents(documents)
documents_arr = Sunspot::Util.Array(documents)
docs_attach = []
docs_no_attach = []
documents_arr.each do |document|
if document.contains_attachment?
docs_attach << document
else
docs_no_attach << document
end
end
begin
if !docs_no_attach.empty?
@connection.add(docs_no_attach)
end
if !docs_attach.empty?
Sunspot::Util.Array(docs_attach).each do |document|
document.add(@connection)
end
end
rescue Exception => e
@batch = nil
raise e
end
end
def document_for(model)
Sunspot::RichDocument.new(
:id => Sunspot::Adapters::InstanceAdapter.adapt(model).index_id,
:type => Sunspot::Util.superclasses_for(model.class).map { |clazz| clazz.name }
)
end
end
end
end
end
| 24.913043 | 91 | 0.526178 |
6aedce2b48bad5bc961385288dc97ff2da9abd0e | 3,516 | class Kapacitor < Formula
desc "Open source time series data processor"
homepage "https://github.com/influxdata/kapacitor"
url "https://github.com/influxdata/kapacitor.git",
:tag => "v1.5.1",
:revision => "89828ffff6cf5cd4cb2b34bf883e134395f734de"
head "https://github.com/influxdata/kapacitor.git"
bottle do
cellar :any_skip_relocation
sha256 "1a4e79d992f1f1ad814d966bd0e92f1406644e47382b421e025c17729103cd77" => :mojave
sha256 "0402f7ec1ec87fc16ee4c1c7ed963e27fbb25c2e0d8704a933fd21a1036bb113" => :high_sierra
sha256 "ece9de540c79c9b311e99b245855d2233ab685668e6ee65c308eaeabc97289c3" => :sierra
sha256 "89991463ddc94584786e4190811d3b624bd6ce559a55264df1a8782aa29c7ea5" => :el_capitan
sha256 "3f4719e55c19889403e7221b32b57c7a9b2937c5e34c0cc815007b0209afbfbd" => :x86_64_linux
end
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
kapacitor_path = buildpath/"src/github.com/influxdata/kapacitor"
kapacitor_path.install Dir["*"]
revision = Utils.popen_read("git rev-parse HEAD").strip
version = Utils.popen_read("git describe --tags").strip
cd kapacitor_path do
system "go", "install",
"-ldflags", "-X main.version=#{version} -X main.commit=#{revision}",
"./cmd/..."
end
inreplace kapacitor_path/"etc/kapacitor/kapacitor.conf" do |s|
s.gsub! "/var/lib/kapacitor", "#{var}/kapacitor"
s.gsub! "/var/log/kapacitor", "#{var}/log"
end
bin.install "bin/kapacitord"
bin.install "bin/kapacitor"
etc.install kapacitor_path/"etc/kapacitor/kapacitor.conf" => "kapacitor.conf"
(var/"kapacitor/replay").mkpath
(var/"kapacitor/tasks").mkpath
end
plist_options :manual => "kapacitord -config #{HOMEBREW_PREFIX}/etc/kapacitor.conf"
def plist; <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<dict>
<key>SuccessfulExit</key>
<false/>
</dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/kapacitord</string>
<string>-config</string>
<string>#{HOMEBREW_PREFIX}/etc/kapacitor.conf</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{var}</string>
<key>StandardErrorPath</key>
<string>#{var}/log/kapacitor.log</string>
<key>StandardOutPath</key>
<string>#{var}/log/kapacitor.log</string>
</dict>
</plist>
EOS
end
test do
(testpath/"config.toml").write shell_output("#{bin}/kapacitord config")
inreplace testpath/"config.toml" do |s|
s.gsub! /disable-subscriptions = false/, "disable-subscriptions = true"
s.gsub! %r{data_dir = "/.*/.kapacitor"}, "data_dir = \"#{testpath}/kapacitor\""
s.gsub! %r{/.*/.kapacitor/replay}, "#{testpath}/kapacitor/replay"
s.gsub! %r{/.*/.kapacitor/tasks}, "#{testpath}/kapacitor/tasks"
s.gsub! %r{/.*/.kapacitor/kapacitor.db}, "#{testpath}/kapacitor/kapacitor.db"
end
begin
pid = fork do
exec "#{bin}/kapacitord -config #{testpath}/config.toml"
end
sleep 2
shell_output("#{bin}/kapacitor list tasks")
ensure
Process.kill("SIGINT", pid)
Process.wait(pid)
end
end
end
| 34.470588 | 106 | 0.646758 |
ff915b79bdb6342865ce4a353cdb281776b5e708 | 424 | require 'spec_helper'
require 'challenge_code'
include Challenge
data1 = '86,2,3'
data2 = '125,1,2'
describe 'Stuff' do
it 'should not throw an error' do
expect{ challenge(data1) }.not_to raise_error
expect{ challenge(data2) }.not_to raise_error
end
it 'shoud work 1' do
expect( challenge(data1)).to be true
end
it 'shoud work 2', focus: true do
expect( challenge(data2)).to be false
end
end
| 20.190476 | 49 | 0.693396 |
26d7302ff24c1b6c53fec28de35b6a8ddb90e480 | 3,434 | shared_examples_for "#autoload_all syntactic sugar" do
before :each do
@file_list = [
"#{@base_dir}/module1/a.rb",
"#{@base_dir}/module2/longer_name.rb",
"#{@base_dir}/module2/module3/b.rb"
]
end
it "accepts files with and without extensions" do
should_not be_loaded("Autoloaded::Module2::LongerName")
send(@method, @base_dir + '/module2/longer_name', :base_dir => @autoload_base_dir).should be_true
should be_loaded("Autoloaded::Module2::LongerName")
should_not be_loaded("Autoloaded::Module1::A")
send(@method, @base_dir + '/module1/a.rb', :base_dir => @autoload_base_dir).should be_true
should be_loaded("Autoloaded::Module1::A")
end
it "accepts lists of files" do
should_not be_loaded("Autoloaded::Module1::A", "Autoloaded::Module2::LongerName",
"Autoloaded::Module2::Module3::B")
send(@method, @file_list, :base_dir => @autoload_base_dir).should be_true
should be_loaded("Autoloaded::Module1::A", "Autoloaded::Module2::LongerName",
"Autoloaded::Module2::Module3::B")
end
it "is totally cool with a splatted list of arguments" do
should_not be_loaded("Autoloaded::Module1::A", "Autoloaded::Module2::LongerName",
"Autoloaded::Module2::Module3::B")
send(@method, *(@file_list << {:base_dir => @autoload_base_dir})).should be_true
should be_loaded("Autoloaded::Module1::A", "Autoloaded::Module2::LongerName",
"Autoloaded::Module2::Module3::B")
end
it "will load all .rb files under a directory without a trailing slash" do
should_not be_loaded("Autoloaded::Module1::A", "Autoloaded::Module2::LongerName",
"Autoloaded::Module2::Module3::B")
send(@method, @base_dir, :base_dir => @autoload_base_dir).should be_true
should be_loaded("Autoloaded::Module1::A", "Autoloaded::Module2::LongerName",
"Autoloaded::Module2::Module3::B")
end
it "will load all .rb files under a directory with a trailing slash" do
should_not be_loaded("Autoloaded::Module1::A", "Autoloaded::Module2::LongerName",
"Autoloaded::Module2::Module3::B")
send(@method, "#{@base_dir}/", :base_dir => @autoload_base_dir).should be_true
should be_loaded("Autoloaded::Module1::A", "Autoloaded::Module2::LongerName",
"Autoloaded::Module2::Module3::B")
end
it "will load all files specified by a glob" do
should_not be_loaded("Autoloaded::Module1::A", "Autoloaded::Module2::LongerName",
"Autoloaded::Module2::Module3::B")
send(@method, "#{@base_dir}/**/*.rb", :base_dir => @autoload_base_dir).should be_true
should be_loaded("Autoloaded::Module1::A", "Autoloaded::Module2::LongerName",
"Autoloaded::Module2::Module3::B")
end
it "returns false if an empty input was given" do
send(@method, [])
send(@method, []).should be_false
send(@method).should be_false
end
it "raises LoadError if no file or directory found" do
lambda {send(@method, "not_found")}.should raise_error(LoadError)
end
it "raises LoadError if :base_dir doesn't exist" do
lambda {send(@method, @base_dir, :base_dir => @base_dir + "/non_existing_dir")}.
should raise_exception(LoadError)
end
end
| 45.786667 | 102 | 0.639779 |
bb0a1562a3d6a69a071b2a51ffcabfc399016a66 | 118 | require_relative "holidays/version"
module Holidays
class Error < StandardError; end
def initialize
end
end
| 13.111111 | 35 | 0.762712 |
bbc6e8f6b66cbfdab21019ab823fb5ea761b011d | 2,838 | class ZncConfig
def initialize(config_path)
@config = parse_config(config_path)
end
def user_exists(username)
return true if @config.key?(username)
false
end
def auth_user(username, password)
return false unless user_exists(username)
hash = @config[username]['password']['hash']
salt = @config[username]['password']['salt']
authed = Digest::SHA256.hexdigest(password + salt)
authed == hash
end
private
def parse_config(config_path)
users = {}
current_user = nil
current_network = nil
current_channel = nil
current_password = nil
cur_obj = nil
File.open(config_path).each do |line|
words = line.split(' ')
if words[0] == '<User'
current_user = {
'networks' => [],
'modules' => [],
'channels' => [],
'username' => words[1].chomp('>').downcase
}
cur_obj = current_user
elsif words[0] == '</User>'
unless current_user.nil?
users[current_user['username']] = current_user
current_user = cur_obj
cur_obj = nil
end
elsif words[0] == '<Network'
unless current_user.nil?
current_network = {
'name' => words[1].chomp('>'),
'channels' => []
}
cur_obj = current_network
end
elsif words[0] == '</Network>'
unless current_user.nil? || current_network.nil?
current_user['networks'].push current_network
current_network = cur_obj
cur_obj = nil
end
elsif words[0] == '<Chan'
unless current_network.nil?
current_channel = {
'name' => words[1].chomp('>')
}
cur_obj = current_channel
end
elsif words[0] == '</Chan>'
unless current_channel.nil? || current_network.nil?
current_network['channels'].push current_channel
current_channel = nil
cur_obj = current_network
end
elsif words[0] == '<Pass'
unless current_user.nil?
current_password = {}
cur_obj = current_password
end
elsif words[0] == '</Pass>'
unless current_user.nil? || current_password.nil?
current_user['password'] = current_password
current_password = cur_obj
cur_obj = nil
end
elsif words[0] != '//' && words[0] != '' && !words[0].nil?
next if cur_obj.nil?
key = words[0].downcase
value = words[2, 500].join(' ')
if cur_obj.key?(key) && !cur_obj[key].is_a?(Array)
cur_obj[key] = [cur_obj[key]]
end
if cur_obj.key?(key) && cur_obj[key].is_a?(Array)
cur_obj[key].push value
else
cur_obj[key] = value
end
end
end
users
end
end
| 26.523364 | 64 | 0.551092 |
1ca31f4f61c3a1af849db605020e0330da9d45f3 | 2,258 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::RecoveryServicesBackup::Mgmt::V2019_05_13
module Models
#
# Encapsulates information regarding data directory
#
class SQLDataDirectoryMapping
include MsRestAzure
# @return [SQLDataDirectoryType] Type of data directory mapping. Possible
# values include: 'Invalid', 'Data', 'Log'
attr_accessor :mapping_type
# @return [String] Restore source logical name path
attr_accessor :source_logical_name
# @return [String] Restore source path
attr_accessor :source_path
# @return [String] Target path
attr_accessor :target_path
#
# Mapper for SQLDataDirectoryMapping class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'SQLDataDirectoryMapping',
type: {
name: 'Composite',
class_name: 'SQLDataDirectoryMapping',
model_properties: {
mapping_type: {
client_side_validation: true,
required: false,
serialized_name: 'mappingType',
type: {
name: 'String'
}
},
source_logical_name: {
client_side_validation: true,
required: false,
serialized_name: 'sourceLogicalName',
type: {
name: 'String'
}
},
source_path: {
client_side_validation: true,
required: false,
serialized_name: 'sourcePath',
type: {
name: 'String'
}
},
target_path: {
client_side_validation: true,
required: false,
serialized_name: 'targetPath',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 28.225 | 79 | 0.522586 |
6aff7531210bbaee6b3b17db8b96b1b2eb379140 | 1,834 | # == Schema Information
#
# Table name: audio_segments
#
# id :bigint not null, primary key
# duration :integer
# duration_ms :integer
# percentile :float
# relative_segments :jsonb
# segments :jsonb
# silent_duration :integer
# silent_durrelative_silent_durationation :integer
# timestamp_from :integer
# timestamp_median :integer
# timestamp_to :integer
# verse_key :string
# verse_number :integer
# created_at :datetime not null
# updated_at :datetime not null
# audio_file_id :bigint
# audio_recitation_id :bigint
# chapter_id :bigint
# verse_id :bigint
#
# Indexes
#
# index_audio_segments_on_audio_file_id (audio_file_id)
# index_audio_segments_on_audio_file_id_and_verse_number (audio_file_id,verse_number) UNIQUE
# index_audio_segments_on_audio_recitation_id (audio_recitation_id)
# index_audio_segments_on_chapter_id (chapter_id)
# index_audio_segments_on_verse_id (verse_id)
# index_audio_segments_on_verse_number (verse_number)
# index_on_audio_segments_median_time (audio_recitation_id,chapter_id,verse_id,timestamp_median)
#
require 'rails_helper'
RSpec.describe Audio::Segment, type: :model do
pending "add some examples to (or delete) #{__FILE__}"
end
| 45.85 | 117 | 0.522901 |
f75e5374846a6b5893cd798a168e2d94f1190224 | 987 | cask 'folx' do
version '5.1.13647'
sha256 'c15ff8719b31a98cfe8566f5bc875aa46278da5dd18fc7c3e26445ef864db150'
url "http://www.eltima.com/download/folx-update/downloader_mac_#{version}.dmg"
appcast 'http://mac.eltima.com/download/folx-updater/folx.xml',
checkpoint: '3e558df977be7022303f45aae20be26c652c4fcc9721bc4b1d49b80916e29996'
name 'Folx'
homepage 'http://mac.eltima.com/download-manager.html'
auto_updates true
app 'Folx.app'
zap delete: [
'~/Library/Application Support/Eltima Software/Folx3',
'~/Library/Caches/com.eltima.Folx3',
'~/Library/Internet Plug-Ins/Folx3Plugin.plugin',
'~/Library/Logs/Folx.log',
'~/Library/Logs/Folx3.log',
'~/Library/Preferences/com.eltima.Folx3.plist',
'~/Library/Preferences/com.eltima.FolxAgent.plist',
'~/Library/Saved Application State/com.eltima.Folx3.savedState',
]
end
| 37.961538 | 88 | 0.653495 |
6227d10815091b19dd44e14f2fbb454d5dc932e2 | 185 | module TokenAuthenticateMe
module Generators
class ControllersGenerator < ::Rails::Generators::Base
source_root File.expand_path('../templates', __FILE__)
end
end
end
| 23.125 | 60 | 0.740541 |
ff683ccff1502564822ac16afa78e7731feca505 | 334 | class Admin::FetchTrackInfosController < ApplicationController
def update
track_info = TrackInfo.find params[:id]
WikiDataJob.perform_later(track_info: track_info)
GoogleJob.perform_later(track_info: track_info)
LastfmJob.perform_later(track_info: track_info)
redirect_to track_info_path(track_info)
end
end
| 27.833333 | 62 | 0.796407 |
335adbc51210580eb62fb35a969cb99b78409a0c | 1,812 | class CategsController < ApplicationController
before_action :set_categ, only: [:show, :edit, :update, :destroy]
# GET /categs
# GET /categs.json
def index
@categs = Categ.all
end
# GET /categs/1
# GET /categs/1.json
def show
end
# GET /categs/new
def new
@categ = Categ.new
end
# GET /categs/1/edit
def edit
end
# POST /categs
# POST /categs.json
def create
@categ = Categ.new(categ_params)
respond_to do |format|
if @categ.save
format.html { redirect_to @categ, notice: 'Categ was successfully created.' }
format.json { render :show, status: :created, location: @categ }
else
format.html { render :new }
format.json { render json: @categ.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /categs/1
# PATCH/PUT /categs/1.json
def update
respond_to do |format|
if @categ.update(categ_params)
format.html { redirect_to @categ, notice: 'Categ was successfully updated.' }
format.json { render :show, status: :ok, location: @categ }
else
format.html { render :edit }
format.json { render json: @categ.errors, status: :unprocessable_entity }
end
end
end
# DELETE /categs/1
# DELETE /categs/1.json
def destroy
@categ.destroy
respond_to do |format|
format.html { redirect_to categs_url, notice: 'Categ was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_categ
@categ = Categ.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def categ_params
params.require(:categ).permit(:name)
end
end
| 24.16 | 89 | 0.645143 |
21dcf79a5e32c188780f890b015e72966b0979c4 | 4,047 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "helper"
describe Google::Cloud::Bigtable::Cluster, :create_backup, :mock_bigtable do
let(:instance_id) { "test-instance" }
let(:cluster_id) { "test-cluster" }
let :cluster_grpc do
Google::Cloud::Bigtable::Admin::V2::Cluster.new(
name: cluster_path(instance_id, cluster_id),
serve_nodes: 3,
location: location_path("us-east-1b"),
default_storage_type: :SSD,
state: :READY
)
end
let(:cluster) { Google::Cloud::Bigtable::Cluster.from_grpc cluster_grpc, bigtable.service }
let(:ops_name) { "operations/1234567890" }
let(:job_grpc) do
operation_pending_grpc ops_name, "type.googleapis.com/google.bigtable.admin.v2.CreateBackupMetadata"
end
let :job_done_grpc do
operation_done_grpc(
ops_name,
"type.googleapis.com/google.bigtable.admin.v2.CreateBackupMetadata",
Google::Cloud::Bigtable::Admin::V2::CreateBackupMetadata.new,
"type.googleapis.com/google.bigtable.admin.v2.Backup",
backup_grpc
)
end
let(:backup_id) { "test-backup" }
let(:source_table_id) { "test-table-source" }
let :source_table_grpc do
Google::Cloud::Bigtable::Admin::V2::Table.new table_hash(name: table_path(instance_id, source_table_id))
end
let(:source_table) { Google::Cloud::Bigtable::Table.from_grpc source_table_grpc, bigtable.service }
let(:expire_time) { Time.now.round(0) + 60 * 60 * 7 }
let :backup_grpc do
Google::Cloud::Bigtable::Admin::V2::Backup.new source_table: table_path(instance_id, source_table_id),
expire_time: expire_time
end
it "creates a backup with table as string ID" do
mock = Minitest::Mock.new
mock.expect :create_backup, operation_grpc(job_grpc, mock), [parent: cluster_path(instance_id, cluster_id), backup_id: backup_id, backup: backup_grpc]
mock.expect :get_operation, operation_grpc(job_done_grpc, mock), [{name: ops_name}, Gapic::CallOptions]
bigtable.service.mocked_tables = mock
job = cluster.create_backup source_table_id, backup_id, expire_time
_(job).must_be_kind_of Google::Cloud::Bigtable::Backup::Job
_(job).wont_be :done?
_(job).wont_be :error?
_(job.error).must_be :nil?
_(job.backup).must_be :nil?
job.reload!
backup = job.backup
_(backup).wont_be :nil?
_(backup).must_be_kind_of Google::Cloud::Bigtable::Backup
mock.verify
end
it "creates a backup with table object" do
mock = Minitest::Mock.new
mock.expect :create_backup, operation_grpc(job_grpc, mock), [parent: cluster_path(instance_id, cluster_id), backup_id: backup_id, backup: backup_grpc]
mock.expect :get_operation, operation_grpc(job_done_grpc, mock), [{name: ops_name}, Gapic::CallOptions]
bigtable.service.mocked_tables = mock
job = cluster.create_backup source_table, backup_id, expire_time
_(job).must_be_kind_of Google::Cloud::Bigtable::Backup::Job
_(job).wont_be :done?
_(job).wont_be :error?
_(job.error).must_be :nil?
_(job.backup).must_be :nil?
job.reload!
backup = job.backup
_(backup).wont_be :nil?
_(backup).must_be_kind_of Google::Cloud::Bigtable::Backup
mock.verify
end
def operation_grpc longrunning_grpc, mock
Gapic::Operation.new(
longrunning_grpc,
mock,
result_type: Google::Cloud::Bigtable::Admin::V2::Backup,
metadata_type: Google::Cloud::Bigtable::Admin::V2::CreateBackupMetadata
)
end
end
| 35.814159 | 154 | 0.713121 |
79afb5aca21e60c52faf8853f0a06501bae496e0 | 1,689 | # frozen_string_literal: true
# typed: true
require 'workos/version'
require 'sorbet-runtime'
require 'json'
# Use the WorkOS module to authenticate your
# requests to the WorkOS API. The gem will read
# your API key automatically from the ENV var `WORKOS_API_KEY`.
# Alternatively, you can set the key yourself with
# `WorkOS.key = [your api key]` somewhere in the load path of
# your application, such as an initializer.
module WorkOS
API_HOSTNAME = ENV['WORKOS_API_HOSTNAME'] || 'api.workos.com'
def self.key=(value)
Base.key = value
end
def self.key
Base.key
end
def self.key!
key || raise('WorkOS.key not set')
end
autoload :Types, 'workos/types'
autoload :Base, 'workos/base'
autoload :Client, 'workos/client'
autoload :AuditTrail, 'workos/audit_trail'
autoload :Connection, 'workos/connection'
autoload :DirectorySync, 'workos/directory_sync'
autoload :Directory, 'workos/directory'
autoload :DirectoryGroup, 'workos/directory_group'
autoload :Organization, 'workos/organization'
autoload :Organizations, 'workos/organizations'
autoload :Passwordless, 'workos/passwordless'
autoload :Portal, 'workos/portal'
autoload :Profile, 'workos/profile'
autoload :ProfileAndToken, 'workos/profile_and_token'
autoload :SSO, 'workos/sso'
autoload :DirectoryUser, 'workos/directory_user'
# Errors
autoload :APIError, 'workos/errors'
autoload :AuthenticationError, 'workos/errors'
autoload :InvalidRequestError, 'workos/errors'
# Remove WORKOS_KEY at some point in the future. Keeping it here now for
# backwards compatibility.
key = ENV['WORKOS_API_KEY'] || ENV['WORKOS_KEY']
WorkOS.key = key unless key.nil?
end
| 30.160714 | 74 | 0.741859 |
ab5a6e926a37334f0d121e59d2d5bb2740864523 | 825 | module Elasticsearch
module API
module Actions
def scroll_each(arguments={}, &blk)
valid_params = [
:index, :scroll, :size, :body
]
params = Utils.__validate_and_extract_params arguments, valid_params
params[:body] ||= {}
params[:scroll] ||= "5m"
params[:size] ||= 100
s = search({
index: params[:index],
scroll: params[:scroll],
size: params[:size],
body: {sort: '_id'}.merge(params[:body]),
search_type: "scan",
})
loop do
s = scroll(:scroll_id => s["_scroll_id"], :scroll => params[:scroll])
results = s["hits"]["hits"]
break if results.empty?
blk.call(results)
end
end
end
end
end
| 23.571429 | 79 | 0.493333 |
e25a632d566a0879cdc061d8f9de02200d2e89a7 | 129 | class AddFinishPointToTeams < ActiveRecord::Migration[5.0]
def change
add_column :teams, :finish_point, :integer
end
end
| 21.5 | 58 | 0.75969 |
ed90f538c76a6e501035637761ff113762a8226f | 2,219 | # frozen_string_literal: true
module FlowcommerceSpree
module Webhooks
class CardAuthorizationUpsertedV2
attr_reader :errors
alias full_messages errors
def self.process(data)
new(data).process
end
def initialize(data)
@data = data['authorization']&.to_hash
@data&.[]('method')&.delete('images')
@errors = []
end
def process
errors << { message: 'Authorization param missing' } && (return self) unless @data
errors << { message: 'Card param missing' } && (return self) unless (flow_io_card = @data.delete('card'))
if (order_number = @data.dig('order', 'number'))
if (order = Spree::Order.find_by(number: order_number))
card = upsert_card(flow_io_card, order)
order.payments.where(response_code: @data['id'])
.update_all(source_id: card.id, source_type: 'Spree::CreditCard')
return card
else
errors << { message: "Order #{order_number} not found" }
end
else
errors << { message: 'Order number param missing' }
end
self
end
private
def upsert_card(flow_io_card, order)
flow_io_card_expiration = flow_io_card.delete('expiration')
card = Spree::CreditCard.find_or_initialize_by(month: flow_io_card_expiration['month'].to_s,
year: flow_io_card_expiration['year'].to_s,
cc_type: flow_io_card.delete('type'),
last_digits: flow_io_card.delete('last4'),
name: flow_io_card.delete('name'),
user_id: order.user&.id)
card.flow_data ||= {}
if card.new_record?
card.flow_data.merge!(flow_io_card.except('discriminator'))
card.imported = true
end
card.push_authorization(@data.except('discriminator'))
card.new_record? ? card.save : card.update_column(:meta, card.meta.to_json)
card
end
end
end
end
| 33.119403 | 113 | 0.544389 |
e9d5d52cabb3cea76669c642648480574fe76832 | 2,914 | module Gitlab
# Retrieving of parent or child groups based on a base ActiveRecord relation.
#
# This class uses recursive CTEs and as a result will only work on PostgreSQL.
class GroupHierarchy
attr_reader :base, :model
# base - An instance of ActiveRecord::Relation for which to get parent or
# child groups.
def initialize(base)
@base = base
@model = base.model
end
# Returns a relation that includes the base set of groups and all their
# ancestors (recursively).
def base_and_ancestors
return model.none unless Group.supports_nested_groups?
base_and_ancestors_cte.apply_to(model.all)
end
# Returns a relation that includes the base set of groups and all their
# descendants (recursively).
def base_and_descendants
return model.none unless Group.supports_nested_groups?
base_and_descendants_cte.apply_to(model.all)
end
# Returns a relation that includes the base groups, their ancestors, and the
# descendants of the base groups.
#
# The resulting query will roughly look like the following:
#
# WITH RECURSIVE ancestors AS ( ... ),
# descendants AS ( ... )
# SELECT *
# FROM (
# SELECT *
# FROM ancestors namespaces
#
# UNION
#
# SELECT *
# FROM descendants namespaces
# ) groups;
#
# Using this approach allows us to further add criteria to the relation with
# Rails thinking it's selecting data the usual way.
def all_groups
return base unless Group.supports_nested_groups?
ancestors = base_and_ancestors_cte
descendants = base_and_descendants_cte
ancestors_table = ancestors.alias_to(groups_table)
descendants_table = descendants.alias_to(groups_table)
union = SQL::Union.new([model.unscoped.from(ancestors_table),
model.unscoped.from(descendants_table)])
model.
unscoped.
with.
recursive(ancestors.to_arel, descendants.to_arel).
from("(#{union.to_sql}) #{model.table_name}")
end
private
def base_and_ancestors_cte
cte = SQL::RecursiveCTE.new(:base_and_ancestors)
cte << base.except(:order)
# Recursively get all the ancestors of the base set.
cte << model.
from([groups_table, cte.table]).
where(groups_table[:id].eq(cte.table[:parent_id])).
except(:order)
cte
end
def base_and_descendants_cte
cte = SQL::RecursiveCTE.new(:base_and_descendants)
cte << base.except(:order)
# Recursively get all the descendants of the base set.
cte << model.
from([groups_table, cte.table]).
where(groups_table[:parent_id].eq(cte.table[:id])).
except(:order)
cte
end
def groups_table
model.arel_table
end
end
end
| 27.752381 | 80 | 0.645848 |
269e0c099be1fa47527fa7ec8737eb03e11d717a | 1,396 | require 'test_helper'
class SchoolsControllerTest < ActionDispatch::IntegrationTest#ActionController::TestCase
setup do
@password = "password"
@confirmed_user = User.create(email: "#{rand(50000)}@example.com",
password: @password )
# @unconfirmed_user = User.create(email: "#{rand(50000)}@example.com",
# password: @password )
@school = schools(:one)
end
test "should get index" do
sign_in(user: @confirmed_user, password: @password)
get schools_path
assert_response :success
end
# test "should get new" do
# get :new
# assert_response :success
# end
# test "should create school" do
# assert_difference('School.count') do
# post :create, school: { }
# end
# assert_redirected_to school_path(assigns(:school))
# end
# test "should show school" do
# get :show, id: @school
# assert_response :success
# end
# test "should get edit" do
# get :edit, id: @school
# assert_response :success
# end
# test "should update school" do
# put :update, id: @school, school: { }
# assert_redirected_to school_path(assigns(:school))
# end
# test "should destroy school" do
# assert_difference('School.count', -1) do
# delete :destroy, id: @school
# end
# assert_redirected_to schools_path
# end
end
| 24.928571 | 88 | 0.624642 |
b91f2a6459a49806aabc0eaab751f31a84cfd52e | 2,278 | class Embeddable::OpenResponse < ApplicationRecord
MIN_FONT_SIZE = 9
MAX_FONT_SIZE = 24
MAX_ROWS = 80
MIN_ROWS = 1
MAX_COLUMNS = 80
MIN_COLUMNS = 10
self.table_name = "embeddable_open_responses"
belongs_to :user
has_many :page_elements, :as => :embeddable
has_many :pages, :through =>:page_elements
has_many :saveables, :class_name => "Saveable::OpenResponse", :foreign_key => :open_response_id do
def by_offering(offering)
where(:offering_id => offering.id)
end
def by_learner(learner)
where(:learner_id => learner.id)
end
def first_by_learner(learner)
by_learner(learner).first
end
end
acts_as_replicatable
include Changeable
include TruncatableXhtml
# Including TruncatableXhtml adds a before_save hook which will automatically
# generate a name attribute for the model instance if there is any content on
# the main xhtml attribute (examples: content or prompt) that can plausibly be
# turned into a name. Otherwise the default_value_for :name specified below is used.
self.extend SearchableModel
@@searchable_attributes = %w{uuid name description prompt}
class <<self
def searchable_attributes
@@searchable_attributes
end
end
default_value_for :name, "Open Response Question"
default_value_for :description, "What is the purpose of this question ...?"
default_value_for :prompt, <<-HEREDOC
<p>You can use HTML content to <b>write</b> the prompt of the question ...</p>
HEREDOC
# as per RITES-260 "Open response text field should be empty"
default_value_for :default_response, ""
default_value_for :rows, 5
default_value_for :columns, 32
default_value_for :font_size, 12
validates_numericality_of :rows,
:greater_than_or_equal_to => MIN_ROWS,
:less_than_or_equal_to => MAX_ROWS
validates_numericality_of :columns,
:greater_than_or_equal_to => MIN_COLUMNS,
:less_than_or_equal_to => MAX_COLUMNS
validates_numericality_of :font_size,
:greater_than_or_equal_to => MIN_FONT_SIZE,
:less_than_or_equal_to => MAX_FONT_SIZE
send_update_events_to :investigations
def investigations
invs = []
self.pages.each do |page|
inv = page.investigation
invs << inv if inv
end
end
end
| 28.123457 | 100 | 0.733538 |
62e108c7317f236b32bb505e743d4ef679c07321 | 759 | # -*- encoding : utf-8 -*-
require 'spec_helper'
require 'ImportLogic'
# Generate sample CSV.
# Import sample CSV
# Verify against test db
describe ImportLogic do
let(:imp) { described_class.new(nil) }
let(:csv_test_file) { File.expand_path('./spec/lib/test.csv') }
describe 'Reads file' do
before do
end
after do
end
it 'process CSV file properly' do
skip 'This Test Can only be run in isolation. Because it uses Threads. And locks the SQlite db.'
if `rake db:reset RAILS_ENV="test"`
if imp.send(:process_CSV_file, File.new(csv_test_file), 3, 'bom|utf-8')
imp.terminate
end
puts 'Finishes Import Checking Result'
expect(Debt.count).to eq(2)
end
end
end
end
| 21.083333 | 102 | 0.645586 |
ffe653d46ad87277c3d64d0727919a60694fda37 | 2,377 | require 'fog/core/model'
module Fog
module Compute
class Aliyun
class Image < Fog::Model
identity :id, aliases: 'ImageId'
attribute :description, aliases: 'Description'
attribute :product_code, aliases: 'ProductCode'
attribute :os_type, aliases: 'OSType'
attribute :architecture, aliases: 'Architecture'
attribute :os_name, aliases: 'OSName'
attribute :disk_device_mappings, aliases: 'DiskDeviceMappings'
attribute :owner_alias, aliases: 'ImageOwnerAlias'
attribute :progress, aliases: 'Progress'
attribute :usage, aliases: 'Usage'
attribute :created_at, aliases: 'CreationTime'
attribute :tags, aliases: 'Tags'
attribute :version, aliases: 'ImageVersion'
attribute :state, aliases: 'Status'
attribute :name, aliases: 'ImageName'
attribute :is_self_shared, aliases: 'IsSelfShared'
attribute :is_copied, aliases: 'IsCopied'
attribute :is_subscribed, aliases: 'IsSubscribed'
attribute :platform, aliases: 'Platform'
attribute :size, aliases: 'Size'
attribute :snapshot_id, aliases: 'SnapshotId'
def initialize(attributes)
self.snapshot_id = attributes['DiskDeviceMappings']['DiskDeviceMapping'][0]['SnapshotId']
super
end
def save(options = {})
requires :snapshot_id
options[:name] = name if name
options[:description] = description if description
data = Fog::JSON.decode(service.create_image(snapshot_id, options).body)
merge_attributes(data)
true
end
def destroy
requires :id
service.delete_image(id)
true
end
def ready?
state == 'Available'
end
def snapshot
requires :snapshot_id
Fog::Compute::Aliyun::Snapshots.new(service: service).all(snapshotIds: [snapshot_id])[0]
end
private
def snapshot=(new_snapshot)
self.snapshot_id = new_snapshot.id
end
end
end
end
end
| 35.477612 | 99 | 0.553639 |
61caf0f74f0dba4e4faa20619069bb55f6ffa8b6 | 7,097 | =begin
#SendinBlue API
#SendinBlue provide a RESTFul API that can be used with any languages. With this API, you will be able to : - Manage your campaigns and get the statistics - Manage your contacts - Send transactional Emails and SMS - and much more... You can download our wrappers at https://github.com/orgs/sendinblue **Possible responses** | Code | Message | | :-------------: | ------------- | | 200 | OK. Successful Request | | 201 | OK. Successful Creation | | 202 | OK. Request accepted | | 204 | OK. Successful Update/Deletion | | 400 | Error. Bad Request | | 401 | Error. Authentication Needed | | 402 | Error. Not enough credit, plan upgrade needed | | 403 | Error. Permission denied | | 404 | Error. Object does not exist | | 405 | Error. Method not allowed | | 406 | Error. Not Acceptable |
OpenAPI spec version: 3.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.19
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for SibApiV3Sdk::UpdateEmailCampaign
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'UpdateEmailCampaign' do
before do
# run before each test
@instance = SibApiV3Sdk::UpdateEmailCampaign.new
end
after do
# run after each test
end
describe 'test an instance of UpdateEmailCampaign' do
it 'should create an instance of UpdateEmailCampaign' do
expect(@instance).to be_instance_of(SibApiV3Sdk::UpdateEmailCampaign)
end
end
describe 'test attribute "tag"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "sender"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "name"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "html_content"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "html_url"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "scheduled_at"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "subject"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "reply_to"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "to_field"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "recipients"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "attachment_url"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "inline_image_activation"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "mirror_active"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "recurring"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "footer"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "header"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "utm_campaign"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "params"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "send_at_best_time"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "ab_testing"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "subject_a"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "subject_b"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "split_rule"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "winner_criteria"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
# validator = Petstore::EnumTest::EnumAttributeValidator.new('String', ["open", "click"])
# validator.allowable_values.each do |value|
# expect { @instance.winner_criteria = value }.not_to raise_error
# end
end
end
describe 'test attribute "winner_delay"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "ip_warmup_enable"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "initial_quota"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "increase_rate"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 34.120192 | 839 | 0.698605 |
f737117c74498a153f46aad0fde0a6edefc42dc0 | 617 | cask :v1 => 'hipchat' do
version :latest
sha256 :no_check
url 'https://www.hipchat.com/downloads/latest/mac'
appcast 'https://www.hipchat.com/release_notes/appcast/mac'
homepage 'https://www.hipchat.com/'
license :unknown
app 'HipChat.app'
postflight do
suppress_move_to_applications
end
zap :delete => [
# todo expand/glob for '~/Library/<userid>/HipChat/',
'~/Library/Caches/com.hipchat.HipChat',
'~/Library/HipChat',
'~/Library/Logs/HipChat',
'~/Library/chat.hipchat.com',
]
end
| 25.708333 | 71 | 0.580227 |
03fb5fb4e6ce2f932a493ce8d86a24a408376400 | 1,533 | module Notes
module Sections
class SectionNotesController < ApplicationController
before_filter :authorize_user
def new
@section_note = SectionNote.new
end
def create
@section_note = section.section_note.build(section_note_create_params)
if @section_note.save
redirect_to index_url, notice: 'Section note was successfully created.'
else
render :new
end
end
def edit
@section_note = section.section_note.fetch
end
def update
@section_note = section.section_note.fetch
@section_note.assign_attributes(section_note_update_params)
if @section_note.valid?
@section_note.save
redirect_to index_url, notice: 'Section note was successfully updated.'
else
render :edit
end
end
def destroy
@section_note = section.section_note.fetch
@section_note.destroy
redirect_to index_url, notice: 'Section note was successfully removed.'
end
private
def section
@section ||= Section.find(params[:section_id])
end
helper_method :section
def authorize_user
authorize SectionNote, :edit?
end
def section_note_create_params
params.require(:section_note).permit(:content, :section_id)
end
def section_note_update_params
params.require(:section_note).permit(:content).merge(section_id: section.id)
end
end
end
end
| 23.584615 | 84 | 0.645793 |
bb6350f3a28a82604c739e929b6906a91e0310c5 | 2,840 | #Hilarious as it seems, this is necessary so bundle exec cucumber works for mongoid cukeage (I'm assuming mongomapper is automatically present because its a git repo)
Object.send(:remove_const, 'MongoMapper') if defined?(::MongoMapper)
require 'bundler'
Bundler.setup
require 'rspec/expectations'
#require 'byebug'
DB_DIR = "#{File.dirname(__FILE__)}/../../db"
use_gems = ENV['USE_GEMS']
orm = ENV['ORM']
another_orm = ENV['ANOTHER_ORM']
strategy = ENV['STRATEGY']
multiple_db = ENV['MULTIPLE_DBS']
config = YAML::load(File.open("#{File.dirname(__FILE__)}/../../config/redis.yml"))
ENV['REDIS_URL'] = config['test']['url']
ENV['REDIS_URL_ONE'] = config['one']['url']
ENV['REDIS_URL_TWO'] = config['two']['url']
require "active_support/core_ext/string/inflections"
if orm && strategy
require "#{File.dirname(__FILE__)}/../../lib/#{orm.downcase}_models"
if use_gems
require "database_cleaner-#{orm.underscore}"
else
$:.unshift(File.dirname(__FILE__) + '/../../../lib')
require "database_cleaner-core"
end
if another_orm
require "#{File.dirname(__FILE__)}/../../lib/#{another_orm.downcase}_models"
if use_gems
require "database_cleaner-#{another_orm.underscore}"
end
end
require 'database_cleaner/cucumber'
if multiple_db
DatabaseCleaner.app_root = "#{File.dirname(__FILE__)}/../.."
orm_sym = orm.gsub(/(.)([A-Z]+)/,'\1_\2').downcase.to_sym
case orm_sym
when :mongo_mapper
DatabaseCleaner[ orm_sym, {:connection => 'database_cleaner_test_one'} ].strategy = strategy.to_sym
DatabaseCleaner[ orm_sym, {:connection => 'database_cleaner_test_two'} ].strategy = strategy.to_sym
when :redis, :ohm
DatabaseCleaner[ orm_sym, {:connection => ENV['REDIS_URL_ONE']} ].strategy = strategy.to_sym
DatabaseCleaner[ orm_sym, {:connection => ENV['REDIS_URL_TWO']} ].strategy = strategy.to_sym
when :active_record
DatabaseCleaner[:active_record, {:model => ActiveRecordWidgetUsingDatabaseOne} ].strategy = strategy.to_sym
DatabaseCleaner[:active_record, {:model => ActiveRecordWidgetUsingDatabaseTwo} ].strategy = strategy.to_sym
else
DatabaseCleaner[ orm_sym, {:connection => :one} ].strategy = strategy.to_sym
DatabaseCleaner[ orm_sym, {:connection => :two} ].strategy = strategy.to_sym
end
elsif another_orm
DatabaseCleaner[ orm.gsub(/(.)([A-Z]+)/,'\1_\2').downcase.to_sym ].strategy = strategy.to_sym
DatabaseCleaner[ another_orm.gsub(/(.)([A-Z]+)/,'\1_\2').downcase.to_sym ].strategy = strategy.to_sym
else
DatabaseCleaner.strategy = strategy.to_sym unless strategy == "default"
end
else
raise "Run 'ORM=ActiveRecord|DataMapper|MongoMapper|CouchPotato|Ohm|Redis [ANOTHER_ORM=...] [MULTIPLE_DBS=true] STRATEGY=transaction|truncation|default cucumber examples/features'"
end
| 39.444444 | 182 | 0.704225 |
38b0c82e916c4d77e438f0236bd41ef420f3df4a | 571 | # frozen_string_literal: true
class CourseSubmissionMailerWorker
include Sidekiq::Worker
sidekiq_options lock: :until_executed
def self.schedule_email(course, instructor)
perform_async(course.id, instructor.id)
end
def perform(course_id, instructor_id)
course = Course.find(course_id)
instructor = User.find(instructor_id)
CourseSubmissionMailer.send_submission_confirmation(course, instructor)
staffer = SpecialUsers.classroom_program_manager
CourseSubmissionMailer.send_submission_confirmation(course, staffer) if staffer
end
end
| 30.052632 | 83 | 0.805604 |
ff302541d9ea235fc6e5a6687791aae92b6cc296 | 1,702 | # frozen_string_literal: true
lib = File.expand_path('lib', __dir__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'karafka/version'
Gem::Specification.new do |spec|
spec.name = 'karafka'
spec.version = ::Karafka::VERSION
spec.platform = Gem::Platform::RUBY
spec.authors = ['Maciej Mensfeld', 'Pavlo Vavruk', 'Adam Gwozdowski']
spec.email = %w[[email protected] [email protected] [email protected]]
spec.homepage = 'https://github.com/karafka/karafka'
spec.summary = 'Ruby based framework for working with Apache Kafka'
spec.description = 'Framework used to simplify Apache Kafka based Ruby applications development'
spec.license = 'MIT'
spec.add_dependency 'activesupport', '>= 4.0'
spec.add_dependency 'dry-configurable', '~> 0.7'
spec.add_dependency 'dry-inflector', '~> 0.1.1'
spec.add_dependency 'dry-monitor', '~> 0.1'
spec.add_dependency 'dry-validation', '~> 0.11'
spec.add_dependency 'envlogic', '~> 1.0'
spec.add_dependency 'multi_json', '>= 1.12'
spec.add_dependency 'rake', '>= 11.3'
spec.add_dependency 'require_all', '>= 1.4'
spec.add_dependency 'ruby-kafka', '>= 0.6'
spec.add_dependency 'thor', '~> 0.19'
spec.add_dependency 'waterdrop', '~> 1.2.4'
spec.post_install_message = <<~MSG
\e[93mWarning:\e[0m If you're using Kafka 0.10, please lock ruby-kafka in your Gemfile to version '0.6.8':
gem 'ruby-kafka', '~> 0.6.8'
MSG
spec.required_ruby_version = '>= 2.3.0'
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(spec)/}) }
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.require_paths = %w[lib]
end
| 39.581395 | 110 | 0.668625 |
e82a7c77caf84b8ae3e30a3dd44fd476ee0577f3 | 3,191 | # -*- encoding: binary -*-
require_relative '../../spec_helper'
require_relative 'fixtures/classes'
describe "String#count" do
it "counts occurrences of chars from the intersection of the specified sets" do
s = "hello\nworld\x00\x00"
s.count(s).should == s.size
s.count("lo").should == 5
s.count("eo").should == 3
s.count("l").should == 3
s.count("\n").should == 1
s.count("\x00").should == 2
s.count("").should == 0
"".count("").should == 0
s.count("l", "lo").should == s.count("l")
s.count("l", "lo", "o").should == s.count("")
s.count("helo", "hel", "h").should == s.count("h")
s.count("helo", "", "x").should == 0
end
it "raises an ArgumentError when given no arguments" do
lambda { "hell yeah".count }.should raise_error(ArgumentError)
end
it "negates sets starting with ^" do
s = "^hello\nworld\x00\x00"
s.count("^").should == 1 # no negation, counts ^
s.count("^leh").should == 9
s.count("^o").should == 12
s.count("helo", "^el").should == s.count("ho")
s.count("aeiou", "^e").should == s.count("aiou")
"^_^".count("^^").should == 1
"oa^_^o".count("a^").should == 3
end
it "counts all chars in a sequence" do
s = "hel-[()]-lo012^"
s.count("\x00-\xFF").should == s.size
s.count("ej-m").should == 3
s.count("e-h").should == 2
# no sequences
s.count("-").should == 2
s.count("e-").should == s.count("e") + s.count("-")
s.count("-h").should == s.count("h") + s.count("-")
s.count("---").should == s.count("-")
# see an ASCII table for reference
s.count("--2").should == s.count("-./012")
s.count("(--").should == s.count("()*+,-")
s.count("A-a").should == s.count("A-Z[\\]^_`a")
# negated sequences
s.count("^e-h").should == s.size - s.count("e-h")
s.count("^^-^").should == s.size - s.count("^")
s.count("^---").should == s.size - s.count("-")
"abcdefgh".count("a-ce-fh").should == 6
"abcdefgh".count("he-fa-c").should == 6
"abcdefgh".count("e-fha-c").should == 6
"abcde".count("ac-e").should == 4
"abcde".count("^ac-e").should == 1
end
it "raises if the given sequences are invalid" do
s = "hel-[()]-lo012^"
lambda { s.count("h-e") }.should raise_error(ArgumentError)
lambda { s.count("^h-e") }.should raise_error(ArgumentError)
end
it 'returns the number of occurrences of a multi-byte character' do
str = "\u{2605}"
str.count(str).should == 1
"asd#{str}zzz#{str}ggg".count(str).should == 2
end
it "calls #to_str to convert each set arg to a String" do
other_string = mock('lo')
other_string.should_receive(:to_str).and_return("lo")
other_string2 = mock('o')
other_string2.should_receive(:to_str).and_return("o")
s = "hello world"
s.count(other_string, other_string2).should == s.count("o")
end
it "raises a TypeError when a set arg can't be converted to a string" do
lambda { "hello world".count(100) }.should raise_error(TypeError)
lambda { "hello world".count([]) }.should raise_error(TypeError)
lambda { "hello world".count(mock('x')) }.should raise_error(TypeError)
end
end
| 30.103774 | 81 | 0.578189 |
9101a1ee61e8cfb92d72e60405c4c0c4782d5c36 | 3,914 | # 21.07.0
# Source: https://raw.githubusercontent.com/Homebrew/homebrew-core/master/Formula/poppler.rb
# Changes: cairo symbols, no bottle
class PopplerAT21070 < Formula
desc "PDF rendering library (based on the xpdf-3.0 code base)"
homepage "https://poppler.freedesktop.org/"
url "https://poppler.freedesktop.org/poppler-21.07.0.tar.xz"
sha256 "e26ab29f68065de4d6562f0a3e2b5435a83ca92be573b99a1c81998fa286a4d4"
license "GPL-2.0-only"
head "https://gitlab.freedesktop.org/poppler/poppler.git"
# https://gitlab.freedesktop.org/poppler/poppler/-/merge_requests/632
# Publish internal cairo symbols within libpoppler library.
patch :p1, "
diff --git a/CMakeLists.txt b/CMakeLists.txt
index d1d3653a..f5886709 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -324,6 +324,9 @@ endif()
if(LCMS2_FOUND)
include_directories(SYSTEM ${LCMS2_INCLUDE_DIR})
endif()
+if(CAIRO_FOUND)
+ include_directories(SYSTEM ${CAIRO_INCLUDE_DIRS})
+endif()
# Recent versions of poppler-data install a .pc file.
# Use it to determine the encoding data path, if available.
@@ -458,6 +461,14 @@ set(poppler_SRCS
splash/SplashXPath.cc
splash/SplashXPathScanner.cc
)
+if(CAIRO_FOUND)
+ set(poppler_SRCS ${poppler_SRCS}
+ poppler/CairoFontEngine.cc
+ poppler/CairoOutputDev.cc
+ poppler/CairoRescaleBox.cc
+ )
+ set(poppler_LIBS ${poppler_LIBS} ${CAIRO_LIBRARIES})
+endif()
set(poppler_LIBS ${FREETYPE_LIBRARIES})
if(FONTCONFIG_FOUND)
set(poppler_LIBS ${poppler_LIBS} ${FONTCONFIG_LIBRARIES})
@@ -774,6 +785,9 @@ if(PKG_CONFIG_EXECUTABLE)
if(ENABLE_GLIB)
poppler_create_install_pkgconfig(poppler-glib.pc ${CMAKE_INSTALL_LIBDIR}/pkgconfig)
endif()
+ if(CAIRO_FOUND)
+ poppler_create_install_pkgconfig(poppler-cairo.pc ${CMAKE_INSTALL_LIBDIR}/pkgconfig)
+ endif()
if(ENABLE_CPP)
poppler_create_install_pkgconfig(poppler-cpp.pc ${CMAKE_INSTALL_LIBDIR}/pkgconfig)
endif()
"
livecheck do
url :homepage
regex(/href=.*?poppler[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
depends_on "cmake" => :build
depends_on "gobject-introspection" => :build
depends_on "pkg-config" => :build
depends_on "cairo"
depends_on "fontconfig"
depends_on "freetype"
depends_on "gettext"
depends_on "glib"
depends_on "jpeg"
depends_on "libpng"
depends_on "libtiff"
depends_on "little-cms2"
depends_on "nss"
depends_on "openjpeg"
depends_on "qt@5"
uses_from_macos "gperf" => :build
uses_from_macos "curl"
conflicts_with "pdftohtml", "pdf2image", "xpdf",
because: "poppler, pdftohtml, pdf2image, and xpdf install conflicting executables"
resource "font-data" do
url "https://poppler.freedesktop.org/poppler-data-0.4.10.tar.gz"
sha256 "6e2fcef66ec8c44625f94292ccf8af9f1d918b410d5aa69c274ce67387967b30"
end
def install
ENV.cxx11
args = std_cmake_args + %w[
-DBUILD_GTK_TESTS=OFF
-DENABLE_BOOST=OFF
-DENABLE_CMS=lcms2
-DENABLE_GLIB=ON
-DENABLE_QT5=ON
-DENABLE_QT6=OFF
-DENABLE_UNSTABLE_API_ABI_HEADERS=ON
-DWITH_GObjectIntrospection=ON
]
system "cmake", ".", *args
system "make", "install"
system "make", "clean"
system "cmake", ".", "-DBUILD_SHARED_LIBS=OFF", *args
system "make"
lib.install "libpoppler.a"
lib.install "cpp/libpoppler-cpp.a"
lib.install "glib/libpoppler-glib.a"
resource("font-data").stage do
system "make", "install", "prefix=#{prefix}"
end
on_macos do
libpoppler = (lib/"libpoppler.dylib").readlink
[
"#{lib}/libpoppler-cpp.dylib",
"#{lib}/libpoppler-glib.dylib",
"#{lib}/libpoppler-qt5.dylib",
*Dir["#{bin}/*"],
].each do |f|
macho = MachO.open(f)
macho.change_dylib("@rpath/#{libpoppler}", "#{opt_lib}/#{libpoppler}")
macho.write!
end
end
end
test do
system "#{bin}/pdfinfo", test_fixtures("test.pdf")
end
end | 29.651515 | 92 | 0.69673 |
acd63960f63aaa716278284adc56e1101e650d84 | 14,672 | #
# Author:: Daniel DeLeo (<[email protected]>)
# Copyright:: Copyright (c) 2012-2016 Chef Software, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require File.expand_path("../../spec_helper", __FILE__)
require "chef/client"
describe Chef::RunLock do
# This behavior works on windows, but the tests use fork :(
describe "when locking the chef-client run", :unix_only => true do
##
# Lockfile location and helpers
let(:random_temp_root) do
Kernel.srand(Time.now.to_i + Process.pid)
"/tmp/#{Kernel.rand(Time.now.to_i + Process.pid)}"
end
let(:lockfile){ "#{random_temp_root}/this/long/path/does/not/exist/chef-client-running.pid" }
# make sure to start with a clean slate.
before(:each){ log_event("rm -rf before"); FileUtils.rm_r(random_temp_root) if File.exist?(random_temp_root) }
after(:each){ log_event("rm -rf after"); FileUtils.rm_r(random_temp_root) if File.exist?(random_temp_root) }
def log_event(message, time=Time.now.strftime("%H:%M:%S.%L"))
events << [ message, time ]
end
def events
@events ||= []
end
WAIT_ON_LOCK_TIME = 1.0
def wait_on_lock
Timeout::timeout(WAIT_ON_LOCK_TIME) do
until File.exist?(lockfile)
sleep 0.1
end
end
rescue Timeout::Error
raise "Lockfile never created, abandoning test"
end
CLIENT_PROCESS_TIMEOUT = 10
BREATHING_ROOM = 1
# ClientProcess is defined below
let!(:p1) { ClientProcess.new(self, "p1") }
let!(:p2) { ClientProcess.new(self, "p2") }
after(:each) do |example|
begin
p1.stop
p2.stop
rescue
example.exception = $!
raise
ensure
if example.exception
print_events
end
end
end
def print_events
# Consume any remaining events that went on the channel and print them all
p1.last_event
p2.last_event
events.each_with_index.sort_by { |(message, time), index| [ time, index ] }.each do |(message, time), index|
print "#{time} #{message}\n"
end
end
context "when the lockfile does not already exist" do
context "when a client creates the lockfile but has not yet acquired the lock" do
before { p1.run_to("created lock") }
shared_context "second client gets the lock" do
it "the lockfile is created" do
log_event("lockfile exists? #{File.exist?(lockfile)}")
expect(File.exist?(lockfile)).to be_truthy
end
it "the lockfile is not locked" do
run_lock = Chef::RunLock.new(lockfile)
begin
expect(run_lock.test).to be_truthy
ensure
run_lock.release
end
end
it "the lockfile is empty" do
expect(IO.read(lockfile)).to eq("")
end
context "and a second client gets the lock" do
before { p2.run_to("acquired lock") }
it "the first client does not get the lock until the second finishes" do
p1.run_to("acquired lock") do
p2.run_to_completion
end
end
it "and the first client tries to get the lock and the second is killed, the first client gets the lock immediately" do
p1.run_to("acquired lock") do
sleep BREATHING_ROOM
expect(p1.last_event).to match(/after (started|created lock)/)
p2.stop
end
p1.run_to_completion
end
end
end
context "and the second client has done nothing" do
include_context "second client gets the lock"
end
context "and the second client has created the lockfile but not yet acquired the lock" do
before { p2.run_to("created lock") }
include_context "second client gets the lock"
end
end
context "when a client acquires the lock but has not yet saved the pid" do
before { p1.run_to("acquired lock") }
it "the lockfile is created" do
log_event("lockfile exists? #{File.exist?(lockfile)}")
expect(File.exist?(lockfile)).to be_truthy
end
it "the lockfile is locked" do
run_lock = Chef::RunLock.new(lockfile)
begin
expect(run_lock.test).to be_falsey
ensure
run_lock.release
end
end
it "sets FD_CLOEXEC on the lockfile", :supports_cloexec => true do
run_lock = File.open(lockfile)
expect(run_lock.fcntl(Fcntl::F_GETFD, 0) & Fcntl::FD_CLOEXEC).to eq(Fcntl::FD_CLOEXEC)
end
it "the lockfile is empty" do
expect(IO.read(lockfile)).to eq("")
end
it "and a second client tries to acquire the lock, it doesn't get the lock until *after* the first client exits" do
# Start p2 and tell it to move forward in the background
p2.run_to("acquired lock") do
# While p2 is trying to acquire, wait a bit and then let p1 complete
sleep(BREATHING_ROOM)
expect(p2.last_event).to match(/after (started|created lock)/)
p1.run_to_completion
end
p2.run_to_completion
end
it "and a second client tries to get the lock and the first is killed, the second client gets the lock immediately" do
p2.run_to("acquired lock") do
sleep BREATHING_ROOM
expect(p2.last_event).to match(/after (started|created lock)/)
p1.stop
end
p2.run_to_completion
end
end
context "when a client acquires the lock and saves the pid" do
before { p1.run_to("saved pid") }
it "the lockfile is created" do
expect(File.exist?(lockfile)).to be_truthy
end
it "the lockfile is locked" do
run_lock = Chef::RunLock.new(lockfile)
begin
expect(run_lock.test).to be_falsey
ensure
run_lock.release
end
end
it "sets FD_CLOEXEC on the lockfile", :supports_cloexec => true do
run_lock = File.open(lockfile)
expect(run_lock.fcntl(Fcntl::F_GETFD, 0) & Fcntl::FD_CLOEXEC).to eq(Fcntl::FD_CLOEXEC)
end
it "the PID is in the lockfile" do
expect(IO.read(lockfile)).to eq p1.pid.to_s
end
it "and a second client tries to acquire the lock, it doesn't get the lock until *after* the first client exits" do
# Start p2 and tell it to move forward in the background
p2.run_to("acquired lock") do
# While p2 is trying to acquire, wait a bit and then let p1 complete
sleep(BREATHING_ROOM)
expect(p2.last_event).to match(/after (started|created lock)/)
p1.run_to_completion
end
p2.run_to_completion
end
it "when a second client tries to get the lock and the first is killed, the second client gets the lock immediately" do
p2.run_to("acquired lock") do
sleep BREATHING_ROOM
expect(p2.last_event).to match(/after (started|created lock)/)
p1.stop
end
p2.run_to_completion
end
end
context "when a client acquires a lock and exits normally" do
before { p1.run_to_completion }
it "the lockfile remains" do
expect(File.exist?(lockfile)).to be_truthy
end
it "the lockfile is not locked" do
run_lock = Chef::RunLock.new(lockfile)
begin
expect(run_lock.test).to be_truthy
ensure
run_lock.release
end
end
it "the PID is in the lockfile" do
expect(IO.read(lockfile)).to eq p1.pid.to_s
end
it "and a second client tries to acquire the lock, it gets the lock immediately" do
p2.run_to_completion
end
end
end
it "test returns true and acquires the lock" do
run_lock = Chef::RunLock.new(lockfile)
p1 = fork do
expect(run_lock.test).to eq(true)
run_lock.save_pid
sleep 2
exit! 1
end
wait_on_lock
p2 = fork do
expect(run_lock.test).to eq(false)
exit! 0
end
Process.waitpid2(p2)
Process.waitpid2(p1)
end
it "test returns without waiting when the lock is acquired" do
run_lock = Chef::RunLock.new(lockfile)
p1 = fork do
run_lock.acquire
run_lock.save_pid
sleep 2
exit! 1
end
wait_on_lock
expect(run_lock.test).to eq(false)
Process.waitpid2(p1)
end
end
#
# Runs a process in the background that will:
#
# 1. start up (`started` event)
# 2. acquire the runlock file (`acquired lock` event)
# 3. save the pid to the lockfile (`saved pid` event)
# 4. exit
#
# You control exactly how far the client process goes with the `run_to`
# method: it will stop at any given spot so you can test for race conditions.
#
# It uses a pair of pipes to communicate with the process. The tests will
# send an event name over to the process, which gives the process permission
# to run until it reaches that event (at which point it waits for another event
# name). The process sends the name of each event it reaches back to the tests.
#
class ClientProcess
def initialize(example, name)
@example = example
@name = name
@read_from_process, @write_to_tests = IO.pipe
@read_from_tests, @write_to_process = IO.pipe
end
attr_reader :example
attr_reader :name
attr_reader :pid
def last_event
loop do
line = readline_nonblock(read_from_process)
break if line.nil?
event, time = line.split("@")
example.log_event("#{name}.last_event got #{event}")
example.log_event("[#{name}] #{event}", time.strip)
@last_event = event
end
@last_event
end
def run_to(to_event, &background_block)
example.log_event("#{name}.run_to(#{to_event.inspect})")
# Start the process if it's not started
start if !pid
# Tell the process what to stop at (also means it can go)
write_to_process.print "#{to_event}\n"
# Run the background block
background_block.call if background_block
# Wait until it gets there
Timeout::timeout(CLIENT_PROCESS_TIMEOUT) do
until @last_event == "after #{to_event}"
got_event, time = read_from_process.gets.split("@")
example.log_event("#{name}.last_event got #{got_event}")
example.log_event("[#{name}] #{got_event}", time.strip)
@last_event = got_event
end
end
example.log_event("#{name}.run_to(#{to_event.inspect}) finished")
end
def run_to_completion
example.log_event("#{name}.run_to_completion")
# Start the process if it's not started
start if !pid
# Tell the process to stop at nothing (no blocking)
@write_to_process.print "nothing\n"
# Wait for the process to exit
wait_for_exit
example.log_event("#{name}.run_to_completion finished")
end
def wait_for_exit
example.log_event("#{name}.wait_for_exit (pid #{pid})")
Timeout::timeout(CLIENT_PROCESS_TIMEOUT) do
Process.wait(pid) if pid
end
example.log_event("#{name}.wait_for_exit finished (pid #{pid})")
end
def stop
if pid
example.log_event("#{name}.stop (pid #{pid})")
begin
# Send it the kill signal over and over until it dies
Timeout::timeout(CLIENT_PROCESS_TIMEOUT) do
Process.kill(:KILL, pid)
while !Process.waitpid2(pid, Process::WNOHANG)
sleep(0.05)
end
end
example.log_event("#{name}.stop finished (stopped pid #{pid})")
# Process not found is perfectly fine when we're trying to kill a process :)
rescue Errno::ESRCH
example.log_event("#{name}.stop finished (pid #{pid} wasn't running)")
end
end
end
def fire_event(event)
# Let the caller know what event we've reached
write_to_tests.print("after #{event}@#{Time.now.strftime("%H:%M:%S.%L")}\n")
# Block until the client tells us where to stop
if !@run_to_event || event == @run_to_event
write_to_tests.print("waiting for instructions after #{event}@#{Time.now.strftime("%H:%M:%S.%L")}\n")
@run_to_event = read_from_tests.gets.strip
write_to_tests.print("told to run to #{@run_to_event} after #{event}@#{Time.now.strftime("%H:%M:%S.%L")}\n")
elsif @run_to_event
write_to_tests.print("continuing until #{@run_to_event} after #{event}@#{Time.now.strftime("%H:%M:%S.%L")}\n")
end
end
private
attr_reader :read_from_process
attr_reader :write_to_tests
attr_reader :read_from_tests
attr_reader :write_to_process
class TestRunLock < Chef::RunLock
attr_accessor :client_process
def create_lock
super
client_process.fire_event("created lock")
end
end
def start
example.log_event("#{name}.start")
@pid = fork do
begin
Timeout::timeout(CLIENT_PROCESS_TIMEOUT) do
run_lock = TestRunLock.new(example.lockfile)
run_lock.client_process = self
fire_event("started")
run_lock.acquire
fire_event("acquired lock")
run_lock.save_pid
fire_event("saved pid")
exit!(0)
end
rescue
fire_event($!.message.lines.join(" // "))
raise
end
end
example.log_event("#{name}.start forked (pid #{pid})")
end
def readline_nonblock(fd)
buffer = ""
buffer << fd.read_nonblock(1) while buffer[-1] != "\n"
buffer
#rescue IO::EAGAINUnreadable
rescue IO::WaitReadable
unless buffer == ""
sleep 0.1
retry
end
nil
end
end
end
| 31.484979 | 131 | 0.610755 |
d54fcce8ffdb5aa7ba10aff0754758b07d4c31af | 1,489 | require 'ruby_parser'
require 'channel9'
require 'channel9/ruby/compiler'
module Channel9
module Ruby
def self.compile_string(type, str, filename = "__eval__", line = 0)
stream = Channel9::Stream.new
stream.build do |builder|
parser = RubyParser.new
begin
tree = parser.parse(str, filename)
tree = s(type.to_sym, tree)
tree.file = filename
tree.line = line
compiler = Channel9::Ruby::Compiler.new(builder)
compiler.transform(tree)
rescue Racc::ParseError => e
puts "parse error in #{filename}: #{e}"
return nil
rescue ArgumentError => e
puts "argument error in #{filename}: #{e}"
return nil
rescue SyntaxError => e
puts "syntax error in #{filename}: #{e}"
return nil
rescue NotImplementedError => e
puts "not implemented error in #{filename}: #{e}"
return nil
rescue RegexpError => e
puts "invalid regex error in #{filename}: #{e}"
return nil
end
end
return stream
end
def self.compile_eval(str, filename = "__eval__")
return compile_string(:eval, str, filename)
end
def self.compile(filename)
begin
File.open("#{filename}", "r") do |f|
return compile_string(:file, f.read, filename)
end
rescue Errno::ENOENT, Errno::EISDIR
return nil
end
end
end
end
| 27.574074 | 71 | 0.576226 |
33d944661cf0a25f43875ede613c0b2a769a8fdc | 136 | BLACKLIST_BASE = File.expand_path("")
BLACKLIST_CONFIG = {
"config_path" => 'config',
"initializers_path" => 'initializers',
} | 27.2 | 42 | 0.669118 |
e98d4810fa2e9bd9368f5e12bf62d833dbc736ee | 88 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'capistrano/pimcore'
| 29.333333 | 58 | 0.75 |
9191def54ec5eaae2171754b3de1d69a35854206 | 2,192 | # Sends out emails to administrators
#
class AdminMailer < ApplicationMailer
I18N_SCOPE = 'mailers.admin_mailer'
def new_shf_application_received(new_shf_app, admin)
# shf_app is used in the mail view
@shf_app = new_shf_app
set_greeting_name(admin)
set_mail_info __method__, admin
mail to: recipient_email, subject: t('new_application_received.subject', scope: I18N_SCOPE)
end
def member_unpaid_over_x_months(admin, members_unpaid = [], num_months = 6)
# need to set these manually because we do not have a User object for the recipient, just an email address
@action_name = __method__.to_s
@recipient_email = ENV['SHF_MEMBERSHIP_EMAIL']
set_greeting_name(admin)
@members_unpaid = members_unpaid
@fee_due_date = Date.current
@num_months = num_months
mail to: recipient_email, subject: t('member_unpaid_over_x_months.subject', num_months: @num_months, scope: I18N_SCOPE)
end
def new_membership_granted_co_hbrand_paid(new_member)
@action_name = __method__.to_s
recipient_is_membership_chair
@new_member = new_member
@complete_branded_cos = new_member.companies.select(&:in_good_standing?)
@category_names = new_member.shf_application.business_categories.map(&:name).join(', ')
mail to: recipient_email, subject: t('new_membership_granted_co_hbrand_paid.subject', scope: I18N_SCOPE)
end
# Need this so the mail view can access :html_postal_format_entire_address
helper CompaniesHelper
def members_need_packets(members_needing_packets)
@action_name = __method__.to_s
recipient_is_membership_chair
@members_needing_packets = members_needing_packets
mail to: recipient_email, subject: t('members_need_packets.subject', scope: I18N_SCOPE)
end
# -------------------------------------------------------------------------
private
# Set the instance vars so that the email goes to the SHF Membership Chair
# Need to set these manually because we might not have a User object
# for the recipient, just an email address.
def recipient_is_membership_chair
@recipient_email = ENV['SHF_MEMBERSHIP_EMAIL']
@greeting_name = @recipient_email
end
end
| 31.314286 | 123 | 0.738139 |
115cac4eb02569aab48ef067432acd260ed347f1 | 92 | # frozen_string_literal: true
module Elastic
TimeoutError = Class.new(StandardError)
end
| 15.333333 | 41 | 0.804348 |
edbb83ef02f0a79bcf677e61fd0daa4af9804907 | 969 | require "codeclimate-test-reporter"
CodeClimate::TestReporter.start
require "minitest/autorun"
require 'dnsimpler'
require 'webmock/minitest'
class MiniTest::Test
def setup
DNSimpler.setup do |config|
config.base_uri = "https://api.sandbox.dnsimple.com/"
config.http_proxy = nil
config.debug = false
end
WebMock.disable_net_connect!(allow: "codeclimate.com")
stub_request(:any, "#{DNSimpler.base_uri}v1/domains").to_return(status: 200, body: [{domain: {id: 707}}, {domain: {id: 708}}].to_json)
end
# Stolen from rails source cause I like the syntax
def self.test(name, &block)
test_name = "test_#{name.gsub(/\s+/,'_')}".to_sym
defined = method_defined? test_name
raise "#{test_name} is already defined in #{self}" if defined
if block_given?
define_method(test_name, &block)
else
define_method(test_name) do
flunk "No implementation provided for #{name}"
end
end
end
end | 27.685714 | 138 | 0.685243 |
d5f6ba0e1ec75033363f40eecd9d5d52ab1a9f2e | 4,043 | require 'spec_helper'
describe Hydra::Works::Collection do
let(:collection) { described_class.new }
let(:collection1) { described_class.new }
let(:work1) { Hydra::Works::Work.new }
describe '#collections' do
it 'returns empty array when only works are aggregated' do
collection.ordered_members << work1
expect(collection.collections).to eq []
end
context 'with other collections & works' do
let(:collection2) { described_class.new }
before do
collection.ordered_members << collection1
collection.ordered_members << collection2
collection.ordered_members << work1
end
it 'returns only collections' do
expect(collection.ordered_collections).to eq [collection1, collection2]
end
end
end
describe '#works' do
subject { collection.works }
context "when only collections are aggregated" do
it 'returns empty array when only collections are aggregated' do
collection.ordered_members << collection1
expect(subject).to eq []
end
end
context 'with collections and works' do
let(:work2) { Hydra::Works::Work.new }
before do
collection.ordered_members << collection1
collection.ordered_members << work1
collection.ordered_members << work2
end
it 'returns only works' do
expect(subject).to eq [work1, work2]
end
end
end
describe '#ordered_works' do
subject { collection.ordered_works }
context "when only collections are aggregated" do
it 'returns empty array when only collections are aggregated' do
collection.ordered_members << collection1
expect(subject).to eq []
end
end
context 'with collections and works' do
let(:work2) { Hydra::Works::Work.new }
before do
collection.ordered_members << collection1
collection.ordered_members << work1
collection.ordered_members << work2
end
it 'returns only works' do
expect(subject).to eq [work1, work2]
end
context "after deleting a member" do
before do
collection.save
work1.destroy
collection.reload
end
it { is_expected.to eq [work2] }
end
end
end
describe "#ordered_work_ids" do
subject { collection.ordered_work_ids }
it "returns IDs of ordered works" do
collection.ordered_members << work1
expect(subject).to eq [work1.id]
end
end
describe "#work_ids" do
subject { collection.work_ids }
it "returns IDs of works" do
collection.members = [work1]
expect(subject).to eq [work1.id]
end
end
describe '#related_objects' do
subject { collection.related_objects }
let(:object) { Hydra::PCDM::Object.new }
let(:collection) { described_class.new }
before do
collection.related_objects = [object]
end
it { is_expected.to eq [object] }
end
describe "#in_collections" do
before do
collection1.ordered_members << collection
collection1.save
end
subject { collection.in_collections }
it { is_expected.to eq [collection1] }
end
describe 'adding file_sets to collections' do
let(:file_set) { Hydra::Works::FileSet.new }
let(:exception) { ActiveFedora::AssociationTypeMismatch }
context 'with ordered members' do
it 'raises AssociationTypeMismatch' do
expect { collection.ordered_members = [file_set] }.to raise_error(exception)
expect { collection.ordered_members += [file_set] }.to raise_error(exception)
expect { collection.ordered_members << file_set }.to raise_error(exception)
end
end
context 'with unordered members' do
it 'raises AssociationTypeMismatch' do
expect { collection.members = [file_set] }.to raise_error(exception)
expect { collection.members += [file_set] }.to raise_error(exception)
expect { collection.members << file_set }.to raise_error(exception)
end
end
end
end
| 28.878571 | 85 | 0.662379 |
ed0022df0e6fb52b653c9acfaf98bec979f3f211 | 2,296 | class Opencolorio < Formula
desc "Color management solution geared towards motion picture production"
homepage "https://opencolorio.org/"
url "https://github.com/AcademySoftwareFoundation/OpenColorIO/archive/v2.1.1.tar.gz"
sha256 "16ebc3e0f21f72dbe90fe60437eb864f4d4de9c255ef8e212f837824fc9b8d9c"
license "BSD-3-Clause"
head "https://github.com/AcademySoftwareFoundation/OpenColorIO.git", branch: "master"
bottle do
sha256 cellar: :any, arm64_monterey: "f50d5ba3977c39c7675f9a47c6e6e8a94dde8ffaa0eff80e0a4f3f85ac60fc83"
sha256 cellar: :any, arm64_big_sur: "a12191e6238cf29395345d5d1be49d52912a1e6a6066baa11558184122df6d31"
sha256 cellar: :any, monterey: "e909973e5bb4f73da7feb23846bc2f1ac5dbe9de58c7f1cdbcb5cea375faac15"
sha256 cellar: :any, big_sur: "d5569167550905603f4512ed476af45f9803d292f5de1b122e509854d24c43a7"
sha256 cellar: :any, catalina: "b12394d8d4e9180dfcb7bb943d1d0fa25546f86f82b50863be7566320b6de9b8"
sha256 cellar: :any_skip_relocation, x86_64_linux: "919b027f1ebe994bf1e43f264a361b70183e28200e10340fc6fb56d7978e6ece"
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "little-cms2"
depends_on "[email protected]"
def install
args = std_cmake_args + %W[
-DCMAKE_VERBOSE_MAKEFILE=OFF
-DCMAKE_INSTALL_RPATH=#{rpath}
-DPYTHON=python3
-DPYTHON_EXECUTABLE=#{Formula["[email protected]"].opt_bin}/"python3"
]
mkdir "macbuild" do
system "cmake", *args, ".."
system "make"
system "make", "install"
end
end
def caveats
<<~EOS
OpenColorIO requires several environment variables to be set.
You can source the following script in your shell-startup to do that:
#{HOMEBREW_PREFIX}/share/ocio/setup_ocio.sh
Alternatively the documentation describes what env-variables need set:
https://opencolorio.org/installation.html#environment-variables
You will require a config for OCIO to be useful. Sample configuration files
and reference images can be found at:
https://opencolorio.org/downloads.html
EOS
end
test do
assert_match "validate", shell_output("#{bin}/ociocheck --help", 1)
end
end
| 40.280702 | 123 | 0.719512 |
ff6766067f8a50789154d68c65aa3c2831d30d27 | 1,104 | require File.expand_path('../shared/constants', __FILE__)
describe "Digest::SHA256#digest" do
it "returns a digest" do
cur_digest = Digest::SHA256.new
cur_digest.digest().should == SHA256Constants::BlankDigest
# add something to check that the state is reset later
cur_digest << "test"
cur_digest.digest(SHA256Constants::Contents).should == SHA256Constants::Digest
# second invocation is intentional, to make sure there are no side-effects
cur_digest.digest(SHA256Constants::Contents).should == SHA256Constants::Digest
# after all is done, verify that the digest is in the original, blank state
cur_digest.digest.should == SHA256Constants::BlankDigest
end
end
describe "Digest::SHA256.digest" do
it "returns a digest" do
Digest::SHA256.digest(SHA256Constants::Contents).should == SHA256Constants::Digest
# second invocation is intentional, to make sure there are no side-effects
Digest::SHA256.digest(SHA256Constants::Contents).should == SHA256Constants::Digest
Digest::SHA256.digest("").should == SHA256Constants::BlankDigest
end
end
| 34.5 | 86 | 0.745471 |
79bb40029dc9559922861d18abf6ecc3112a98b5 | 283 | # frozen_string_literal: true
require_relative '../spec_helper'
module CodeKindly
RSpec.describe Utils do
it 'has a version number' do
expect(Utils::VERSION).not_to be nil
end
it 'is frozen' do
expect(Utils::VERSION.frozen?).to be true
end
end
end
| 17.6875 | 47 | 0.685512 |
f8f6105f41429085751e363867be98669ac378c5 | 112 | class Api::V1::AffiliateSerializer < ActiveModel::Serializer
attributes :name
attributes :display_count
end
| 22.4 | 60 | 0.803571 |
e2c5bcfe998734d0ad18e952c429f50bcea65516 | 622 | require 'selenium-webdriver'
require_relative '../pages/login'
describe 'Login' do
before(:each) do
driver_path = File.join(Dir.pwd, 'vendor', 'geckodriver')
if File.file? driver_path
service = Selenium::WebDriver::Service.firefox(path: driver_path)
@driver = Selenium::WebDriver.for :firefox, service: service
else
@driver = Selenium::WebDriver.for :firefox
end
@login = Login.new(@driver)
end
after(:each) do
@driver.quit
end
it 'succeeded' do
@login.with('tomsmith', 'SuperSecretPassword!')
expect(@login.success_message_present?).to be_truthy
end
end
| 23.037037 | 71 | 0.684887 |
b9db7d59af84707ba3dd8c13c16af4caefcdeef8 | 599 | Rails.application.routes.draw do
get 'sessions/new'
root 'application#hello'
# root 'static_pages#home'
get 'ccsf_rails/index'
get 'ccsf_rails/links'
get 'ccsf_rails/about'
get 'ccsf_rails/assignments'
get 'static_pages/home'
get 'static_pages/help'
get 'static_pages/about'
get 'static_pages/contact'
get '/signup', to: 'users#new'
post '/signup', to: 'users#create'
get '/login', to: 'sessions#new'
post '/login', to: 'sessions#create'
delete '/logout', to: 'sessions#destroy'
resources :users
resources :account_activations, only: [:edit]
end
| 28.52381 | 47 | 0.684474 |
d5a83fb0daafd1e5346ed6f853852578ac47c39b | 2,588 | # frozen_string_literal: true
require_relative 'test_helper'
require 'okapi/hashing/v1/hashing_pb'
require 'okapi'
require 'okapi/hashing'
class HashTest < Minitest::Test
def before_setup
Okapi.load_native_library
end
# rubocop:disable Layout/LineLength
# taken from: https://raw.githubusercontent.com/BLAKE3-team/BLAKE3/master/test_vectors/test_vectors.json
def test_blake3_hash
hash = 'e1be4d7a8ab5560aa4199eea339849ba8e293d55ca0a81006726d184519e647f5b49b82f805a538c68915c1ae8035c900fd1d4b13902920fd05e1450822f36de9454b7e9996de4900c8e723512883f93f4345f8a58bfe64ee38d3ad71ab027765d25cdd0e448328a8e7a683b9a6af8b0af94fa09010d9186890b096a08471e4230a134'
request = Okapi::Hashing::V1::Blake3HashRequest.new(data: "\x00\x01\x02")
response = Okapi::Hashing.blake3_hash(request)
assert !response.nil?
assert !response.digest.nil?
assert hash.start_with?(response.digest.unpack1('H*'))
end
def test_blake3_keyed_hash
keyed_hash = '39e67b76b5a007d4921969779fe666da67b5213b096084ab674742f0d5ec62b9b9142d0fab08e1b161efdbb28d18afc64d8f72160c958e53a950cdecf91c1a1bbab1a9c0f01def762a77e2e8545d4dec241e98a89b6db2e9a5b070fc110caae2622690bd7b76c02ab60750a3ea75426a6bb8803c370ffe465f07fb57def95df772c39f'
request = Okapi::Hashing::V1::Blake3KeyedHashRequest.new(data: "\x00\x01\x02", key: 'whats the Elvish word for friend')
response = Okapi::Hashing.blake3_keyed_hash(request)
assert !response.nil?
assert !response.digest.nil?
assert keyed_hash.start_with?(response.digest.unpack1('H*'))
end
def test_blake3_derive_key
derive_key = '440aba35cb006b61fc17c0529255de438efc06a8c9ebf3f2ddac3b5a86705797f27e2e914574f4d87ec04c379e12789eccbfbc15892626042707802dbe4e97c3ff59dca80c1e54246b6d055154f7348a39b7d098b2b4824ebe90e104e763b2a447512132cede16243484a55a4e40a85790038bb0dcf762e8c053cabae41bbe22a5bff7'
request = Okapi::Hashing::V1::Blake3DeriveKeyRequest.new(key_material: "\x00\x01\x02", context: 'BLAKE3 2019-12-27 16:29:52 test vectors context')
response = Okapi::Hashing.blake3_derive_key(request)
assert !response.nil?
assert !response.digest.nil?
assert derive_key.start_with?(response.digest.unpack1('H*'))
end
# rubocop:enable Layout/LineLength
def test_sha256_hash
hash = '71b3af35d9d53d24e7462177da41b8acd5e2ef4afc333dd9272cb2ab8743b3db'
request = Okapi::Hashing::V1::SHA256HashRequest.new(data: '4113')
response = Okapi::Hashing.sha256_hash(request)
assert !response.nil?
assert !response.digest.nil?
assert_equal(hash, response.digest.unpack1('H*'))
end
end
| 44.62069 | 281 | 0.816074 |
87339acec890b5f4a3baed22dc9845824734584a | 1,036 | class InboundMailParser
def initialize(msg)
@msg = msg
end
def proposal
@proposal ||= find_proposal(find_public_id(msg)) or return
end
def comment_user
@comment_user ||= find_comment_user(msg)
end
def comment_text
@comment_text ||= find_comment_text(msg)
end
private
attr_reader :msg
def find_proposal(public_id)
Proposal.find_by_public_id(public_id) || Proposal.find(public_id)
end
def find_public_id(msg)
references = msg["headers"]["References"]
ref_re = /<proposal-(\d+)\@.+?>/
sbj_re = /Request\ #?([\w\-]+)/
if references.match(ref_re)
references.match(ref_re)[1]
elsif msg["subject"].match(sbj_re)
msg["subject"].match(sbj_re)[1]
else
fail "Failed to find public_id in msg #{msg.inspect}"
end
end
def find_comment_user(msg)
User.find_by(email_address: msg["from_email"]) || User.find_by(email_address: msg["headers"]["Sender"])
end
def find_comment_text(msg)
EmailReplyParser.parse_reply(msg["text"])
end
end
| 21.583333 | 107 | 0.67471 |
61e4e517e8a15de75adb97738c346b28e773e371 | 1,140 | RSpec.feature "Users can view fund level activities" do
context "when the user belongs to BEIS" do
let(:user) { create(:beis_user) }
before { authenticate!(user: user) }
scenario "can view a fund level activity" do
fund_activity = create(:fund_activity)
programme = create(:programme_activity, parent: fund_activity)
visit organisation_activity_path(programme.extending_organisation, programme)
click_on "Details"
within(".activity-details") do
click_on fund_activity.title
end
page_displays_an_activity(activity_presenter: ActivityPresenter.new(fund_activity))
end
scenario "can view and create programme level activities" do
fund_activity = create(:fund_activity, organisation: user.organisation)
programme_activity = create(:programme_activity)
fund_activity.child_activities << programme_activity
activity_presenter = ActivityPresenter.new(programme_activity)
visit organisation_activity_children_path(fund_activity.organisation, fund_activity)
expect(page).to have_link activity_presenter.display_title
end
end
end
| 36.774194 | 90 | 0.750877 |
f7ef889f514fa1df5bad1e1862d229a2458684e1 | 386 | require "spec_helper"
describe String do
it "acts as proxy for StringMaster using #prep method" do
"<b>This</b> is quite a long text".prep do |s|
s.cut(11, 7, :append => '...')
s.html_escape
end.should == "<b>This...</b>"
"<b>This</b> is quite a long text".prep.cut(11, 7, :append => '...').html_escape == "<b>This...</b>"
end
end
| 27.571429 | 116 | 0.582902 |
62138d3555172a205e398ffddd5a29c2e58c1a42 | 664 | RSpec.shared_context 'Stubbed Apple Receipt Verification' do
let(:verify_url) { 'http://example.com/receipt-verify' }
before do
require 'apple_receipt_service'
stub_const('AppleReceiptService::VERIFICATION_URL', verify_url)
end
subject { described_class.new('fake_receipt') }
def stub_receipt_verification(latest_receipt_info: {}, status: 0)
response = { status: status, latest_receipt_info: latest_receipt_info }.to_json
stub_request(:post, verify_url).to_return(
status: 200,
body: response,
headers: {
'Content-Type' => 'application/json',
'Content-Length' => response.length
}
)
end
end
| 31.619048 | 83 | 0.698795 |
61f6fa222ba710b864dabed1ece384fba4a66205 | 903 | require "vector_salad/standard_shapes/basic_shape"
require "vector_salad/standard_shapes/path"
require "vector_salad/standard_shapes/n"
require "vector_salad/mixins/at"
module VectorSalad
module StandardShapes
# Rectangle shape.
class Rect < BasicShape
include VectorSalad::Mixins::At
attr_reader :width, :height
# Create a rectangle with specified width and height.
#
# @example
# new(100, 200)
Contract Pos, Pos, {} => Rect
def initialize(width, height, **options)
@width, @height = width, height
@options = options
@x, @y = 0, 0
self
end
# Convert the shape to a path
def to_path
Path.new(
N.n(@x, @y),
N.n(@x, @y + @height),
N.n(@x + @width, @y + @height),
N.n(@x + @width, @y),
**@options
)
end
end
end
end
| 23.763158 | 59 | 0.569214 |
3992cdd6c22d3bf7c7624b89b50c3af08d440dd8 | 4,861 | require_relative 'test_persister'
module TargetedRefreshSpecHelper
def create_persister
TestPersister.new(@ems, ManagerRefresh::TargetCollection.new(:manager => @ems))
end
def expected_ext_management_systems_count
2
end
def base_inventory_counts
{
:auth_private_key => 0,
:availability_zone => 0,
:cloud_network => 0,
:cloud_subnet => 0,
:cloud_volume => 0,
:cloud_volume_backup => 0,
:cloud_volume_snapshot => 0,
:custom_attribute => 0,
:disk => 0,
:ext_management_system => expected_ext_management_systems_count,
:firewall_rule => 0,
:flavor => 0,
:floating_ip => 0,
:guest_device => 0,
:hardware => 0,
:miq_template => 0,
:network => 0,
:network_port => 0,
:network_router => 0,
:operating_system => 0,
:orchestration_stack => 0,
:orchestration_stack_output => 0,
:orchestration_stack_parameter => 0,
:orchestration_stack_resource => 0,
:orchestration_template => 0,
:security_group => 0,
:snapshot => 0,
:system_service => 0,
:vm => 0,
:vm_or_template => 0
}
end
def assert_counts(expected_table_counts)
expected_counts = base_inventory_counts.merge(expected_table_counts)
assert_table_counts(expected_counts)
assert_ems(expected_counts)
end
def assert_table_counts(expected_table_counts)
actual = {
:auth_private_key => AuthPrivateKey.count,
:cloud_volume => CloudVolume.count,
:cloud_volume_backup => CloudVolumeBackup.count,
:cloud_volume_snapshot => CloudVolumeSnapshot.count,
:ext_management_system => ExtManagementSystem.count,
:flavor => Flavor.count,
:availability_zone => AvailabilityZone.count,
:vm_or_template => VmOrTemplate.count,
:vm => Vm.count,
:miq_template => MiqTemplate.count,
:disk => Disk.count,
:guest_device => GuestDevice.count,
:hardware => Hardware.count,
:network => Network.count,
:operating_system => OperatingSystem.count,
:snapshot => Snapshot.count,
:system_service => SystemService.count,
:orchestration_template => OrchestrationTemplate.count,
:orchestration_stack => OrchestrationStack.count,
:orchestration_stack_parameter => OrchestrationStackParameter.count,
:orchestration_stack_output => OrchestrationStackOutput.count,
:orchestration_stack_resource => OrchestrationStackResource.count,
:security_group => SecurityGroup.count,
:firewall_rule => FirewallRule.count,
:network_port => NetworkPort.count,
:cloud_network => CloudNetwork.count,
:floating_ip => FloatingIp.count,
:network_router => NetworkRouter.count,
:cloud_subnet => CloudSubnet.count,
:custom_attribute => CustomAttribute.count
}
expect(actual).to eq expected_table_counts
end
def assert_ems(expected_table_counts)
expect(@ems).to have_attributes(
:api_version => nil, # TODO: Should be 3.0
:uid_ems => nil
)
expect(@ems.flavors.size).to eql(expected_table_counts[:flavor])
expect(@ems.availability_zones.size).to eql(expected_table_counts[:availability_zone])
expect(@ems.vms_and_templates.size).to eql(expected_table_counts[:vm_or_template])
expect(@ems.security_groups.size).to eql(expected_table_counts[:security_group])
expect(@ems.network_ports.size).to eql(expected_table_counts[:network_port])
expect(@ems.cloud_networks.size).to eql(expected_table_counts[:cloud_network])
expect(@ems.floating_ips.size).to eql(expected_table_counts[:floating_ip])
expect(@ems.network_routers.size).to eql(expected_table_counts[:network_router])
expect(@ems.cloud_subnets.size).to eql(expected_table_counts[:cloud_subnet])
expect(@ems.miq_templates.size).to eq(expected_table_counts[:miq_template])
expect(@ems.orchestration_stacks.size).to eql(expected_table_counts[:orchestration_stack])
end
end
| 45.009259 | 94 | 0.575396 |
b9909b5f2ae7993459ef61abb70133c859a4508a | 1,104 | name "php"
maintainer "Opscode, Inc."
maintainer_email "[email protected]"
license "Apache 2.0"
description "Installs and maintains php and php modules"
version "1.1.0"
depends "build-essential"
depends "xml"
depends "mysql"
%w{ debian ubuntu centos redhat fedora scientific amazon }.each do |os|
supports os
end
recipe "php", "Installs php"
recipe "php::package", "Installs php using packages."
recipe "php::source", "Installs php from source."
recipe "php::module_apc", "Install the php5-apc package"
recipe "php::module_curl", "Install the php5-curl package"
recipe "php::module_fileinfo", "Install the php5-fileinfo package"
recipe "php::module_fpdf", "Install the php-fpdf package"
recipe "php::module_gd", "Install the php5-gd package"
recipe "php::module_ldap", "Install the php5-ldap package"
recipe "php::module_memcache", "Install the php5-memcache package"
recipe "php::module_mysql", "Install the php5-mysql package"
recipe "php::module_pgsql", "Install the php5-pgsql packag"
recipe "php::module_sqlite3", "Install the php5-sqlite3 package"
| 38.068966 | 71 | 0.73279 |
acf0eace8cdfdd024246e3db6bf19d625bc7b10e | 727 | class DashboardController < ApplicationController
before_action :authenticate_user!
def show
authorize :dashboard
# Return all active/inactive volunteers, inactive will be filtered by default
@volunteers = policy_scope(
current_organization.volunteers.includes(:versions, :supervisor, :casa_cases, case_assignments: [:casa_case]).references(:supervisor, :casa_cases)
).decorate
@casa_cases = policy_scope(current_organization.casa_cases.includes(:case_assignments, :volunteers))
@case_contacts = policy_scope(CaseContact.where(
casa_case_id: @casa_cases.map(&:id)
)).order(occurred_at: :desc).decorate
@supervisors = policy_scope(current_organization.supervisors)
end
end
| 34.619048 | 152 | 0.766162 |
bba571c5e0eea4d99f37f66784f0e7a4b4df6f2a | 118 | json.liquid_template do
json.id @liquid_template.id
json.href api_admin_liquid_template_url(@liquid_template)
end
| 23.6 | 59 | 0.838983 |
1daf1ed40756593f9c5e225ac570d31440155274 | 930 | cask "kindavim" do
version "7.0.0"
sha256 :no_check
url "https://kindavim.app/releases/kindaVim.zip"
name "kindaVim"
desc "Use Vim in input fields and non input fields"
homepage "https://kindavim.app/"
livecheck do
url "https://kindavim.app/releases/appcast.xml"
strategy :sparkle
end
depends_on macos: ">= :monterey"
app "kindaVim.app"
zap trash: [
"~/Library/Application Scripts/mo.com.sleeplessmind.kindaVim",
"~/Library/Application Scripts/mo.com.sleeplessmind.kindaVim-LaunchAtLoginHelper",
"~/Library/Application Support/kindaVim",
"~/Library/Caches/mo.com.sleeplessmind.kindaVim",
"~/Library/Containers/mo.com.sleeplessmind.kindaVim",
"~/Library/Containers/mo.com.sleeplessmind.kindaVim-LaunchAtLoginHelper",
"~/Library/Preferences/mo.com.sleeplessmind.kindaVim.plist",
"~/Library/Saved Application State/mo.com.sleeplessmind.kindaVim.savedState",
]
end
| 31 | 86 | 0.727957 |
f88aad9fdbf158f4d92b6a8dca2230ff9f7b5e14 | 400 | Rails.application.config.middleware.use OmniAuth::Builder do
provider :google_oauth2, ENV['GOOGLE_OMNIAUTH_PROVIDER_KEY'], ENV['GOOGLE_OMNIAUTH_PROVIDER_SECRET']
provider :facebook, ENV['FACEBOOK_APP_ID'], ENV['FACEBOOK_SECRET'], {:provider_ignores_state => true, :client_options => {:ssl => {:ca_path => "/etc/ssl/certs"}}}
provider :twitter, ENV['TWITTER_API_KEY'], ENV['TWITTER_SECRET']
end
| 66.666667 | 164 | 0.76 |
26b5230b64b4920c19d6d31a7486b5ff970e3c46 | 2,527 | require 'rails_helper'
RSpec.describe "Followings", type: :request do
let!(:user1) { create(:user) }
let!(:user2) { create(:other_user) }
let!(:user3) { create(:taro) }
before do
create_list(:post1, 30, user_id: user1.id)
create_list(:post2, 30, user_id: user2.id)
create_list(:post3, 30, user_id: user3.id)
Relationship.create(follower_id: user1.id, followed_id: user2.id)
Relationship.create(follower_id: user2.id, followed_id: user3.id)
Relationship.create(follower_id: user3.id, followed_id: user1.id)
allow_any_instance_of(ActionDispatch::Request)
.to receive(:session).and_return(user_id: user1.id)
end
describe `following page` do
before { get following_user_path(user1) }
it `has folloing` do
expect(user1.following.empty?).to eq false
expect(response.body).to include user1.following.count.to_s
user1.following.each do |user|
expect(response.body).to include user_path(user)
end
end
end
describe `followers page` do
before { get followers_user_path(user1) }
it `has folloing` do
expect(user1.followers.empty?).to eq false
expect(response.body).to include user1.followers.count.to_s
user1.followers.each do |user|
expect(response.body).to include user_path(user)
end
end
end
describe `#create` do
context `not use Ajax` do
it `adds followings` do
expect { post relationships_path, params: { followed_id: user3.id } }.to change { user1.following.count }.by(1)
end
end
context `user Ajax` do
it `adds followings` do
expect { post relationships_path, xhr: true, params: { followed_id: user3.id } }.to change { user1.following.count }.by(1)
end
end
end
describe `#destroy` do
let(:relationship) { user1.active_relationships.find_by(followed_id: user3.id) }
before { user1.follow(user3) }
context `not use Ajax` do
it `decrease followings` do
expect { delete relationship_path(relationship) }.to change { user1.following.count }.by(-1)
end
end
context `user Ajax` do
it `decrease followings` do
expect { delete relationship_path(relationship), xhr: true }.to change { user1.following.count }.by(-1)
end
end
end
describe `Home Page` do
before { get root_path }
it `shows feed` do
user1.feed.paginate(page: 1).each do |micropost|
expect(response.body).to include CGI.escapeHTML(micropost.content)
end
end
end
end
| 32.397436 | 130 | 0.671943 |
080504d27349d7d5b828dfcb16c2f8786204f097 | 29,822 | #
# a language data file for Ruby/CLDR
#
# Generated by: CLDR::Generator
#
# CLDR version: 1.3
#
# Original file name: common/main/pt.xml
# Original file revision: 1.42 $
#
# Copyright (C) 2006 Masao Mutoh
#
# This file is distributed under the same license as the Ruby/CLDR.
#
private
def init_data
@languages = {}
@languages["aa"] = "afar"
@languages["ab"] = "abkhazian"
@languages["ace"] = "achinese"
@languages["ach"] = "acoli"
@languages["ada"] = "adangme"
@languages["ady"] = "adyghe"
@languages["ae"] = "avéstico"
@languages["af"] = "africâner"
@languages["afa"] = "afro-asiático (outros)"
@languages["afh"] = "Afrihili"
@languages["ak"] = "Akan"
@languages["akk"] = "acadiano"
@languages["ale"] = "aleúte"
@languages["alg"] = "idiomas algonquianos"
@languages["am"] = "amárico"
@languages["an"] = "aragonês"
@languages["ang"] = "inglês, arcaico (aprox. 450-1100)"
@languages["apa"] = "idiomas apache"
@languages["ar"] = "árabe"
@languages["arc"] = "aramaico"
@languages["arn"] = "araucano"
@languages["arp"] = "arapaho"
@languages["art"] = "artificiais (outros)"
@languages["arw"] = "arauaqui"
@languages["as"] = "assamês"
@languages["ast"] = "asturiano"
@languages["ath"] = "idiomas atabascanos"
@languages["aus"] = "idiomas australianos"
@languages["av"] = "avaric"
@languages["awa"] = "Awadhi"
@languages["ay"] = "aimara"
@languages["az"] = "azerbaijano"
@languages["ba"] = "bashkir"
@languages["bad"] = "banda"
@languages["bai"] = "bamileke Languages"
@languages["bal"] = "balúchi"
@languages["ban"] = "balinês"
@languages["bas"] = "basa"
@languages["bat"] = "bálticos (outros)"
@languages["be"] = "bielo-russo"
@languages["bej"] = "beja"
@languages["bem"] = "bemba"
@languages["ber"] = "berbere"
@languages["bg"] = "búlgaro"
@languages["bh"] = "biari"
@languages["bho"] = "bhojpuri"
@languages["bi"] = "bislamá"
@languages["bik"] = "bikol"
@languages["bin"] = "bini"
@languages["bla"] = "siksika"
@languages["bm"] = "bambara"
@languages["bn"] = "bengali"
@languages["bnt"] = "banto"
@languages["bo"] = "tibetano"
@languages["br"] = "bretão"
@languages["bra"] = "braj"
@languages["bs"] = "bósnio"
@languages["btk"] = "bataque"
@languages["bua"] = "Buriat"
@languages["bug"] = "Buginese"
@languages["byn"] = "Blin"
@languages["ca"] = "catalão"
@languages["cad"] = "caddo"
@languages["cai"] = "indígenas centro-americanos (outros)"
@languages["car"] = "caribe"
@languages["cau"] = "caucasianos (outros)"
@languages["ce"] = "chechene"
@languages["ceb"] = "cebuano"
@languages["cel"] = "célticos (outros)"
@languages["ch"] = "chamorro"
@languages["chb"] = "chibcha"
@languages["chg"] = "chagatai"
@languages["chk"] = "chuukese"
@languages["chm"] = "mari"
@languages["chn"] = "chinook jargon"
@languages["cho"] = "choctaw"
@languages["chp"] = "chipewyan"
@languages["chr"] = "cheroqui"
@languages["chy"] = "cheiene"
@languages["cmc"] = "chamic languages"
@languages["co"] = "córsico"
@languages["cop"] = "copta"
@languages["cpe"] = "crioulos e pídgin, inglês (outros)"
@languages["cpf"] = "crioulos e pídgin, francês (outros)"
@languages["cpp"] = "crioulos e pídgin, português (outros)"
@languages["cr"] = "cree"
@languages["crh"] = "crimean turkish; crimean tatar"
@languages["crp"] = "crioulos e pídgins (outros)"
@languages["cs"] = "tcheco"
@languages["csb"] = "kashubian"
@languages["cu"] = "eslavo eclesiástico"
@languages["cus"] = "cuxitas (outros)"
@languages["cv"] = "chuvash"
@languages["cy"] = "galês"
@languages["da"] = "dinamarquês"
@languages["dak"] = "dacota"
@languages["dar"] = "dargwa"
@languages["day"] = "dayak"
@languages["de"] = "alemão"
@languages["del"] = "delaware"
@languages["den"] = "slave"
@languages["dgr"] = "dogrib"
@languages["din"] = "dinka"
@languages["doi"] = "dogri"
@languages["dra"] = "dravídicos (outros)"
@languages["dsb"] = "Lower Sorbian"
@languages["dua"] = "duala"
@languages["dum"] = "holandês, medieval (aprox. 1050-1350)"
@languages["dv"] = "divehi"
@languages["dyu"] = "diúla"
@languages["dz"] = "dzonga"
@languages["ee"] = "eve"
@languages["efi"] = "efique"
@languages["egy"] = "egípcio (arcaico)"
@languages["eka"] = "ekajuk"
@languages["el"] = "grego"
@languages["elx"] = "elamite"
@languages["en"] = "inglês"
@languages["enm"] = "inglês, medieval (1100-1500)"
@languages["eo"] = "esperanto"
@languages["es"] = "espanhol"
@languages["et"] = "estoniano"
@languages["eu"] = "basco"
@languages["ewo"] = "ewondo"
@languages["fa"] = "persa"
@languages["fan"] = "fangue"
@languages["fat"] = "fanti"
@languages["ff"] = "fula"
@languages["fi"] = "finlandês"
@languages["fiu"] = "ugro-finês (outros)"
@languages["fj"] = "fijiano"
@languages["fo"] = "feroês"
@languages["fon"] = "fom"
@languages["fr"] = "francês"
@languages["frm"] = "francês, medieval (aprox.1400-1600)"
@languages["fro"] = "francês, arcaico (842-aprox.1400)"
@languages["fur"] = "friulano"
@languages["fy"] = "frisão"
@languages["ga"] = "irlandês"
@languages["gaa"] = "ga"
@languages["gay"] = "gayo"
@languages["gba"] = "gbaia"
@languages["gd"] = "gaélico escocês"
@languages["gem"] = "germânicos (outros)"
@languages["gez"] = "geez"
@languages["gil"] = "gilbertês"
@languages["gl"] = "galego"
@languages["gmh"] = "alemão, medieval alto (aprox.1050-1500)"
@languages["gn"] = "guarani"
@languages["goh"] = "alemão, arcaico alto (aprox.750-1050)"
@languages["gon"] = "gondi"
@languages["gor"] = "gorontalo"
@languages["got"] = "gótico"
@languages["grb"] = "Gerbo"
@languages["grc"] = "grego, arcaico (até 1453)"
@languages["gu"] = "guzerate"
@languages["gv"] = "manx"
@languages["gwi"] = "gwichʼin"
@languages["ha"] = "hauçá"
@languages["hai"] = "haida"
@languages["haw"] = "havaiano"
@languages["he"] = "hebraico"
@languages["hi"] = "hindi"
@languages["hil"] = "hiligaynon"
@languages["him"] = "himachali"
@languages["hit"] = "hitita"
@languages["hmn"] = "hmong"
@languages["ho"] = "hiri motu"
@languages["hr"] = "croata"
@languages["hsb"] = "upper sorbian"
@languages["ht"] = "haitiano"
@languages["hu"] = "húngaro"
@languages["hup"] = "hupa"
@languages["hy"] = "armênio"
@languages["hz"] = "herero"
@languages["ia"] = "interlíngua"
@languages["iba"] = "Iban"
@languages["id"] = "indonésio"
@languages["ie"] = "interlingue"
@languages["ig"] = "ibo"
@languages["ii"] = "sichuan yi"
@languages["ik"] = "Inupiaq"
@languages["ilo"] = "ilocano"
@languages["inc"] = "índicos (outros)"
@languages["ine"] = "indo-europeus (outros)"
@languages["inh"] = "inguche"
@languages["io"] = "ido"
@languages["ira"] = "iraniano"
@languages["iro"] = "idiomas iroqueses"
@languages["is"] = "islandês"
@languages["it"] = "italiano"
@languages["iu"] = "inuktitut"
@languages["ja"] = "japonês"
@languages["jbo"] = "lojban"
@languages["jpr"] = "judaico-persa"
@languages["jrb"] = "judaico-arábico"
@languages["ka"] = "georgiano"
@languages["kaa"] = "kara-Kalpak"
@languages["kab"] = "kabyle"
@languages["kac"] = "kachin"
@languages["kam"] = "kamba"
@languages["kar"] = "karen"
@languages["kaw"] = "kawi"
@languages["kbd"] = "kabardian"
@languages["kg"] = "congolês"
@languages["kha"] = "khasi"
@languages["khi"] = "khoisan (other)"
@languages["kho"] = "khotanese"
@languages["ki"] = "quicuio"
@languages["kj"] = "Kuanyama"
@languages["kk"] = "cazaque"
@languages["kl"] = "groenlandês"
@languages["km"] = "cmer"
@languages["kmb"] = "quimbundo"
@languages["kn"] = "canarês"
@languages["ko"] = "coreano"
@languages["kok"] = "concani"
@languages["kos"] = "kosraean"
@languages["kpe"] = "kpelle"
@languages["kr"] = "canúri"
@languages["krc"] = "karachay-Balkar"
@languages["kro"] = "kru"
@languages["kru"] = "kurukh"
@languages["ks"] = "kashmiri"
@languages["ku"] = "curdo"
@languages["kum"] = "kumyk"
@languages["kut"] = "kutenai"
@languages["kv"] = "komi"
@languages["kw"] = "córnico"
@languages["ky"] = "quirguiz"
@languages["la"] = "latim"
@languages["lad"] = "ladino"
@languages["lah"] = "lahnda"
@languages["lam"] = "lamba"
@languages["lb"] = "luxemburguês"
@languages["lez"] = "lezghian"
@languages["lg"] = "luganda"
@languages["li"] = "limburgish"
@languages["ln"] = "lingala"
@languages["lo"] = "laosiano"
@languages["lol"] = "mongo"
@languages["loz"] = "lozi"
@languages["lt"] = "lituano"
@languages["lu"] = "luba-catanga"
@languages["lua"] = "luba-Lulua"
@languages["lui"] = "luiseno"
@languages["lun"] = "lunda"
@languages["lus"] = "lushai"
@languages["lv"] = "letão"
@languages["mad"] = "madurês"
@languages["mag"] = "magahi"
@languages["mai"] = "maithili"
@languages["mak"] = "makasar"
@languages["man"] = "mandinga"
@languages["map"] = "austronésio"
@languages["mas"] = "massai"
@languages["mdf"] = "mocsa"
@languages["mdr"] = "mandar"
@languages["men"] = "mende"
@languages["mg"] = "malgaxe"
@languages["mga"] = "irlandês, medieval (900-1200)"
@languages["mh"] = "marshallês"
@languages["mi"] = "maori"
@languages["mic"] = "miquemaque"
@languages["min"] = "minangkabau"
@languages["mis"] = "idiomas diversos"
@languages["mk"] = "macedônio"
@languages["mkh"] = "mon-khmer (other)"
@languages["ml"] = "malaiala"
@languages["mn"] = "mongol"
@languages["mnc"] = "manchu"
@languages["mni"] = "manipuri"
@languages["mno"] = "manobo languages"
@languages["mo"] = "moldávio"
@languages["moh"] = "mohawk"
@languages["mos"] = "mossi"
@languages["mr"] = "marata"
@languages["ms"] = "malaio"
@languages["mt"] = "maltês"
@languages["mul"] = "idiomas múltiplos"
@languages["mun"] = "idiomas munda"
@languages["mus"] = "creek"
@languages["mwr"] = "marwari"
@languages["my"] = "birmanês"
@languages["myn"] = "maia"
@languages["myv"] = "erzya"
@languages["na"] = "nauruano"
@languages["nah"] = "náuatle"
@languages["nai"] = "indígenas norte-americanos (outros)"
@languages["nap"] = "napolitano"
@languages["nb"] = "bokmål norueguês"
@languages["nd"] = "ndebele, north"
@languages["nds"] = "alto alemão; baixo saxão"
@languages["ne"] = "nepali"
@languages["new"] = "newari"
@languages["ng"] = "dongo"
@languages["nia"] = "nias"
@languages["nic"] = "niger - kordofanian (other)"
@languages["niu"] = "niueano"
@languages["nl"] = "holandês"
@languages["nn"] = "nynorsk norueguês"
@languages["no"] = "norueguês"
@languages["nog"] = "nogai"
@languages["non"] = "norse, old"
@languages["nr"] = "ndebele, south"
@languages["nso"] = "soto, setentrional"
@languages["nub"] = "idiomas núbios"
@languages["nv"] = "navajo"
@languages["ny"] = "nianja; chicheua; cheua"
@languages["nym"] = "nyamwezi"
@languages["nyn"] = "nyankole"
@languages["nyo"] = "nyoro"
@languages["nzi"] = "nzima"
@languages["oc"] = "occitânico (após 1500); provençal"
@languages["oj"] = "ojibwa"
@languages["om"] = "oromo"
@languages["or"] = "oriya"
@languages["os"] = "ossetic"
@languages["osa"] = "osage"
@languages["ota"] = "turco, otomano (1500-1928)"
@languages["oto"] = "idiomas otomanos"
@languages["pa"] = "panjabi"
@languages["paa"] = "papuanos (outros)"
@languages["pag"] = "pangasinã"
@languages["pal"] = "pálavi"
@languages["pam"] = "pampanga"
@languages["pap"] = "papiamento"
@languages["pau"] = "palauano"
@languages["peo"] = "persa arcaico (aprox. 600-400 a.C.)"
@languages["phi"] = "filipinos (outros)"
@languages["phn"] = "fenício"
@languages["pi"] = "páli"
@languages["pl"] = "polonês"
@languages["pon"] = "pohnpeian"
@languages["pra"] = "idiomas prácrito"
@languages["pro"] = "provençal, arcaico (até 1500)"
@languages["ps"] = "pashto (pushto)"
@languages["pt"] = "português"
@languages["qu"] = "quíchua"
@languages["raj"] = "rajastani"
@languages["rap"] = "rapanui"
@languages["rar"] = "rarotongano"
@languages["rm"] = "rhaeto-romance"
@languages["rn"] = "rundi"
@languages["ro"] = "romeno"
@languages["roa"] = "romances (outros)"
@languages["rom"] = "romani"
@languages["ru"] = "russo"
@languages["rw"] = "kinyarwanda"
@languages["sa"] = "sânscrito"
@languages["sad"] = "sandawe"
@languages["sah"] = "iacuto"
@languages["sai"] = "indígenas sul-americanos (outros)"
@languages["sal"] = "salishan languages"
@languages["sam"] = "aramaico samaritano"
@languages["sas"] = "sasak"
@languages["sat"] = "santali"
@languages["sc"] = "sardo"
@languages["sco"] = "escocês"
@languages["sd"] = "sindi"
@languages["se"] = "northern sami"
@languages["sel"] = "selkup"
@languages["sem"] = "semíticos (outros)"
@languages["sg"] = "sango"
@languages["sga"] = "irlandês, arcaico (até 900)"
@languages["sgn"] = "linguages de sinais"
@languages["sh"] = "servo-croata"
@languages["shn"] = "shan"
@languages["si"] = "cingalês"
@languages["sid"] = "sidamo"
@languages["sio"] = "idiomas sioux"
@languages["sit"] = "sino-tibetanos (outros)"
@languages["sk"] = "eslovaco"
@languages["sl"] = "eslovênio"
@languages["so"] = "somali"
@languages["sog"] = "sogdien"
@languages["son"] = "songai"
@languages["sq"] = "albanês"
@languages["sr"] = "sérvio"
@languages["srr"] = "serere"
@languages["ss"] = "swati"
@languages["ssa"] = "nilo-saarianos (outros)"
@languages["st"] = "soto, do sul"
@languages["su"] = "sundanês"
@languages["suk"] = "sukuma"
@languages["sus"] = "sosso"
@languages["sux"] = "sumério"
@languages["sv"] = "sueco"
@languages["sw"] = "suaíli"
@languages["syr"] = "siríaco"
@languages["ta"] = "tâmil"
@languages["tai"] = "tai (outros)"
@languages["te"] = "telugu"
@languages["tem"] = "timne"
@languages["ter"] = "tereno"
@languages["tet"] = "tétum"
@languages["tg"] = "tadjique"
@languages["th"] = "tailandês"
@languages["ti"] = "tigrínia"
@languages["tig"] = "tigré"
@languages["tk"] = "turcomano"
@languages["tkl"] = "toquelauano"
@languages["tli"] = "tlinguite"
@languages["tmh"] = "tamaxeque"
@languages["tn"] = "tswana"
@languages["to"] = "tonga (ilhas tonga)"
@languages["tog"] = "toganês (Nyasa)"
@languages["tpi"] = "tok pisin"
@languages["tr"] = "turco"
@languages["ts"] = "tsonga"
@languages["tsi"] = "tsimshian"
@languages["tt"] = "tatar"
@languages["tum"] = "tumbuka"
@languages["tup"] = "idiomas tupi"
@languages["tut"] = "altaicos (outros)"
@languages["tvl"] = "tuvaluano"
@languages["tw"] = "twi"
@languages["ty"] = "taitiano"
@languages["tyv"] = "tuvinian"
@languages["udm"] = "udmurt"
@languages["ug"] = "uighur"
@languages["uga"] = "ugarítico"
@languages["uk"] = "ucraniano"
@languages["umb"] = "umbundu"
@languages["und"] = "indeterminado"
@languages["ur"] = "urdu"
@languages["uz"] = "usbeque"
@languages["ve"] = "venda"
@languages["vi"] = "vietnamita"
@languages["vo"] = "volapuque"
@languages["vot"] = "votic"
@languages["wa"] = "walloon"
@languages["wak"] = "wakashan languages"
@languages["wal"] = "walamo"
@languages["war"] = "waray"
@languages["was"] = "washo"
@languages["wen"] = "sorbian languages"
@languages["wo"] = "uolofe"
@languages["xal"] = "kalmyk"
@languages["xh"] = "xosa"
@languages["yao"] = "iao"
@languages["yap"] = "yapese"
@languages["yi"] = "iídiche"
@languages["yo"] = "ioruba"
@languages["ypk"] = "idiomas iúpique"
@languages["za"] = "zhuang"
@languages["zap"] = "zapoteca"
@languages["zen"] = "zenaga"
@languages["zh"] = "chinês"
@languages["znd"] = "zande"
@languages["zu"] = "zulu"
@languages["zun"] = "zunhi"
@territories = {}
@territories["AD"] = "Andorra"
@territories["AE"] = "Emirados Árabes Unidos"
@territories["AF"] = "Afeganistão"
@territories["AG"] = "Antígua e Barbuda"
@territories["AI"] = "Anguilla"
@territories["AL"] = "Albânia"
@territories["AM"] = "Armênia"
@territories["AN"] = "Antilhas Holandesas"
@territories["AO"] = "Angola"
@territories["AQ"] = "Antártida"
@territories["AR"] = "Argentina"
@territories["AS"] = "Samoa Americana"
@territories["AT"] = "Áustria"
@territories["AU"] = "Austrália"
@territories["AW"] = "Aruba"
@territories["AZ"] = "Azerbaijão"
@territories["BA"] = "Bósnia-Herzegóvina"
@territories["BB"] = "Barbados"
@territories["BD"] = "Bangladesh"
@territories["BE"] = "Bélgica"
@territories["BF"] = "Burquina Faso"
@territories["BG"] = "Bulgária"
@territories["BH"] = "Bareine"
@territories["BI"] = "Burundi"
@territories["BJ"] = "Benin"
@territories["BM"] = "Bermudas"
@territories["BN"] = "Brunei"
@territories["BO"] = "Bolívia"
@territories["BR"] = "Brasil"
@territories["BS"] = "Bahamas"
@territories["BT"] = "Butão"
@territories["BV"] = "Ilha Bouvet"
@territories["BW"] = "Botsuana"
@territories["BY"] = "Belarus"
@territories["BZ"] = "Belize"
@territories["CA"] = "Canadá"
@territories["CC"] = "Ilhas Cocos (Keeling)"
@territories["CD"] = "Congo, República Democrática do"
@territories["CF"] = "República Centro-Africana"
@territories["CG"] = "Congo"
@territories["CH"] = "Suíça"
@territories["CI"] = "Costa do Marfim"
@territories["CK"] = "Ilhas Cook"
@territories["CL"] = "Chile"
@territories["CM"] = "República dos Camarões"
@territories["CN"] = "China"
@territories["CO"] = "Colômbia"
@territories["CR"] = "Costa Rica"
@territories["CS"] = "Sérvia e Montenegro"
@territories["CU"] = "Cuba"
@territories["CV"] = "Cabo Verde"
@territories["CX"] = "Ilhas Natal"
@territories["CY"] = "Chipre"
@territories["CZ"] = "República Tcheca"
@territories["DE"] = "Alemanha"
@territories["DJ"] = "Djibuti"
@territories["DK"] = "Dinamarca"
@territories["DM"] = "Dominica"
@territories["DO"] = "República Dominicana"
@territories["DZ"] = "Argélia"
@territories["EC"] = "Equador"
@territories["EE"] = "Estônia"
@territories["EG"] = "Egito"
@territories["EH"] = "Saara Ocidental"
@territories["ER"] = "Eritréia"
@territories["ES"] = "Espanha"
@territories["ET"] = "Etiópia"
@territories["FI"] = "Finlândia"
@territories["FJ"] = "Fiji"
@territories["FK"] = "Ilhas Malvinas"
@territories["FM"] = "Micronésia, Estados Federados da"
@territories["FO"] = "Ilhas Faroe"
@territories["FR"] = "França"
@territories["GA"] = "Gabão"
@territories["GB"] = "Reino Unido"
@territories["GD"] = "Granada"
@territories["GE"] = "Geórgia"
@territories["GF"] = "Guiana Francesa"
@territories["GH"] = "Gana"
@territories["GI"] = "Gibraltar"
@territories["GL"] = "Groênlandia"
@territories["GM"] = "Gâmbia"
@territories["GN"] = "Guiné"
@territories["GP"] = "Guadalupe"
@territories["GQ"] = "Guiné Equatorial"
@territories["GR"] = "Grécia"
@territories["GS"] = "Geórgia do Sul e Ilhas Sandwich do Sul"
@territories["GT"] = "Guatemala"
@territories["GU"] = "Guam"
@territories["GW"] = "Guiné Bissau"
@territories["GY"] = "Guiana"
@territories["HK"] = "Hong Kong, Região Admin. Especial da China"
@territories["HM"] = "Ilha Heard e Ilhas McDonald"
@territories["HN"] = "Honduras"
@territories["HR"] = "Croácia"
@territories["HT"] = "Haiti"
@territories["HU"] = "Hungria"
@territories["ID"] = "Indonésia"
@territories["IE"] = "Irlanda"
@territories["IL"] = "Israel"
@territories["IN"] = "Índia"
@territories["IO"] = "Território Britânico do Oceano Índico"
@territories["IQ"] = "Iraque"
@territories["IR"] = "Irã"
@territories["IS"] = "Islândia"
@territories["IT"] = "Itália"
@territories["JM"] = "Jamaica"
@territories["JO"] = "Jordânia"
@territories["JP"] = "Japão"
@territories["KE"] = "Quênia"
@territories["KG"] = "Quirguistão"
@territories["KH"] = "Camboja"
@territories["KI"] = "Quiribati"
@territories["KM"] = "Comores"
@territories["KN"] = "São Cristovão e Nevis"
@territories["KP"] = "Coréia, Norte"
@territories["KR"] = "Coréia, Sul"
@territories["KW"] = "Kuwait"
@territories["KY"] = "Ilhas Caiman"
@territories["KZ"] = "Casaquistão"
@territories["LA"] = "República Democrática Popular de Lao"
@territories["LB"] = "Líbano"
@territories["LC"] = "Santa Lúcia"
@territories["LI"] = "Liechtenstein"
@territories["LK"] = "Sri Lanka"
@territories["LR"] = "Libéria"
@territories["LS"] = "Lesoto"
@territories["LT"] = "Lituânia"
@territories["LU"] = "Luxemburgo"
@territories["LV"] = "Letônia"
@territories["LY"] = "Líbia"
@territories["MA"] = "Marrocos"
@territories["MC"] = "Mônaco"
@territories["MD"] = "Moldova, República de"
@territories["MG"] = "Madagascar"
@territories["MH"] = "Ilhas Marshall"
@territories["MK"] = "Macedônia, República da"
@territories["ML"] = "Mali"
@territories["MM"] = "Mianmá"
@territories["MN"] = "Mongólia"
@territories["MO"] = "Macau, Região Admin. Especial da China"
@territories["MP"] = "Ilhas Marianas do Norte"
@territories["MQ"] = "Martinica"
@territories["MR"] = "Mauritânia"
@territories["MS"] = "Montserrat"
@territories["MT"] = "Malta"
@territories["MU"] = "Maurício"
@territories["MV"] = "Maldivas"
@territories["MW"] = "Malawi"
@territories["MX"] = "México"
@territories["MY"] = "Malásia"
@territories["MZ"] = "Moçambique"
@territories["NA"] = "Namíbia"
@territories["NC"] = "Nova Caledônia"
@territories["NE"] = "Níger"
@territories["NF"] = "Ilha Norfolk"
@territories["NG"] = "Nigéria"
@territories["NI"] = "Nicarágua"
@territories["NL"] = "Países Baixos"
@territories["NO"] = "Noruega"
@territories["NP"] = "Nepal"
@territories["NR"] = "Nauru"
@territories["NU"] = "Niue"
@territories["NZ"] = "Nova Zelândia"
@territories["OM"] = "Omã"
@territories["PA"] = "Panamá"
@territories["PE"] = "Peru"
@territories["PF"] = "Polinésia Francesa"
@territories["PG"] = "Papua-Nova Guiné"
@territories["PH"] = "Filipinas"
@territories["PK"] = "Paquistão"
@territories["PL"] = "Polônia"
@territories["PM"] = "Saint Pierre e Miquelon"
@territories["PN"] = "Pitcairn"
@territories["PR"] = "Porto Rico"
@territories["PS"] = "Território da Palestina"
@territories["PT"] = "Portugal"
@territories["PW"] = "Palau"
@territories["PY"] = "Paraguai"
@territories["QA"] = "Catar"
@territories["RE"] = "Reunião"
@territories["RO"] = "Romênia"
@territories["RU"] = "Rússia"
@territories["RW"] = "Ruanda"
@territories["SA"] = "Arábia Saudita"
@territories["SB"] = "Ilhas Salomão"
@territories["SC"] = "Seychelles"
@territories["SD"] = "Sudão"
@territories["SE"] = "Suécia"
@territories["SG"] = "Cingapura"
@territories["SH"] = "Santa Helena"
@territories["SI"] = "Eslovênia"
@territories["SJ"] = "Svalbard e Jan Mayen"
@territories["SK"] = "Eslováquia"
@territories["SL"] = "Serra Leoa"
@territories["SM"] = "San Marino"
@territories["SN"] = "Senegal"
@territories["SO"] = "Somália"
@territories["SR"] = "Suriname"
@territories["ST"] = "São Tomé e Príncipe"
@territories["SV"] = "El Salvador"
@territories["SY"] = "Síria"
@territories["SZ"] = "Suazilândia"
@territories["TC"] = "Ilhas Turks e Caicos"
@territories["TD"] = "Chade"
@territories["TF"] = "Territórios Franceses do Sul"
@territories["TG"] = "Togo"
@territories["TH"] = "Tailândia"
@territories["TJ"] = "Tadjiquistão"
@territories["TK"] = "Tokelau"
@territories["TL"] = "Timor Leste"
@territories["TM"] = "Turcomenistão"
@territories["TN"] = "Tunísia"
@territories["TO"] = "Tonga"
@territories["TR"] = "Turquia"
@territories["TT"] = "Trinidad e Tobago"
@territories["TV"] = "Tuvalu"
@territories["TW"] = "Taiwan"
@territories["TZ"] = "Tanzânia"
@territories["UA"] = "Ucrânia"
@territories["UG"] = "Uganda"
@territories["UM"] = "Ilhas Menores Distantes dos Estados Unidos"
@territories["US"] = "Estados Unidos"
@territories["UY"] = "Uruguai"
@territories["UZ"] = "Uzbequistão"
@territories["VA"] = "Vaticano"
@territories["VC"] = "São Vicente e Granadinas"
@territories["VE"] = "Venezuela"
@territories["VG"] = "Ilhas Virgens Britânicas"
@territories["VI"] = "Ilhas Virgens dos EUA"
@territories["VN"] = "Vietnã"
@territories["VU"] = "Vanuatu"
@territories["WF"] = "Wallis e Futuna"
@territories["WS"] = "Samoa"
@territories["YE"] = "Iêmen"
@territories["YT"] = "Mayotte"
@territories["ZA"] = "África do Sul"
@territories["ZM"] = "Zâmbia"
@territories["ZW"] = "Zimbábwe"
@scripts = {}
@scripts["Arab"] = "árabe"
@scripts["Armn"] = "armênio"
@scripts["Beng"] = "bengali"
@scripts["Bopo"] = "bopomofo"
@scripts["Brai"] = "braile"
@scripts["Buhd"] = "buhid"
@scripts["Cans"] = "símbolos aborígenes do Canadá Unificado"
@scripts["Cher"] = "cheroqui"
@scripts["Copt"] = "cóptico"
@scripts["Cprt"] = "cipriota"
@scripts["Cyrl"] = "cirílico"
@scripts["Deva"] = "devanágari"
@scripts["Dsrt"] = "deseret"
@scripts["Ethi"] = "etiópico"
@scripts["Geor"] = "georgiano"
@scripts["Goth"] = "gótico"
@scripts["Grek"] = "grego"
@scripts["Gujr"] = "gujerati"
@scripts["Guru"] = "gurmuqui"
@scripts["Hang"] = "hangul"
@scripts["Hani"] = "han"
@scripts["Hano"] = "hanunoo"
@scripts["Hans"] = "han simplificado"
@scripts["Hant"] = "han tradicional"
@scripts["Hebr"] = "hebraico"
@scripts["Hira"] = "hiragana"
@scripts["Ital"] = "itálico antigo"
@scripts["Kana"] = "katakana"
@scripts["Khmr"] = "khmer"
@scripts["Knda"] = "kannada"
@scripts["Laoo"] = "lao"
@scripts["Latn"] = "latim"
@scripts["Limb"] = "limbu"
@scripts["Linb"] = "b linear"
@scripts["Mlym"] = "malaiala"
@scripts["Mong"] = "mongol"
@scripts["Mymr"] = "myanmar"
@scripts["Ogam"] = "ogâmico"
@scripts["Orya"] = "oriya"
@scripts["Osma"] = "osmanya"
@scripts["Qaai"] = "herdado"
@scripts["Runr"] = "rúnico"
@scripts["Shaw"] = "shaviano"
@scripts["Sinh"] = "cingalês"
@scripts["Syrc"] = "siríaco"
@scripts["Tagb"] = "tagbanwa"
@scripts["Tale"] = "tai Le"
@scripts["Taml"] = "tâmil"
@scripts["Telu"] = "télugu"
@scripts["Tglg"] = "tagalo"
@scripts["Thaa"] = "thaana"
@scripts["Thai"] = "tailandês"
@scripts["Tibt"] = "tibetano"
@scripts["Ugar"] = "ugarítico"
@scripts["Yiii"] = "yi"
@scripts["Zyyy"] = "comum"
@character_exemplar_characters = {}
@character_exemplar_characters["default"] = "[a-zãõçáéíóúàâêôüò]"
@character_exemplar_characters["auxiliary"] = "[]"
@delimiter_quotation_start = "“"
@delimiter_quotation_end = "”"
@delimiter_alternate_quotation_start = "‘"
@delimiter_alternate_quotation_end = "’"
@measurement_system = "metric"
@measurement_papersize_height = "297"
@measurement_papersize_width = "210"
@variants = {}
@variants["REVISED"] = "Revisado"
@keys = {}
@keys["calendar"] = "Calendário"
@keys["collation"] = "Intercalação"
@keys["currency"] = "Moeda"
@types = {}
@types["buddhist"] = "Calendário Budista"
@types["chinese"] = "Calendário Chinês"
@types["direct"] = "Ordem Direta"
@types["gregorian"] = "Calendário Gregoriano"
@types["hebrew"] = "Calendário Hebraico"
@types["islamic"] = "Calendário Islâmico"
@types["islamic-civil"] = "Calendário Civil Islâmico"
@types["japanese"] = "Calendário Japonês"
@types["phonebook"] = "Ordem de Lista Telefônica"
@types["pinyin"] = "Ordem Pin-yin"
@types["stroke"] = "Ordem dos Traços"
@types["traditional"] = "Ordem Tradicional"
@yesstr = "sim:s"
@nostr = "não:nao:n"
@yesexpr = "^([yY]([eE][sS])?)|([sS]([iI][mM])?)"
@noexpr = "^[nN]([ãÃaA]?[oO])?"
@references = {}
end
public
attr_reader :languages
attr_reader :territories
attr_reader :scripts
attr_reader :layout_orientation
attr_reader :layout_inlist_casing
attr_reader :character_exemplar_characters
attr_reader :character_mapping
attr_reader :delimiter_quotation_start
attr_reader :delimiter_quotation_end
attr_reader :delimiter_alternate_quotation_start
attr_reader :delimiter_alternate_quotation_end
attr_reader :measurement_system
attr_reader :measurement_papersize_height
attr_reader :measurement_papersize_width
attr_reader :variants
attr_reader :keys
attr_reader :types
attr_reader :yesstr
attr_reader :nostr
attr_reader :yesexpr
attr_reader :noexpr
attr_reader :references
| 36.060459 | 69 | 0.583998 |
acbe6f55f0dc6af41f1cca62ffe359a2da5a4def | 1,794 | require 'test_helper'
class UsersSignupTest < ActionDispatch::IntegrationTest
def setup
ActionMailer::Base.deliveries.clear
end
test "invalid signup information" do
get signup_path
assert_no_difference 'User.count' do
post signup_path, params: { user: { name: "",
email: "user@invalid",
password: "foo",
password_confirmation: "bar" } }
end
assert_template 'users/new'
assert_select 'div#error_explanation'
assert_select 'div.field_with_errors'
assert_select 'form[action="/signup"]'
end
test "valid signup information with account activation" do
get signup_path
assert_difference 'User.count', 1 do
post users_path, params: { user: { name: "Example User",
email: "[email protected]",
password: "password",
password_confirmation: "password" } }
end
assert_equal 1, ActionMailer::Base.deliveries.size
user = assigns(:user)
assert_not user.activated?
# 有効化していない状態でログインしてみる
log_in_as(user)
assert_not is_logged_in?
# 有効化トークンが不正な場合
get edit_account_activation_path("invalid token", email: user.email)
assert_not is_logged_in?
# トークンは正しいがメールアドレスが無効な場合
get edit_account_activation_path(user.activation_token, email: 'wrong')
assert_not is_logged_in?
# 有効化トークンが正しい場合
get edit_account_activation_path(user.activation_token, email: user.email)
assert user.reload.activated?
follow_redirect!
assert_template 'users/show'
assert_not flash.empty?
assert is_logged_in?
end
end
| 34.5 | 80 | 0.615385 |
87e0657363b768c7b361d11ffdedf242d070adaa | 248 | class CreateCarts < ActiveRecord::Migration[5.2]
def change
create_table :carts do |t|
t.belongs_to :user, foreign_key: true
t.belongs_to :product, foreign_key: true
t.integer :quantity
t.timestamps
end
end
end
| 20.666667 | 48 | 0.673387 |
edfc4c08c8e8687240d73c48342825077f2cabac | 1,123 | module RssNewsBrasil
class Rss
attr_reader :title,
:description,
:link,
:image_url,
:last_build_date,
:items
def initialize(rss)
@rss = rss
@channel = @rss.channel
set_required_data
set_optionals_data
end
private
def set_required_data
@title = @channel.title ||= ""
@description = @channel.description ||= ""
@link = @channel.link ||= ""
@items = get_items(@channel)
end
def set_optionals_data
@image_url = get_image_url_from_channel
@last_build_date = @channel.lastBuildDate ||= ""
end
def get_image_url_from_channel
@channel.image ? @channel.image.url : ""
end
def get_items(channel)
channel.items.map { |element| item = create_item(element)}
end
def create_item(item_data)
title = item_data.title ||= ""
description = item_data.description ||= ""
link = item_data.link ||= ""
pub_date = item_data.pubDate ||= ""
Item.new(title, description, link, pub_date)
end
end
end
| 23.395833 | 65 | 0.581478 |
8707cb705ebb76427f7c5390642518357836c68d | 4,478 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
##
# Original script comments by nick[at]executionflow.org:
# Meterpreter script to deliver and execute powershell scripts using
# a compression/encoding method based on the powershell PoC code
# from rel1k and winfang98 at DEF CON 18. This script furthers the
# idea by bypassing Windows' command character lmits, allowing the
# execution of very large scripts. No files are ever written to disk.
##
require 'zlib' # TODO: check if this can be done with REX
require 'msf/core'
require 'rex'
class Metasploit3 < Msf::Post
include Msf::Post::Windows::Powershell
def initialize(info={})
super(update_info(info,
'Name' => "Windows Manage PowerShell Download and/or Execute",
'Description' => %q{
This module will download and execute a PowerShell script over a meterpreter session.
The user may also enter text substitutions to be made in memory before execution.
Setting VERBOSE to true will output both the script prior to execution and the results.
},
'License' => MSF_LICENSE,
'Platform' => ['win'],
'SessionTypes' => ['meterpreter'],
'Author' => [
'Nicholas Nam (nick[at]executionflow.org)', # original meterpreter script
'RageLtMan' # post module
]
))
register_options(
[
OptPath.new( 'SCRIPT', [true, 'Path to the local PS script', ::File.join(Msf::Config.install_root, "scripts", "ps", "msflag.ps1") ]),
], self.class)
register_advanced_options(
[
OptString.new('SUBSTITUTIONS', [false, 'Script subs in gsub format - original,sub;original,sub' ]),
OptBool.new( 'DELETE', [false, 'Delete file after execution', false ]),
OptBool.new( 'DRY_RUN', [false, 'Only show what would be done', false ]),
OptInt.new('TIMEOUT', [false, 'Execution timeout', 15]),
], self.class)
end
def run
# Make sure we meet the requirements before running the script, note no need to return
# unless error
return 0 if ! (session.type == "meterpreter" || have_powershell?)
# End of file marker
eof = Rex::Text.rand_text_alpha(8)
env_suffix = Rex::Text.rand_text_alpha(8)
# check/set vars
subs = process_subs(datastore['SUBSTITUTIONS'])
script_in = read_script(datastore['SCRIPT'])
print_status(script_in)
# Make substitutions in script if needed
script_in = make_subs(script_in, subs) unless subs.empty?
# Get target's computer name
computer_name = session.sys.config.sysinfo['Computer']
# Create unique log directory
log_dir = ::File.join(Msf::Config.log_directory,'scripts', computer_name)
::FileUtils.mkdir_p(log_dir)
# Define log filename
script_ext = ::File.extname(datastore['SCRIPT'])
script_base = ::File.basename(datastore['SCRIPT'], script_ext)
time_stamp = ::Time.now.strftime('%Y%m%d:%H%M%S')
log_file = ::File.join(log_dir,"#{script_base}-#{time_stamp}.txt")
# Compress
print_status('Compressing script contents.')
compressed_script = compress_script(script_in, eof)
if datastore['DRY_RUN']
print_good("powershell -EncodedCommand #{compressed_script}")
return
end
# If the compressed size is > 8100 bytes, launch stager
if (compressed_script.size > 8100)
print_error("Compressed size: #{compressed_script.size}")
error_msg = "Compressed size may cause command to exceed "
error_msg += "cmd.exe's 8kB character limit."
print_error(error_msg)
print_status('Launching stager:')
script = stage_to_env(compressed_script, env_suffix)
print_good("Payload successfully staged.")
else
print_good("Compressed size: #{compressed_script.size}")
script = compressed_script
end
# Execute the powershell script
print_status('Executing the script.')
cmd_out, running_pids, open_channels = execute_script(script, datastore['TIMEOUT'])
# Write output to log
print_status("Logging output to #{log_file}.")
write_to_log(cmd_out, log_file, eof)
# Clean up
print_status('Cleaning up residual objects and processes.')
clean_up(datastore['SCRIPT'], eof, running_pids, open_channels, env_suffix)
# That's it
print_good('Finished!')
end
end
| 35.539683 | 142 | 0.670835 |
abbf53ac557a024e6e26e525f0b39e99edaa527d | 1,049 | # Be sure to restart your server when you modify this file.
# Version of your assets, change this if you want to expire all your assets.
Rails.application.config.assets.version = '1.0'
# Add additional assets to the asset load path
# Rails.application.config.assets.paths << Emoji.images_path
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
# Rails.application.config.assets.precompile += %w( search.js )
Rails.application.config.assets.precompile += [ Proc.new { |path, fn| fn =~ /app\/themes/ && !%w(.js .css).include?(File.extname(path)) } ]
Rails.application.config.assets.precompile += Dir["app/themes/*"].map { |path| "#{path.split('/').last}/application.js" }
Rails.application.config.assets.precompile += Dir["app/themes/*"].map { |path| "#{path.split('/').last}/application.css" }
Rails.application.config.assets.precompile += ['flat-ui/**/*']
Dir.glob("#{Rails.root}/app/themes/*/assets/*").each do |dir|
Rails.application.config.assets.paths << dir
end | 49.952381 | 139 | 0.71878 |
4a6f57b49e9296d455eb933056f758e6444c9193 | 1,444 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
# メインのサンプルユーザーを1人作成する
User.create!(name: "Example User",
email: "[email protected]",
password: "foobar",
password_confirmation: "foobar",
admin: true,
activated: true,
activated_at: Time.zone.now)
# 追加のユーザーをまとめて生成する
99.times do |n|
name = Faker::Name.name
email = "example-#{n+1}@railstutorial.org"
password = "password"
User.create!(name: name,
email: email,
password: password,
password_confirmation: password,
activated: true,
activated_at: Time.zone.now)
end
# ユーザーの一部を対象にマイクロポストを生成する
users = User.order(:created_at).take(6)
50.times do
content = Faker::Lorem.sentence(word_count: 5)
users.each { |user| user.microposts.create!(content: content) }
end
# 以下のリレーションシップを作成する
users = User.all
user = users.first
following = users[2..50]
followers = users[3..40]
following.each { |followed| user.follow(followed) }
followers.each { |follower| follower.follow(user) } | 31.391304 | 111 | 0.641274 |
0133820251d8c1082f043185c73617a5553c6bfe | 1,731 | require 'puppet/configurer'
require 'puppet/resource/catalog'
class Puppet::Configurer::Downloader
attr_reader :name, :path, :source, :ignore
# Evaluate our download, returning the list of changed values.
def evaluate
Puppet.info _("Retrieving %{name}") % { name: name }
files = []
begin
catalog.apply do |trans|
trans.changed?.each do |resource|
yield resource if block_given?
files << resource[:path]
end
end
rescue Puppet::Error => detail
Puppet.log_exception(detail, _("Could not retrieve %{name}: %{detail}") % { name: name, detail: detail })
end
files
end
def initialize(name, path, source, ignore = nil, environment = nil, source_permissions = :ignore)
@name, @path, @source, @ignore, @environment, @source_permissions = name, path, source, ignore, environment, source_permissions
end
def catalog
catalog = Puppet::Resource::Catalog.new("PluginSync", @environment)
catalog.host_config = false
catalog.add_resource(file)
catalog
end
def file
args = default_arguments.merge(:path => path, :source => source)
args[:ignore] = ignore.split if ignore
Puppet::Type.type(:file).new(args)
end
private
def default_arguments
defargs = {
:path => path,
:recurse => true,
:links => :follow,
:source => source,
:source_permissions => @source_permissions,
:tag => name,
:purge => true,
:force => true,
:backup => false,
:noop => false
}
if !Puppet::Util::Platform.windows?
defargs.merge!(
{
:owner => Process.uid,
:group => Process.gid
}
)
end
return defargs
end
end
| 25.455882 | 131 | 0.616984 |
d5b9d956aa928f8e42240fb63c2c45f594ebae2c | 4,177 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ServiceFabric::V7_0_0_42
module Models
#
# Represents the health of the stateful service replica.
# Contains the replica aggregated health state, the health events and the
# unhealthy evaluations.
#
class StatefulServiceReplicaHealth < ReplicaHealth
include MsRestAzure
def initialize
@ServiceKind = "Stateful"
end
attr_accessor :ServiceKind
# @return [String] Id of a stateful service replica. ReplicaId is used by
# Service Fabric to uniquely identify a replica of a partition. It is
# unique within a partition and does not change for the lifetime of the
# replica. If a replica gets dropped and another replica gets created on
# the same node for the same partition, it will get a different value for
# the id. Sometimes the id of a stateless service instance is also
# referred as a replica id.
attr_accessor :replica_id
#
# Mapper for StatefulServiceReplicaHealth class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'Stateful',
type: {
name: 'Composite',
class_name: 'StatefulServiceReplicaHealth',
model_properties: {
aggregated_health_state: {
client_side_validation: true,
required: false,
serialized_name: 'AggregatedHealthState',
type: {
name: 'String'
}
},
health_events: {
client_side_validation: true,
required: false,
serialized_name: 'HealthEvents',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'HealthEventElementType',
type: {
name: 'Composite',
class_name: 'HealthEvent'
}
}
}
},
unhealthy_evaluations: {
client_side_validation: true,
required: false,
serialized_name: 'UnhealthyEvaluations',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'HealthEvaluationWrapperElementType',
type: {
name: 'Composite',
class_name: 'HealthEvaluationWrapper'
}
}
}
},
health_statistics: {
client_side_validation: true,
required: false,
serialized_name: 'HealthStatistics',
type: {
name: 'Composite',
class_name: 'HealthStatistics'
}
},
partition_id: {
client_side_validation: true,
required: false,
serialized_name: 'PartitionId',
type: {
name: 'String'
}
},
ServiceKind: {
client_side_validation: true,
required: true,
serialized_name: 'ServiceKind',
type: {
name: 'String'
}
},
replica_id: {
client_side_validation: true,
required: false,
serialized_name: 'ReplicaId',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 32.379845 | 79 | 0.486713 |
916de01e9632743e1482c0ef5ae4a632e084abb0 | 130 | class AddAltNamesToNcbiNode < ActiveRecord::Migration[5.2]
def change
add_column :ncbi_nodes, :alt_names, :string
end
end
| 21.666667 | 58 | 0.761538 |
87af0867e1aeb7fbd5e944e8dad17c910418dd19 | 48 | module Urlogger
module VisitsHelper
end
end
| 9.6 | 21 | 0.791667 |
1ce0b6e532801dee9d2cd20983a3f398e3c26938 | 1,629 | module FbGraph
class Photo < Node
include Connections::Comments
include Connections::Likes
include Connections::Likes::Likable
include Connections::Picture
include Connections::Tags
include Connections::Tags::Taggable
attr_accessor :from, :name, :icon, :source, :height, :width, :images, :link, :created_time, :updated_time, :place
def initialize(identifier, attributes = {})
super
if (from = attributes[:from])
@from = if from[:category]
Page.new(from[:id], from)
else
User.new(from[:id], from)
end
end
# NOTE:
# for some reason, facebook uses different parameter names.
# "name" in GET & "message" in POST
@name = attributes[:name] || attributes[:message]
@icon = attributes[:icon]
@source = attributes[:source]
@height = attributes[:height]
@width = attributes[:width]
@link = attributes[:link]
@images = []
if attributes[:images]
attributes[:images].each do |image|
@images << Image.new(image)
end
end
if attributes[:created_time]
@created_time = Time.parse(attributes[:created_time]).utc
end
if attributes[:updated_time]
@updated_time = Time.parse(attributes[:updated_time]).utc
end
if attributes[:place].is_a? Hash
@place = Page.new(attributes[:place][:id], :name => attributes[:place][:name], :location => attributes[:place][:location])
end
# cached connection
cache_collections attributes, :comments, :likes, :tags
end
end
end
| 31.941176 | 130 | 0.610804 |
e9e31cca9ba60956232cf165fb7888f077dd6ac8 | 117 | class RemoveLikesColumn < ActiveRecord::Migration[5.0]
def change
remove_column :posts, :likes_count
end
end
| 19.5 | 54 | 0.760684 |
08021600565b3d8b6f9fb25cbbf3e29d56c33b94 | 1,832 | # -*- encoding: utf-8 -*-
# stub: addressable 2.3.8 ruby lib
Gem::Specification.new do |s|
s.name = "addressable"
s.version = "2.3.8"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Bob Aman"]
s.date = "2015-03-27"
s.description = "Addressable is a replacement for the URI implementation that is part of\nRuby's standard library. It more closely conforms to the relevant RFCs and\nadds support for IRIs and URI templates.\n"
s.email = "[email protected]"
s.extra_rdoc_files = ["README.md"]
s.files = ["README.md"]
s.homepage = "https://github.com/sporkmonger/addressable"
s.licenses = ["Apache License 2.0"]
s.rdoc_options = ["--main", "README.md"]
s.rubygems_version = "2.4.8"
s.summary = "URI Implementation"
s.installed_by_version = "2.4.8" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rake>, [">= 10.4.2", "~> 10.4"])
s.add_development_dependency(%q<rspec>, ["~> 3.0"])
s.add_development_dependency(%q<rspec-its>, ["~> 1.1"])
s.add_development_dependency(%q<launchy>, [">= 2.4.3", "~> 2.4"])
else
s.add_dependency(%q<rake>, [">= 10.4.2", "~> 10.4"])
s.add_dependency(%q<rspec>, ["~> 3.0"])
s.add_dependency(%q<rspec-its>, ["~> 1.1"])
s.add_dependency(%q<launchy>, [">= 2.4.3", "~> 2.4"])
end
else
s.add_dependency(%q<rake>, [">= 10.4.2", "~> 10.4"])
s.add_dependency(%q<rspec>, ["~> 3.0"])
s.add_dependency(%q<rspec-its>, ["~> 1.1"])
s.add_dependency(%q<launchy>, [">= 2.4.3", "~> 2.4"])
end
end
| 40.711111 | 212 | 0.612991 |
28032df6ec8040fbf67431795d6264616ef4ce18 | 827 | cask 'colorsnapper' do
version '1.3.2'
sha256 'd866cf1040cfab7f4036050d1b50f9d9e05a65316607a0c55c3d96eb7443eaef'
# amazonaws.com/cs2-binaries was verified as official when first introduced to the cask
url "https://s3.amazonaws.com/cs2-binaries/ColorSnapper2-#{version.dots_to_underscores}.zip"
appcast 'https://colorsnapper.com/app/appcast.xml',
checkpoint: '189b0b3b11a3116495c11cc64d59d78e6c5fa1cc8ed7645bb3551296f3a5476b'
name 'ColorSnapper 2'
homepage 'https://colorsnapper.com/'
app 'ColorSnapper2.app'
uninstall quit: 'com.koolesache.ColorSnapper2'
zap delete: [
'~/Library/Application Support/ColorSnapper2',
'~/Library/Caches/com.koolesache.ColorSnapper2',
'~/Library/Preferences/com.koolesache.ColorSnapper2.plist',
]
end
| 37.590909 | 94 | 0.725514 |
e2af4e24bb17bd9ebe9a03a418bfcfdfe6fa7ff2 | 1,132 |
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "inspec-test-fixture/version"
Gem::Specification.new do |spec|
spec.name = "inspec-test-fixture"
spec.version = InspecPlugins::TestFixture::VERSION
spec.authors = ["InSpec Engineering Team"]
spec.email = ["[email protected]"]
spec.summary = %q{A simple test plugin gem for InSpec}
spec.description = %q{This gem is used to test the gem search and install capabilities of InSpec's plugin V2 system. It is not a good example or starting point for plugin development.}
spec.homepage = "https://github.com/inspec/inspec"
spec.files = [
'inspec-test-fixture.gemspec',
'lib/inspec-test-fixture.rb',
'lib/inspec-test-fixture/plugin.rb',
'lib/inspec-test-fixture/mock_plugin.rb',
'lib/inspec-test-fixture/version.rb',
]
spec.executables = []
spec.require_paths = ["lib"]
spec.add_development_dependency "rake", "~> 10.0"
if InspecPlugins::TestFixture::VERSION == '0.2.0'
spec.add_dependency "ordinal_array", "~> 0.2.0"
end
end
| 36.516129 | 189 | 0.676678 |
2157caf73d5fbe495744ee4a0ecac047c9d3c48d | 75 | class Space
def main(arg)
return 0
end
end
# 5.5 local
# 42 on pi
| 8.333333 | 15 | 0.613333 |
1dd9fe9fef152f931e6287317b98bbc8e2909268 | 551 | # frozen_string_literal: true
class Syscase
class Web
# Base page
class Page < LB::Project::Page
LB::Project::View.setup(self)
def covered
@covered ||= Syscase::Web.db.repository(
Syscase::Web::Persistence::Repositories::Paths
).coverage_count
end
def coverage
@coverage ||= covered.to_f / total.to_f
end
def total
@total ||= Syscase::Web.db.repository(
Syscase::Web::Persistence::Repositories::Addresses
).count
end
end
end
end
| 20.407407 | 60 | 0.593466 |
018686851dc44bd4544dc8799b4f3a21e00af949 | 1,466 | cask 'forklift' do
version '3.2.7'
sha256 '0e96fb834cb12628e3d9715361411fe9bf1a5e7fc28971920e55668ee2008cc7'
url "https://download.binarynights.com/ForkLift#{version}.zip"
appcast "https://updates.binarynights.com/ForkLift#{version.major}/update.xml"
name 'ForkLift'
homepage 'https://binarynights.com/forklift/'
auto_updates true
depends_on macos: '>= :el_capitan'
app 'ForkLift.app'
uninstall delete: '/Library/PrivilegedHelperTools/com.binarynights.ForkLiftHelper',
launchctl: [
'com.binarynights.ForkLiftHelper',
'com.binarynights.ForkLiftMini',
],
quit: [
"com.binarynights.ForkLift-#{version.major}",
'com.binarynights.ForkLiftMini',
]
zap trash: [
'~/Library/Application Support/ForkLift',
"~/Library/Caches/com.binarynights.ForkLift-#{version.major}",
"~/Library/Cookies/com.binarynights.ForkLift-#{version.major}.binarycookies",
'~/Library/Logs/ForkLift',
'~/Library/Logs/ForkLiftMini',
"~/Library/Preferences/com.binarynights.ForkLift-#{version.major}.plist",
'~/Library/Preferences/com.binarynights.ForkLiftMini.plist',
"~/Library/Saved Application State/com.binarynights.ForkLift-#{version.major}.savedState",
]
end
| 40.722222 | 105 | 0.606412 |
799a8662b94aacdd935bbc91a8b18ef6ebed2c63 | 645 | require 'spec_helper'
describe GitlabSchema.types['Issue'] do
it { expect(described_class).to expose_permissions_using(Types::PermissionTypes::Issue) }
it { expect(described_class.graphql_name).to eq('Issue') }
it { expect(described_class).to require_graphql_authorizations(:read_issue) }
it { expect(described_class.interfaces).to include(Types::Notes::NoteableType.to_graphql) }
it 'has specific fields' do
fields = %i[title_html description_html relative_position web_path web_url
reference]
fields.each do |field_name|
expect(described_class).to have_graphql_field(field_name)
end
end
end
| 30.714286 | 93 | 0.750388 |
d53fc62463f599b0e8f2239031de1cb13118bfeb | 9,664 | # frozen_string_literal: true
module RocketChat
module Messages
#
# Rocket.Chat Room messages template (groups&channels)
#
class Room # rubocop:disable Metrics/ClassLength
include RoomSupport
include UserSupport
def self.inherited(subclass)
field = subclass.name.split('::')[-1].downcase
collection = field + 's'
subclass.send(:define_singleton_method, :field) { field }
subclass.send(:define_singleton_method, :collection) { collection }
end
#
# @param [Session] session Session
#
def initialize(session)
@session = session
end
# Full API path to call
def self.api_path(method)
"/api/v1/#{collection}.#{method}"
end
#
# *.create REST API
# @param [String] name Room name
# @param [Hash] options Additional options
# @return [Room]
# @raise [HTTPError, StatusError]
#
def create(name, options = {})
response = session.request_json(
self.class.api_path('create'),
method: :post,
body: {
name: name
}.merge(room_option_hash(options))
)
RocketChat::Room.new response[self.class.field]
end
#
# *.delete REST API
# @param [String] room_id Rocket.Chat room id
# @param [String] name Rocket.Chat room name (coming soon)
# @return [Boolean]
# @raise [HTTPError, StatusError]
#
def delete(room_id: nil, name: nil)
session.request_json(
self.class.api_path('delete'),
method: :post,
body: room_params(room_id, name),
upstreamed_errors: ['error-room-not-found']
)['success']
end
#
# *.addAll REST API
# @param [String] room_id Rocket.Chat room id
# @param [String] name Rocket.Chat room name (coming soon)
# @param [String] active_users_only Add active users only
# @return [Boolean]
# @raise [HTTPError, StatusError]
#
def add_all(room_id: nil, name: nil, active_users_only: false)
session.request_json(
self.class.api_path('addAll'),
method: :post,
body: room_params(room_id, name)
.merge(activeUsersOnly: active_users_only)
)['success']
end
#
# *.add_owner REST API
# @param [String] room_id Rocket.Chat room id
# @param [String] name Rocket.Chat room name (coming soon)
# @param [String] user_id Rocket.Chat user id
# @param [String] username Rocket.Chat username
# @return [Boolean]
# @raise [HTTPError, StatusError]
#
def add_owner(room_id: nil, name: nil, user_id: nil, username: nil)
session.request_json(
self.class.api_path('addOwner'),
method: :post,
body: room_params(room_id, name)
.merge(user_params(user_id, username))
)['success']
end
#
# *.remove_owner REST API
# @param [String] room_id Rocket.Chat room id
# @param [String] name Rocket.Chat room name (coming soon)
# @param [String] user_id Rocket.Chat user id
# @param [String] username Rocket.Chat username
# @return [Boolean]
# @raise [HTTPError, StatusError]
#
def remove_owner(room_id: nil, name: nil, user_id: nil, username: nil)
session.request_json(
self.class.api_path('removeOwner'),
method: :post,
body: room_params(room_id, name)
.merge(user_params(user_id, username))
)['success']
end
#
# *.add_moderator REST API
# @param [String] room_id Rocket.Chat room id
# @param [String] name Rocket.Chat room name (coming soon)
# @param [String] user_id Rocket.Chat user id
# @param [String] username Rocket.Chat username
# @return [Boolean]
# @raise [HTTPError, StatusError]
#
def add_moderator(room_id: nil, name: nil, user_id: nil, username: nil)
session.request_json(
self.class.api_path('addModerator'),
method: :post,
body: room_params(room_id, name)
.merge(user_params(user_id, username))
)['success']
end
#
# *.remove_moderator REST API
# @param [String] room_id Rocket.Chat room id
# @param [String] name Rocket.Chat room name (coming soon)
# @param [String] user_id Rocket.Chat user id
# @param [String] username Rocket.Chat username
# @return [Boolean]
# @raise [HTTPError, StatusError]
#
def remove_moderator(room_id: nil, name: nil, user_id: nil, username: nil)
session.request_json(
self.class.api_path('removeModerator'),
method: :post,
body: room_params(room_id, name)
.merge(user_params(user_id, username))
)['success']
end
#
# *.info REST API
# @param [String] room_id Rocket.Chat room id
# @param [String] name Room name (channels since 0.56)
# @return [Room]
# @raise [HTTPError, StatusError]
#
def info(room_id: nil, name: nil)
response = session.request_json(
self.class.api_path('info'),
body: room_params(room_id, name),
upstreamed_errors: ['error-room-not-found']
)
RocketChat::Room.new response[self.class.field] if response['success']
end
#
# *.invite REST API
# @param [String] room_id Rocket.Chat room id
# @param [String] name Rocket.Chat room name (coming soon)
# @param [String] user_id Rocket.Chat user id
# @param [String] username Rocket.Chat username
# @return [Boolean]
# @raise [HTTPError, StatusError]
#
def invite(room_id: nil, name: nil, user_id: nil, username: nil)
session.request_json(
self.class.api_path('invite'),
method: :post,
body: room_params(room_id, name)
.merge(user_params(user_id, username))
)['success']
end
#
# *.kick REST API
# @param [String] room_id Rocket.Chat room id
# @param [String] name Rocket.Chat room name (coming soon)
# @param [String] user_id Rocket.Chat user id
# @param [String] username Rocket.Chat username
# @return [Boolean]
# @raise [HTTPError, StatusError]
#
def kick(room_id: nil, name: nil, user_id: nil, username: nil)
session.request_json(
self.class.api_path('kick'),
method: :post,
body: room_params(room_id, name)
.merge(user_params(user_id, username))
)['success']
end
#
# *.archive REST API
# @param [String] room_id Rocket.Chat room id
# @param [String] name Rocket.Chat room name (coming soon)
# @return [Boolean]
# @raise [HTTPError, StatusError]
#
def archive(room_id: nil, name: nil)
session.request_json(
self.class.api_path('archive'),
method: :post,
body: room_params(room_id, name)
)['success']
end
#
# *.unarchive REST API
# @param [String] room_id Rocket.Chat room id
# @param [String] name Rocket.Chat room name (coming soon)
# @return [Boolean]
# @raise [HTTPError, StatusError]
#
def unarchive(room_id: nil, name: nil)
session.request_json(
self.class.api_path('unarchive'),
method: :post,
body: room_params(room_id, name)
)['success']
end
#
# *.leave REST API
# @param [String] room_id Rocket.Chat room id
# @param [String] name Rocket.Chat room name (coming soon)
# @return [Boolean]
# @raise [HTTPError, StatusError]
#
def leave(room_id: nil, name: nil)
session.request_json(
self.class.api_path('leave'),
method: :post,
body: room_params(room_id, name)
)['success']
end
#
# *.rename REST API
# @param [String] room_id Rocket.Chat room id
# @param [String] new_name New room name
# @return [Boolean]
# @raise [HTTPError, StatusError]
#
def rename(room_id, new_name)
session.request_json(
self.class.api_path('rename'),
method: :post,
body: { roomId: room_id, name: new_name }
)['success']
end
#
# *.set* REST API
# @param [String] room_id Rocket.Chat room id
# @param [String] name Rocket.Chat room name (coming soon)
# @param [Hash] setting Single key-value
# @return [Boolean]
# @raise [ArgumentError, HTTPError, StatusError]
#
def set_attr(room_id: nil, name: nil, **setting)
attribute, value = setting.first
validate_attribute(attribute)
session.request_json(
self.class.api_path(Util.camelize("set_#{attribute}")),
method: :post,
body: room_params(room_id, name)
.merge(Util.camelize(attribute) => value)
)['success']
end
private
attr_reader :session
def room_option_hash(options)
args = [options, :members, :read_only, :custom_fields]
options = Util.slice_hash(*args)
return {} if options.empty?
new_hash = {}
options.each { |key, value| new_hash[Util.camelize(key)] = value }
new_hash
end
def validate_attribute(attribute)
raise ArgumentError, "Unsettable attribute: #{attribute || 'nil'}" unless \
self.class.settable_attributes.include?(attribute)
end
end
end
end
| 31.376623 | 83 | 0.583092 |
4affb3bfb25a17ec107d7f4a134207e03699a894 | 122 | require "test_helper"
class SendEmailJobTest < ActiveJob::TestCase
# test "the truth" do
# assert true
# end
end
| 15.25 | 44 | 0.704918 |
edb4718bbdb2aa60cbebea056482f44bf18ea085 | 873 | #
# Author:: Seth Chisamore (<[email protected]>)
# Cookbook Name:: iis
# Recipe:: mod_security
#
# Copyright 2011, Opscode, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe "iis"
%w{ URLAuthorization RequestFiltering IPSecurity }.each do |product|
webpi_product product do
accept_eula node['iis']['accept_eula']
action :install
end
end
| 30.103448 | 74 | 0.74685 |
3375b8fcf9fb9737eca51ba7515779899fde39bb | 80 | class Labelling < ApplicationRecord
belongs_to :label
belongs_to :photo
end
| 16 | 35 | 0.8 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.