hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
3922ecf33a02216c6f8d4eae01931252bffface3 | 69 | describe 'One', :wip do
it 'passes' do end
it 'fails' do end
end
| 13.8 | 23 | 0.637681 |
6a30b3190c3cdbaaf89776f9ce40094026b59f8e | 1,456 |
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "kansuji/version"
Gem::Specification.new do |spec|
spec.name = "kansuji"
spec.version = Kansuji::VERSION
spec.authors = ["Lê Hoàng Tâm"]
spec.email = ["[email protected]"]
spec.summary = %q{Convert number to kanji and kanji to number in japanese}
spec.description = %q{Re-define Number and String to Convert number to kanji and kanji to number in japanese}
spec.homepage = "https://github.com/kokorolee/kansuji"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
# if spec.respond_to?(:metadata)
# spec.metadata["allowed_push_host"] = "'# TODO: https://github.com/kokorolee/kansuji'"
# else
# raise "RubyGems 2.0 or newer is required to protect against " \
# "public gem pushes."
# end
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.16"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
end
| 39.351351 | 113 | 0.664835 |
793aabe6c6d66903fd974a44bef33b264e05d7fe | 1,198 | customers = Customer.all
staff_members = StaffMember.where(suspended: false).all
s = 2.years.ago
23.times do |n|
m = CustomerMessage.create!(
customer: customers.sample,
subject: 'これは問い合わせです。' * 4,
body: "これは問い合わせです。\n" * 8,
created_at: s.advance(months: n)
)
r = StaffMessage.create!(
customer: m.customer,
staff_member: staff_members.sample,
root: m,
parent: m,
subject: 'これは返信です。' * 4,
body: "これは返信です。\n" * 8,
created_at: s.advance(months: n, hours: 1)
)
if n % 6 == 0
m2 = CustomerMessage.create!(
customer: r.customer,
root: m,
parent: r,
subject: 'これは返信への返信です。',
body: "これは返信への返信です。",
created_at: s.advance(months: n, hours: 2)
)
StaffMessage.create!(
customer: m2.customer,
staff_member: staff_members.sample,
root: m,
parent: m2,
subject: 'これは返信の返信への返信です。',
body: "これは返信の返信への返信です。",
created_at: s.advance(months: n, hours: 3)
)
end
end
s = 24.hours.ago
8.times do |n|
CustomerMessage.create!(
customer: customers.sample,
subject: 'これは問い合わせです。' * 4,
body: "これは問い合わせです。\n" * 8,
created_at: s.advance(hours: n * 3)
)
end
| 23.490196 | 55 | 0.610184 |
91345063e09f8f9acdbb94683efe1c58997725bf | 1,318 | RSpec.describe Admin::GroupUsersController, faketenant: true do
let(:group) { FactoryBot.create(:group) }
context 'as an anonymous user' do
describe 'GET #index' do
subject { get :index, params: { group_id: group.id } }
it { is_expected.to redirect_to root_path }
end
end
context 'as an admin user' do
before { sign_in create(:admin) }
describe 'GET #index' do
subject { get :index, params: { group_id: group.id } }
it { is_expected.to render_template('layouts/hyrax/dashboard') }
it { is_expected.to render_template('admin/groups/users') }
end
context 'modifying group membership' do
let(:user) { FactoryBot.create(:user) }
describe 'POST #add' do
it 'adds a user to a group when it recieves a group ID' do
expect do
post :add, params: { group_id: group.id, user_ids: user.id }
end.to change(group.members, :count).by(1)
end
end
describe 'DELETE #remove' do
before { group.add_members_by_id(user.id) }
it 'removes a user from a group when it recieves a group ID' do
expect do
delete :remove, params: { group_id: group.id, user_ids: user.id }
end.to change(group.members, :count).by(-1)
end
end
end
end
end
| 29.288889 | 77 | 0.61912 |
79c2b128c4ba6b26b3099d87aa2c6f2f611e93c5 | 22,972 | # encoding: utf-8
require 'helper'
describe T::List do
before do
rcfile = RCFile.instance
rcfile.path = fixture_path + "/.trc"
@list = T::List.new
@old_stderr = $stderr
$stderr = StringIO.new
@old_stdout = $stdout
$stdout = StringIO.new
Timecop.freeze(Time.utc(2011, 11, 24, 16, 20, 0))
end
after do
Timecop.return
$stderr = @old_stderr
$stdout = @old_stdout
end
describe "#add" do
before do
@list.options = @list.options.merge("profile" => fixture_path + "/.trc")
stub_get("/1/account/verify_credentials.json").
to_return(:body => fixture("sferik.json"), :headers => {:content_type => "application/json; charset=utf-8"})
stub_post("/1/lists/members/create_all.json").
with(:body => {:screen_name => "BarackObama", :slug => "presidents", :owner_screen_name => "sferik"}).
to_return(:body => fixture("list.json"), :headers => {:content_type => "application/json; charset=utf-8"})
end
it "should request the correct resource" do
@list.add("presidents", "BarackObama")
a_get("/1/account/verify_credentials.json").
should have_been_made
a_post("/1/lists/members/create_all.json").
with(:body => {:screen_name => "BarackObama", :slug => "presidents", :owner_screen_name => "sferik"}).
should have_been_made
end
it "should have the correct output" do
@list.add("presidents", "BarackObama")
$stdout.string.should =~ /@testcli added 1 member to the list "presidents"\./
end
context "--id" do
before do
@list.options = @list.options.merge("id" => true)
stub_post("/1/lists/members/create_all.json").
with(:body => {:user_id => "7505382", :slug => "presidents", :owner_screen_name => "sferik"}).
to_return(:body => fixture("list.json"), :headers => {:content_type => "application/json; charset=utf-8"})
end
it "should request the correct resource" do
@list.add("presidents", "7505382")
a_get("/1/account/verify_credentials.json").
should have_been_made
a_post("/1/lists/members/create_all.json").
with(:body => {:user_id => "7505382", :slug => "presidents", :owner_screen_name => "sferik"}).
should have_been_made
end
end
context "Twitter is down" do
it "should retry 3 times and then raise an error" do
stub_post("/1/lists/members/create_all.json").
with(:body => {:screen_name => "BarackObama", :slug => "presidents", :owner_screen_name => "sferik"}).
to_return(:status => 502)
lambda do
@list.add("presidents", "BarackObama")
end.should raise_error("Twitter is down or being upgraded.")
a_post("/1/lists/members/create_all.json").
with(:body => {:screen_name => "BarackObama", :slug => "presidents", :owner_screen_name => "sferik"}).
should have_been_made.times(3)
end
end
end
describe "#create" do
before do
@list.options = @list.options.merge("profile" => fixture_path + "/.trc")
stub_post("/1/lists/create.json").
with(:body => {:name => "presidents"}).
to_return(:body => fixture("list.json"), :headers => {:content_type => "application/json; charset=utf-8"})
end
it "should request the correct resource" do
@list.create("presidents")
a_post("/1/lists/create.json").
with(:body => {:name => "presidents"}).
should have_been_made
end
it "should have the correct output" do
@list.create("presidents")
$stdout.string.chomp.should == "@testcli created the list \"presidents\"."
end
end
describe "#information" do
before do
@list.options = @list.options.merge("profile" => fixture_path + "/.trc")
stub_get("/1/lists/show.json").
with(:query => {:owner_screen_name => "testcli", :slug => "presidents"}).
to_return(:body => fixture("list.json"), :headers => {:content_type => "application/json; charset=utf-8"})
end
it "should request the correct resource" do
@list.information("presidents")
a_get("/1/lists/show.json").
with(:query => {:owner_screen_name => "testcli", :slug => "presidents"}).
should have_been_made
end
it "should have the correct output" do
@list.information("presidents")
$stdout.string.should == <<-eos
ID 8863586
Description Presidents of the United States of America
Slug presidents
Screen name @sferik
Created at Mar 15 2010
Members 2
Subscribers 1
Status Not following
Mode public
URL https://twitter.com/sferik/presidents
eos
end
context "with a user passed" do
it "should request the correct resource" do
@list.information("testcli/presidents")
a_get("/1/lists/show.json").
with(:query => {:owner_screen_name => "testcli", :slug => "presidents"}).
should have_been_made
end
context "--id" do
before do
@list.options = @list.options.merge("id" => true)
stub_get("/1/lists/show.json").
with(:query => {:owner_id => "7505382", :slug => "presidents"}).
to_return(:body => fixture("list.json"), :headers => {:content_type => "application/json; charset=utf-8"})
end
it "should request the correct resource" do
@list.information("7505382/presidents")
a_get("/1/lists/show.json").
with(:query => {:owner_id => "7505382", :slug => "presidents"}).
should have_been_made
end
end
end
context "--csv" do
before do
@list.options = @list.options.merge("csv" => true)
end
it "should have the correct output" do
@list.information("presidents")
$stdout.string.should == <<-eos
ID,Description,Slug,Screen name,Created at,Members,Subscribers,Following,Mode,URL
8863586,Presidents of the United States of America,presidents,sferik,2010-03-15 12:10:13 +0000,2,1,false,public,https://twitter.com/sferik/presidents
eos
end
end
end
describe "#members" do
before do
stub_get("/1/lists/members.json").
with(:query => {:cursor => "-1", :include_entities => "false", :owner_screen_name => "testcli", :skip_status => "true", :slug => "presidents"}).
to_return(:body => fixture("users_list.json"), :headers => {:content_type => "application/json; charset=utf-8"})
end
it "should request the correct resource" do
@list.members("presidents")
a_get("/1/lists/members.json").
with(:query => {:cursor => "-1", :include_entities => "false", :owner_screen_name => "testcli", :skip_status => "true", :slug => "presidents"}).
should have_been_made
end
it "should have the correct output" do
@list.members("presidents")
$stdout.string.rstrip.should == "@pengwynn @sferik"
end
context "--csv" do
before do
@list.options = @list.options.merge("csv" => true)
end
it "should output in CSV format" do
@list.members("presidents")
$stdout.string.should == <<-eos
ID,Since,Tweets,Favorites,Listed,Following,Followers,Screen name,Name
14100886,2008-03-08 16:34:22 +0000,3913,32,185,1871,2767,pengwynn,Wynn Netherland
7505382,2007-07-16 12:59:01 +0000,2962,727,29,88,898,sferik,Erik Michaels-Ober
eos
end
end
context "--favorites" do
before do
@list.options = @list.options.merge("favorites" => true)
end
it "should sort by number of favorites" do
@list.members("presidents")
$stdout.string.rstrip.should == "@pengwynn @sferik"
end
end
context "--followers" do
before do
@list.options = @list.options.merge("followers" => true)
end
it "should sort by number of followers" do
@list.members("presidents")
$stdout.string.rstrip.should == "@sferik @pengwynn"
end
end
context "--friends" do
before do
@list.options = @list.options.merge("friends" => true)
end
it "should sort by number of friends" do
@list.members("presidents")
$stdout.string.rstrip.should == "@sferik @pengwynn"
end
end
context "--listed" do
before do
@list.options = @list.options.merge("listed" => true)
end
it "should sort by number of list memberships" do
@list.members("presidents")
$stdout.string.rstrip.should == "@sferik @pengwynn"
end
end
context "--long" do
before do
@list.options = @list.options.merge("long" => true)
end
it "should output in long format" do
@list.members("presidents")
$stdout.string.should == <<-eos
ID Since Tweets Favorites Listed Following Followers Scre...
14100886 Mar 8 2008 3,913 32 185 1,871 2,767 @pen...
7505382 Jul 16 2007 2,962 727 29 88 898 @sfe...
eos
end
end
context "--posted" do
before do
@list.options = @list.options.merge("posted" => true)
end
it "should sort by the time wshen Twitter account was created" do
@list.members("presidents")
$stdout.string.rstrip.should == "@sferik @pengwynn"
end
end
context "--reverse" do
before do
@list.options = @list.options.merge("reverse" => true)
end
it "should reverse the order of the sort" do
@list.members("presidents")
$stdout.string.rstrip.should == "@sferik @pengwynn"
end
end
context "--tweets" do
before do
@list.options = @list.options.merge("tweets" => true)
end
it "should sort by number of Tweets" do
@list.members("presidents")
$stdout.string.rstrip.should == "@sferik @pengwynn"
end
end
context "--unsorted" do
before do
@list.options = @list.options.merge("unsorted" => true)
end
it "should not be sorted" do
@list.members("presidents")
$stdout.string.rstrip.should == "@sferik @pengwynn"
end
end
context "with a user passed" do
it "should request the correct resource" do
@list.members("testcli/presidents")
a_get("/1/lists/members.json").
with(:query => {:cursor => "-1", :include_entities => "false", :owner_screen_name => "testcli", :skip_status => "true", :slug => "presidents"}).
should have_been_made
end
context "--id" do
before do
@list.options = @list.options.merge("id" => true)
stub_get("/1/lists/members.json").
with(:query => {:cursor => "-1", :include_entities => "false", :owner_id => "7505382", :skip_status => "true", :slug => "presidents"}).
to_return(:body => fixture("users_list.json"), :headers => {:content_type => "application/json; charset=utf-8"})
end
it "should request the correct resource" do
@list.members("7505382/presidents")
a_get("/1/lists/members.json").
with(:query => {:cursor => "-1", :include_entities => "false", :owner_id => "7505382", :skip_status => "true", :slug => "presidents"}).
should have_been_made
end
end
end
end
describe "#remove" do
before do
@list.options = @list.options.merge("profile" => fixture_path + "/.trc")
stub_get("/1/account/verify_credentials.json").
to_return(:body => fixture("sferik.json"), :headers => {:content_type => "application/json; charset=utf-8"})
end
it "should request the correct resource" do
stub_post("/1/lists/members/destroy_all.json").
with(:body => {:screen_name => "BarackObama", :slug => "presidents", :owner_screen_name => "sferik"}).
to_return(:body => fixture("list.json"), :headers => {:content_type => "application/json; charset=utf-8"})
@list.remove("presidents", "BarackObama")
a_get("/1/account/verify_credentials.json").
should have_been_made
a_post("/1/lists/members/destroy_all.json").
with(:body => {:screen_name => "BarackObama", :slug => "presidents", :owner_screen_name => "sferik"}).
should have_been_made
end
it "should have the correct output" do
stub_post("/1/lists/members/destroy_all.json").
with(:body => {:screen_name => "BarackObama", :slug => "presidents", :owner_screen_name => "sferik"}).
to_return(:body => fixture("list.json"), :headers => {:content_type => "application/json; charset=utf-8"})
@list.remove("presidents", "BarackObama")
$stdout.string.should =~ /@testcli removed 1 member from the list "presidents"\./
end
context "--id" do
before do
@list.options = @list.options.merge("id" => true)
stub_post("/1/lists/members/destroy_all.json").
with(:body => {:user_id => "7505382", :slug => "presidents", :owner_screen_name => "sferik"}).
to_return(:body => fixture("list.json"), :headers => {:content_type => "application/json; charset=utf-8"})
end
it "should request the correct resource" do
@list.remove("presidents", "7505382")
a_get("/1/account/verify_credentials.json").
should have_been_made
a_post("/1/lists/members/destroy_all.json").
with(:body => {:user_id => "7505382", :slug => "presidents", :owner_screen_name => "sferik"}).
should have_been_made
end
end
context "Twitter is down" do
it "should retry 3 times and then raise an error" do
stub_post("/1/lists/members/destroy_all.json").
with(:body => {:screen_name => "BarackObama", :slug => "presidents", :owner_screen_name => "sferik"}).
to_return(:status => 502)
lambda do
@list.remove("presidents", "BarackObama")
end.should raise_error("Twitter is down or being upgraded.")
a_post("/1/lists/members/destroy_all.json").
with(:body => {:screen_name => "BarackObama", :slug => "presidents", :owner_screen_name => "sferik"}).
should have_been_made.times(3)
end
end
end
describe "#timeline" do
before do
stub_get("/1/lists/statuses.json").
with(:query => {:owner_screen_name => "testcli", :per_page => "20", :slug => "presidents", :include_entities => "false"}).
to_return(:body => fixture("statuses.json"), :headers => {:content_type => "application/json; charset=utf-8"})
end
it "should request the correct resource" do
@list.timeline("presidents")
a_get("/1/lists/statuses.json").
with(:query => {:owner_screen_name => "testcli", :per_page => "20", :slug => "presidents", :include_entities => "false"}).
should have_been_made
end
it "should have the correct output" do
@list.timeline("presidents")
$stdout.string.should =~ /@natevillegas/
$stdout.string.should =~ /RT @gelobautista #riordan RT @WilI_Smith: Yesterday is history\. Tomorrow is a /
$stdout.string.should =~ /mystery\. Today is a gift\. That's why it's called the present\./
$stdout.string.should =~ /7 months ago/
end
context "--csv" do
before do
@list.options = @list.options.merge("csv" => true)
end
it "should output in long format" do
@list.timeline("presidents")
$stdout.string.should == <<-eos
ID,Posted at,Screen name,Text
194548121416630272,2011-04-23 22:07:41 +0000,natevillegas,RT @gelobautista #riordan RT @WilI_Smith: Yesterday is history. Tomorrow is a mystery. Today is a gift. That's why it's called the present.
194547993607806976,2011-04-23 22:07:10 +0000,TD,@kelseysilver how long will you be in town?
194547987593183233,2011-04-23 22:07:09 +0000,rusashka,@maciej hahaha :) @gpena together we're going to cover all core 28 languages!
194547824690597888,2011-04-23 22:06:30 +0000,fat,@stevej @xc i'm going to picket when i get back.
194547658562605057,2011-04-23 22:05:51 +0000,wil,@0x9900 @paulnivin http://t.co/bwVdtAPe
194547528430137344,2011-04-23 22:05:19 +0000,wangtian,"@tianhonghe @xiangxin72 oh, you can even order specific items?"
194547402550689793,2011-04-23 22:04:49 +0000,shinypb,"@kpk Pfft, I think you're forgetting mechanical television, which depended on a clever German. http://t.co/JvLNQCDm @skilldrick @hoverbird"
194547260233760768,2011-04-23 22:04:16 +0000,0x9900,@wil @paulnivin if you want to take you seriously don't say daemontools!
194547084349804544,2011-04-23 22:03:34 +0000,kpk,@shinypb @skilldrick @hoverbird invented it
194546876782092291,2011-04-23 22:02:44 +0000,skilldrick,@shinypb Well played :) @hoverbird
194546811480969217,2011-04-23 22:02:29 +0000,sam,"Can someone project the date that I'll get a 27"" retina display?"
194546738810458112,2011-04-23 22:02:11 +0000,shinypb,"@skilldrick @hoverbird Wow, I didn't even know they *had* TV in Britain."
194546727670390784,2011-04-23 22:02:09 +0000,bartt,"@noahlt @gaarf Yup, now owning @twitter -> FB from FE to daemons. Lot’s of fun. Expect improvements in the weeks to come."
194546649203347456,2011-04-23 22:01:50 +0000,skilldrick,"@hoverbird @shinypb You guys must be soooo old, I don't remember the words to the duck tales intro at all."
194546583608639488,2011-04-23 22:01:34 +0000,sean,@mep Thanks for coming by. Was great to have you.
194546388707717120,2011-04-23 22:00:48 +0000,hoverbird,"@shinypb @trammell it's all suck a ""duck blur"" sometimes."
194546264212385793,2011-04-23 22:00:18 +0000,kelseysilver,San Francisco here I come! (@ Newark Liberty International Airport (EWR) w/ 92 others) http://t.co/eoLANJZw
eos
end
end
context "--long" do
before do
@list.options = @list.options.merge("long" => true)
end
it "should output in long format" do
@list.timeline("presidents")
$stdout.string.should == <<-eos
ID Posted at Screen name Text
194548121416630272 Apr 23 2011 @natevillegas RT @gelobautista #riordan RT...
194547993607806976 Apr 23 2011 @TD @kelseysilver how long will ...
194547987593183233 Apr 23 2011 @rusashka @maciej hahaha :) @gpena tog...
194547824690597888 Apr 23 2011 @fat @stevej @xc i'm going to pic...
194547658562605057 Apr 23 2011 @wil @0x9900 @paulnivin http://t....
194547528430137344 Apr 23 2011 @wangtian @tianhonghe @xiangxin72 oh, ...
194547402550689793 Apr 23 2011 @shinypb @kpk Pfft, I think you're fo...
194547260233760768 Apr 23 2011 @0x9900 @wil @paulnivin if you want ...
194547084349804544 Apr 23 2011 @kpk @shinypb @skilldrick @hoverb...
194546876782092291 Apr 23 2011 @skilldrick @shinypb Well played :) @hov...
194546811480969217 Apr 23 2011 @sam Can someone project the date...
194546738810458112 Apr 23 2011 @shinypb @skilldrick @hoverbird Wow, ...
194546727670390784 Apr 23 2011 @bartt @noahlt @gaarf Yup, now owni...
194546649203347456 Apr 23 2011 @skilldrick @hoverbird @shinypb You guys...
194546583608639488 Apr 23 2011 @sean @mep Thanks for coming by. W...
194546388707717120 Apr 23 2011 @hoverbird @shinypb @trammell it's all ...
194546264212385793 Apr 23 2011 @kelseysilver San Francisco here I come! (...
eos
end
context "--reverse" do
before do
@list.options = @list.options.merge("reverse" => true)
end
it "should reverse the order of the sort" do
@list.timeline("presidents")
$stdout.string.should == <<-eos
ID Posted at Screen name Text
194546264212385793 Apr 23 2011 @kelseysilver San Francisco here I come! (...
194546388707717120 Apr 23 2011 @hoverbird @shinypb @trammell it's all ...
194546583608639488 Apr 23 2011 @sean @mep Thanks for coming by. W...
194546649203347456 Apr 23 2011 @skilldrick @hoverbird @shinypb You guys...
194546727670390784 Apr 23 2011 @bartt @noahlt @gaarf Yup, now owni...
194546738810458112 Apr 23 2011 @shinypb @skilldrick @hoverbird Wow, ...
194546811480969217 Apr 23 2011 @sam Can someone project the date...
194546876782092291 Apr 23 2011 @skilldrick @shinypb Well played :) @hov...
194547084349804544 Apr 23 2011 @kpk @shinypb @skilldrick @hoverb...
194547260233760768 Apr 23 2011 @0x9900 @wil @paulnivin if you want ...
194547402550689793 Apr 23 2011 @shinypb @kpk Pfft, I think you're fo...
194547528430137344 Apr 23 2011 @wangtian @tianhonghe @xiangxin72 oh, ...
194547658562605057 Apr 23 2011 @wil @0x9900 @paulnivin http://t....
194547824690597888 Apr 23 2011 @fat @stevej @xc i'm going to pic...
194547987593183233 Apr 23 2011 @rusashka @maciej hahaha :) @gpena tog...
194547993607806976 Apr 23 2011 @TD @kelseysilver how long will ...
194548121416630272 Apr 23 2011 @natevillegas RT @gelobautista #riordan RT...
eos
end
end
end
context "--number" do
before do
@list.options = @list.options.merge("number" => 1)
stub_get("/1/lists/statuses.json").
with(:query => {:owner_screen_name => "testcli", :per_page => "1", :slug => "presidents", :include_entities => "false"}).
to_return(:body => fixture("statuses.json"), :headers => {:content_type => "application/json; charset=utf-8"})
end
it "should limit the number of results" do
@list.timeline("presidents")
a_get("/1/lists/statuses.json").
with(:query => {:owner_screen_name => "testcli", :per_page => "1", :slug => "presidents", :include_entities => "false"}).
should have_been_made
end
end
context "with a user passed" do
it "should request the correct resource" do
@list.timeline("testcli/presidents")
a_get("/1/lists/statuses.json").
with(:query => {:owner_screen_name => "testcli", :per_page => "20", :slug => "presidents", :include_entities => "false"}).
should have_been_made
end
context "--id" do
before do
@list.options = @list.options.merge("id" => true)
stub_get("/1/lists/statuses.json").
with(:query => {:owner_id => "7505382", :per_page => "20", :slug => "presidents", :include_entities => "false"}).
to_return(:body => fixture("statuses.json"), :headers => {:content_type => "application/json; charset=utf-8"})
end
it "should request the correct resource" do
@list.timeline("7505382/presidents")
a_get("/1/lists/statuses.json").
with(:query => {:owner_id => "7505382", :per_page => "20", :slug => "presidents", :include_entities => "false"}).
should have_been_made
end
end
end
end
end
| 46.596349 | 197 | 0.626023 |
03b980f34008812176d3aa3611436704d25704f9 | 889 | require 'rails_helper'
feature 'Visitor tries to acess car categories and' do
after :each do
expect(current_path).to eq(new_user_session_path)
expect(page).to have_content('Para continuar, efetue login ou registre-se.')
end
scenario 'cannot view index unless logged in' do
visit root_path
expect(page).not_to have_link('Categorias de Carros')
end
scenario 'cannot view car category index unless logged in' do
visit car_categories_path
end
scenario 'and must be authenticated to see detals' do
car_category = create(:car_category)
visit car_category_path(car_category)
end
scenario 'and must be authenticated to create a new category' do
visit new_car_category_path
end
scenario 'and must be authenticated to edit some category' do
car_category = create(:car_category)
visit edit_car_category_path(car_category)
end
end
| 27.78125 | 80 | 0.755906 |
287cf5abbe7c41245daecc9c5d9dbde3281fc47f | 1,120 | Openlectures::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
end
| 37.333333 | 85 | 0.773214 |
012a4d115a235715292574ad3b8dc3a65aa5e506 | 331 | class ContactSerializer
include JSONAPI::Serializer
attributes :id, :first_name, :last_name, :email, :mobile_phone, :office_phone, :affiliation, :notes, :disabled_at, :created_at, :updated_at
attribute :is_enabled do | object |
object.enabled?
end
attribute :full_name do | object |
object.full_name
end
end
| 23.642857 | 141 | 0.734139 |
6afe98fd57de6cec2b3039c00923d31d580f4a64 | 315 | module Urpi
class Builder
def initialize(pass='')
@pass=pass
end
def superpass(length=n)
__chars = '$abcdefghijklmñopqrstuvwxyz.ABCDEFGHIJKLMNÑOPQRSTUVWXYZ_1234567890='
length.times {(@pass << __chars[rand(__chars.size)])} * Math.sqrt(__chars.size).to_i
puts @pass
end
end
end | 24.230769 | 88 | 0.688889 |
ed5daf7457b276e042ee765994bd42b794150690 | 1,313 | class Gofumpt < Formula
desc "Stricter gofmt"
homepage "https://github.com/mvdan/gofumpt"
url "https://github.com/mvdan/gofumpt/archive/v0.1.0.tar.gz"
sha256 "802c92d3df222c9b266d785305107c58a26ea186c4dbb5989b0db97b9bce0367"
license "BSD-3-Clause"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "5bcea30433a93afeef620532b03d9c0d19e4bb864ee8be5e2a132615911f0c77"
sha256 cellar: :any_skip_relocation, big_sur: "85b2788203df32191b839d607d908e43a250d7cd687ad11705afc76f80e0c0d7"
sha256 cellar: :any_skip_relocation, catalina: "b69839b084e95f9339b776d979ebffc1382d93917aeabc6c1e1880fb6a9056c7"
sha256 cellar: :any_skip_relocation, mojave: "b254aafd541d63d411ec6a034cf88c926b5354cf59bbc37fbaf6eb051fe3a14e"
end
depends_on "go"
def install
system "go", "build", *std_go_args, "-ldflags", "-s -w -X main.version=#{version}"
end
test do
assert_match version.to_s, shell_output("#{bin}/gofumpt --version")
(testpath/"test.go").write <<~EOS
package foo
func foo() {
println("bar")
}
EOS
(testpath/"expected.go").write <<~EOS
package foo
func foo() {
println("bar")
}
EOS
assert_match shell_output("#{bin}/gofumpt test.go"), (testpath/"expected.go").read
end
end
| 29.840909 | 122 | 0.714395 |
7a866c385d7b90daf1d0d1ddeec11ea5daecc10a | 419 | Rails.application.routes.draw do
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
namespace :api do
namespace :v1 do
resources :users
resources :artists
resources :albums
resources :favorites
post "/auth", to: "auth#create"
get "current_user", to: "auth#show"
end
end
end
# http://localhost:3000/knock/auth_token
| 20.95 | 101 | 0.675418 |
d557bd66058b6d7cf50af84cf0ea474f9972e7b0 | 11,990 | # frozen_string_literal: true
class Gitlab::Client
# Defines methods related to groups.
# @see https://docs.gitlab.com/ce/api/groups.html
module Groups
# Gets a list of groups.
#
# @example
# Gitlab.groups
# Gitlab.groups({ per_page: 40, page: 2 })
#
# @param [Hash] options A customizable set of options.
# @option options [Integer] :page The page number.
# @option options [Integer] :per_page The number of results per page.
# @return [Array<Gitlab::ObjectifiedHash>]
def groups(options = {})
get('/groups', query: options)
end
# Gets a single group.
#
# @example
# Gitlab.group(42)
#
# @param [Integer] id The ID of a group.
# @param [Hash] options A customizable set of options.
# @option options [Boolean] :with_custom_attributes Include custom attributes in response (admins only)
# @option options [Boolean] :with_projects Include details about group projects (default: true)
# @return [Gitlab::ObjectifiedHash]
def group(id, options = {})
get("/groups/#{url_encode id}", query: options)
end
# Creates a new group.
#
# @example
# Gitlab.create_group('new-group', 'group-path')
# Gitlab.create_group('gitlab', 'gitlab-path', { description: 'New Gitlab project' })
#
# @param [String] name The name of a group.
# @param [String] path The path of a group.
# @return [Gitlab::ObjectifiedHash] Information about created group.
def create_group(name, path, options = {})
body = { name: name, path: path }.merge(options)
post('/groups', body: body)
end
# Delete's a group.
#
# @example
# Gitlab.delete_group(42)
# @param [Integer] id The ID of a group
# @return [Gitlab::ObjectifiedHash] Information about the deleted group.
def delete_group(id)
delete("/groups/#{url_encode id}")
end
# Get a list of group members.
#
# @example
# Gitlab.group_members(1)
# Gitlab.group_members(1, { per_page: 40 })
#
# @param [Integer] id The ID of a group.
# @param [Hash] options A customizable set of options.
# @option options [Integer] :page The page number.
# @option options [Integer] :per_page The number of results per page.
# @return [Array<Gitlab::ObjectifiedHash>]
def group_members(id, options = {})
get("/groups/#{url_encode id}/members", query: options)
end
# Get a list of group members that are billable.
#
# @example
# Gitlab.group_billable_members(1)
# Gitlab.group_billable_members(1, { per_page: 40 })
#
# @param [Integer] id The ID of a group.
# @param [Hash] options A customizable set of options.
# @option options [Integer] :page The page number.
# @option options [Integer] :per_page The number of results per page.
# @return [Array<Gitlab::ObjectifiedHash>]
def group_billable_members(id, options = {})
get("/groups/#{url_encode id}/billable_members", query: options)
end
# Get details of a single group member.
#
# @example
# Gitlab.group_member(1, 10)
#
# @param [Integer] team_id The ID of the group to find a member in.
# @param [Integer] user_id The user id of the member to find.
# @return [Gitlab::ObjectifiedHash] (id, username, name, email, state, access_level ...)
def group_member(team_id, user_id)
get("/groups/#{url_encode team_id}/members/#{user_id}")
end
# Gets a list of merge requests of a group.
#
# @example
# Gitlab.group_merge_requests(5)
#
# @param [Integer, String] group_id The ID or name of a group.
# @param [Hash] options A customizable set of options.
# @return [Array<Gitlab::ObjectifiedHash>]
def group_merge_requests(group, options = {})
get("/groups/#{group}/merge_requests", query: options)
end
# Adds a user to group.
#
# @example
# Gitlab.add_group_member(1, 2, 40)
#
# @param [Integer] team_id The group id to add a member to.
# @param [Integer] user_id The user id of the user to add to the team.
# @param [Integer] access_level Project access level.
# @return [Gitlab::ObjectifiedHash] Information about added team member.
def add_group_member(team_id, user_id, access_level)
post("/groups/#{url_encode team_id}/members", body: { user_id: user_id, access_level: access_level })
end
# Edit a user of a group.
#
# @example
# Gitlab.edit_group_member(1, 2, 40)
#
# @param [Integer] team_id The group id of member to edit.
# @param [Integer] user_id The user id of the user to edit.
# @param [Integer] access_level Project access level.
# @return [Gitlab::ObjectifiedHash] Information about edited team member.
def edit_group_member(team_id, user_id, access_level)
put("/groups/#{url_encode team_id}/members/#{user_id}", body: { access_level: access_level })
end
# Removes user from user group.
#
# @example
# Gitlab.remove_group_member(1, 2)
#
# @param [Integer] team_id The group ID.
# @param [Integer] user_id The ID of a user.
# @return [Gitlab::ObjectifiedHash] Information about removed team member.
def remove_group_member(team_id, user_id)
delete("/groups/#{url_encode team_id}/members/#{user_id}")
end
# Transfers a project to a group
#
# @example
# Gitlab.transfer_project_to_group(3, 50)
#
# @param [Integer] id The ID of a group.
# @param [Integer] project_id The ID of a project.
def transfer_project_to_group(id, project_id)
body = { id: id, project_id: project_id }
post("/groups/#{url_encode id}/projects/#{project_id}", body: body)
end
# Search for groups by name
#
# @example
# Gitlab.group_search('gitlab')
#
# @param [String] search A string to search for in group names and paths.
# @param [Hash] options A customizable set of options.
# @option options [String] :per_page Number of projects to return per page
# @option options [String] :page The page to retrieve
# @return [Array<Gitlab::ObjectifiedHash>]
def group_search(search, options = {})
options[:search] = search
get('/groups', query: options)
end
# Get a list of projects under a group
# @example
# Gitlab.group_projects(1)
#
# @param [Integer] id The ID of a group
# @return [Array<Gitlab::ObjectifiedHash>] List of projects under a group
def group_projects(id, options = {})
get("/groups/#{url_encode id}/projects", query: options)
end
# Get a list of subgroups under a group
# @example
# Gitlab.group_subgroups(1)
#
# @param [Integer] id the ID of a group
# @param [Hash] options A customizable set of options.
# @option options [String] :skip_groups Skip the group IDs passed.
# @option options [String] :all_available Show all the groups you have access to (defaults to false for authenticated users).
# @option options [String] :search Return the list of authorized groups matching the search criteria.
# @option options [String] :order_by Order groups by name or path. Default is name.
# @option options [String] :sort Order groups in asc or desc order. Default is asc.
# @option options [String] :statistics Include group statistics (admins only).
# @option options [String] :owned Limit to groups owned by the current user.
# @return [Array<Gitlab::ObjectifiedHash>] List of subgroups under a group
def group_subgroups(id, options = {})
get("/groups/#{url_encode id}/subgroups", query: options)
end
# Updates an existing group.
#
# @example
# Gitlab.edit_group(42)
# Gitlab.edit_group(42, { name: 'Group Name' })
#
# @param [Integer] group The ID.
# @param [Hash] options A customizable set of options
# @option options [String] :name The name of the group.
# @option options [String] :path The path of the group.
# @option options [String] :description The description of the group.
# @option options [String] :visibility The visibility level of the group. Can be private, internal, or public
# @option options [String] :lfs_enabled Enable/disable Large File Storage (LFS) for the projects in this groupr.
# @option options [String] :request_access_enabled Allow users to request member access.
# @return [Gitlab::ObjectifiedHash] Information about the edited group.
def edit_group(id, options = {})
put("/groups/#{url_encode id}", body: options)
end
# Gets a list of issues of a group.
#
# @example
# Gitlab.group_issues(5)
#
# @param [Integer, String] group_id The ID or name of a group.
# @param [Hash] options A customizable set of options.
# @return [Array<Gitlab::ObjectifiedHash>]
def group_issues(group, options = {})
get("/groups/#{group}/issues", query: options)
end
# Sync group with LDAP
#
# @example
# Gitlab.sync_ldap_group(1)
#
# @param [Integer] id The ID or name of a group.
# @return [Array<Gitlab::ObjectifiedHash>]
def sync_ldap_group(id)
post("/groups/#{url_encode id}/ldap_sync")
end
# Add LDAP group link
#
# @example
# Gitlab.add_ldap_group_links(1, 'all', 50, 'ldap')
#
# @param [Integer] id The ID of a group
# @param [String] cn The CN of a LDAP group
# @param [Integer] group_access Minimum access level for members of the LDAP group.
# @param [String] provider LDAP provider for the LDAP group
# @return [Gitlab::ObjectifiedHash] Information about added ldap group link
def add_ldap_group_links(id, commonname, group_access, provider)
post("/groups/#{url_encode id}/ldap_group_links", body: { cn: commonname, group_access: group_access, provider: provider })
end
# Delete LDAP group link
#
# @example
# Gitlab.delete_ldap_group_links(1, 'all')
#
# @param [Integer] id The ID of a group
# @param [String] cn The CN of a LDAP group
# @return [Gitlab::ObjectifiedHash] Empty hash
def delete_ldap_group_links(id, commonname, provider)
delete("/groups/#{url_encode id}/ldap_group_links/#{url_encode provider}/#{url_encode commonname}")
end
# Gets group custom_attributes.
#
# @example
# Gitlab.group_custom_attributes(2)
#
# @param [Integer] group_id The ID of a group.
# @return [Gitlab::ObjectifiedHash]
def group_custom_attributes(group_id)
url = "/groups/#{group_id}/custom_attributes"
get(url)
end
# Gets single group custom_attribute.
#
# @example
# Gitlab.group_custom_attribute(key, 2)
#
# @param [String] key The custom_attributes key
# @param [Integer] group_id The ID of a group.
# @return [Gitlab::ObjectifiedHash]
def group_custom_attribute(key, group_id)
url = "/groups/#{group_id}/custom_attributes/#{key}"
get(url)
end
# Creates a new custom_attribute
#
# @example
# Gitlab.add_custom_attribute('some_new_key', 'some_new_value', 2)
#
# @param [String] key The custom_attributes key
# @param [String] value The custom_attributes value
# @param [Integer] group_id The ID of a group.
# @return [Gitlab::ObjectifiedHash]
def add_group_custom_attribute(key, value, group_id)
url = "/groups/#{group_id}/custom_attributes/#{key}"
put(url, body: { value: value })
end
# Delete custom_attribute
# Will delete a custom_attribute
#
# @example
# Gitlab.custom_attribute("somekey", 2)
#
# @param [String] key The custom_attribute key to delete
# @param [Integer] group_id The ID of a group.
# @return [Boolean]
def delete_group_custom_attribute(key, group_id = nil)
url = "/groups/#{group_id}/custom_attributes/#{key}"
delete(url)
end
end
end
| 36.333333 | 129 | 0.652127 |
2633abc635c675c33435ffa5dfa736882625f35d | 403 | # frozen_string_literal: true
title 'rider archives profile'
control 'rider archive' do
impact 1.0
title 'should be installed'
describe file('/etc/default/rider.sh') do
it { should exist }
end
# describe file('/usr/local/jetbrains/rider-*/bin/rider.sh') do
# it { should exist }
# end
describe file('/usr/share/applications/rider.desktop') do
it { should exist }
end
end
| 21.210526 | 65 | 0.682382 |
11684af64fb52a493da2cb0564e7dbd2c5fa4e5d | 167 | Dummy::Application.routes.draw do
match "/home(.:format)", :to => "home#index", :as => :home
match "/another(.:format)", :to => "home#another", :as=> :another
end
| 33.4 | 67 | 0.616766 |
26be618904f1dd0a6f1368e31af6671b0e1bb446 | 467 | module Fog
module Sql
class AzureRM
# Mock class for Sql Request
class Real
def check_database_exists(resource_group, server_name, name)
msg = "Checking SQL Database #{name}"
Fog::Logger.debug msg
# This module needs to be updated to azure sdk
end
end
# Mock class for Sql Request
class Mock
def check_database_exists(*)
true
end
end
end
end
end
| 21.227273 | 68 | 0.584582 |
7afb3b806fc5689fcf739c0facbff41e982a41e6 | 702 | namespace :db do
desc "Create default admin account ([email protected]:aA1aaaa)."
task create_default_admin: :environment do
puts 'email: [email protected]; pwd: aA1aaaa'
admin = User.new(email: "[email protected]",
password: "aA1aaaa",
password_confirmation: "aA1aaaa",
firstname: "admin",
lastname: "admin")
admin.skip_confirmation!
admin.blocked = false
admin.save
admin.toggle!(:admin)
end
desc "drop default admin account ([email protected])."
task drop_default_admin: :environment do
admin = User.find_by_email "[email protected]"
admin.delete
end
end | 33.428571 | 66 | 0.619658 |
18f169ea2928b87bab1ef11effa31907fc10187f | 1,432 | require File.expand_path(File.dirname(__FILE__) + "/../spec_helper")
require 'capybara/rspec'
# Put your acceptance spec helpers inside /spec/acceptance/support
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each {|f| require f}
# RSpec.configure do |config|
# end
def sign_up_new_user
user = User.make
visit "/"
find('#join_now_link').click
click_link 'Sign up with Email'
fill_in 'Full name', with: user.full_name
fill_in 'Email', with: user.email
find('#user_password').set user.password
find('#user_password_confirmation').set user.password
click_button 'Sign up'
user
end
def login(user)
visit "/"
find('.nav #login_link').click
fill_in 'Email', with: user.email
fill_in 'Password', with: user.password
click_button 'Sign in'
end
def login_normal
normal = User.make!
login(normal)
normal
end
def login_admin
admin = User.make!(:admin)
login(admin)
admin
end
def login_mod
mod = User.make!(:mod)
login(mod)
mod
end
def logout
click_link 'Sign out'
end
def sign_up_with_linkedin
visit "/"
find('#join_now_link').click
click_link 'Sign up with Linkedin'
end
def login_with_linkedin
visit "/"
find('.nav #login_link').click
click_link 'Sign in with Linkedin'
end
def link_to_linkedin
visit "/users/edit"
click_link 'Link to your Linkedin account'
end
def unlink_from_linkedin
visit "/users/edit"
click_link 'Unlink from your Linkedin account'
end
| 19.351351 | 69 | 0.722067 |
bb0cba8ab4cfd97afe50d85184bd9a3557858275 | 1,156 | # coding: utf-8
require 'rails_helper'
RSpec.describe AttachmentsController, :type => :controller do
describe 'POST #create' do
before do
Attachment.destroy_all
@user = create(:user)
sign_in @user
end
it 'saves the new attachment in the database' do
expect {
post :create, params: { attachment: attributes_for(:attachment) }
}.to change(Attachment, :count).by(1)
end
it 'returns the json which has image_name and image_url' do
post :create, params: { attachment: attributes_for(:attachment) }
attachment = Attachment.first
res = JSON.parse response.body
expect(res['image_name']).to eq 'test.jpg'
expect(res['image_url']).to eq "/uploads/attachment/image/#{attachment.id}/test.jpg"
end
it 'retuns the json for fail if the new attachment is not saved' do
allow_any_instance_of(Attachment).to receive(:save).and_return(false)
post :create, params: { attachment: attributes_for(:attachment) }
res = JSON.parse response.body
expect(res['image_name']).to eq '画像のアップロードに失敗しました'
expect(res['image_url']).to be_nil
end
end
end
| 34 | 90 | 0.675606 |
3328700b6ae59ef3961eb90d8bb2944c4b40f6b2 | 1,886 | require 'ihasa/lua'
module Ihasa
# Bucket class. That bucket fills up to burst, by rate per
# second. Each accept? or accept?! call decrement it from 1.
class Bucket
class << self
def create(*args)
new(*args).tap(&:save)
end
REDIS_VERSION_WITH_REPLICATE_COMMANDS_SUPPORT = 3.2
def legacy_mode?(redis)
redis_version(redis) < REDIS_VERSION_WITH_REPLICATE_COMMANDS_SUPPORT
end
private
def redis_version(redis)
Float(redis.info['redis_version'][/\d+\.\d+/])
end
end
attr_reader :redis, :keys, :rate, :burst, :prefix
def initialize(rate, burst, prefix, redis)
@implementation =
if self.class.legacy_mode?(redis)
require 'ihasa/bucket/legacy_implementation'
LegacyImplementation.instance
else
require 'ihasa/bucket/implementation'
Implementation.instance
end
@prefix = prefix
@keys = Ihasa::OPTIONS.map { |opt| "#{prefix}:#{opt.upcase}" }
@redis = redis
@rate = Float rate
@burst = Float burst
end
SETUP_ADVICE = 'Ensure that the method '\
'Ihasa::Bucket#save was called.'.freeze
SETUP_ERROR = ('Redis raised an error: %{msg}. ' + SETUP_ADVICE).freeze
class RedisNamespaceSetupError < RuntimeError; end
def accept?
result = @implementation.accept?(self) == OK
return yield if result && block_given?
result
rescue Redis::CommandError => e
raise RedisNamespaceSetupError, SETUP_ERROR % { msg: e.message }
end
class EmptyBucket < RuntimeError; end
def accept!
result = (block_given? ? accept?(&Proc.new) : accept?)
raise EmptyBucket, "Bucket #{prefix} throttle limit" unless result
result
end
def save
@implementation.save(self)
end
def delete
redis.del(keys)
end
end
end
| 26.56338 | 76 | 0.634146 |
26b9a88babf92e46d1a3c44548a185fd6472b1d1 | 1,567 | # frozen_string_literal: true.
$LOAD_PATH.unshift(File.dirname(__FILE__) + "/lib")
require "version"
Gem::Specification.new do |s|
s.name = %q(cfndsl-pipeline)
s.authors = [
"Cam Maxwell"
]
s.homepage = 'https://github.com/cmaxwellau/cfndsl-pipeline.git'
s.author = 'Cam Maxwell'
s.email = '[email protected]'
s.version = CfnDslPipeline::VERSION
s.date = %q(2019-08-19)
s.summary = %q(Integrated build pipeline for building CloudFormation with CfnDsl)
s.description = %q(Integrated CfnDsl CloudFormation template generation pipeline that integrates cfn_nag, AWS template validation, and AWS template costing (where possible), and generated `aws cloudformation deploy` compatible parameters files)
s.license = 'MIT'
s.files = `git ls-files`.split($INPUT_RECORD_SEPARATOR)
s.require_paths = ["lib"]
s.required_ruby_version = '>= 2.4.1'
s.bindir = 'bin'
s.add_dependency('cfn-nag', '~> 0.4')
s.add_dependency('cfndsl', '~> 0.17')
s.add_dependency('aws-sdk-cloudformation', '~> 1')
s.add_dependency('aws-sdk-s3', '~> 1')
s.add_dependency('uuid', '~> 2.3')
s.add_dependency('colorize', '~> 0.8')
s.executables << 'cfndsl_pipeline'
s.add_development_dependency "bundler", "~> 2.0"
s.add_development_dependency "rake"
s.add_development_dependency "rspec"
s.add_development_dependency "cfndsl"
s.add_development_dependency "rubocop", ">= 0.49.0"
end
| 38.219512 | 256 | 0.641991 |
6abc5c6c29033a13b1e2a84c93657c455c6e4c36 | 300 | module Truepill
module Resources
class Patient < RestfulResource
public :list, :read
def create(data)
put(resource_base, data)
end
def get_prescriptions(data)
path = "#{resource_path(data)}/prescriptions"
get(path)
end
end
end
end
| 16.666667 | 53 | 0.61 |
bfba97fe61db47f9e3ab269a3b3f04e05aebeac8 | 1,538 | require 'spec_helper'
describe DryRunnable do
class Agents::SandboxedAgent < Agent
default_schedule "3pm"
can_dry_run!
def check
log "Logging"
create_event payload: { test: "foo" }
error "Recording error"
create_event payload: { test: "bar" }
self.memory = { last_status: "ok" }
save!
end
end
before do
stub(Agents::SandboxedAgent).valid_type?("Agents::SandboxedAgent") { true }
@agent = Agents::SandboxedAgent.create(name: "some agent") { |agent|
agent.user = users(:bob)
}
end
it "traps logging, event emission and memory updating" do
results = nil
expect {
results = @agent.dry_run!
}.not_to change {
[users(:bob).agents.count, users(:bob).events.count, users(:bob).logs.count]
}
expect(results[:log]).to match(/\AI, .+ INFO -- : Logging\nE, .+ ERROR -- : Recording error\n/)
expect(results[:events]).to eq([{ test: 'foo' }, { test: 'bar' }])
expect(results[:memory]).to eq({ "last_status" => "ok" })
end
it "does not perform dry-run if Agent does not support dry-run" do
stub(@agent).can_dry_run? { false }
results = nil
expect {
results = @agent.dry_run!
}.not_to change {
[users(:bob).agents.count, users(:bob).events.count, users(:bob).logs.count]
}
expect(results[:log]).to match(/\AE, .+ ERROR -- : Exception during dry-run. SandboxedAgent does not support dry-run: /)
expect(results[:events]).to eq([])
expect(results[:memory]).to eq({})
end
end
| 26.982456 | 124 | 0.619636 |
7ab20115b015a85b6d3d4ec3ed3c0838a0f9ef85 | 1,925 | class ProjectsController < ApplicationController
before_action :set_project, only: [:show, :edit, :update, :destroy]
# GET /projects
# GET /projects.json
def index
@projects = Project.all
end
# GET /projects/1
# GET /projects/1.json
def show
end
# GET /projects/new
def new
@project = Project.new
end
# GET /projects/1/edit
def edit
end
# POST /projects
# POST /projects.json
def create
@project = Project.new(project_params)
respond_to do |format|
if @project.save
format.html { redirect_to @project, notice: 'Project was successfully created.' }
format.json { render :show, status: :created, location: @project }
else
format.html { render :new }
format.json { render json: @project.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /projects/1
# PATCH/PUT /projects/1.json
def update
respond_to do |format|
if @project.update(project_params)
format.html { redirect_to @project, notice: 'Project was successfully updated.' }
format.json { render :show, status: :ok, location: @project }
else
format.html { render :edit }
format.json { render json: @project.errors, status: :unprocessable_entity }
end
end
end
# DELETE /projects/1
# DELETE /projects/1.json
def destroy
@project.destroy
respond_to do |format|
format.html { redirect_to projects_url, notice: 'Project was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_project
@project = Project.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def project_params
params.require(:project).permit(:name, :description, :active, :worm_ids)
end
end
| 26.013514 | 93 | 0.661818 |
08a88f0b6f29baf0c4fba9f8119eafed39e9b8e9 | 1,652 | require 'test_helper'
class FollowingTest < ActionDispatch::IntegrationTest
# test "the truth" do
# assert true
# end
def setup
@user = users(:takuya)
@other = users(:kasumi)
log_in_as(@user)
end
test "following page" do
get following_user_path(@user)
assert_not @user.following.empty?
assert_match @user.following.count.to_s, response.body
@user.following.each do |user|
assert_select "a[href=?]", user_path(user)
end
end
test "followers page" do
get followers_user_path(@user)
assert_not @user.followers.empty?
assert_match @user.followers.count.to_s, response.body
@user.followers.each do |user|
assert_select "a[href=?]", user_path(user)
end
end
test "should follow a user the standard way" do
assert_difference '@user.following.count', 1 do
post relationships_path, params: { followed_id: @other.id }
end
end
test "should follow a user with Ajax" do
assert_difference '@user.following.count', 1 do
post relationships_path, xhr: true, params: { followed_id: @other.id }
end
end
test "should unfollow a user the standard way" do
@user.follow(@other)
relationship = @user.active_relationships.find_by(followed_id: @other.id)
assert_difference '@user.following.count', -1 do
delete relationship_path(relationship)
end
end
test "should unfollow a user with Ajax" do
@user.follow(@other)
relationship = @user.active_relationships.find_by(followed_id: @other.id)
assert_difference '@user.following.count', -1 do
delete relationship_path(relationship), xhr: true
end
end
end
| 27.533333 | 77 | 0.697337 |
e83aa8b3f4905cfd29824a5f18bde83ed7ab5a4b | 1,201 | =begin
#Datadog API V1 Collection
#Collection of all Datadog Public endpoints.
The version of the OpenAPI document: 1.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
This product includes software developed at Datadog (https://www.datadoghq.com/).
Copyright 2020-Present Datadog, Inc.
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for DatadogAPIClient::V1::PagerDutyServiceName
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe DatadogAPIClient::V1::PagerDutyServiceName do
let(:instance) { DatadogAPIClient::V1::PagerDutyServiceName.new }
describe 'test an instance of PagerDutyServiceName' do
it 'should create an instance of PagerDutyServiceName' do
expect(instance).to be_instance_of(DatadogAPIClient::V1::PagerDutyServiceName)
end
end
describe 'test attribute "service_name"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 31.605263 | 107 | 0.777685 |
0182cf4cf93b831f80e51cf6b4a21909f917ea4f | 96 | # frozen_string_literal: true
module GraphQL
module AnyCable
VERSION = "1.1.3"
end
end
| 12 | 29 | 0.708333 |
d50ea48a59b4661c521e1c0ec899dc07c8c259e5 | 505 | class AddPipelineExecutionStrategyToSamples < ActiveRecord::Migration[5.1]
def change
add_column :samples, :pipeline_execution_strategy, :string, default: "directed_acyclic_graph", comment: "A soft enum (string) describing which pipeline infrastructure to run the sample on."
add_column :pipeline_runs, :pipeline_execution_strategy, :string, default: "directed_acyclic_graph", comment: "A soft enum (string) describing which pipeline infrastructure the pipeline run was performed on."
end
end
| 72.142857 | 212 | 0.80396 |
e8ee75d34166176a9875a02a69ea56b99e114bbe | 3,950 | # frozen_string_literal: true
module ApplicationHelper
def markdown(body, opts = {})
opts[:format] = "markdown"
raw BlueDoc::HTML.render(body, opts)
end
def logo_tag(href: "/")
site_logo = Setting.site_logo
if site_logo&.start_with?("data:")
style = "background: none; padding: 0"
link_to href, class: "navbar-brand", style: style do
image_tag(Setting.site_logo)
end
else
link_to href, class: "navbar-brand" do
yield
end
end
end
def sanitize_html(html)
raw Sanitize.fragment(html, BlueDoc::Sanitize::DEFAULT)
end
def close_button
raw %(<i class="notice-close js-notice-close fas fa-cancel"></i>)
end
def icon_tag(name, opts = {})
icon_html = content_tag(:i, "", class: "fas fa-#{name} #{opts[:class]}")
return icon_html if opts[:label].blank?
raw [icon_html, "<span>#{opts[:label]}</span>"].join(" ")
end
def notice_message
flash_messages = []
flash_messages << flash_block_tag(:success, flash[:notice]) if flash[:notice]
flash_messages << flash_block_tag(:error, flash[:alert]) if flash[:alert]
content_tag(:div, flash_messages.join("\n").html_safe, class: "navbar-notice")
end
def flash_block_tag(type, message)
content_tag(:div, class: "notice notice-#{type}") do
close_button + message
end
end
def timeago(t)
return "" if t.blank?
if t < 2.weeks.ago
return content_tag(:span, class: "time", title: t.iso8601) { l t, format: :short }
end
content_tag(:span, class: "timeago", datetime: t.iso8601, title: t.iso8601) { l t, format: :short }
end
def title_tag(*texts)
text = texts.join(" - ")
content_for :title, h("#{text} - BlueDoc")
end
def action_button_tag(target, action_type, opts = {})
return "" if target.blank?
label = opts[:label]
undo_label = opts[:undo_label]
icon = opts[:icon]
undo = opts[:undo]
with_count = opts[:with_count]
label ||= t("shared.action_button.#{action_type}")
undo_label ||= t("shared.action_button.un#{action_type}")
icon ||= action_type.downcase
action_type_pluralize = action_type.to_s.pluralize
action_count = "#{action_type_pluralize}_count"
url = target.to_path("/action?#{{action_type: action_type}.to_query}")
data = {method: :post, label: label, undo_label: undo_label, remote: true, disable: true}
class_names = opts[:class] || "btn btn-sm"
if with_count
class_names += " btn-with-count"
end
btn_label = label.dup
if undo.nil?
undo = current_user && User.find_action(action_type, target: target, user: current_user)
end
if undo
data[:method] = :delete
btn_label = undo_label.dup
end
out = []
social_count = ""
if with_count && target.respond_to?(action_count)
social_count = %(<i class="social-count" >#{target.send(action_count)}</i>)
end
out << link_to(raw([icon_tag(icon, label: btn_label), social_count].join("")), url, data: data, class: class_names)
content_tag(:span, raw(out.join("")), class: "#{target.class.name.underscore.singularize}-#{target.id}-#{action_type}-button action-button")
end
# Render div.form-group with a block, it including validation error below input
#
# form_group(f, :email) do
# f.email_field :email, class: "form-control"
# end
def form_group(form, field, opts = {}, &block)
has_errors = form.object.errors[field].present?
opts[:class] ||= "form-group"
opts[:class] += " has-error" if has_errors
content_tag :div, class: opts[:class] do
concat form.label field, class: "control-label" if opts[:label] != false
concat capture(&block)
concat errors_for(form, field)
end
end
def errors_for(form, field)
message = form.object.errors.full_messages_for(field)&.first
return nil if message.blank?
content_tag(:div, message, class: "form-error")
end
end
| 28.623188 | 144 | 0.651392 |
11616f5f426da40c9801f05ef3874c23c6f176c8 | 273 | require 'twilio-ruby'
ACCOUNT_SID = ''
AUTH_TOKEN = ''
FROM_NUMBER = ''
def send_sms(phone_number, body_text)
client = Twilio::REST::Client.new(ACCOUNT_SID, AUTH_TOKEN)
client.messages.create(
from: FROM_NUMBER,
to: phone_number,
body: body_text
)
end
| 17.0625 | 60 | 0.703297 |
7902415609139a8864bc6632b74977aae436c672 | 3,059 | class Thrift < Formula
desc "Framework for scalable cross-language services development"
homepage "https://thrift.apache.org/"
license "Apache-2.0"
stable do
url "https://www.apache.org/dyn/closer.lua?path=thrift/0.15.0/thrift-0.15.0.tar.gz"
mirror "https://archive.apache.org/dist/thrift/0.15.0/thrift-0.15.0.tar.gz"
sha256 "d5883566d161f8f6ddd4e21f3a9e3e6b8272799d054820f1c25b11e86718f86b"
# Fix -flat_namespace being used on Big Sur and later.
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/03cf8088210822aa2c1ab544ed58ea04c897d9c4/libtool/configure-big_sur.diff"
sha256 "35acd6aebc19843f1a2b3a63e880baceb0f5278ab1ace661e57a502d9d78c93c"
end
end
bottle do
sha256 cellar: :any, arm64_monterey: "77ea98789bcafe47df87460c0b236c2a4d0883d3a87f189de5fa228f14b920d1"
sha256 cellar: :any, arm64_big_sur: "8352c16aaef3267d863dc31aa6e6562ab59ce5823b61296a0a2dfe62fb747112"
sha256 cellar: :any, monterey: "8fafe39b9a14df03183aadb76da3d2005972de99f9ef99820a760e71bf131f8b"
sha256 cellar: :any, big_sur: "e371159616481c100f45d01660e70ff68d9df907baef025982e8650ff485b3f4"
sha256 cellar: :any, catalina: "77ffd9f8aea765983f90ae45c94afd32502d035bb09f58b601290f65fc2966e0"
sha256 cellar: :any, mojave: "e1f46a93099ec7b01caf380433a05d5014686cc5ea59293286fc55a5b0efd39c"
sha256 cellar: :any_skip_relocation, x86_64_linux: "fa1ac943dc0f963402b71ce1189f19e62aa51510165c4f3607dc45b6b95f273f"
end
head do
url "https://github.com/apache/thrift.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "pkg-config" => :build
end
depends_on "bison" => :build
depends_on "boost" => [:build, :test]
depends_on "[email protected]"
def install
system "./bootstrap.sh" unless build.stable?
args = %W[
--disable-debug
--disable-tests
--prefix=#{prefix}
--libdir=#{lib}
--with-openssl=#{Formula["[email protected]"].opt_prefix}
--without-erlang
--without-haskell
--without-java
--without-perl
--without-php
--without-php_extension
--without-python
--without-ruby
--without-swift
]
ENV.cxx11 if ENV.compiler == :clang
# Don't install extensions to /usr:
ENV["PY_PREFIX"] = prefix
ENV["PHP_PREFIX"] = prefix
ENV["JAVA_PREFIX"] = buildpath
system "./configure", *args
ENV.deparallelize
system "make"
system "make", "install"
end
test do
(testpath/"test.thrift").write <<~'EOS'
service MultiplicationService {
i32 multiply(1:i32 x, 2:i32 y),
}
EOS
system "#{bin}/thrift", "-r", "--gen", "cpp", "test.thrift"
system ENV.cxx, "-std=c++11", "gen-cpp/MultiplicationService.cpp",
"gen-cpp/MultiplicationService_server.skeleton.cpp",
"-I#{include}/include",
"-L#{lib}", "-lthrift"
end
end
| 34.370787 | 142 | 0.675057 |
1aacc57f20f17136b11ab3d6c2039cd047300bea | 1,043 | require "matrix"
require "kablammo"
require_relative "./cell"
require_relative "./danger_score"
module Scaredy
class DangerMatrix
include Strategy::Constants
attr_accessor :matrix
def self.score(board, me, opponents)
danger_matrix = DangerMatrix.new()
danger_matrix.score_cells(me, board, opponents)
danger_matrix
end
def self.from_array(array)
danger_matrix = DangerMatrix.new()
matrix = Matrix[*array]
danger_matrix.matrix = matrix
danger_matrix
end
def at(x, y)
return nil if x < 0
return nil if y < 0
@matrix[y, x]
end
def safe?(x, y)
at(x, y) == 0
end
def score_cells(me, board, opponents)
@matrix = map_cells(board.width, board.height) do |cell|
Scaredy::DangerScore.score(cell, me, opponents)
end
end
private
def map_cells(width, height, &block)
Matrix.build(width, height) do |y, x|
cell = Cell.new(x, y)
value = block.call(cell)
end
end
end
end
| 19.679245 | 62 | 0.623202 |
ac0c373603ba30c2fbdf64b59c3a6fd6745bf2a3 | 2,285 | # frozen_string_literal: true
require "action_policy/testing"
module ActionPolicy
module RSpec
# Authorization matcher `be_authorized_to`.
#
# Verifies that a block of code has been authorized using specific policy.
#
# Example:
#
# # in controller/request specs
# subject { patch :update, id: product.id }
#
# it "is authorized" do
# expect { subject }
# .to be_authorized_to(:manage?, product)
# .with(ProductPolicy)
# end
#
class BeAuthorizedTo < ::RSpec::Matchers::BuiltIn::BaseMatcher
attr_reader :rule, :target, :policy, :actual_calls
def initialize(rule, target)
@rule = rule
@target = target
end
def with(policy)
@policy = policy
self
end
def match(_expected, actual)
raise "This matcher only supports block expectations" unless actual.is_a?(Proc)
@policy ||= ::ActionPolicy.lookup(target)
begin
ActionPolicy::Testing::AuthorizeTracker.tracking { actual.call }
rescue ActionPolicy::Unauthorized
# we don't want to care about authorization result
end
@actual_calls = ActionPolicy::Testing::AuthorizeTracker.calls
actual_calls.any? { _1.matches?(policy, rule, target) }
end
def does_not_match?(*)
raise "This matcher doesn't support negation"
end
def supports_block_expectations?() = true
def failure_message
"expected #{formatted_record} " \
"to be authorized with #{policy}##{rule}, " \
"but #{actual_calls_message}"
end
def actual_calls_message
if actual_calls.empty?
"no authorization calls have been made"
else
"the following calls were encountered:\n" \
"#{formatted_calls}"
end
end
def formatted_calls
actual_calls.map do
" - #{_1.inspect}"
end.join("\n")
end
def formatted_record(record = target) = ::RSpec::Support::ObjectFormatter.format(record)
end
end
end
RSpec.configure do |config|
config.include(Module.new do
def be_authorized_to(rule, target)
ActionPolicy::RSpec::BeAuthorizedTo.new(rule, target)
end
end)
end
| 25.388889 | 94 | 0.61663 |
e9f58701e957faafdc750c7181e6a491ce8bdd2a | 3,481 | module QuickbooksWebConnector
class Job
attr_accessor :response_xml
def initialize(payload)
@payload = payload
end
# Creates a job by placing it on the queue. Expects a request builder class
# name, a response handler class name, and an optional array of arguments to
# pass to the class' `perform` method.
#
# Raises an exception if no class is given.
def self.create(request_builder, response_handler, *args)
QuickbooksWebConnector.push(
'request_builder_class' => request_builder.to_s,
'response_handler_class' => response_handler.to_s,
'args' => args
)
end
# Destroys a job on the queue. Expects a request builder class name, a
# response handler class name, and an optional array of arguments to pass
# to the class' `perform` method.
def self.destroy(request_builder, response_handler, *args)
QuickbooksWebConnector.remove(
'request_builder_class' => request_builder.to_s,
'response_handler_class' => response_handler.to_s,
'args' => args
)
end
# Returns an instance of QuickbooksWebConnector::Job
# if any jobs are available. If not, returns nil.
def self.reserve
return unless payload = QuickbooksWebConnector.pop
new(payload)
end
# Return an instance of QuickbooksWebConnector::job if any jobs are
# available, without removing the job from the queue
def self.peek
return unless payload = QuickbooksWebConnector.peek
new(payload)
end
# Find jobs from the queue.
#
# Returns the list of jobs queued.
#
# This method can be potentially very slow and memory intensive,
# depending on the size of your queue, as it loads all jobs into
# a Ruby array.
def self.queued
QuickbooksWebConnector.list_range(:queue, 0, -1).map do |item|
new(item)
end
end
# Attempts to perform the work represented by this job instance.
# Calls #perform on the class given in the payload with the
# Quickbooks response and the arguments given in the payload..
def perform
begin
job = response_handler_class
# Execute the job.
job.perform(response_xml, *job_args)
rescue Object => ex
fail(ex)
end
end
# Returns the request XML from the payload.
def request_xml
begin
xml = request_builder_class.perform(*job_args)
# Replace non-ascii characters with decimal entities
xml.gsub!(/[^\u{20}-\u{7E}]/) do |char|
"&##{char.codepoints.first};"
end
xml
rescue Object => ex
fail(ex)
:failed
end
end
# Returns the actual class constant for building the request from the job's payload.
def request_builder_class
@request_builder_class ||= @payload['request_builder_class'].constantize
end
# Returns the actual class constant represented in this job's payload.
def response_handler_class
@response_handler_class ||= @payload['response_handler_class'].constantize
end
# Returns an array of args represented in this job's payload.
def args
@payload['args']
end
def job_args
args || []
end
# Given an exception object, hands off the needed parameters to the Failure
# module.
def fail(exception)
Failure.create(
payload: @payload,
exception: exception
)
end
end
end
| 28.532787 | 88 | 0.658719 |
bfa1ab319d65d77a326886b2d8a6705de89c5548 | 58 | module Spree
def self.version
'4.2.0.rc5'
end
end
| 9.666667 | 18 | 0.637931 |
e82d8d1841241581d5515562b5c90eb4162a4fc9 | 823 | module Idv
module DocumentCaptureSessionValidator
extend ActiveSupport::Concern
included do
validates :session_uuid, presence: { message: 'session missing' }
validate :session_exists, if: :session_uuid_present?
validate :session_not_expired, if: :session_uuid_present?
end
private
attr_reader :session_uuid
def session_exists
return if document_capture_session
errors.add(:session_uuid, 'invalid session')
end
def session_not_expired
return unless document_capture_session&.expired?
errors.add(:session_uuid, 'session expired')
end
def session_uuid_present?
session_uuid.present?
end
def document_capture_session
@document_capture_session ||= DocumentCaptureSession.find_by(uuid: session_uuid)
end
end
end
| 24.205882 | 86 | 0.72661 |
18628ae0503817afc0f11ea2cec9edc7a059254b | 4,321 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Compute::Mgmt::V2020_06_01
module Models
#
# The source user image virtual hard disk. The virtual hard disk will be
# copied before being attached to the virtual machine. If SourceImage is
# provided, the destination virtual hard drive must not exist.
#
class Image < Resource
include MsRestAzure
# @return [SubResource] The source virtual machine from which Image is
# created.
attr_accessor :source_virtual_machine
# @return [ImageStorageProfile] Specifies the storage settings for the
# virtual machine disks.
attr_accessor :storage_profile
# @return [String] The provisioning state.
attr_accessor :provisioning_state
# @return [HyperVGenerationTypes] Gets the HyperVGenerationType of the
# VirtualMachine created from the image. Possible values include: 'V1',
# 'V2'
attr_accessor :hyper_vgeneration
#
# Mapper for Image class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'Image',
type: {
name: 'Composite',
class_name: 'Image',
model_properties: {
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
location: {
client_side_validation: true,
required: true,
serialized_name: 'location',
type: {
name: 'String'
}
},
tags: {
client_side_validation: true,
required: false,
serialized_name: 'tags',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
source_virtual_machine: {
client_side_validation: true,
required: false,
serialized_name: 'properties.sourceVirtualMachine',
type: {
name: 'Composite',
class_name: 'SubResource'
}
},
storage_profile: {
client_side_validation: true,
required: false,
serialized_name: 'properties.storageProfile',
type: {
name: 'Composite',
class_name: 'ImageStorageProfile'
}
},
provisioning_state: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.provisioningState',
type: {
name: 'String'
}
},
hyper_vgeneration: {
client_side_validation: true,
required: false,
serialized_name: 'properties.hyperVGeneration',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 30.864286 | 77 | 0.469104 |
5db17438163cc345ca0dbac81e0b3a328fcc77c8 | 1,420 | class TranslateShell < Formula
desc "Command-line translator using Google Translate and more"
homepage "https://www.soimort.org/translate-shell"
url "https://github.com/soimort/translate-shell/archive/v0.9.6.12.tar.gz"
sha256 "4c4843a8c66276190535b8435775ecb5d9c8286083a33cdbe2db608eba93ca97"
license "Unlicense"
head "https://github.com/soimort/translate-shell.git", branch: "develop"
bottle do
sha256 cellar: :any_skip_relocation, catalina: "858d52386202bbcb1313a72b642d9d5f4cbfe2ca35fd9556f6cf5275d7d2b9a1"
sha256 cellar: :any_skip_relocation, mojave: "858d52386202bbcb1313a72b642d9d5f4cbfe2ca35fd9556f6cf5275d7d2b9a1"
sha256 cellar: :any_skip_relocation, high_sierra: "858d52386202bbcb1313a72b642d9d5f4cbfe2ca35fd9556f6cf5275d7d2b9a1"
end
depends_on "fribidi"
depends_on "gawk"
depends_on "rlwrap"
on_linux do
depends_on "util-linux"
end
def install
system "make"
bin.install "build/trans"
man1.install "man/trans.1"
end
def caveats
<<~EOS
By default, text-to-speech functionality is provided by macOS's builtin
`say' command. This functionality may be improved in certain cases by
installing one of mplayer, mpv, or mpg123, all of which are available
through `brew install'.
EOS
end
test do
assert_equal "hello\n",
shell_output("#{bin}/trans -no-init -b -s fr -t en bonjour").downcase
end
end
| 33.023256 | 120 | 0.748592 |
08ce98762798476d33bd89a9b4c497b520234bcf | 2,539 | # -*- encoding: utf-8 -*-
#
require 'spec_helper'
RSpec.describe Brcobranca::Retorno::Cnab400::Bradesco do
before do
@arquivo = File.join(File.dirname(__FILE__), '..', '..', '..', 'arquivos', 'CNAB400BRADESCO.RET')
end
it 'Ignora primeira linha que é header' do
pagamentos = described_class.load_lines(@arquivo)
pagamento = pagamentos.first
expect(pagamento.sequencial).to eql('000002')
end
it 'Transforma arquivo de retorno em objetos de retorno retornando somente as linhas de pagamentos de títulos sem registro' do
pagamentos = described_class.load_lines(@arquivo)
expect(pagamentos.size).to eq(7) # deve ignorar a primeira linha que é header
pagamento = pagamentos.first
expect(pagamento.agencia_com_dv).to eql('01467-2')
expect(pagamento.cedente_com_dv).to eql('0019669P')
expect(pagamento.nosso_numero).to eql('000000000303')
expect(pagamento.carteira).to eql('009')
expect(pagamento.data_vencimento).to eql('250515')
expect(pagamento.valor_titulo).to eql('0000000145000')
expect(pagamento.banco_recebedor).to eql('237')
expect(pagamento.agencia_recebedora_com_dv).to eql('04157')
expect(pagamento.especie_documento).to eql('')
expect(pagamento.valor_tarifa).to eql('0000000000160')
expect(pagamento.iof).to eql('0000000000000')
expect(pagamento.valor_abatimento).to eql('0000000000000')
expect(pagamento.desconto).to eql('0000000000000')
expect(pagamento.valor_recebido).to eql('0000000145000')
expect(pagamento.juros_mora).to eql('0000000000000')
expect(pagamento.outros_recebimento).to eql('0000000000000')
expect(pagamento.data_credito).to eql('150515')
expect(pagamento.sequencial).to eql('000002')
expect(pagamento.cod_de_ocorrencia).to eql('02')
expect(pagamento.data_de_ocorrencia).to eql('150515')
# Campos da classe base que não encontrei a relação com CNAB400
# parse.field :tipo_cobranca,80..80
# parse.field :tipo_cobranca_anterior,81..81
# parse.field :natureza_recebimento,86..87
# parse.field :convenio,31..37
# parse.field :juros_desconto,201..213
# parse.field :iof_desconto,214..226
# parse.field :desconto_concedito,240..252
# parse.field :outras_despesas,279..291
# parse.field :abatimento_nao_aproveitado,292..304
# parse.field :data_liquidacao,295..300
# parse.field :valor_lancamento,305..317
# parse.field :indicativo_lancamento,318..318
# parse.field :indicador_valor,319..319
# parse.field :valor_ajuste,320..331
end
end
| 43.033898 | 128 | 0.730209 |
5de41f6fbf0f8736f04693b4f384826b99690b7b | 645 | # frozen_string_literal: true
module EE
# GroupProjectsFinder
#
# Extends GroupProjectsFinder
#
# Added arguments:
# params:
# with_security_reports: boolean
module GroupProjectsFinder
extend ::Gitlab::Utils::Override
override :filter_projects
def filter_projects(collection)
collection = super(collection)
by_security_scans_presence(collection)
end
def by_security_scans_presence(collection)
if params[:with_security_reports] && group.licensed_feature_available?(:security_dashboard)
collection.with_security_scans
else
collection
end
end
end
end
| 22.241379 | 97 | 0.717829 |
d5ca520b41cb53133176d2202c0ba72e4cc44d80 | 133 | #coding: utf-8
#同城快运-返款清单
class LocalTownRefundsController < BaseRefundsController
defaults :resource_class => LocalTownRefund
end
| 22.166667 | 56 | 0.827068 |
e8b289fb9bf852a8874aa7a4ef6e9e85eed13af2 | 3,772 | # Quote History routines
#
require 'ib-ruby'
module PryIb
class History
DURATIONS = { sec1: '1 S',
day1: '1 D',
week1: '1 W',
month1: '1 M',
year1: '1 Y',
}
def initialize( ib )
@ib = ib
@market = {}
@request_id = PryIb::next_request_id
@quotes = { @request_id => [] }
log "Quote init. ID #{@request_id}"
end
def avg(list)
sum =0.0
if list.size > 0
list.each{|bar| sum += bar}
average = sum / list.size
end
average
end
###
def quote(symbol,duration='1 D', bar_size='5 mins',stats_only=false)
@contract = Security.get_contract(symbol)
log("Quote for:#{@contract.inspect} duration:#{duration} bar_size=#{bar_size}, stats_only:#{stats_only}")
@market = { @request_id => @contract }
# Ensure we get alerts
@ib.subscribe(:Alert) { |msg| log "ALERT: #{msg.to_human}" }
# Subscribe to historical quote data
@ib.subscribe(IB::Messages::Incoming::HistoricalData) do |msg|
quote_list = []
log "ID: #{msg.request_id}: #{msg.count} items:"
msg.results.each do |entry|
#log "Request_id:#{msg.request_id}t
quote_list << entry
end
@quotes[msg.request_id] = quote_list
@last_msg_time = Time.now.to_i
#log "Quotes:#{@quotes.inspect}"
end
log "-- After subscribe"
# HistoricalData docs: http://www.interactivebrokers.com/php/apiUsersGuide/apiguide/api/historical_data_limitations.htm#XREF_93621_Historical_Data
target_date = Time.now.to_date
target_trade_time = Date::recent_ib_trading_date
log ">> Target Date: #{target_date.to_s} IB: #{ target_trade_time }"
# Now we actually request historical data for the symbols we're interested in. TWS will
# respond with a HistoricalData message, which will be processed by the code above.
@market.each_pair do |id, contract|
log ">> SEND request id:#{id}"
mess = IB::Messages::Outgoing::RequestHistoricalData.new(
:request_id => id,
:contract => contract,
:end_date_time => target_trade_time,
:duration => duration, #'1 D', # ?
:bar_size => bar_size, #'5 mins', # IB::BAR_SIZES.key(:hour)?
:what_to_show => :trades,
:use_rth => 1,
:format_date => 1)
log ">> Send contract: #{contract.inspect}"
log ">> mess:#{mess.inspect}"
@ib.send_message( mess )
end
log "---- WAIT ....."
# Wait for IB to respond to our request
sleep 0.2 until @last_msg_time && @last_msg_time < Time.now.to_i + 4.7
log "------------------"
log "------------------"
#log "QUOTES: #{@quotes.inspect}"
max_high = 0
max_low = 999999
avg_close = 0
avg_vol = 0
@quotes.each_pair do |id, bars|
log ">>>--------------"
log "ID: #{id} Desc: #{@market[id].description}"
#log "Quotes: #{quotes.inspect}"
bars.each do |bar|
log ">> BAR: #{bar.to_s}" unless stats_only
max_high = bar.high if bar.high > max_high
max_low = bar.low if bar.low < max_low
end
avg_close = avg( bars.collect{|b| b[:low]} )
avg_vol = avg( bars.collect{|b| b[:volume]} )
end
log "---------------------------"
log "Max High: #{max_high}"
log "Max Low : #{max_low}"
log "Avg Close: #{"%6.2f" % avg_close}"
log "Avg Vol: #{"%6.2f" % avg_vol}"
@quotes
end
end
end
| 31.966102 | 152 | 0.525186 |
1a2a9e7431874f694973da3bdd1949e4f5088f24 | 115 | require 'capistrano/net_storage/s3/base'
class Capistrano::NetStorage::S3
class Error < StandardError
end
end
| 16.428571 | 40 | 0.782609 |
bbe50598f7609a16006a3f88bd046b2736a2aaf7 | 3,758 | #
# params_lookup.rb
#
# This function lookups for a variable value in various locations
# following this order (first match is returned)
# - Hiera backend (if present) for modulename::varname
# - Hiera backend (if present) for modulename_varname
# - Hiera backend (if present) for varname (if second argument is 'global')
# - Top Scope Variable ::modulename::varname
# - Top Scope Variable ::modulename_varname
# - Top Scope Variable ::varname (if second argument is 'global')
# - Module default: ::modulename::params::varname
#
# It's based on a suggestion of Dan Bode on how to better manage
# Example42 NextGen modules params lookups.
# Major help has been given by Brice Figureau, Peter Meier
# and Ohad Levy during the Fosdem 2012 days (thanks guys)
#
# Tested and adapted to Puppet 2.6.x and later
#
# Alessandro Franceschi [email protected]
#
module Puppet::Parser::Functions
newfunction(:params_lookup, :type => :rvalue, :doc => <<-EOS
This fuction looks for the given variable name in a set of different sources:
- Hiera, if available ('modulename::varname')
- Hiera, if available ('modulename_varname')
- Hiera, if available (if second argument is 'global')
- ::modulename::varname
- ::modulename_varname
- ::varname (if second argument is 'global')
- ::modulename::params::varname
If no value is found in the defined sources, it returns an empty string ('')
EOS
) do |arguments|
raise(Puppet::ParseError, "params_lookup(): Define at least the variable name " +
"given (#{arguments.size} for 1)") if arguments.size < 1
value = ''
var_name = arguments[0]
module_name = parent_module_name
# Hiera Lookup
if Puppet::Parser::Functions.function('hiera')
value = function_hiera(["#{module_name}::#{var_name}", ''])
return value if (not value.nil?) && (value != :undefined) && (value != '')
value = function_hiera(["#{module_name}_#{var_name}", ''])
return value if (not value.nil?) && (value != :undefined) && (value != '')
value = function_hiera(["#{var_name}", '']) if arguments[1] == 'global'
return value if (not value.nil?) && (value != :undefined) && (value != '')
end
# Top Scope Variable Lookup (::modulename::varname)
value = lookupvar("::#{module_name}::#{var_name}")
return value if (not value.nil?) && (value != :undefined) && (value != '')
# Top Scope Variable Lookup (::modulename_varname)
catch (:undefined_variable) do
begin
value = lookupvar("::#{module_name}_#{var_name}")
rescue Puppet::ParseError => e
raise unless e.to_s =~ /^Undefined variable /
end
end
return value if (not value.nil?) && (value != :undefined) && (value != '')
# Look up ::varname (only if second argument is 'global')
if arguments[1] == 'global'
catch (:undefined_variable) do
begin
value = lookupvar("::#{var_name}")
rescue Puppet::ParseError => e
raise unless e.to_s =~ /^Undefined variable /
end
end
return value if (not value.nil?) && (value != :undefined) && (value != '')
end
# needed for the next two lookups
classname = self.resource.name.downcase
loaded_classes = catalog.classes
# self::params class lookup for default value
if loaded_classes.include?("#{classname}::params")
value = lookupvar("::#{classname}::params::#{var_name}")
return value if (not value.nil?) && (value != :undefined) && (value != '')
end
# Params class lookup for default value
if loaded_classes.include?("#{module_name}::params")
value = lookupvar("::#{module_name}::params::#{var_name}")
return value if (not value.nil?) && (value != :undefined) && (value != '')
end
return ''
end
end
| 37.207921 | 85 | 0.649282 |
795b3a078b9bb89e3d165086cc78d9fb73c959fd | 1,058 | FactoryBot.define do
factory :user do
association :team
slack_id { "U#{generate(:slack_id)}" }
name { Faker::Name.name }
username { Faker::Internet.username }
image_url { Faker::Avatar.image }
deactivated { false }
team_admin { false }
transient do
with_sparkles { 0 }
end
trait :davidcelis do
slack_id { "U02JE49NDNY" }
name { "David Celis" }
username { "davidcelis" }
image_url { "https://secure.gravatar.com/avatar/66b085a6f16864adae78586e92811a73.jpg?s=512&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0002-512.png" }
team_admin { true }
association :team, :sparkles
end
after(:create) do |user, evaluator|
next unless evaluator.with_sparkles > 0
channel = create(:channel, team: user.team)
create_list(:sparkle, evaluator.with_sparkles, team: user.team, channel: channel, sparkler: user, sparklee: user)
# You may need to reload the record here, depending on your application
user.reload
end
end
end
| 28.594595 | 175 | 0.665406 |
bf8dab245555b32b7cc988430e78df91a968573c | 20,417 | require 'spec_helper'
describe 'monit' do
context 'supported operating systems' do
on_supported_os.each do |os, facts|
context "on #{os}" do
let(:facts) { facts }
case facts[:osfamily]
when 'Debian'
config_file = '/etc/monit/monitrc'
config_dir = '/etc/monit/conf.d'
monit_version = '5'
case facts[:lsbdistcodename]
when 'squeeze', 'lucid'
default_file_content = 'startup=1'
service_hasstatus = false
when 'wheezy', 'jessie', 'stretch', 'precise', 'trusty', 'xenial', 'bionic'
default_file_content = 'START=yes'
service_hasstatus = true
else
raise 'unsupported operatingsystemmajrelease detected on Debian osfamily'
end
when 'RedHat'
config_dir = '/etc/monit.d'
service_hasstatus = true
case facts[:operatingsystem]
when 'Amazon'
case facts[:operatingsystemmajrelease]
when '4', '2'
monit_version = '5'
config_file = '/etc/monitrc'
else
raise 'unsupported operatingsystemmajrelease detected on Amazon Linux operating system'
end
else
case facts[:operatingsystemmajrelease]
when '5'
monit_version = '4'
config_file = '/etc/monit.conf'
when '6'
monit_version = '5'
config_file = '/etc/monit.conf'
when '7'
monit_version = '5'
config_file = '/etc/monitrc'
else
raise 'unsupported operatingsystemmajrelease detected on RedHat osfamily'
end
end
else
raise 'unsupported osfamily detected'
end
it { is_expected.to compile.with_all_deps }
it { is_expected.to contain_class('monit') }
it do
is_expected.to contain_package('monit').with('ensure' => 'present',
'provider' => nil)
end
it do
is_expected.to contain_file('/var/lib/monit').with('ensure' => 'directory',
'owner' => 'root',
'group' => 'root',
'mode' => '0755')
end
it do
is_expected.to contain_file('monit_config_dir').with('ensure' => 'directory',
'path' => config_dir,
'owner' => 'root',
'group' => 'root',
'mode' => '0755',
'purge' => false,
'recurse' => false,
'require' => 'Package[monit]')
end
it do
is_expected.to contain_file('monit_config').with('ensure' => 'file',
'path' => config_file,
'owner' => 'root',
'group' => 'root',
'mode' => '0600',
'require' => 'Package[monit]')
end
monit_config_fixture = if monit_version == '4'
File.read(fixtures("monitrc.4.#{facts[:osfamily]}"))
else
File.read(fixtures("monitrc.#{facts[:osfamily]}"))
end
it { is_expected.to contain_file('monit_config').with_content(monit_config_fixture) }
if facts[:osfamily] == 'Debian'
it do
is_expected.to contain_file('/etc/default/monit').with('notify' => 'Service[monit]')
.with_content(%r{^#{default_file_content}$})
end
else
it { is_expected.not_to contain_file('/etc/default/monit') }
end
it do
is_expected.to contain_service('monit').with('ensure' => 'running',
'name' => 'monit',
'enable' => true,
'hasrestart' => true,
'hasstatus' => service_hasstatus,
'subscribe' => [
'File[/var/lib/monit]',
'File[monit_config_dir]',
'File[monit_config]',
])
end
describe 'parameter functionality' do
context 'when check_interval is set to valid integer <242>' do
let(:params) { { check_interval: 242 } }
it { is_expected.to contain_file('monit_config').with_content(%r{^set daemon 242$}) }
end
context 'when httpd is set to valid bool <true>' do
let(:params) { { httpd: true } }
content = <<-END.gsub(%r{^\s+\|}, '')
|set httpd port 2812 and
| use address localhost
| allow 0.0.0.0/0.0.0.0
| allow admin:monit
END
it { is_expected.to contain_file('monit_config').with_content(%r{#{content}}) }
end
context 'when httpd_* params are set to valid values' do
let(:params) do
{
httpd: true,
httpd_port: 2420,
httpd_address: 'otherhost',
httpd_allow: '0.0.0.0/0.0.0.0',
httpd_user: 'tester',
httpd_password: 'Passw0rd',
}
end
content = <<-END.gsub(%r{^\s+\|}, '')
|set httpd port 2420 and
| use address otherhost
| allow 0.0.0.0/0.0.0.0
| allow tester:Passw0rd
END
it { is_expected.to contain_file('monit_config').with_content(%r{#{content}}) }
end
context 'when manage_firewall and http are set to valid bool <true>' do
let(:pre_condition) { ['include ::firewall'] }
let(:params) do
{
manage_firewall: true,
httpd: true,
}
end
it do
is_expected.to contain_firewall('2812 allow Monit inbound traffic').with('action' => 'accept',
'dport' => '2812',
'proto' => 'tcp')
end
end
context 'when package_ensure is set to valid string <absent>' do
let(:params) { { package_ensure: 'absent' } }
it { is_expected.to contain_package('monit').with_ensure('absent') }
end
context 'when package_name is set to valid string <monit3>' do
let(:params) { { package_name: 'monit3' } }
it { is_expected.to contain_package('monit').with_name('monit3') }
end
context 'when service_enable is set to valid bool <false>' do
let(:params) { { service_enable: false } }
it { is_expected.to contain_service('monit').with_enable(false) }
end
context 'when service_ensure is set to valid string <stopped>' do
let(:params) { { service_ensure: 'stopped' } }
it { is_expected.to contain_service('monit').with_ensure('stopped') }
end
context 'when service_manage is set to valid bool <false>' do
let(:params) { { service_manage: false } }
it { is_expected.not_to contain_service('monit') }
it { is_expected.not_to contain_file('/etc/default/monit') }
end
context 'when service_name is set to valid string <stopped>' do
let(:params) { { service_name: 'monit3' } }
it { is_expected.to contain_service('monit').with_name('monit3') }
end
context 'when logfile is set to valid path </var/log/monit3.log>' do
let(:params) { { logfile: '/var/log/monit3.log' } }
it { is_expected.to contain_file('monit_config').with_content(%r{^set logfile /var/log/monit3.log$}) }
end
context 'when logfile is set to valid string <syslog>' do
let(:params) { { logfile: 'syslog' } }
it { is_expected.to contain_file('monit_config').with_content(%r{^set logfile syslog$}) }
end
context 'when mailserver is set to valid string <mailhost>' do
let(:params) { { mailserver: 'mailhost' } }
it { is_expected.to contain_file('monit_config').with_content(%r{^set mailserver mailhost$}) }
end
context 'when mailformat is set to valid hash' do
let(:params) do
{
mailformat: {
'from' => '[email protected]',
'message' => 'Monit $ACTION $SERVICE at $DATE on $HOST: $DESCRIPTION',
'subject' => 'spectesting',
},
}
end
content = <<-END.gsub(%r{^\s+\|}, '')
|set mail-format \{
| from: monit\@test.local
| message: Monit \$ACTION \$SERVICE at \$DATE on \$HOST: \$DESCRIPTION
| subject: spectesting
|\}
END
it { is_expected.to contain_file('monit_config').with_content(%r{#{Regexp.escape(content)}}) }
end
context 'when alert_emails is set to valid array' do
let(:params) do
{
alert_emails: [
'[email protected]',
'[email protected]',
],
}
end
content = <<-END.gsub(%r{^\s+\|}, '')
|set alert [email protected]
|set alert [email protected]
END
it { is_expected.to contain_file('monit_config').with_content(%r{#{content}}) }
end
context 'when mmonit_address is set to valid string <monit3.test.local>' do
let(:params) { { mmonit_address: 'monit3.test.local' } }
content = 'set mmonit https://monit:[email protected]:8443/collector'
it { is_expected.to contain_file('monit_config').with_content(%r{#{content}}) }
end
context 'when mmonit_without_credential is set to valid bool <true>' do
let(:params) do
{
mmonit_without_credential: true,
mmonit_address: 'monit3.test.local',
}
end
content = ' and register without credentials'
it { is_expected.to contain_file('monit_config').with_content(%r{#{content}}) }
end
context 'when mmonit_* params are set to valid values' do
let(:params) do
{
mmonit_address: 'monit242.test.local',
mmonit_https: false,
mmonit_port: '8242',
mmonit_user: 'monituser',
mmonit_password: 'Pa55w0rd',
}
end
content = 'set mmonit http://monituser:[email protected]:8242/collector'
it { is_expected.to contain_file('monit_config').with_content(%r{#{content}}) }
end
context 'when config_file is set to valid path </etc/monit3.conf>' do
let(:params) { { config_file: '/etc/monit3.conf' } }
it { is_expected.to contain_file('monit_config').with_path('/etc/monit3.conf') }
end
context 'when config_dir is set to valid path </etc/monit3/conf.d>' do
let(:params) { { config_dir: '/etc/monit3/conf.d' } }
it { is_expected.to contain_file('monit_config_dir').with_path('/etc/monit3/conf.d') }
end
context 'when config_dir_purge is set to valid bool <true>' do
let(:params) { { config_dir_purge: true } }
it do
is_expected.to contain_file('monit_config_dir').with('purge' => true,
'recurse' => true)
end
end
end
end
end
end
describe 'failures' do
let(:facts) do
{
osfamily: 'Debian',
lsbdistcodename: 'squeeze',
monit_version: '5',
}
end
[-1, 65_536].each do |value|
context "when httpd_port is set to invalid value <#{value}>" do
let(:params) do
{
httpd: true,
httpd_port: value,
httpd_address: 'otherhost',
httpd_user: 'tester',
httpd_password: 'Passw0rd',
}
end
it 'fails' do
expect {
is_expected.to contain_class('monit')
}.to raise_error(Puppet::Error, %r{Expected #{value} to be (smaller|greater) or equal to (0|65535)})
end
end
end
context 'when check_interval is set to invalid value <-1>' do
let(:params) { { check_interval: -1 } }
it 'fails' do
expect {
is_expected.to contain_class('monit')
}.to raise_error(Puppet::Error, %r{to be greater or equal to 0})
end
end
context 'when start_delay is set to invalid value <-1>' do
let(:params) { { start_delay: -1 } }
it 'fails' do
expect {
is_expected.to contain_class('monit')
}.to raise_error(Puppet::Error, %r{to be greater or equal to 0})
end
end
context 'when major release of Amazon Linux is unsupported' do
let :facts do
{ osfamily: 'RedHat',
operatingsystem: 'Amazon',
operatingsystemmajrelease: '3',
monit_version: '5' }
end
it 'fails' do
expect {
is_expected.to contain_class('monit')
}.to raise_error(Puppet::Error, %r{monit supports Amazon Linux 2\. Detected operatingsystemmajrelease is <3>})
end
end
context 'when major release of EL is unsupported' do
let :facts do
{ osfamily: 'RedHat',
operatingsystem: 'CentOS',
operatingsystemmajrelease: '4',
monit_version: '5' }
end
it 'fails' do
expect {
is_expected.to contain_class('monit')
}.to raise_error(Puppet::Error, %r{monit supports EL 5, 6 and 7\. Detected operatingsystemmajrelease is <4>})
end
end
context 'when major release of Debian is unsupported' do
let :facts do
{ osfamily: 'Debian',
operatingsystemmajrelease: '4',
lsbdistcodename: 'etch',
monit_version: '5' }
end
it 'fails' do
expect {
is_expected.to contain_class('monit')
}.to raise_error(Puppet::Error, %r{monit supports Debian 6 \(squeeze\), 7 \(wheezy\), 8 \(jessie\) and 9 \(stretch\) \
and Ubuntu 10\.04 \(lucid\), 12\.04 \(precise\), 14\.04 \(trusty\), 16\.04 \(xenial\) and 18\.04 \(bionic\)\. \
Detected lsbdistcodename is <etch>\.})
end
end
context 'when major release of Ubuntu is unsupported' do
let :facts do
{ osfamily: 'Debian',
operatingsystemmajrelease: '8',
lsbdistcodename: 'hardy',
monit_version: '5' }
end
it 'fails' do
expect {
is_expected.to contain_class('monit')
}.to raise_error(Puppet::Error, %r{monit supports Debian 6 \(squeeze\), 7 \(wheezy\), 8 \(jessie\) and 9 \(stretch\) \
and Ubuntu 10\.04 \(lucid\), 12\.04 \(precise\), 14\.04 \(trusty\), 16\.04 \(xenial\) and 18\.04 \(bionic\). \
Detected lsbdistcodename is <hardy>\.})
end
end
context 'when osfamily is unsupported' do
let :facts do
{ osfamily: 'Unsupported',
operatingsystemmajrelease: '9',
monit_version: '5' }
end
it 'fails' do
expect {
is_expected.to contain_class('monit')
}.to raise_error(Puppet::Error, %r{monit supports osfamilies Debian and RedHat\. Detected osfamily is <Unsupported>\.})
end
end
end
describe 'variable type and content validations' do
# set needed custom facts and variables
let(:facts) do
{
osfamily: 'Debian',
operatingsystemrelease: '6.0',
operatingsystemmajrelease: '6',
lsbdistcodename: 'squeeze',
monit_version: '5',
}
end
let(:validation_params) do
{
#:param => 'value',
}
end
validations = {
'absolute_path' => {
name: ['config_file', 'config_dir', 'logfile'],
valid: ['/absolute/filepath', '/absolute/directory/'],
invalid: ['invalid', 3, 2.42, ['array'], { 'ha' => 'sh' }],
message: 'is not an absolute path',
},
'array' => {
name: ['alert_emails'],
valid: [['valid', 'array']],
invalid: ['string', { 'ha' => 'sh' }, 3, 2.42, true],
message: 'is not an Array',
},
'bool_stringified' => {
name: ['httpd', 'manage_firewall', 'service_enable', 'service_manage', 'mmonit_https', 'mmonit_without_credential', 'config_dir_purge'],
valid: [true, 'true', false, 'false'],
invalid: ['invalid', 3, 2.42, ['array'], { 'ha' => 'sh' }, nil],
message: '(is not a boolean|Unknown type of boolean)',
},
'hash' => {
name: ['mailformat'],
valid: [{ 'ha' => 'sh' }],
invalid: ['string', 3, 2.42, ['array'], true, false, nil],
message: 'is not a Hash',
},
'integer_stringified' => {
name: ['check_interval', 'httpd_port', 'start_delay'],
valid: [242, '242'],
invalid: [2.42, 'invalid', ['array'], { 'ha' => 'sh ' }, true, false, nil],
message: 'Expected.*to be an Integer',
},
'string' => {
name: ['httpd_address', 'httpd_allow', 'httpd_user', 'httpd_password',
'package_ensure', 'package_name', 'service_name', 'mailserver',
'mmonit_address', 'mmonit_port', 'mmonit_user', 'mmonit_password'],
valid: ['present'],
invalid: [['array'], { 'ha' => 'sh' }],
message: 'is not a string',
},
'service_ensure_string' => {
name: ['service_ensure'],
valid: ['running'],
invalid: [['array'], { 'ha' => 'sh' }],
message: 'is not a string',
},
}
validations.sort.each do |type, var|
var[:name].each do |var_name|
var[:valid].each do |valid|
context "with #{var_name} (#{type}) set to valid #{valid} (as #{valid.class})" do
let(:params) { validation_params.merge(:"#{var_name}" => valid) }
it { is_expected.to compile }
end
end
var[:invalid].each do |invalid|
context "with #{var_name} (#{type}) set to invalid #{invalid} (as #{invalid.class})" do
let(:params) { validation_params.merge(:"#{var_name}" => invalid) }
it 'fails' do
expect {
catalogue
}.to raise_error(Puppet::Error, %r{#{var[:message]}})
end
end
end
end # var[:name].each
end # validations.sort.each
end # describe 'variable type and content validations'
end
| 37.393773 | 144 | 0.480972 |
9111a80968d93a1d22bac5e5ade41e5d24aeaf8e | 1,976 | require 'spec_helper'
describe DownloadsController do
render_views
before(:each) do
@item_id = 'bpl-dev:h702q6403'
@datastream_id = 'access800'
@first_image_pid = 'bpl-dev:h702q641c'
end
describe "GET 'show'" do
describe 'file object (single item download)' do
it 'should be successful and set the right instance variables' do
xhr :get, :show, :id => @first_image_pid, :datastream_id => @datastream_id
expect(response).to be_success
expect(assigns(:parent_document).id).to eq(@item_id)
expect(assigns(:object_profile).class).to eq(Hash)
end
end
describe 'top-level object (ZIP download)' do
it 'should be successful and set the right instance variables' do
xhr :get, :show, :id => @item_id, :datastream_id => @datastream_id
expect(response).to be_success
expect(assigns(:parent_document)).to eq(assigns(:document))
expect(assigns(:object_profile)).to be_nil
end
end
end
describe "GET 'trigger_download'" do
describe 'file object (single item download)' do
it 'should be successful and set the right headers' do
get :trigger_download, :id => @first_image_pid, :datastream_id => @datastream_id
expect(response).to be_success
expect(response.headers['Content-Type']).to eq('image/jpeg')
expect(response.headers['Content-Disposition']).to eq("attachment; filename=\"#{@item_id}_#{@datastream_id}.jpg\"")
end
end
describe 'top-level object (ZIP download)' do
it 'should be successful and set the right instance variables' do
get :trigger_download, :id => @item_id, :datastream_id => @datastream_id
expect(response).to be_success
expect(response.headers['Content-Type']).to eq('application/zip')
expect(response.headers['Content-Disposition']).to eq("attachment; filename=\"#{@item_id}_#{@datastream_id}.zip\"")
end
end
end
end
| 29.939394 | 123 | 0.668522 |
f736c85f5a45b626a5540dd7f9fd238840c7b87d | 300 | require 'test/unit'
require_relative '../leetcode/905_Sort_Array_By_Parity'
class SortArrayByParityTest < Test::Unit::TestCase
def setup
@arr = [1, 3]
@sort = SortArrayByParity.new
end
def test_solution
sol = @sort.sort_array_by_parity(@arr)
assert_equal sol, [3, 1]
end
end
| 21.428571 | 55 | 0.713333 |
1a201f8c487ce8ec78b4d41cbcdc97cac3e54aa2 | 468 | $:.unshift File.dirname(__FILE__)
if ENV['APP_ENV']!="production"
require 'byebug'
require 'dotenv'
Dotenv.load
end
require 'aws-sdk'
require 'json'
require 'mechanize'
require 'celluloid'
# Classes
require "classes/downloader.rb"
require "classes/web_page.rb"
require "classes/link_extractor.rb"
# Facades
require "classes/facades/queue.rb"
require "classes/facades/sqs/client.rb"
require "classes/facades/storage.rb"
require "classes/facades/s3/client.rb" | 20.347826 | 39 | 0.764957 |
0111cdfff1c29819a5f73c2a55554a6a92b462cb | 872 | # frozen_string_literal: true
class UsersController < ApplicationController
def new
@user = User.new
end
def create
# @user = User.new(username: params[:username], email: params[:email], password: params[:password]) #form tags
@user = User.new(user_params) # form_for
if @user.save
redirect_to new_user_path
else
render 'new'
end
end
def show
@user = User.find(params[:id])
end
def edit
@user = User.find(params[:id])
end
def update
@user = User.find(params[:id])
if @user.update(user_params)
flash[:success] = "You have updated #{@user.username}."
redirect_to user_path
else
flash.now[:error] = "You have not updated #{@user.username}."
render 'new'
end
end
private
def user_params
params.require(:user).permit(:username, :email, :password)
end
end
| 20.27907 | 115 | 0.641055 |
26a75d5888cdee6d6a6623c51b135b73cafcc2ee | 1,425 | require 'spec_helper'
module Resync
describe XML do
describe '#element' do
it 'returns an element unchanged' do
elem = REXML::Element.new('foo')
expect(XML.element(elem)).to be(elem)
end
it 'returns the root element of a string document' do
xml_str = '<?xml version="1.0"?><foo><bar/><baz/></foo>'
elem = XML.element(xml_str)
expect(elem).to be_a(REXML::Element)
expect(elem).to be_xml('<foo><bar/><baz/></foo>')
end
it 'returns the root element of a REXML::Document' do
xml_str = '<?xml version="1.0"?><foo><bar/><baz/></foo>'
doc = REXML::Document.new(xml_str)
elem = XML.element(doc)
expect(elem).to be_a(REXML::Element)
expect(elem).to be_xml(doc.root)
end
it 'parses an XML fragment as an element' do
xml_str = '<foo><bar/><baz/></foo>'
elem = XML.element(xml_str)
expect(elem).to be_a(REXML::Element)
expect(elem).to be_xml(xml_str)
end
it 'parses a file as an XML document' do
file = File.new('spec/data/examples/example-1.xml')
elem = XML.element(file)
expect(elem).to be_a(REXML::Element)
expect(elem.name).to eq('urlset')
end
it 'fails when it gets something other than XML' do
data = 12_345
expect { XML.element(data) }.to raise_exception
end
end
end
end
| 29.6875 | 64 | 0.58807 |
1124b98056c9ed4c4df334967f2dfcb5dce1aba3 | 128 | Sequel.migration do
change do
add_column(:versions, :order, Integer)
self[:versions].update(:order => :id)
end
end
| 16 | 42 | 0.671875 |
b91c002624048537f2c5a2b984571dac7f978fe8 | 2,943 | require 'net/http'
require 'net/https'
require 'uri'
module RubySms
module Gateway
class Sms77
class Parameters < Hash
def initialize(options)
merge!(options)
symbolize_keys!
end
def symbolize_keys!
inject({}){|h,(k,v)| h.merge({ k.to_sym => v}) }
end
end
class ParameterError < StandardError ; end
attr_accessor :api_key, :user
SMS77_GATEWAY_URL = 'https://gateway.sms77.io/api/sms'.freeze
SUCCESS_CODE = '100'.freeze
DEFAULT_PIN_SIZE = 4
def initialize(options)
raise ParameterError, 'no params given' if options.nil?
params = Parameters.new(options)
raise ParameterError, 'parameter :user is missing' if params[:user].nil?
raise ParameterError, 'parameter :api_key is missing' if params[:api_key].nil?
self.user = params[:user]
self.api_key = params[:api_key]
end
def send(options)
raise ParameterError, 'no params given' if options.nil?
params = Parameters.new(options)
raise ParameterError, 'parameter :text is missing' if params[:text].nil?
response = Sms77Response.new
(response.add_error(:empty_options) and (return response)) if params.nil?
code = post_request(params)
return response if code == SUCCESS_CODE
response.add_error(code)
response
end
# generate a simple random 4 digits pin
def send_pin(options)
raise ParameterError, 'no params given' if options.nil?
params = Parameters.new(options)
raise ParameterError, 'parameter :text is missing' if params[:text].nil?
response = Sms77Response.new
(response.add_error(:empty_options) and (return response)) if params.nil?
# we may use SecureRandom.hex(size) if you need a more secure pin
response.pin = rand.to_s[2..(DEFAULT_PIN_SIZE + 1)]
params[:text] = params[:text].gsub('%PIN%', response.pin)
code = post_request(params)
return response if code == SUCCESS_CODE
response.add_error(code)
response
end
private
def post_request(options)
uri = URI.parse(SMS77_GATEWAY_URL)
request = Net::HTTP::Post.new(uri.to_s)
request.set_form_data(
text: options[:text].force_encoding('utf-8'),
to: options[:to],
delay: options[:delay] || 0,
debug: 0,
utf8: 1,
u: user,
p: api_key
)
response = https(uri).request(request)
response.body
rescue StandardError => e
RubySms.logger.error("Error while sending post request => #{e}")
:connection_error
end
def https(uri)
Net::HTTP.new(uri.host, uri.port).tap do |http|
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
end
end
end
end
| 30.340206 | 86 | 0.608902 |
e287001f90109211e34c41b3d5ba64e7a91b94ed | 276 | class AppDelegate
def application(application, didFinishLaunchingWithOptions:launchOptions)
self.window = UIWindow.alloc.initWithFrame(UIScreen.mainScreen.bounds)
self.window.backgroundColor = UIColor.whiteColor
self.window.makeKeyAndVisible
true
end
end
| 27.6 | 75 | 0.804348 |
e2a494a13f632af0cb5a67947a5424f9317a9eb3 | 180 | class DeityCharacterSibling < ApplicationRecord
include HasContentLinking
belongs_to :deity
belongs_to :character_sibling, class_name: Character.name
belongs_to :user
end
| 22.5 | 59 | 0.827778 |
4a11fb941618498bad3e918f88de5950b84bf7ef | 572 | require 'rails_helper'
RSpec.describe Web::LocalesController do
describe 'GET #toggle' do
before { request.session[:locale] = current_locale }
context 'when english locale is enabled' do
let(:current_locale) { :en }
it 'changes locale to russian' do
get :toggle
expect(session[:locale]).to eq :ru
end
end
context 'when russian locale is enabled' do
let(:current_locale) { :ru }
it 'changes locale to english' do
get :toggle
expect(session[:locale]).to eq :en
end
end
end
end
| 22 | 56 | 0.627622 |
bf45e4cc68a73f605223d9ac53d4c6c71c864654 | 324 | #!/usr/bin/env ruby
# Copyright (c) 2004-2020 Microchip Technology Inc. and its subsidiaries.
# SPDX-License-Identifier: MIT
require "pp"
require "json"
require_relative 'libeasy/et'
meta = {
"owner" => "anielsen",
"desc" => "Reboot, load new SW, and start mesa-demo",
}
$ts = get_test_setup("mesa_pc_b2b_2x")
| 19.058824 | 73 | 0.685185 |
7a1df7a17920244dd3fe0bacf819bb57173bc7a2 | 263 | require "rarma/server"
class Rarma::CLI::Subcommand::Server
attr_reader :opts
def initialize
@opts = nil
end
def main
@opts = Trollop::options do
opt :port, "Port number", :default => 31337
end
Rarma::Server.start(@opts)
end
end
| 16.4375 | 49 | 0.646388 |
33d37776fabdc29eaab4e08e9770ae558a109e7c | 195 | class Helper
include Singleton
include ActionView::Helpers::TagHelper
include ActionView::Helpers::UrlHelper
include ActionView::Helpers::NumberHelper
#include ActionView::Partials
end
| 24.375 | 43 | 0.805128 |
2671f286d396dcb626166c0b3becb997a6173c87 | 116 | module MiqAeMethodService
class MiqAeServiceConfigurationScript < MiqAeServiceModelBase
expose :run
end
end
| 19.333333 | 63 | 0.827586 |
d56e8a24b13b2c9269166a8024d470210f2a4e24 | 7,565 | # Windows helper functions
#### Windows stuff
require_relative "RubyCommon.rb"
require_relative "Helpers.rb"
# Converts unix path to windows path
def convertPathToWindows(path, doubleEscape = false)
if doubleEscape
path.gsub /\//, "\\\\"
else
path.gsub /\//, "\\"
end
end
def runningAsAdmin?
(`reg query HKU\\S-1-5-19 2>&1` =~ /ERROR/).nil?
end
# Makes sure that the wanted value is specified for all targets that match the regex
def verifyVSProjectRuntimeLibrary(projFile, solutionFile, matchRegex, wantedRuntimeLib,
justReturnValue: false)
require 'nokogiri'
# Very parameters
onError "Call verifyVSProjectRuntimeLibrary only on windows!" if not OS.windows?
onError "Project file: #{projFile} doesn't exist" if not File.exist? projFile
onError "Project file: #{solutionFile} doesn't exist" if not File.exist? solutionFile
# Load xml with nokogiri
doc = File.open(projFile) { |f| Nokogiri::XML(f) }
doc.css("Project ItemDefinitionGroup").each do |group|
if not matchRegex.match group['Condition']
next
end
info "Checking that project target '#{group['Condition']}' " +
"Has RuntimeLibrary of type #{wantedRuntimeLib}"
libType = group.at_css("ClCompile RuntimeLibrary")
if not libType
warning "Couldn't verify library type. Didn't find RuntimeLibrary node"
next
end
if libType.content != wantedRuntimeLib
puts ""
error "In file '" + File.absolute_path(projFile) +"' target '#{group['Condition']}' " +
"Has RuntimeLibrary of type '#{libType.content}' which is " +
"not '" + wantedRuntimeLib + "' Please open the visual studio solution in the " +
"folder and modify the Runtime Library to be #{wantedRuntimeLib}. " +
"If you don't know how search online: 'visual studio set " +
"project runtime library'. \n" +
"The option should be in properties (of project) > C/C++ > Code Generation > " +
"Runtime Library\n" +
"Also make sure to change both the 'Debug' and 'Release' targets to use the " +
"wanted type. \n" +
"Important: make sure that 'Debug' configuration uses a runtime library that " +
"has 'debug' in its name and 'release' uses one that doesn't have " +
"'debug' in its name."
if justReturnValue
return false
end
openVSSolutionIfAutoOpen solutionFile
puts "Please fix the above configuration issue in visual studio and press something " +
"to continue"
waitForKeyPress
while !verifyVSProjectRuntimeLibrary(projFile, solutionFile, matchRegex,
wantedRuntimeLib, justReturnValue: true)
puts ""
error "The runtime library is still incorrect. Please fix the error to continue."
waitForKeyPress
# todo: cancel
# onError "runtime library is still incorrect"
end
end
end
success "All targets had correct runtime library types"
true
end
# Makes sure that the wanted value is specified for all targets that match the regex
def verifyVSProjectPlatformToolset(projFile, solutionFile, matchRegex, wantedVersion,
justReturnValue: false)
require 'nokogiri'
# Very parameters
onError "Call verifyVSProjectPlatformToolset only on windows!" if not OS.windows?
onError "Project file: #{projFile} doesn't exist" if not File.exist? projFile
onError "Project file: #{solutionFile} doesn't exist" if not File.exist? solutionFile
# Load xml with nokogiri
doc = File.open(projFile) { |f| Nokogiri::XML(f) }
doc.css("Project PropertyGroup").each do |group|
if not matchRegex =~ group['Condition']
next
end
info "Checking that project target '#{group['Condition']}' " +
"Has PlatformToolset of type #{wantedVersion}"
platType = group.at_css("PlatformToolset")
if not platType
warning "Couldn't verify platform toolset. Didn't find PlatformToolset node"
next
end
if platType.content != wantedVersion
puts ""
error "In file '" + File.absolute_path(projFile) +"' target '#{group['Condition']}' " +
"Has PlatformToolset of '#{platType.content}' which is " +
"not '" + wantedVersion + "' Please open the visual studio solution in the " +
"folder and right-click the solution and select 'Retarget solution'."
if justReturnValue
return false
end
openVSSolutionIfAutoOpen solutionFile
puts "Please fix the above configuration issue in visual studio and press something " +
"to continue"
waitForKeyPress
while !verifyVSProjectPlatformToolset(projFile, solutionFile, matchRegex, wantedVersion,
justReturnValue: true)
puts ""
error "The platform toolset is still incorrect. Please fix the error to continue."
waitForKeyPress
# todo: cancel
# onError "platform toolset is still incorrect"
end
end
end
success "All targets had correct platform toolset types"
true
end
def runWindowsAdmin(cmd)
require 'win32ole'
shell = WIN32OLE.new('Shell.Application')
shell.ShellExecute("ruby.exe",
"\"#{CurrentDir}/Helpers/WinInstall.rb\" " +
"\"#{cmd.gsub( '"', '\\"')}\"",
"#{Dir.pwd}", 'runas')
# TODO: find a proper way to wait here
info "Please wait while the install script runs and then press any key to continue"
runSystemSafe "pause"
end
def askToRunAdmin(cmd)
puts "."
puts "."
info "You need to open a new cmd window as administrator and run the following command: "
info cmd
info "Sorry, windows is such a pain in the ass"
runSystemSafe "pause"
end
# Run msbuild with specific target and configuration
# Runtimelibrary sets the used library: possible values
# https://docs.microsoft.com/fi-fi/cpp/build/reference/md-mt-ld-use-run-time-library
def runVSCompiler(threads, project: "ALL_BUILD.vcxproj", configuration: CMakeBuildType,
platform: "x64", solution: nil, runtimelibrary: nil)
# "Any CPU" might need to be quoted if used for platform
onError "runVSCompiler called with non msvc toolchain (#{TC})" if !TC.is_a?(WindowsMSVC)
if !File.exists?(project)
onError "runVsCompiler: target project file doesn't exist: #{project}"
end
project = File.absolute_path project
targetSelect = []
if solution
# Run project in solution
targetSelect = [solution, "/t:" + project]
else
targetSelect = [project]
end
# TC should have brought vs to path
args = [*TC.VS.bringVSToPath, "&&", "MSBuild.exe", *targetSelect, "/maxcpucount:#{threads}",
"/p:CL_MPCount=#{threads}",
"/p:Configuration=#{configuration}", "/p:Platform=#{platform}"]
if runtimelibrary
args.push "/p:RuntimeLibrary=#{runtimelibrary}"
end
info "Running MSBuild.exe with max cpu count = #{threads} on project #{targetSelect}"
info "with configuration = #{configuration} and platform = #{platform}"
runSystemSafe(*args) == 0
end
def openVSSolutionIfAutoOpen(solutionFile)
if not AutoOpenVS
return
end
puts "Automatically opening Visual Studio solution (NOTE: verify right version of " +
"vs opened): " + solutionFile
runOpen3 "start", solutionFile
waitForKeyPress
end
| 31.65272 | 94 | 0.651818 |
b9b997bf3a704465894a11bdc78c2ca76b92908a | 5,557 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::DataFactory::Mgmt::V2018_06_01
module Models
#
# Azure Databricks Delta Lake linked service.
#
class AzureDatabricksDeltaLakeLinkedService < LinkedService
include MsRestAzure
def initialize
@type = "AzureDatabricksDeltaLake"
end
attr_accessor :type
# @return <REGION>.azuredatabricks.net, domain name of your Databricks
# deployment. Type: string (or Expression with resultType string).
attr_accessor :domain
# @return [SecretBase] Access token for databricks REST API. Refer to
# https://docs.azuredatabricks.net/api/latest/authentication.html. Type:
# string, SecureString or AzureKeyVaultSecretReference.
attr_accessor :access_token
# @return The id of an existing interactive cluster that will be used for
# all runs of this job. Type: string (or Expression with resultType
# string).
attr_accessor :cluster_id
# @return The encrypted credential used for authentication. Credentials
# are encrypted using the integration runtime credential manager. Type:
# string (or Expression with resultType string).
attr_accessor :encrypted_credential
#
# Mapper for AzureDatabricksDeltaLakeLinkedService class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'AzureDatabricksDeltaLake',
type: {
name: 'Composite',
class_name: 'AzureDatabricksDeltaLakeLinkedService',
model_properties: {
additional_properties: {
client_side_validation: true,
required: false,
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'ObjectElementType',
type: {
name: 'Object'
}
}
}
},
connect_via: {
client_side_validation: true,
required: false,
serialized_name: 'connectVia',
type: {
name: 'Composite',
class_name: 'IntegrationRuntimeReference'
}
},
description: {
client_side_validation: true,
required: false,
serialized_name: 'description',
type: {
name: 'String'
}
},
parameters: {
client_side_validation: true,
required: false,
serialized_name: 'parameters',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'ParameterSpecificationElementType',
type: {
name: 'Composite',
class_name: 'ParameterSpecification'
}
}
}
},
annotations: {
client_side_validation: true,
required: false,
serialized_name: 'annotations',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'ObjectElementType',
type: {
name: 'Object'
}
}
}
},
type: {
client_side_validation: true,
required: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
domain: {
client_side_validation: true,
required: true,
serialized_name: 'typeProperties.domain',
type: {
name: 'Object'
}
},
access_token: {
client_side_validation: true,
required: false,
serialized_name: 'typeProperties.accessToken',
type: {
name: 'Composite',
polymorphic_discriminator: 'type',
uber_parent: 'SecretBase',
class_name: 'SecretBase'
}
},
cluster_id: {
client_side_validation: true,
required: false,
serialized_name: 'typeProperties.clusterId',
type: {
name: 'Object'
}
},
encrypted_credential: {
client_side_validation: true,
required: false,
serialized_name: 'typeProperties.encryptedCredential',
type: {
name: 'Object'
}
}
}
}
}
end
end
end
end
| 32.688235 | 79 | 0.476336 |
7a07f76feddfdc2228817a68cb2d42e83064a8a0 | 856 | cask 'qqlive' do
version '2.3.1.43012'
sha256 '503a89359032692d5651c75073c20705e95da6c6b94e86c3e98120d442490f3c'
url "https://dldir1.qq.com/qqtv/mac/TencentVideo_V#{version}.dmg"
appcast 'https://v.qq.com/download.html#mac'
name 'QQLive'
name '腾讯视频'
homepage 'https://v.qq.com/download.html#mac'
auto_updates true
depends_on macos: '>= :yosemite'
app 'QQLive.app'
zap trash: [
'~/Library/Caches/com.tencent.tenvideo',
'~/Library/Containers/com.tencent.tenvideo',
'~/Library/Cookies/com.tencent.tenvideo.binarycookies',
'~/Library/Logs/QQLive',
'~/Library/Preferences/com.tencent.tenvideo.plist',
'~/Library/Saved Application State/com.tencent.tenvideo.savedState',
'~/Library/WebKit/com.tencent.tenvideo',
]
end
| 32.923077 | 83 | 0.642523 |
e9558bfd03d76f5444a14ee33555096a0e779b0b | 3,693 | # frozen_string_literal: true
class ConferenceSerializer < ActiveModel::Serializer
include ApplicationHelper
attributes :short_title, :title, :description, :start_date, :end_date, :picture_url,
:difficulty_levels, :event_types, :rooms, :tracks,
:date_range, :revision
def difficulty_levels
object.program.difficulty_levels.map do |difficulty_level| { id: difficulty_level.id,
title: difficulty_level.title,
description: difficulty_level.description
}
end
end
def event_types
object.program.event_types.map do |event_type| { id: event_type.id,
title: event_type.title,
length: event_type.length,
description: event_type.description
}
end
end
def rooms
if object.venue
object.venue.rooms.map do |room| { id: room.id,
size: room.size,
events: room.event_schedules.map do |event_schedule| { guid: event_schedule.event.title,
title: event_schedule.event.title,
subtitle: event_schedule.event.subtitle,
abstract: event_schedule.event.abstract,
description: event_schedule.event.description,
is_highlight: event_schedule.event.is_highlight,
require_registration: event_schedule.event.require_registration,
start_time: event_schedule.start_time,
event_type_id: event_schedule.event.event_type.id,
difficulty_level_id: event_schedule.event.difficulty_level_id,
track_id: event_schedule.event.track_id,
speaker_names: event_schedule.event.speaker_names
}
end
}
end
else
[]
end
end
def tracks
object.program.tracks.map do |track| { 'id' => track.id,
'name' => track.name,
'description' => track.description
}
end
end
def revision
object.revision || 0
end
def date_range
if defined? date_string(object.start_date, object.end_date)
date_string(object.start_date, object.end_date).try(:split, ',').try(:first)
end
end
end
| 53.521739 | 161 | 0.375846 |
ed6849aa16d61ecf442be9c63237a1fffd9087ef | 450 | cask :v1 => 'font-mountains-of-christmas' do
# version '1.002'
version :latest
sha256 :no_check
url 'https://github.com/w0ng/googlefontdirectory/trunk/fonts/mountainsofchristmas',
:using => :svn,
:revision => '50',
:trust_cert => true
homepage 'http://www.google.com/fonts/specimen/Mountains%20of%20Christmas'
license :apache
font 'MountainsofChristmas-Bold.ttf'
font 'MountainsofChristmas-Regular.ttf'
end
| 28.125 | 85 | 0.697778 |
01edfbec32b067955bd22f983e2de28073494b18 | 1,554 | class Links < Formula
desc "Lynx-like WWW browser that supports tables, menus, etc."
homepage "http://links.twibright.com/"
url "http://links.twibright.com/download/links-2.16.tar.bz2"
sha256 "82f03038d5e050a65681b9888762af41c40fd42dec7e59a8d630bfb0ee134a3f"
bottle do
cellar :any
sha256 "8e70c80765c28eaec7146aaa806d3294e1857f06148c21900f7e4e505c561495" => :high_sierra
sha256 "d26fabd7bc2111c443b5c3169ed5f4c246e5d0a234cb7da98ff040e83ff1e232" => :sierra
sha256 "72b090e5cd35aced4c32e269ce546c4f39184f94b6efa4214e01f520ea666aee" => :el_capitan
sha256 "12f64a7b2adfa01783e66b62a1d33f7eda7ccad11a470515bc171d1509ce8a7d" => :x86_64_linux
end
depends_on "pkg-config" => :build
depends_on "openssl" => :recommended
depends_on "libtiff" => :optional
depends_on "jpeg" => :optional
depends_on "librsvg" => :optional
depends_on :x11 => :optional
depends_on "linuxbrew/xorg/xorg" unless OS.mac?
def install
args = %W[
--disable-debug
--disable-dependency-tracking
--prefix=#{prefix}
--mandir=#{man}
--with-ssl=#{Formula["openssl"].opt_prefix}
--without-lzma
]
args << "--enable-graphics" if build.with? "x11"
args << "--without-libtiff" if build.without? "libtiff"
args << "--without-libjpeg" if build.without? "jpeg"
args << "--without-librsvg" if build.without? "librsvg"
system "./configure", *args
system "make", "install"
doc.install Dir["doc/*"]
end
test do
system bin/"links", "-dump", "https://duckduckgo.com"
end
end
| 33.06383 | 94 | 0.705277 |
262846adffd69b7db33f0ba4e4744557dae1e9b2 | 126 | require 'test_helper'
class SubjectgroupTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 15.75 | 48 | 0.714286 |
4aa15c5b9a9f4bffc725274c1f08f6d81c9c77dc | 1,958 | $VERBOSE = ENV['VERBOSE'] || false
require 'rubygems'
ENGINE_RAILS_ROOT = File.join(File.dirname(__FILE__), '../') unless defined?(ENGINE_RAILS_ROOT)
# Configure Rails Environment
ENV["RAILS_ENV"] ||= 'test'
if ENV['TRAVIS']
require 'coveralls'
Coveralls.wear!
end
require File.expand_path("../dummy/config/environment", __FILE__)
require 'rspec/rails'
require 'capybara/rspec'
require 'rspec/retry'
Rails.backtrace_cleaner.remove_silencers!
RSpec.configure do |config|
config.mock_with :rspec
config.filter_run :focus => true
config.filter_run :js => true if ENV['JS'] == 'true'
config.filter_run :js => nil if ENV['JS'] == 'false'
config.run_all_when_everything_filtered = true
config.include ActionView::TestCase::Behavior, :file_path => %r{spec/presenters}
config.infer_spec_type_from_file_location!
config.before(:each) do
::I18n.default_locale = I18n.locale = Globalize.locale = :en
end
# rspec-retry
config.verbose_retry = true
config.default_sleep_interval = 0.33
config.clear_lets_on_failure = true
config.default_retry_count = 3
unless ENV['FULL_BACKTRACE']
config.backtrace_exclusion_patterns = %w(
rails actionpack railties capybara activesupport rack warden rspec actionview
activerecord dragonfly benchmark quiet_assets rubygems
).map { |noisy| /\b#{noisy}\b/ }
end
end
# Requires supporting files with custom matchers and macros, etc,
# in ./support/ and its subdirectories including factories.
([ENGINE_RAILS_ROOT, Rails.root.to_s].uniq | Refinery::Plugins.registered.pathnames).map{ |p|
Dir[File.join(p, 'spec', 'support', '**', '*.rb').to_s]
}.flatten.sort.each do |support_file|
require support_file
end
Capybara.register_driver :poltergeist_debug do |app|
Capybara::Poltergeist::Driver.new(app, debug: false, js_errors: true, inspector: :open)
end
require 'capybara/poltergeist'
Capybara.javascript_driver = :poltergeist
Capybara.always_include_port = true
| 30.123077 | 95 | 0.74668 |
f85a0b14199bb916c49f642c88624265831f3c11 | 24,543 | #-- encoding: UTF-8
#-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2018 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
class WorkPackage < ActiveRecord::Base
include WorkPackage::Validations
include WorkPackage::SchedulingRules
include WorkPackage::StatusTransitions
include WorkPackage::AskBeforeDestruction
include WorkPackage::TimeEntries
include WorkPackage::Ancestors
prepend WorkPackage::Parent
include WorkPackage::TypedDagDefaults
include WorkPackage::CustomActions
include OpenProject::Journal::AttachmentHelper
DONE_RATIO_OPTIONS = %w(field status disabled).freeze
ATTRIBS_WITH_VALUES_FROM_CHILDREN =
%w(start_date due_date estimated_hours done_ratio).freeze
belongs_to :project
belongs_to :type
belongs_to :status, class_name: 'Status', foreign_key: 'status_id'
belongs_to :author, class_name: 'User', foreign_key: 'author_id'
belongs_to :assigned_to, class_name: 'Principal', foreign_key: 'assigned_to_id'
belongs_to :responsible, class_name: 'Principal', foreign_key: 'responsible_id'
belongs_to :fixed_version, class_name: 'Version', foreign_key: 'fixed_version_id'
belongs_to :priority, class_name: 'IssuePriority', foreign_key: 'priority_id'
belongs_to :category, class_name: 'Category', foreign_key: 'category_id'
has_many :time_entries, dependent: :delete_all
has_and_belongs_to_many :changesets, -> {
order("#{Changeset.table_name}.committed_on ASC, #{Changeset.table_name}.id ASC")
}
scope :recently_updated, ->() {
order(updated_at: :desc)
}
scope :visible, ->(*args) {
where(project_id: Project.allowed_to(args.first || User.current, :view_work_packages))
}
scope :in_status, ->(*args) do
where(status_id: (args.first.respond_to?(:id) ? args.first.id : args.first))
end
scope :for_projects, ->(projects) {
where(project_id: projects)
}
scope :changed_since, ->(changed_since) {
if changed_since
where(["#{WorkPackage.table_name}.updated_at >= ?", changed_since])
end
}
scope :with_status_open, ->() {
includes(:status)
.where(statuses: { is_closed: false })
}
scope :with_status_closed, ->() {
includes(:status)
.where(statuses: { is_closed: true })
}
scope :with_limit, ->(limit) {
limit(limit)
}
scope :on_active_project, -> {
includes(:status, :project, :type)
.where(projects: { status: Project::STATUS_ACTIVE })
}
scope :without_version, -> {
where(fixed_version_id: nil)
}
scope :with_query, ->(query) {
where(query.statement)
}
scope :with_author, ->(author) {
where(author_id: author.id)
}
acts_as_watchable
before_create :default_assign
before_save :close_duplicates, :update_done_ratio_from_status
acts_as_customizable
acts_as_searchable columns: ['subject',
"#{table_name}.description",
"#{Journal.table_name}.notes"],
include: %i(project journals),
references: %i(projects journals),
date_column: "#{quoted_table_name}.created_at",
# sort by id so that limited eager loading doesn't break with postgresql
order_column: "#{table_name}.id"
##################### WARNING #####################
# Do not change the order of acts_as_attachable #
# and acts_as_journalized! #
# #
# This order ensures that no journal entries are #
# written after a project is destroyed. #
# #
# See test/unit/project_test.rb #
# test_destroying_root_projects_should_clear_data #
# for details. #
###################################################
acts_as_attachable after_remove: :attachments_changed,
order: "#{Attachment.table_name}.filename",
add_on_new_permission: :add_work_packages,
add_on_persisted_permission: :edit_work_packages
after_validation :set_attachments_error_details,
if: lambda { |work_package| work_package.errors.messages.has_key? :attachments }
associated_to_ask_before_destruction TimeEntry,
->(work_packages) {
TimeEntry.on_work_packages(work_packages).count > 0
},
method(:cleanup_time_entries_before_destruction_of)
include WorkPackage::Journalized
def self.done_ratio_disabled?
Setting.work_package_done_ratio == 'disabled'
end
def self.use_status_for_done_ratio?
Setting.work_package_done_ratio == 'status'
end
def self.use_field_for_done_ratio?
Setting.work_package_done_ratio == 'field'
end
# Returns true if usr or current user is allowed to view the work_package
def visible?(usr = nil)
(usr || User.current).allowed_to?(:view_work_packages, project)
end
# ACTS AS JOURNALIZED
def activity_type
'work_packages'
end
# RELATIONS
# Returns true if this work package is blocked by another work package that is still open
def blocked?
blocked_by
.with_status_open
.exists?
end
def relations
Relation.of_work_package(self)
end
def visible_relations(user)
# This duplicates chaining
# .relations.visible
# The duplication is made necessary to achive a performant sql query on MySQL.
# Chaining would result in
# WHERE (relations.from_id = [ID] OR relations.to_id = [ID])
# AND relations.from_id IN (SELECT [IDs OF VISIBLE WORK_PACKAGES])
# AND relations.to_id IN (SELECT [IDs OF VISIBLE WORK_PACKAGES])
# This performs OK on postgresql but is very slow on MySQL
# The SQL generated by this method:
# WHERE (relations.from_id = [ID] AND relations.to_id IN (SELECT [IDs OF VISIBLE WORK_PACKAGES])
# OR (relations.to_id = [ID] AND relations.from_id IN (SELECT [IDs OF VISIBLE WORK_PACKAGES]))
# is arguably easier to read and performs equally good on both DBs.
relations_from = Relation
.where(from: self)
.where(to: WorkPackage.visible(user))
relations_to = Relation
.where(to: self)
.where(from: WorkPackage.visible(user))
relations_from
.or(relations_to)
end
def relation(id)
Relation.of_work_package(self).find(id)
end
def new_relation
relations_to.build
end
def add_time_entry(attributes = {})
attributes.reverse_merge!(
project: project,
work_package: self
)
time_entries.build(attributes)
end
# Users/groups the work_package can be assigned to
extend Forwardable
def_delegator :project, :possible_assignees, :assignable_assignees
# Users the work_package can be assigned to
def_delegator :project, :possible_responsibles, :assignable_responsibles
# Versions that the work_package can be assigned to
# A work_package can be assigned to:
# * any open, shared version of the project the wp belongs to
# * the version it was already assigned to
# (to make sure, that you can still update closed tickets)
def assignable_versions
@assignable_versions ||= begin
current_version = fixed_version_id_changed? ? Version.find_by(id: fixed_version_id_was) : fixed_version
(project.assignable_versions + [current_version]).compact.uniq.sort
end
end
def to_s
"#{type.is_standard ? '' : type.name} ##{id}: #{subject}"
end
# Return true if the work_package is closed, otherwise false
def closed?
status.nil? || status.is_closed?
end
# Returns true if the work_package is overdue
def overdue?
!due_date.nil? && (due_date < Date.today) && !closed?
end
def milestone?
type && type.is_milestone?
end
alias_method :is_milestone?, :milestone?
# Returns an array of status that user is able to apply
def new_statuses_allowed_to(user, include_default = false)
return Status.where('1=0') if status.nil?
current_status = Status.where(id: status_id)
statuses = new_statuses_allowed_by_workflow_to(user)
.or(current_status)
statuses = statuses.or(Status.where(id: Status.default.id)) if include_default
statuses = statuses.where(is_closed: false) if blocked?
statuses.order_by_position
end
# Returns users that should be notified
def recipients
notified = project.notified_users + attribute_users.select { |u| u.notify_about?(self) }
notified.uniq!
# Remove users that can not view the work package
notified & User.allowed(:view_work_packages, project)
end
def notify?(user)
case user.mail_notification
when 'selected', 'only_my_events'
author == user || user.is_or_belongs_to?(assigned_to) || user.is_or_belongs_to?(responsible)
when 'none'
false
when 'only_assigned'
user.is_or_belongs_to?(assigned_to) || user.is_or_belongs_to?(responsible)
when 'only_owner'
author == user
else
false
end
end
def done_ratio
if WorkPackage.use_status_for_done_ratio? && status && status.default_done_ratio
status.default_done_ratio
else
read_attribute(:done_ratio)
end
end
def estimated_hours=(h)
converted_hours = (h.is_a?(String) ? h.to_hours : h)
write_attribute :estimated_hours, !!converted_hours ? converted_hours : h
end
# Overrides Redmine::Acts::Customizable::InstanceMethods#available_custom_fields
def available_custom_fields
WorkPackage::AvailableCustomFields.for(project, type)
end
# aliasing subject to name
# using :alias is not possible as AR will add the subject method later
def name
subject
end
def status_id=(sid)
self.status = nil
write_attribute(:status_id, sid)
end
def priority_id=(pid)
self.priority = nil
write_attribute(:priority_id, pid)
end
def type_id=(tid)
self.type = nil
result = write_attribute(:type_id, tid)
@custom_field_values = nil
result
end
# Overrides attributes= so that type_id gets assigned first
def attributes=(new_attributes)
return if new_attributes.nil?
new_type_id = new_attributes['type_id'] || new_attributes[:type_id]
if new_type_id
self.type_id = new_type_id
end
super
end
# Set the done_ratio using the status if that setting is set. This will keep the done_ratios
# even if the user turns off the setting later
def update_done_ratio_from_status
if WorkPackage.use_status_for_done_ratio? && status && status.default_done_ratio
self.done_ratio = status.default_done_ratio
end
end
# Is the amount of work done less than it should for the finish date
def behind_schedule?
return false if start_date.nil? || due_date.nil?
done_date = start_date + (duration * done_ratio / 100).floor
done_date <= Date.today
end
# check if user is allowed to edit WorkPackage Journals.
# see Redmine::Acts::Journalized::Permissions#journal_editable_by
def editable_by?(user)
project = self.project
user.allowed_to?(:edit_work_package_notes, project, global: project.present?) ||
user.allowed_to?(:edit_own_work_package_notes, project, global: project.present?)
end
# Adds the 'virtual' attribute 'hours' to the result set. Using the
# patch in config/initializers/eager_load_with_hours, the value is
# returned as the #hours attribute on each work package.
def self.include_spent_hours(user)
WorkPackage::SpentTime
.new(user)
.scope
.select('SUM(time_entries.hours) AS hours')
end
# Returns the total number of hours spent on this work package and its descendants.
# The result can be a subset of the actual spent time in cases where the user's permissions
# are limited, i.e. he lacks the view_time_entries and/or view_work_packages permission.
#
# Example:
# spent_hours => 0.0
# spent_hours => 50.2
#
# The value can stem from either eager loading the value via
# WorkPackage.include_spent_hours in which case the work package has an
# #hours attribute or it is loaded on calling the method.
def spent_hours(user = User.current)
if respond_to?(:hours)
hours.to_f
else
compute_spent_hours(user)
end || 0.0
end
# Returns a scope for the projects
# the user is allowed to move a work package to
def self.allowed_target_projects_on_move(user)
Project.allowed_to(user, :move_work_packages)
end
# Returns a scope for the projects
# the user is create a work package in
def self.allowed_target_projects_on_create(user)
Project.allowed_to(user, :add_work_packages)
end
# Unassigns issues from +version+ if it's no longer shared with issue's project
def self.update_versions_from_sharing_change(version)
# Update issues assigned to the version
update_versions(["#{WorkPackage.table_name}.fixed_version_id = ?", version.id])
end
# Unassigns issues from versions that are no longer shared
# after +project+ was moved
def self.update_versions_from_hierarchy_change(project)
moved_project_ids = project.self_and_descendants.reload.map(&:id)
# Update issues of the moved projects and issues assigned to a version of a moved project
update_versions(
["#{Version.table_name}.project_id IN (?) OR #{WorkPackage.table_name}.project_id IN (?)",
moved_project_ids,
moved_project_ids]
)
end
# Extracted from the ReportsController.
def self.by_type(project)
count_and_group_by project: project,
field: 'type_id',
joins: ::Type.table_name
end
def self.by_version(project)
count_and_group_by project: project,
field: 'fixed_version_id',
joins: Version.table_name
end
def self.by_priority(project)
count_and_group_by project: project,
field: 'priority_id',
joins: IssuePriority.table_name
end
def self.by_category(project)
count_and_group_by project: project,
field: 'category_id',
joins: Category.table_name
end
def self.by_assigned_to(project)
count_and_group_by project: project,
field: 'assigned_to_id',
joins: User.table_name
end
def self.by_responsible(project)
count_and_group_by project: project,
field: 'responsible_id',
joins: User.table_name
end
def self.by_author(project)
count_and_group_by project: project,
field: 'author_id',
joins: User.table_name
end
def self.by_subproject(project)
return unless project.descendants.active.any?
ActiveRecord::Base.connection.select_all(
"select s.id as status_id,
s.is_closed as closed,
i.project_id as project_id,
count(i.id) as total
from
#{WorkPackage.table_name} i, #{Status.table_name} s
where
i.status_id=s.id
and i.project_id IN (#{project.descendants.active.map(&:id).join(',')})
group by s.id, s.is_closed, i.project_id"
).to_a
end
def self.relateable_to(wp)
# can't relate to itself and not to a descendant (see relations)
relateable_shared(wp)
.not_having_relations_from(wp) # can't relate to wp that relates to us (direct or transitively)
.not_having_direct_relation_to(wp) # can't relate to wp we relate to directly
end
def self.relateable_from(wp)
# can't relate to itself and not to a descendant (see relations)
relateable_shared(wp)
.not_having_relations_to(wp) # can't relate to wp that relates to us (direct or transitively)
.not_having_direct_relation_from(wp) # can't relate to wp we relate to directly
end
def self.relateable_shared(wp)
visible
.not_self(wp) # can't relate to itself
.not_being_descendant_of(wp) # can't relate to a descendant (see relations)
.satisfying_cross_project_setting(wp)
end
private_class_method :relateable_shared
def self.satisfying_cross_project_setting(wp)
if Setting.cross_project_work_package_relations?
all
else
where(project_id: wp.project_id)
end
end
def self.not_self(wp)
where.not(id: wp.id)
end
def self.not_having_direct_relation_to(wp)
where.not(id: wp.relations_to.direct.select(:to_id))
end
def self.not_having_direct_relation_from(wp)
where.not(id: wp.relations_from.direct.select(:from_id))
end
def self.not_having_relations_from(wp)
where.not(id: wp.relations_from.select(:from_id))
end
def self.not_having_relations_to(wp)
where.not(id: wp.relations_to.select(:to_id))
end
def self.not_being_descendant_of(wp)
where.not(id: wp.descendants.select(:to_id))
end
def self.order_by_ancestors(direction)
max_relation_depth = Relation
.hierarchy
.group(:to_id)
.select(:to_id,
"MAX(hierarchy) AS depth")
joins("LEFT OUTER JOIN (#{max_relation_depth.to_sql}) AS max_depth ON max_depth.to_id = work_packages.id")
.reorder("COALESCE(max_depth.depth, 0) #{direction}")
.select("#{table_name}.*, COALESCE(max_depth.depth, 0)")
end
def self.self_and_descendants_of_condition(work_package)
relation_subquery = Relation
.with_type_columns_not(hierarchy: 0)
.select(:to_id)
.where(from_id: work_package.id)
"#{table_name}.id IN (#{relation_subquery.to_sql}) OR #{table_name}.id = #{work_package.id}"
end
def self.hierarchy_tree_following(work_packages)
following = Relation
.where(to: work_packages)
.hierarchy_or_follows
following_from_hierarchy = Relation
.hierarchy
.where(from_id: following.select(:from_id))
.select("to_id common_id")
following_from_self = following.select("from_id common_id")
# Using a union here for performance.
# Using or would yield the same results and be less complicated
# but it will require two orders of magnitude more time.
sub_query = [following_from_hierarchy, following_from_self].map(&:to_sql).join(" UNION ")
where("id IN (SELECT common_id FROM (#{sub_query}) following_relations)")
end
protected
def <=>(other)
other.id <=> id
end
private
def add_time_entry_for(user, attributes)
return if time_entry_blank?(attributes)
attributes.reverse_merge!(user: user,
spent_on: Date.today)
time_entries.build(attributes)
end
def new_statuses_allowed_by_workflow_to(user)
status.new_statuses_allowed_to(
user.roles_for_project(project),
type,
author == user,
assigned_to_id_changed? ? assigned_to_id_was == user.id : assigned_to_id == user.id
)
end
##
# Checks if the time entry defined by the given attributes is blank.
# A time entry counts as blank despite a selected activity if that activity
# is simply the default activity and all other attributes are blank.
def time_entry_blank?(attributes)
return true if attributes.nil?
key = 'activity_id'
id = attributes[key]
default_id = if id && !id.blank?
Enumeration.exists? id: id, is_default: true, type: 'TimeEntryActivity'
else
true
end
default_id && attributes.except(key).values.all?(&:blank?)
end
def self.having_fixed_version_from_other_project
where(
"#{WorkPackage.table_name}.fixed_version_id IS NOT NULL" +
" AND #{WorkPackage.table_name}.project_id <> #{Version.table_name}.project_id" +
" AND #{Version.table_name}.sharing <> 'system'"
)
end
private_class_method :having_fixed_version_from_other_project
# Update issues so their versions are not pointing to a
# fixed_version that is not shared with the issue's project
def self.update_versions(conditions = nil)
# Only need to update issues with a fixed_version from
# a different project and that is not systemwide shared
having_fixed_version_from_other_project
.where(conditions)
.includes(:project, :fixed_version)
.references(:versions).each do |issue|
next if issue.project.nil? || issue.fixed_version.nil?
unless issue.project.shared_versions.include?(issue.fixed_version)
issue.fixed_version = nil
issue.save
end
end
end
private_class_method :update_versions
# Default assignment based on category
def default_assign
if assigned_to.nil? && category && category.assigned_to
self.assigned_to = category.assigned_to
end
end
# Closes duplicates if the issue is being closed
def close_duplicates
return unless closing?
duplicates.each do |duplicate|
# Reload is needed in case the duplicate was updated by a previous duplicate
duplicate.reload
# Don't re-close it if it's already closed
next if duplicate.closed?
# Implicitly creates a new journal
duplicate.update_attribute :status, status
override_last_journal_notes_and_user_of!(duplicate)
end
end
def override_last_journal_notes_and_user_of!(other_work_package)
journal = other_work_package.journals.last
# Same user and notes
journal.user = current_journal.user
journal.notes = current_journal.notes
journal.save
end
# Query generator for selecting groups of issue counts for a project
# based on specific criteria.
# DANGER: :field and :joins MUST never come from user input, because
# they are not SQL-escaped.
#
# Options
# * project - Project to search in.
# * field - String. Issue field to key off of in the grouping.
# * joins - String. The table name to join against.
def self.count_and_group_by(options)
project = options.delete(:project)
select_field = options.delete(:field)
joins = options.delete(:joins)
where = "i.#{select_field}=j.id"
ActiveRecord::Base.connection.select_all(
"select s.id as status_id,
s.is_closed as closed,
j.id as #{select_field},
count(i.id) as total
from
#{WorkPackage.table_name} i, #{Status.table_name} s, #{joins} j
where
i.status_id=s.id
and #{where}
and i.project_id=#{project.id}
group by s.id, s.is_closed, j.id"
).to_a
end
private_class_method :count_and_group_by
def set_attachments_error_details
if invalid_attachment = attachments.detect { |a| !a.valid? }
errors.messages[:attachments].first << " - #{invalid_attachment.errors.full_messages.first}"
end
end
def compute_spent_hours(user)
WorkPackage::SpentTime
.new(user, self)
.scope
.where(id: id)
.pluck('SUM(hours)')
.first
end
def attribute_users
related = [author]
[responsible, assigned_to].each do |user|
if user.is_a?(Group)
related += user.users
else
related << user
end
end
related.select(&:present?)
end
end
| 32.166448 | 110 | 0.675997 |
e934c486c7534e5166f243707d5026e788897bf6 | 11,540 | # NotificationService class
#
# Used for notifying users with emails about different events
#
# Ex.
# NotificationService.new.new_issue(issue, current_user)
#
class NotificationService
# Always notify user about ssh key added
# only if ssh key is not deploy key
#
# This is security email so it will be sent
# even if user disabled notifications
def new_key(key)
if key.user
mailer.new_ssh_key_email(key.id)
end
end
# Always notify user about email added to profile
def new_email(email)
if email.user
mailer.new_email_email(email.id)
end
end
# When create an issue we should send next emails:
#
# * issue assignee if their notification level is not Disabled
# * project team members with notification level higher then Participating
#
def new_issue(issue, current_user)
new_resource_email(issue, issue.project, 'new_issue_email')
end
# When we close an issue we should send next emails:
#
# * issue author if their notification level is not Disabled
# * issue assignee if their notification level is not Disabled
# * project team members with notification level higher then Participating
#
def close_issue(issue, current_user)
close_resource_email(issue, issue.project, current_user, 'closed_issue_email')
end
# When we reassign an issue we should send next emails:
#
# * issue old assignee if their notification level is not Disabled
# * issue new assignee if their notification level is not Disabled
#
def reassigned_issue(issue, current_user)
reassign_resource_email(issue, issue.project, current_user, 'reassigned_issue_email')
end
# When create a merge request we should send next emails:
#
# * mr assignee if their notification level is not Disabled
#
def new_merge_request(merge_request, current_user)
new_resource_email(merge_request, merge_request.target_project, 'new_merge_request_email')
end
# When we reassign a merge_request we should send next emails:
#
# * merge_request old assignee if their notification level is not Disabled
# * merge_request assignee if their notification level is not Disabled
#
def reassigned_merge_request(merge_request, current_user)
reassign_resource_email(merge_request, merge_request.target_project, current_user, 'reassigned_merge_request_email')
end
# When we close a merge request we should send next emails:
#
# * merge_request author if their notification level is not Disabled
# * merge_request assignee if their notification level is not Disabled
# * project team members with notification level higher then Participating
#
def close_mr(merge_request, current_user)
close_resource_email(merge_request, merge_request.target_project, current_user, 'closed_merge_request_email')
end
def reopen_issue(issue, current_user)
reopen_resource_email(issue, issue.project, current_user, 'issue_status_changed_email', 'reopened')
end
# When we merge a merge request we should send next emails:
#
# * merge_request author if their notification level is not Disabled
# * merge_request assignee if their notification level is not Disabled
# * project team members with notification level higher then Participating
#
def merge_mr(merge_request, current_user)
recipients = reject_muted_users([merge_request.author, merge_request.assignee], merge_request.target_project)
recipients = recipients.concat(project_watchers(merge_request.target_project)).uniq
recipients.delete(current_user)
recipients.each do |recipient|
mailer.merged_merge_request_email(recipient.id, merge_request.id, current_user.id)
end
end
def reopen_mr(merge_request, current_user)
reopen_resource_email(merge_request, merge_request.target_project, current_user, 'merge_request_status_email', 'reopened')
end
# Notify new user with email after creation
def new_user(user)
# Don't email omniauth created users
mailer.new_user_email(user.id, user.password) unless user.extern_uid?
end
# Notify users on new note in system
#
# TODO: split on methods and refactor
#
def new_note(note)
return true unless note.noteable_type.present?
# ignore gitlab service messages
return true if note.note =~ /\A_Status changed to closed_/
return true if note.note =~ /\A_mentioned in / && note.system == true
opts = { noteable_type: note.noteable_type, project_id: note.project_id }
target = note.noteable
if target.respond_to?(:participants)
recipients = target.participants
else
recipients = note.mentioned_users
end
if note.commit_id.present?
opts.merge!(commit_id: note.commit_id)
recipients << note.commit_author
else
opts.merge!(noteable_id: note.noteable_id)
end
# Get users who left comment in thread
recipients = recipients.concat(User.where(id: Note.where(opts).pluck(:author_id)))
# Merge project watchers
recipients = recipients.concat(project_watchers(note.project)).compact.uniq
# Reject mutes users
recipients = reject_muted_users(recipients, note.project)
# Reject author
recipients.delete(note.author)
# build notify method like 'note_commit_email'
notify_method = "note_#{note.noteable_type.underscore}_email".to_sym
recipients.each do |recipient|
mailer.send(notify_method, recipient.id, note.id)
end
end
def new_team_member(users_project)
mailer.project_access_granted_email(users_project.id)
end
def update_team_member(users_project)
mailer.project_access_granted_email(users_project.id)
end
def new_group_member(users_group)
mailer.group_access_granted_email(users_group.id)
end
def update_group_member(users_group)
mailer.group_access_granted_email(users_group.id)
end
def project_was_moved(project)
recipients = project.team.members
recipients = reject_muted_users(recipients, project)
recipients.each do |recipient|
mailer.project_was_moved_email(project.id, recipient.id)
end
end
protected
# Get project users with WATCH notification level
def project_watchers(project)
project_members = users_project_notification(project)
users_with_project_level_global = users_project_notification(project, Notification::N_GLOBAL)
users_with_group_level_global = users_group_notification(project, Notification::N_GLOBAL)
users = users_with_global_level_watch([users_with_project_level_global, users_with_group_level_global].flatten.uniq)
users_with_project_setting = select_users_project_setting(project, users_with_project_level_global, users)
users_with_group_setting = select_users_group_setting(project, project_members, users_with_group_level_global, users)
User.where(id: users_with_project_setting.concat(users_with_group_setting).uniq).to_a
end
def users_project_notification(project, notification_level=nil)
project_members = project.users_projects
if notification_level
project_members.where(notification_level: notification_level).pluck(:user_id)
else
project_members.pluck(:user_id)
end
end
def users_group_notification(project, notification_level)
if project.group
project.group.users_groups.where(notification_level: notification_level).pluck(:user_id)
else
[]
end
end
def users_with_global_level_watch(ids)
User.where(
id: ids,
notification_level: Notification::N_WATCH
).pluck(:id)
end
# Build a list of users based on project notifcation settings
def select_users_project_setting(project, global_setting, users_global_level_watch)
users = users_project_notification(project, Notification::N_WATCH)
# If project setting is global, add to watch list if global setting is watch
global_setting.each do |user_id|
if users_global_level_watch.include?(user_id)
users << user_id
end
end
users
end
# Build a list of users based on group notifcation settings
def select_users_group_setting(project, project_members, global_setting, users_global_level_watch)
uids = users_group_notification(project, Notification::N_WATCH)
# Group setting is watch, add to users list if user is not project member
users = []
uids.each do |user_id|
if project_members.exclude?(user_id)
users << user_id
end
end
# Group setting is global, add to users list if global setting is watch
global_setting.each do |user_id|
if project_members.exclude?(user_id) && users_global_level_watch.include?(user_id)
users << user_id
end
end
users
end
# Remove users with disabled notifications from array
# Also remove duplications and nil recipients
def reject_muted_users(users, project = nil)
users = users.to_a.compact.uniq
users.reject do |user|
next user.notification.disabled? unless project
tm = project.users_projects.find_by(user_id: user.id)
if !tm && project.group
tm = project.group.users_groups.find_by(user_id: user.id)
end
# reject users who globally disabled notification and has no membership
next user.notification.disabled? unless tm
# reject users who disabled notification in project
next true if tm.notification.disabled?
# reject users who have N_GLOBAL in project and disabled in global settings
tm.notification.global? && user.notification.disabled?
end
end
def new_resource_email(target, project, method)
if target.respond_to?(:participants)
recipients = target.participants
else
recipients = []
end
recipients = reject_muted_users(recipients, project)
recipients = recipients.concat(project_watchers(project)).uniq
recipients.delete(target.author)
recipients.each do |recipient|
mailer.send(method, recipient.id, target.id)
end
end
def close_resource_email(target, project, current_user, method)
recipients = reject_muted_users([target.author, target.assignee], project)
recipients = recipients.concat(project_watchers(project)).uniq
recipients.delete(current_user)
recipients.each do |recipient|
mailer.send(method, recipient.id, target.id, current_user.id)
end
end
def reassign_resource_email(target, project, current_user, method)
assignee_id_was = previous_record(target, "assignee_id")
recipients = User.where(id: [target.assignee_id, assignee_id_was])
# Add watchers to email list
recipients = recipients.concat(project_watchers(project))
# reject users with disabled notifications
recipients = reject_muted_users(recipients, project)
# Reject me from recipients if I reassign an item
recipients.delete(current_user)
recipients.each do |recipient|
mailer.send(method, recipient.id, target.id, assignee_id_was, current_user.id)
end
end
def reopen_resource_email(target, project, current_user, method, status)
recipients = reject_muted_users([target.author, target.assignee], project)
recipients = recipients.concat(project_watchers(project)).uniq
recipients.delete(current_user)
recipients.each do |recipient|
mailer.send(method, recipient.id, target.id, status, current_user.id)
end
end
def mailer
Notify.delay
end
def previous_record(object, attribute)
if object && attribute
if object.previous_changes.include?(attribute)
object.previous_changes[attribute].first
end
end
end
end
| 32.691218 | 126 | 0.749133 |
7af223310969ee077e6e56c5a550070423acd85d | 5,432 | #!/opt/puppetlabs/puppet/bin/ruby
require 'json'
require 'puppet'
require 'openssl'
def delete_apps_v1_collection_namespaced_replica_set(*args)
header_params = {}
params=args[0][1..-1].split(',')
arg_hash={}
params.each { |param|
mapValues= param.split(':',2)
if mapValues[1].include?(';')
mapValues[1].gsub! ';',','
end
arg_hash[mapValues[0][1..-2]]=mapValues[1][1..-2]
}
# Remove task name from arguments - should contain all necessary parameters for URI
arg_hash.delete('_task')
operation_verb = 'Delete'
query_params, body_params, path_params = format_params(arg_hash)
uri_string = "#{arg_hash['kube_api']}/apis/apps/v1/namespaces/%{namespace}/replicasets" % path_params
if query_params
uri_string = uri_string + '?' + to_query(query_params)
end
header_params['Content-Type'] = 'application/json' # first of #{parent_consumes}
if arg_hash['token']
header_params['Authentication'] = 'Bearer ' + arg_hash['token']
end
uri = URI(uri_string)
verify_mode= OpenSSL::SSL::VERIFY_NONE
if arg_hash['ca_file']
verify_mode=OpenSSL::SSL::VERIFY_PEER
end
Net::HTTP.start(uri.host, uri.port, use_ssl: uri.scheme == 'https', verify_mode: verify_mode, ca_file: arg_hash['ca_file']) do |http|
if operation_verb == 'Get'
req = Net::HTTP::Get.new(uri)
elsif operation_verb == 'Put'
req = Net::HTTP::Put.new(uri)
elsif operation_verb == 'Delete'
req = Net::HTTP::Delete.new(uri)
elsif operation_verb == 'Post'
req = Net::HTTP::Post.new(uri)
end
header_params.each { |x, v| req[x] = v } unless header_params.empty?
unless body_params.empty?
if body_params.key?('file_content')
req.body = body_params['file_content']
else
req.body = body_params.to_json
end
end
Puppet.debug("URI is (#{operation_verb}) #{uri} headers are #{header_params}")
response = http.request req # Net::HTTPResponse object
Puppet.debug("response code is #{response.code} and body is #{response.body}")
success = response.is_a? Net::HTTPSuccess
Puppet.debug("Called (#{operation_verb}) endpoint at #{uri}, success was #{success}")
response
end
end
def to_query(hash)
if hash
return_value = hash.map { |x, v| "#{x}=#{v}" }.reduce { |x, v| "#{x}&#{v}" }
if !return_value.nil?
return return_value
end
end
return ''
end
def op_param(name, inquery, paramalias, namesnake)
{ :name => name, :location => inquery, :paramalias => paramalias, :namesnake => namesnake }
end
def format_params(key_values)
query_params = {}
body_params = {}
path_params = {}
key_values.each { | key, value |
if value.include?("=>")
Puppet.debug("Running hash from string on #{value}")
value.gsub!("=>",":")
value.gsub!("'","\"")
key_values[key] = JSON.parse(value)
Puppet.debug("Obtained hash #{key_values[key].inspect}")
end
}
if key_values.key?('body')
if File.file?(key_values['body'])
if key_values['body'].include?('json')
body_params['file_content'] = File.read(key_values['body'])
else
body_params['file_content'] =JSON.pretty_generate(YAML.load_file(key_values['body']))
end
end
end
op_params = [
op_param('apiversion', 'body', 'apiversion', 'apiversion'),
op_param('continue', 'query', 'continue', 'continue'),
op_param('fieldSelector', 'query', 'field_selector', 'field_selector'),
op_param('kind', 'body', 'kind', 'kind'),
op_param('labelSelector', 'query', 'label_selector', 'label_selector'),
op_param('limit', 'query', 'limit', 'limit'),
op_param('metadata', 'body', 'metadata', 'metadata'),
op_param('namespace', 'path', 'namespace', 'namespace'),
op_param('pretty', 'query', 'pretty', 'pretty'),
op_param('resourceVersion', 'query', 'resource_version', 'resource_version'),
op_param('spec', 'body', 'spec', 'spec'),
op_param('status', 'body', 'status', 'status'),
op_param('timeoutSeconds', 'query', 'timeout_seconds', 'timeout_seconds'),
op_param('watch', 'query', 'watch', 'watch'),
]
op_params.each do |i|
location = i[:location]
name = i[:name]
paramalias = i[:paramalias]
name_snake = i[:namesnake]
if location == 'query'
query_params[name] = key_values[name_snake] unless key_values[name_snake].nil?
query_params[name] = ENV["azure__#{name_snake}"] unless ENV["<no value>_#{name_snake}"].nil?
elsif location == 'body'
body_params[name] = key_values[name_snake] unless key_values[name_snake].nil?
body_params[name] = ENV["azure_#{name_snake}"] unless ENV["<no value>_#{name_snake}"].nil?
else
path_params[name_snake.to_sym] = key_values[name_snake] unless key_values[name_snake].nil?
path_params[name_snake.to_sym] = ENV["azure__#{name_snake}"] unless ENV["<no value>_#{name_snake}"].nil?
end
end
return query_params,body_params,path_params
end
def task
# Get operation parameters from an input JSON
params = STDIN.read
result = delete_apps_v1_collection_namespaced_replica_set(params)
if result.is_a? Net::HTTPSuccess
puts result.body
else
raise result.body
end
rescue StandardError => e
result = {}
result[:_error] = {
msg: e.message,
kind: 'puppetlabs-kubernetes/error',
details: { class: e.class.to_s },
}
puts result
exit 1
end
task | 31.581395 | 135 | 0.652982 |
2801ec94dc0df8e569583bae59a84e13f0f8580b | 2,853 |
###
# This Ruby source file was generated by test-to-ruby.xsl
# and is a derived work from the source document.
# The source document contained the following notice:
=begin
Copyright (c) 2001-2004 World Wide Web Consortium,
(Massachusetts Institute of Technology, Institut National de
Recherche en Informatique et en Automatique, Keio University). All
Rights Reserved. This program is distributed under the W3C's Software
Intellectual Property License. This program is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE.
See W3C License http://www.w3.org/Consortium/Legal/ for more details.
=end
#
require File.expand_path(File.join(File.dirname(__FILE__), '..', '..', '..', 'helper'))
###
# The "getNodeType()" method for a CDATASection Node
# returns the constant value 4.
#
# Retrieve the CDATASection node contained inside the
# second child of the second employee and invoke the
# "getNodeType()" method. The method should return 4.
# @author NIST
# @author Mary Brady
# see[http://www.w3.org/TR/1998/REC-DOM-Level-1-19981001/level-one-core#ID-111237558]
# see[http://www.w3.org/TR/1998/REC-DOM-Level-1-19981001/level-one-core#ID-667469212]
##
DOMTestCase('nodecdatasectionnodetype') do
###
# Constructor.
# @param factory document factory, may not be null
# @throws org.w3c.domts.DOMTestIncompatibleException Thrown if test is not compatible with parser configuration
##
def setup
=begin
org.w3c.domts.DocumentBuilderSetting[] settings =
new org.w3c.domts.DocumentBuilderSetting[] {
org.w3c.domts.DocumentBuilderSetting.notCoalescing
};
DOMTestDocumentBuilderFactory testFactory = factory.newInstance(settings)
setFactory(testFactory)
=end
##
## check if loaded documents are supported for content type
##
contentType = getContentType()
preload(contentType, "staff", false)
end
###
# Runs the test case.
# @throws Throwable Any uncaught exception causes test to fail
#
def test_nodecdatasectionnodetype
doc = nil
elementList = nil
testName = nil
cdataNode = nil
nodeType = nil
doc = load_document("staff", false)
elementList = doc.getElementsByTagName("name")
testName = elementList.item(1)
cdataNode = testName.lastChild()
nodeType = cdataNode.nodeType()
if (equals(3, nodeType))
cdataNode = doc.createCDATASection("")
nodeType = cdataNode.nodeType()
end
assert_equal(4, nodeType, "nodeTypeCDATA")
end
###
# Gets URI that identifies the test.
# @return uri identifier of test
#
def targetURI
"http://www.w3.org/2001/DOM-Test-Suite/tests/Level-1/nodecdatasectionnodetype"
end
end
| 30.677419 | 113 | 0.706625 |
91ebe133a13b1af4bddf6b43299be966c98b03a3 | 47 | require "booqmail/engine"
module Booqmail
end
| 9.4 | 25 | 0.808511 |
1dd7547a7fc41498896579b619c7f1b7e66340bd | 10,644 | require 'spec_helper'
feature 'Expand and collapse diffs', :js do
let(:branch) { 'expand-collapse-diffs' }
let(:project) { create(:project, :repository) }
before do
# Set the limits to those when these specs were written, to avoid having to
# update the test repo every time we change them.
allow(Gitlab::Git::Diff).to receive(:size_limit).and_return(100.kilobytes)
allow(Gitlab::Git::Diff).to receive(:collapse_limit).and_return(10.kilobytes)
sign_in(create(:admin))
# Ensure that undiffable.md is in .gitattributes
project.repository.copy_gitattributes(branch)
visit project_commit_path(project, project.commit(branch))
execute_script('window.ajaxUris = []; $(document).ajaxSend(function(event, xhr, settings) { ajaxUris.push(settings.url) });')
end
def file_container(filename)
find("[data-blob-diff-path*='#{filename}']")
end
# Use define_method instead of let (which is memoized) so that this just works across a
# reload.
#
files = [
'small_diff.md', 'large_diff.md', 'large_diff_renamed.md', 'undiffable.md',
'too_large.md', 'too_large_image.jpg'
]
files.each do |file|
define_method(file.split('.').first) { file_container(file) }
end
it 'should show the diff content with a highlighted line when linking to line' do
expect(large_diff).not_to have_selector('.code')
expect(large_diff).to have_selector('.nothing-here-block')
visit project_commit_path(project, project.commit(branch), anchor: "#{large_diff[:id]}_0_1")
execute_script('window.location.reload()')
wait_for_requests
expect(large_diff).to have_selector('.code')
expect(large_diff).not_to have_selector('.nothing-here-block')
expect(large_diff).to have_selector('.hll')
end
it 'should show the diff content when linking to file' do
expect(large_diff).not_to have_selector('.code')
expect(large_diff).to have_selector('.nothing-here-block')
visit project_commit_path(project, project.commit(branch), anchor: large_diff[:id])
execute_script('window.location.reload()')
wait_for_requests
expect(large_diff).to have_selector('.code')
expect(large_diff).not_to have_selector('.nothing-here-block')
end
context 'visiting a commit with collapsed diffs' do
it 'shows small diffs immediately' do
expect(small_diff).to have_selector('.code')
expect(small_diff).not_to have_selector('.nothing-here-block')
end
it 'shows non-renderable diffs as such immediately, regardless of their size' do
expect(undiffable).not_to have_selector('.code')
expect(undiffable).to have_selector('.nothing-here-block')
expect(undiffable).to have_content('gitattributes')
end
it 'does not allow diffs that are larger than the maximum size to be expanded' do
expect(too_large).not_to have_selector('.code')
expect(too_large).to have_selector('.nothing-here-block')
expect(too_large).to have_content('too large')
end
it 'shows image diffs immediately, regardless of their size' do
expect(too_large_image).not_to have_selector('.nothing-here-block')
expect(too_large_image).to have_selector('.image')
end
context 'expanding a diff for a renamed file' do
before do
large_diff_renamed.find('.click-to-expand').click
wait_for_requests
end
it 'shows the old content' do
old_line = large_diff_renamed.find('.line_content.old')
expect(old_line).to have_content('two copies')
end
it 'shows the new content' do
new_line = large_diff_renamed.find('.line_content.new', match: :prefer_exact)
expect(new_line).to have_content('three copies')
end
end
context 'expanding a large diff' do
before do
# Wait for diffs
find('.js-file-title', match: :first)
# Click `large_diff.md` title
all('.diff-toggle-caret')[1].click
wait_for_requests
end
it 'makes a request to get the content' do
ajax_uris = evaluate_script('ajaxUris')
expect(ajax_uris).not_to be_empty
expect(ajax_uris.first).to include('large_diff.md')
end
it 'shows the diff content' do
expect(large_diff).to have_selector('.code')
expect(large_diff).not_to have_selector('.nothing-here-block')
end
context 'adding a comment to the expanded diff' do
let(:comment_text) { 'A comment' }
before do
large_diff.find('.diff-line-num', match: :prefer_exact).hover
large_diff.find('.add-diff-note', match: :prefer_exact).click
large_diff.find('.note-textarea').send_keys comment_text
large_diff.find_button('Comment').click
wait_for_requests
end
it 'adds the comment' do
expect(large_diff.find('.notes')).to have_content comment_text
end
context 'reloading the page' do
before do
refresh
end
it 'collapses the large diff by default' do
expect(large_diff).not_to have_selector('.code')
expect(large_diff).to have_selector('.nothing-here-block')
end
context 'expanding the diff' do
before do
# Wait for diffs
find('.js-file-title', match: :first)
# Click `large_diff.md` title
all('.diff-toggle-caret')[1].click
wait_for_requests
end
it 'shows the diff content' do
expect(large_diff).to have_selector('.code')
expect(large_diff).not_to have_selector('.nothing-here-block')
end
it 'shows the diff comment' do
expect(large_diff.find('.notes')).to have_content comment_text
end
end
end
end
end
context 'collapsing an expanded diff' do
before do
# Wait for diffs
find('.js-file-title', match: :first)
# Click `small_diff.md` title
all('.diff-toggle-caret')[3].click
end
it 'hides the diff content' do
expect(small_diff).not_to have_selector('.code')
expect(small_diff).to have_selector('.nothing-here-block')
end
context 're-expanding the same diff' do
before do
# Wait for diffs
find('.js-file-title', match: :first)
# Click `small_diff.md` title
all('.diff-toggle-caret')[3].click
end
it 'shows the diff content' do
expect(small_diff).to have_selector('.code')
expect(small_diff).not_to have_selector('.nothing-here-block')
end
it 'does not make a new HTTP request' do
expect(evaluate_script('ajaxUris')).not_to include(a_string_matching('small_diff.md'))
end
end
end
context 'expanding a diff when symlink was converted to a regular file' do
let(:branch) { 'symlink-expand-diff' }
it 'shows the content of the regular file' do
expect(page).to have_content('This diff is collapsed')
expect(page).to have_no_content('No longer a symlink')
find('.click-to-expand').click
wait_for_requests
expect(page).to have_content('No longer a symlink')
end
end
end
context 'visiting a commit without collapsed diffs' do
let(:branch) { 'feature' }
it 'does not show Expand all button' do
expect(page).not_to have_link('Expand all')
end
end
context 'visiting a commit with more than safe files' do
let(:branch) { 'expand-collapse-files' }
# safe-files -> 100 | safe-lines -> 5000 | commit-files -> 105
it 'does collapsing from the safe number of files to the end on small files' do
expect(page).to have_link('Expand all')
expect(page).to have_selector('.diff-content', count: 105)
expect(page).to have_selector('.diff-collapsed', count: 5)
%w(file-95.txt file-96.txt file-97.txt file-98.txt file-99.txt).each do |filename|
expect(find("[data-blob-diff-path*='#{filename}']")).to have_selector('.diff-collapsed')
end
end
end
context 'visiting a commit with more than safe lines' do
let(:branch) { 'expand-collapse-lines' }
# safe-files -> 100 | safe-lines -> 5000 | commit_files -> 8 (each 1250 lines)
it 'does collapsing from the safe number of lines to the end' do
expect(page).to have_link('Expand all')
expect(page).to have_selector('.diff-content', count: 6)
expect(page).to have_selector('.diff-collapsed', count: 2)
%w(file-4.txt file-5.txt).each do |filename|
expect(find("[data-blob-diff-path*='#{filename}']")).to have_selector('.diff-collapsed')
end
end
end
context 'expanding all diffs' do
before do
click_link('Expand all')
# Wait for elements to appear to ensure full page reload
expect(page).to have_content('This diff was suppressed by a .gitattributes entry')
expect(page).to have_content('This source diff could not be displayed because it is too large.')
expect(page).to have_content('too_large_image.jpg')
find('.note-textarea')
wait_for_requests
execute_script('window.ajaxUris = []; $(document).ajaxSend(function(event, xhr, settings) { ajaxUris.push(settings.url) });')
end
it 'reloads the page with all diffs expanded' do
expect(small_diff).to have_selector('.code')
expect(small_diff).not_to have_selector('.nothing-here-block')
expect(large_diff).to have_selector('.code')
expect(large_diff).not_to have_selector('.nothing-here-block')
end
context 'collapsing an expanded diff' do
before do
# Wait for diffs
find('.js-file-title', match: :first)
# Click `small_diff.md` title
all('.diff-toggle-caret')[3].click
end
it 'hides the diff content' do
expect(small_diff).not_to have_selector('.code')
expect(small_diff).to have_selector('.nothing-here-block')
end
context 're-expanding the same diff' do
before do
# Wait for diffs
find('.js-file-title', match: :first)
# Click `small_diff.md` title
all('.diff-toggle-caret')[3].click
end
it 'shows the diff content' do
expect(small_diff).to have_selector('.code')
expect(small_diff).not_to have_selector('.nothing-here-block')
end
it 'does not make a new HTTP request' do
expect(evaluate_script('ajaxUris')).not_to include(a_string_matching('small_diff.md'))
end
end
end
end
end
| 33.683544 | 131 | 0.649004 |
87474892684e82d1e0bdc8b8f5881c9edf8dc376 | 413 | module XRBP
module WebSocket
# MultiConnection strategy where connections are tried
# sequentially until one succeeds
class Prioritized < MultiConnection
def next_connection(prev=nil)
return nil if prev == connections.last
return super if prev.nil?
connections[connections.index(prev)+1]
end
end # class Prioritized
end # module WebSocket
end # module XRBP
| 29.5 | 58 | 0.7046 |
21b6ff652acc1c32d04cd92c99ddd6684704e058 | 1,316 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ApiManagement::Mgmt::V2018_06_01_preview
module Models
#
# Properties specific to the Backend Type.
#
class BackendProperties
include MsRestAzure
# @return [BackendServiceFabricClusterProperties] Backend Service Fabric
# Cluster Properties
attr_accessor :service_fabric_cluster
#
# Mapper for BackendProperties class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'BackendProperties',
type: {
name: 'Composite',
class_name: 'BackendProperties',
model_properties: {
service_fabric_cluster: {
client_side_validation: true,
required: false,
serialized_name: 'serviceFabricCluster',
type: {
name: 'Composite',
class_name: 'BackendServiceFabricClusterProperties'
}
}
}
}
}
end
end
end
end
| 26.857143 | 78 | 0.588906 |
f7ad1ae2e8d2cddf7b12e7b651b5b64b34c59770 | 3,479 | require "logstash/event"
require "insist"
describe LogStash::Event do
before :each do
@event = LogStash::Event.new
@event.timestamp = "2013-01-01T00:00:00.000Z"
@event.type = "sprintf"
@event.message = "hello world"
@event.tags = [ "tag1" ]
@event.source = "/home/foo"
end
subject { @event }
context "#sprintf" do
it "should report a time with %{+format} syntax" do
insist { @event.sprintf("%{+YYYY}") } == "2013"
insist { @event.sprintf("%{+MM}") } == "01"
insist { @event.sprintf("%{+HH}") } == "00"
end
it "should report fields with %{field} syntax" do
insist { @event.sprintf("%{@type}") } == "sprintf"
insist { @event.sprintf("%{@message}") } == subject["@message"]
end
end
context "#append" do
it "should append message with \\n" do
subject.append(LogStash::Event.new("@message" => "hello world"))
insist { subject.message } == "hello world\nhello world"
end
it "should concatenate tags" do
subject.append(LogStash::Event.new("@tags" => [ "tag2" ]))
insist { subject.tags } == [ "tag1", "tag2" ]
end
context "when event field is nil" do
it "should add single value as string" do
subject.append(LogStash::Event.new("@fields" => {"field1" => "append1"}))
insist { subject[ "field1" ] } == "append1"
end
it "should add multi values as array" do
subject.append(LogStash::Event.new("@fields" => {"field1" => [ "append1","append2" ]}))
insist { subject[ "field1" ] } == [ "append1","append2" ]
end
end
context "when event field is a string" do
before { subject[ "field1" ] = "original1" }
it "should append string to values, if different from current" do
subject.append(LogStash::Event.new("@fields" => {"field1" => "append1"}))
insist { subject[ "field1" ] } == [ "original1", "append1" ]
end
it "should not change value, if appended value is equal current" do
subject.append(LogStash::Event.new("@fields" => {"field1" => "original1"}))
insist { subject[ "field1" ] } == [ "original1" ]
end
it "should concatenate values in an array" do
subject.append(LogStash::Event.new("@fields" => {"field1" => [ "append1" ]}))
insist { subject[ "field1" ] } == [ "original1", "append1" ]
end
it "should join array, removing duplicates" do
subject.append(LogStash::Event.new("@fields" => {"field1" => [ "append1","original1" ]}))
insist { subject[ "field1" ] } == [ "original1", "append1" ]
end
end
context "when event field is an array" do
before { subject[ "field1" ] = [ "original1", "original2" ] }
it "should append string values to array, if not present in array" do
subject.append(LogStash::Event.new("@fields" => {"field1" => "append1"}))
insist { subject[ "field1" ] } == [ "original1", "original2", "append1" ]
end
it "should not append string values, if the array already contains it" do
subject.append(LogStash::Event.new("@fields" => {"field1" => "original1"}))
insist { subject[ "field1" ] } == [ "original1", "original2" ]
end
it "should join array, removing duplicates" do
subject.append(LogStash::Event.new("@fields" => {"field1" => [ "append1","original1" ]}))
insist { subject[ "field1" ] } == [ "original1", "original2", "append1" ]
end
end
end
end
| 39.089888 | 97 | 0.584363 |
3314baea64a62fac5620240085258ec2aa7f3a36 | 2,954 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
ruby_version_is "1.9" do
describe "Method#source_location" do
before(:each) do
@method = MethodSpecs::SourceLocation.method(:location)
end
it "returns nil for built-in methods" do
[].method(:size).source_location.should be_nil
end
it "returns an Array" do
@method.source_location.should be_an_instance_of(Array)
end
it "sets the first value to the path of the file in which the method was defined" do
file = @method.source_location.first
file.should be_an_instance_of(String)
file.should == File.dirname(__FILE__) + '/fixtures/classes.rb'
end
it "sets the last value to a Fixnum representing the line on which the method was defined" do
line = @method.source_location.last
line.should be_an_instance_of(Fixnum)
line.should == 5
end
it "returns the last place the method was defined" do
MethodSpecs::SourceLocation.method(:redefined).source_location.last.should == 13
end
it "returns the location of the original method even if it was aliased" do
MethodSpecs::SourceLocation.new.method(:aka).source_location.last.should == 17
end
it "works for methods defined with a block" do
line = nil
klass = Class.new do
line = __LINE__ + 1
define_method(:f) { }
end
method = klass.new.method(:f)
method.source_location[0].should =~ /#{__FILE__}/
method.source_location[1].should == line
end
it "works for methods defined with a Method" do
line = nil
klass = Class.new do
line = __LINE__ + 1
def f
end
define_method :g, new.method(:f)
end
method = klass.new.method(:g)
method.source_location[0].should =~ /#{__FILE__}/
method.source_location[1].should == line
end
it "works for methods defined with an UnboundMethod" do
line = nil
klass = Class.new do
line = __LINE__ + 1
def f
end
define_method :g, instance_method(:f)
end
method = klass.new.method(:g)
method.source_location[0].should =~ /#{__FILE__}/
method.source_location[1].should == line
end
it "works for methods whose visibility has been overridden in a subclass" do
line = nil
superclass = Class.new do
line = __LINE__ + 1
def f
end
end
subclass = Class.new(superclass) do
private :f
end
method = subclass.new.method(:f)
method.source_location[0].should =~ /#{__FILE__}/
method.source_location[1].should == line
end
describe "for a Method generated by respond_to_missing?" do
it "it returns nil" do
m = MethodSpecs::Methods.new
m.method(:handled_via_method_missing).source_location.should be_nil
end
end
end
end
| 28.960784 | 97 | 0.64218 |
abfce78c6d6e3e74b1a984f80d8c0e4bf31eadb4 | 374 | cask 'healthi' do
version '3.1.1'
sha256 '03b4187a432fb8f7d24c0286403f75f243fa1653eda3362fe459ea4d54229f15'
url "https://github.com/pablopunk/healthi-app/releases/download/#{version}/healthi.app.zip"
appcast 'https://github.com/pablopunk/healthi-app/releases.atom'
name 'healthi'
homepage 'https://github.com/pablopunk/healthi-app'
app 'healthi-app.app'
end
| 31.166667 | 93 | 0.770053 |
ff61a80a2e69034206fb04b36ece847f22b9ab58 | 170 | FactoryBot.define do
klass = Struct.new(:stock_available)
factory :stock_level, class: klass do
stock_available { Faker::Number.between(1, 1000).to_i }
end
end
| 24.285714 | 59 | 0.735294 |
03adfef695b5f6040e73ba21db6bbb09e65d8ed5 | 420 | require 'bundler'
begin
Bundler.setup(:default, :development)
rescue Bundler::BundlerError => e
$stderr.puts e.message
$stderr.puts "Run `bundle install` to install missing gems"
exit e.status_code
end
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
$LOAD_PATH.unshift(File.dirname(__FILE__))
require 'fun_with_testing'
class FunWith::Testing::MyTestCase < FunWith::Testing::TestCase
end
| 22.105263 | 66 | 0.752381 |
f73a0b9c147e7a304131c0de41d5e744b04d3ffa | 286 | # frozen_string_literal: true
# == Schema Information
#
# Table name: pages_tags
#
# id :integer not null, primary key
# page_id :integer
# tag_id :integer
#
class PagesTag < ApplicationRecord
belongs_to :tag, optional: true
belongs_to :page, optional: true
end
| 17.875 | 50 | 0.692308 |
1d47c708f4000e9c92a6a3f87435c22cc8fcab7e | 581 | cask 'trash-it' do
if MacOS.version <= :snow_leopard
version '5.2'
sha256 '2428fcf10f8641488e1a43f8d64dfa7256129aab9813c1f841c72bf75f11c821'
url "http://nonamescriptware.com/wp-content/uploads/TrashIt#{version.no_dots}.zip"
app "TrashIt #{version}/Drag content to Desktop/Trash It!.app"
else
version :latest
sha256 :no_check
url 'http://nonamescriptware.com/wp-content/uploads/Trashit.zip'
app "Trash It! #{version}/Drag content to Desktop/Trash It!.app"
end
name 'Trash It!'
homepage 'http://nonamescriptware.com/'
license :gratis
end
| 32.277778 | 86 | 0.726334 |
013a241447b87eeee397aebbb722f7dd43f06369 | 2,852 | class LessonsController < ApplicationController
before_action :set_lesson, only: [:show, :edit, :update, :destroy, :delete_video]
def delete_video
authorize @lesson, :edit?
@lesson.video.purge
@lesson.video_thumbnail.purge
redirect_to edit_course_lesson_path(@course, @lesson), notice: 'Video successfully deleted!'
end
def sort
@course = Course.friendly.find(params[:course_id])
lesson = Lesson.friendly.find(params[:lesson_id])
authorize lesson, :edit?
lesson.update(lesson_params)
render body: nil
end
# GET /lessons
# GET /lessons.json
def index
@lessons = Lesson.all
end
# GET /lessons/1
# GET /lessons/1.json
def show
authorize @lesson
current_user.view_lesson(@lesson)
@lessons = @course.lessons.rank(:row_order)
@comment = Comment.new
@comments = @lesson.comments.order(created_at: :desc)
end
# GET /lessons/new
def new
@lesson = Lesson.new
@course = Course.friendly.find(params[:course_id])
end
# GET /lessons/1/edit
def edit
authorize @lesson
end
# POST /lessons
# POST /lessons.json
def create
@lesson = Lesson.new(lesson_params)
@course = Course.friendly.find(params[:course_id])
@lesson.course_id = @course.id
authorize @lesson
respond_to do |format|
if @lesson.save
format.html { redirect_to course_lesson_path(@course, @lesson), notice: 'Lesson was successfully created.' }
format.json { render :show, status: :created, location: @lesson }
else
format.html { render :new }
format.json { render json: @lesson.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /lessons/1
# PATCH/PUT /lessons/1.json
def update
authorize @lesson
respond_to do |format|
if @lesson.update(lesson_params)
format.html { redirect_to course_lesson_path(@course, @lesson), notice: 'Lesson was successfully updated.' }
format.json { render :show, status: :ok, location: @lesson }
else
format.html { render :edit }
format.json { render json: @lesson.errors, status: :unprocessable_entity }
end
end
end
# DELETE /lessons/1
# DELETE /lessons/1.json
def destroy
authorize @lesson
@lesson.destroy
respond_to do |format|
format.html { redirect_to course_path(@course), notice: 'Lesson was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_lesson
@course = Course.friendly.find(params[:course_id])
@lesson = Lesson.friendly.find(params[:id])
end
# Only allow a list of trusted parameters through.
def lesson_params
params.require(:lesson).permit(:title, :content, :row_order_position, :video, :video_thumbnail)
end
end | 27.960784 | 116 | 0.678822 |
392dc940510903a94d4a6e48f3abf584bfda87e7 | 1,670 | module Leveret
# Handles the acknowledgement or rejection of messages after execution
class ResultHandler
extend Forwardable
attr_accessor :incoming_message
def_delegators :Leveret, :log, :delay_queue
# @param [Message] incoming_message Contains delivery information such as the delivery_tag
def initialize(incoming_message)
self.incoming_message = incoming_message
end
# Call the appropriate handling method for the result
#
# @param [Symbol] result Result returned from running the job, one of +:success+, +:reject+ or +:requeue+
def handle(result)
log.info "[#{delivery_tag}] Job returned #{result}"
send(result) if [:success, :reject, :requeue, :delay].include?(result)
end
# Mark the message as acknowledged
def success
log.debug "[#{delivery_tag}] Acknowledging message"
channel.acknowledge(delivery_tag)
end
# Mark the message as rejected (failure)
def reject
log.debug "[#{delivery_tag}] Rejecting message"
channel.reject(delivery_tag)
end
# Reject the message and reinsert it onto it's queue
def requeue
log.debug "[#{delivery_tag}] Requeueing message"
channel.reject(delivery_tag, true)
end
# Acknowledge the message, but publish it onto the delay queue for execution later
def delay
log.debug ["[#{delivery_tag}] Delaying message"]
channel.acknowledge(delivery_tag)
delay_queue.republish(incoming_message)
end
private
def channel
incoming_message.delivery_info.channel
end
def delivery_tag
incoming_message.delivery_info.delivery_tag
end
end
end
| 28.305085 | 109 | 0.702994 |
6abc635302a39e047d8b10c25ac1ef1dae45790f | 851 | require "#{File.join(File.dirname(__FILE__),'..','spec_helper.rb')}"
describe 'ssmtp::revalias' do
let(:title) { 'ssmtp::revalias' }
let(:node) { 'rspec.example42.com' }
let(:facts) { { :ipaddress => '10.42.42.42',
:revaliases_template => 'ssmtp/revaliases.erb',
:revaliases_file => '/etc/ssmtp/revaliases',
:concat_basedir => '/var/lib/puppet/concat'} }
describe 'Test basic revaliases is created' do
let(:params) { { :name => 'sample1',
:from => '[email protected]',
:mailhub => 'some_mail_relay',
:enable => true, } }
it { should contain_concat__fragment('ssmtp_revaliases_sample1').with_target('/etc/ssmtp/revaliases').with_content("sample1:[email protected]:some_mail_relay\n")
}
end
end
| 38.681818 | 169 | 0.587544 |
080a3bf29cc3cffa28a03a50fda186e7aca5adda | 1,143 | require 'veewee/config/component'
require 'veewee/definition'
require 'ostruct'
module Veewee
class Config
class Definition
attr_accessor :components
attr_reader :env
def initialize(config)
@env=config.env
@components=Hash.new
end
# Currently not used, this is in case we will specify the a definition in the Veeweefile
# This is for future needs
def define(name)
# Depending on type, we create a variable of that type
definition_stub=OpenStruct.new
begin
# Get a real definition object
real_definition=::Veewee::Definition.new(name,env)
rescue Error => e
env.ui.error("Error loading provider with #{name},#{$!}",:prefix => false)
end
definition_stub.definition=real_definition
env.logger.debug("config definition"){ "Start defining definition"}
yield definition_stub
env.logger.debug("config definition"){ "End defining definition #{definition_stub.definition.name}"}
components[name.to_s]=definition_stub.definition
end
end
end
end #Module Veewee
| 25.4 | 108 | 0.663167 |
7961637aa58258146c02f92dc92e64a5bcf85e1f | 5,115 | require_relative 'test_helper'
describe "Set Command" do
include TestDsl
describe "setting to on" do
temporary_change_hash_value(Debugger::Command.settings, :autolist, 0)
it "must set a setting to on" do
enter 'set autolist on'
debug_file 'set'
Debugger::Command.settings[:autolist].must_equal 1
end
it "must set a setting to on by 1" do
enter 'set autolist 1'
debug_file 'set'
Debugger::Command.settings[:autolist].must_equal 1
end
it "must set a setting to on by default" do
enter 'set autolist'
debug_file 'set'
Debugger::Command.settings[:autolist].must_equal 1
end
it "must set a setting using shortcut" do
enter 'set autol'
debug_file 'set'
Debugger::Command.settings[:autolist].must_equal 1
end
end
describe "setting to off" do
temporary_change_hash_value(Debugger::Command.settings, :autolist, 1)
it "must set a setting to off" do
enter 'set autolist off'
debug_file 'set'
Debugger::Command.settings[:autolist].must_equal 0
end
it "must set a setting to off by 0" do
enter 'set autolist 0'
debug_file 'set'
Debugger::Command.settings[:autolist].must_equal 0
end
it "must set a setting to off by 'no' suffix" do
enter 'set noautolist'
debug_file 'set'
Debugger::Command.settings[:autolist].must_equal 0
end
end
describe "messages" do
temporary_change_hash_value(Debugger::Command.settings, :autolist, 0)
it "must show a message after setting" do
enter 'set autolist on'
debug_file 'set'
check_output_includes "autolist is on"
end
end
describe "debuggertesting" do
temporary_change_hash_value(Debugger::Command.settings, :debuggertesting, false)
before { $rdebug_state = nil }
after { $rdebug_state = nil }
it "must set $rdebug_context if debuggersetting is on" do
enter 'set debuggertesting', 'break 3', 'cont'
debug_file('set') { state.must_be_kind_of Debugger::CommandProcessor::State }
end
it "must set basename on too" do
temporary_change_hash_value(Debugger::Command.settings, :basename, false) do
enter 'set debuggertesting', 'show basename'
debug_file('set')
check_output_includes "basename is on"
end
end
it "must not set $rdebug_context if debuggersetting is off" do
enter 'set nodebuggertesting', 'break 3', 'cont'
debug_file('set') { state.must_be_nil }
end
end
describe "history" do
describe "save" do
it "must set history save to on" do
enter 'set history save on'
debug_file 'set'
interface.history_save.must_equal true
end
it "must show a message" do
enter 'set history save on'
debug_file 'set'
check_output_includes "Saving of history save is on"
end
it "must set history save to off" do
enter 'set history save off'
debug_file 'set'
interface.history_save.must_equal false
end
end
describe "size" do
it "must set history size" do
enter 'set history size 250'
debug_file 'set'
interface.history_length.must_equal 250
end
it "must show a message" do
enter 'set history size 250'
debug_file 'set'
check_output_includes "Debugger history size is 250"
end
end
describe "filename" do
it "must set history filename" do
enter 'set history filename .debugger-hist'
debug_file 'set'
interface.histfile.must_equal File.join(ENV["HOME"]||ENV["HOMEPATH"]||".", '.debugger-hist')
end
it "must show a message" do
enter 'set history filename .debugger-hist'
debug_file 'set'
history_filename = File.join(ENV["HOME"] || ENV["HOMEPATH"] || ".", ".debugger-hist")
check_output_includes "The filename in which to record the command history is '#{history_filename}'"
end
end
it "must show an error message if used wrong subcommand" do
enter 'set history bla 2'
debug_file 'set'
check_output_includes "Invalid history parameter bla. Should be 'filename', 'save' or 'size'"
end
it "must show an error message if provided only one argument" do
enter 'set history save'
debug_file 'set'
check_output_includes "Need two parameters for 'set history'; got 1"
end
end
describe "width" do
temporary_change_hash_value(Debugger::Command.settings, :width, 20)
it "must set ENV['COLUMNS'] by the 'set width' command" do
old_columns = ENV["COLUMNS"]
begin
enter 'set width 10'
debug_file 'set'
ENV["COLUMNS"].must_equal '10'
ensure
ENV["COLUMNS"] = old_columns
end
end
end
describe "Post Mortem" do
temporary_change_hash_value(Debugger::Command.settings, :autolist, 0)
it "must work in post-mortem mode" do
enter 'cont', "set autolist on"
debug_file 'post_mortem'
check_output_includes "autolist is on"
end
end
end
| 28.575419 | 108 | 0.653177 |
fffe85480cab8536f640bd4b47cc9b204b0fa588 | 321 | TensorStream::OpMaker.define_operation :sigmoid do |op|
op.what_it_does "Computes sigmoid of x element-wise."
op.parameter :input_a, "tensor X", validate: 'FLOATING_POINT_TYPES'
op.option :name, "Optional name", :nil
op.define_gradient do |grad, _node, params|
i_op(:sigmoid_grad, params[0], grad)
end
end | 32.1 | 69 | 0.735202 |
5dc4bb902d5c500d33c35034e2b640f560b8143b | 10,248 | require 'spec_helper'
module RubySpeech
describe SSML do
describe "#draw" do
let(:doc) { Nokogiri::XML::Document.new }
it "should create an SSML document" do
expected_doc = SSML::Speak.new doc
SSML.draw.should == expected_doc
SSML.draw.document.xpath('ns:speak', ns: 'http://www.w3.org/2001/10/synthesis').size.should == 1
end
it "can draw with a language" do
expected_doc = SSML::Speak.new doc, language: 'pt-BR'
SSML.draw(language: 'pt-BR').should == expected_doc
end
describe "when the return value of the block is a string" do
it "should be inserted into the document" do
expected_doc = SSML::Speak.new(doc, :content => "Hi, I'm Fred")
SSML.draw { "Hi, I'm Fred" }.should == expected_doc
end
end
describe "when the return value of the block is not a string" do
it "should not be inserted into the document" do
expected_doc = SSML::Speak.new doc
SSML.draw { :foo }.should == expected_doc
end
end
describe "when inserting a string" do
it "should work" do
expected_doc = SSML::Speak.new(doc, :content => "Hi, I'm Fred")
SSML.draw { string "Hi, I'm Fred" }.should == expected_doc
end
end
it "should allow other SSML elements to be inserted in the document" do
expected_doc = SSML::Speak.new doc
expected_doc << SSML::Voice.new(doc, :gender => :male, :name => 'fred')
SSML.draw { voice :gender => :male, :name => 'fred' }.should == expected_doc
end
it "should allow nested block return values" do
expected_doc = SSML::Speak.new doc
expected_doc << SSML::Voice.new(doc, :gender => :male, :name => 'fred', :content => "Hi, I'm Fred.")
doc = RubySpeech::SSML.draw do
voice :gender => :male, :name => 'fred' do
"Hi, I'm Fred."
end
end
doc.should == expected_doc
end
it "should allow nested SSML elements" do
drawn_doc = RubySpeech::SSML.draw do
voice :gender => :male, :name => 'fred' do
string "Hi, I'm Fred. The time is currently "
say_as :interpret_as => 'date', :format => 'dmy' do
"01/02/1960"
end
end
end
voice = SSML::Voice.new(doc, :gender => :male, :name => 'fred', :content => "Hi, I'm Fred. The time is currently ")
voice << SSML::SayAs.new(doc, :interpret_as => 'date', :format => 'dmy', :content => "01/02/1960")
expected_doc = SSML::Speak.new doc
expected_doc << voice
drawn_doc.should == expected_doc
end
it "should allow accessing methods defined outside the block" do
def foo
'bar'
end
expected_doc = SSML::Speak.new doc, :content => foo
SSML.draw { string foo }.should == expected_doc
end
describe 'cloning' do
context 'SSML documents' do
let :original do
RubySpeech::SSML.draw do
string "Hi, I'm Fred."
end
end
subject { original.clone }
it 'should match the contents of the original document' do
subject.to_s.should == original.to_s
end
it 'should match the class of the original document' do
subject.class.should == original.class
end
it 'should be equal to the original document' do
subject.should == original
end
end
end
describe "embedding" do
it "SSML documents" do
doc1 = RubySpeech::SSML.draw do
string "Hi, I'm Fred. The time is currently "
say_as :interpret_as => 'date', :format => 'dmy' do
"01/02/1960"
end
end
doc2 = RubySpeech::SSML.draw do
voice :gender => :male, :name => 'fred' do
embed doc1
end
end
expected_doc = RubySpeech::SSML.draw do
voice :gender => :male, :name => 'fred' do
string "Hi, I'm Fred. The time is currently "
say_as :interpret_as => 'date', :format => 'dmy' do
"01/02/1960"
end
end
end
doc2.should == expected_doc
end
it "SSML elements" do
element = SSML::Emphasis.new(doc, :content => "HELLO?")
doc = RubySpeech::SSML.draw do
voice :gender => :male, :name => 'fred' do
embed element
end
end
expected_doc = RubySpeech::SSML.draw do
voice :gender => :male, :name => 'fred' do
emphasis do
"HELLO?"
end
end
end
doc.should == expected_doc
end
it "strings" do
string = "How now, brown cow?"
doc = RubySpeech::SSML.draw do
voice :gender => :male, :name => 'fred' do
embed string
end
end
expected_doc = RubySpeech::SSML.draw do
voice :gender => :male, :name => 'fred' do
string "How now, brown cow?"
end
end
doc.should == expected_doc
end
end
it "should properly escape string input" do
drawn_doc = RubySpeech::SSML.draw do
voice { string "I <3 nachos." }
voice { "I <3 nachos." }
end
expected_doc = SSML::Speak.new doc
2.times do
expected_doc << SSML::Voice.new(doc, :native_content => "I <3 nachos.")
end
drawn_doc.should == expected_doc
end
it "should allow all permutations of possible nested SSML elements" do
drawn_doc = RubySpeech::SSML.draw do
string "Hello world."
ssml_break
audio :src => "hello" do
string "HELLO?"
ssml_break
audio :src => "hello"
emphasis
prosody
say_as :interpret_as => 'date'
voice
end
emphasis do
string "HELLO?"
ssml_break
audio :src => "hello"
emphasis
prosody
say_as :interpret_as => 'date'
voice
end
prosody :rate => :slow do
string "H...E...L...L...O?"
ssml_break
audio :src => "hello"
emphasis
prosody
say_as :interpret_as => 'date'
voice
end
say_as :interpret_as => 'date', :format => 'dmy' do
"01/02/1960"
end
voice :gender => :male, :name => 'fred' do
string "Hi, I'm Fred. The time is currently "
say_as :interpret_as => 'date', :format => 'dmy' do
"01/02/1960"
end
ssml_break
audio :src => "hello"
emphasis do
"I'm so old"
end
prosody :rate => :fast do
"And yet so spritely!"
end
voice :age => 12 do
"And I'm young Fred"
end
end
end
expected_doc = SSML::Speak.new(doc, :content => "Hello world.")
expected_doc << SSML::Break.new(doc)
audio = SSML::Audio.new(doc, :src => "hello", :content => "HELLO?")
audio << SSML::Break.new(doc)
audio << SSML::Audio.new(doc, :src => "hello")
audio << SSML::Emphasis.new(doc)
audio << SSML::Prosody.new(doc)
audio << SSML::SayAs.new(doc, :interpret_as => 'date')
audio << SSML::Voice.new(doc)
expected_doc << audio
emphasis = SSML::Emphasis.new(doc, :content => "HELLO?")
emphasis << SSML::Break.new(doc)
emphasis << SSML::Audio.new(doc, :src => "hello")
emphasis << SSML::Emphasis.new(doc)
emphasis << SSML::Prosody.new(doc)
emphasis << SSML::SayAs.new(doc, :interpret_as => 'date')
emphasis << SSML::Voice.new(doc)
expected_doc << emphasis
prosody = SSML::Prosody.new(doc, :rate => :slow, :content => "H...E...L...L...O?")
prosody << SSML::Break.new(doc)
prosody << SSML::Audio.new(doc, :src => "hello")
prosody << SSML::Emphasis.new(doc)
prosody << SSML::Prosody.new(doc)
prosody << SSML::SayAs.new(doc, :interpret_as => 'date')
prosody << SSML::Voice.new(doc)
expected_doc << prosody
expected_doc << SSML::SayAs.new(doc, :interpret_as => 'date', :format => 'dmy', :content => "01/02/1960")
voice = SSML::Voice.new(doc, :gender => :male, :name => 'fred', :content => "Hi, I'm Fred. The time is currently ")
voice << SSML::SayAs.new(doc, :interpret_as => 'date', :format => 'dmy', :content => "01/02/1960")
voice << SSML::Break.new(doc)
voice << SSML::Audio.new(doc, :src => "hello")
voice << SSML::Emphasis.new(doc, :content => "I'm so old")
voice << SSML::Prosody.new(doc, :rate => :fast, :content => "And yet so spritely!")
voice << SSML::Voice.new(doc, :age => 12, :content => "And I'm young Fred")
expected_doc << voice
drawn_doc.should == expected_doc
end
end
describe "importing nested tags" do
let(:doc) { Nokogiri::XML::Document.new }
let(:say_as) { SSML::SayAs.new doc, :interpret_as => 'date', :format => 'dmy', :content => "01/02/1960" }
let(:string) { "Hi, I'm Fred. The time is currently " }
let :voice do
SSML::Voice.new(doc, :gender => :male, :name => 'fred', :content => string).tap do |voice|
voice << say_as
end
end
let :document do
SSML::Speak.new(doc).tap { |doc| doc << voice }.to_s
end
let(:import) { SSML.import document }
subject { import }
it "should work" do
lambda { subject }.should_not raise_error
end
it { should be_a SSML::Speak }
its(:children) { should == [voice] }
describe "voice" do
subject { import.children.first }
its(:children) { should == [string, say_as] }
end
end
end
end
| 32.951768 | 123 | 0.523322 |
384b01bf8a2df1ef01371f95f3995388180c6574 | 1,674 | require 'rails_helper'
RSpec.describe 'User signup API', type: :request do
describe 'POST /api/signup' do
context 'Valid request' do
let(:valid_user_attributes) {
{
user: {
username: Faker::Internet.user_name,
email: Faker::Internet.email,
password: "password",
password_confirmation: "password"
}
}
}
before {post '/api/signup', params: valid_user_attributes}
it 'returns a status code of 201' do
expect(response).to have_http_status(201)
end
it 'creates a user and returns it in JSON' do
json = JSON.parse(response.body, symbolize_names: true)
expect(json).not_to be_empty
expect(json[:username]).to eq(valid_user_attributes[:user][:username])
expect(json[:password_digest]).not_to eq(nil)
end
end
context 'Invalid request' do
let(:invalid_user_attributes) {
{
user: {
username: "",
email: "",
password: "",
password_confirmation: ""
}
}
}
before {post '/api/signup', params: invalid_user_attributes}
it "returns status code of 422" do
expect(response).to have_http_status(422)
end
it "returns validation errors in json" do
json = JSON.parse(response.body, symbolize_names: true)
expect(json[:errors][:messages]).to eq(
{:password=>["can't be blank"],
:username=>["can't be blank"],
:email=>["can't be blank"],
:password_confirmation=>["can't be blank"]
})
end
end
end
end
| 27.9 | 78 | 0.566308 |
39a08b255aab1857511fb58f6b08321857b7c173 | 326 | class CreatePhLabels < ActiveRecord::Migration[4.2]
def change
create_table :ph_labels do |t|
t.references :piece_head, index: true, foreign_key: true, null: false
t.references :company, index: true, foreign_key: true, null: false
t.string :catalog_no
t.timestamps null: false
end
end
end
| 27.166667 | 75 | 0.693252 |
01f3f2cb561e4bb4f49ebddfa8f28bcfd7e934a7 | 822 | require 'test_helper'
class MicropostsControllerTest < ActionDispatch::IntegrationTest
def setup
@micropost = microposts(:orange)
end
test "should redirect create when not logged in" do
assert_no_difference 'Micropost.count' do
post microposts_path, params: { micropost: { content: "lorem ipsum" } }
end
assert_redirected_to login_url
end
test "should redirect destroy when not logged in" do
assert_no_difference 'Micropost.count' do
delete micropost_path(@micropost)
end
assert_redirected_to login_url
end
test "should redirect destroy for wrong micropost" do
log_in_as(users(:michael))
micropost = microposts(:ants)
assert_no_difference 'Micropost.count' do
delete micropost_path(micropost)
end
assert_redirected_to root_url
end
end
| 25.6875 | 77 | 0.738443 |
ff802329da7dfd5e6a84bb395806aff6ac78d670 | 2,460 | class Seexpr < Formula
desc "Embeddable expression evaluation engine"
homepage "https://wdas.github.io/SeExpr/"
url "https://github.com/wdas/SeExpr/archive/v3.0.1.tar.gz"
sha256 "1e4cd35e6d63bd3443e1bffe723dbae91334c2c94a84cc590ea8f1886f96f84e"
license "Apache-2.0"
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
cellar :any
sha256 "8045ec68c468b5db8a118006756e34c54425bbcc3e29306c16a52e86ccefdbb7" => :big_sur
sha256 "2a55400ad86255b300843f7cde1dbed4130d0ba26ffc4c8725fec83b50e7f9e3" => :catalina
sha256 "e5ba2fcca24837fc43d11524fdeff04d9f4429f6c66421dec6c1925b60893f82" => :mojave
sha256 "b5a3d64c08f692d25d3eb12dd9409c414939303b0b9f19396c95a13d07b46fa9" => :high_sierra
end
depends_on "cmake" => :build
depends_on "doxygen" => :build
depends_on "libpng"
def install
mkdir "build" do
system "cmake", "..", *std_cmake_args, "-DUSE_PYTHON=FALSE"
system "make", "doc"
system "make", "install"
end
end
test do
actual_output = shell_output("#{bin}/asciigraph2").lines.map(&:rstrip).join("\n")
expected_output = <<~EOS
|
|
|
|
###
# |#
## |##
# | #
## | ##
# | #
## | ##
# | #
# | ##
#### # | # ####
#######-----##--###-----#-----|----##-----##--###-----######
###### ## # | # # ######
## ## | ## ##
### | ###
|
|
|
|
|
|
|
|
|
|
|
|
EOS
assert_equal actual_output, expected_output.rstrip
end
end
| 34.647887 | 93 | 0.355691 |
337c28d12ad27056d969f899b8ea15406a686d0f | 1,230 | require "language/node"
class FirebaseCli < Formula
desc "Firebase command-line tools"
homepage "https://firebase.google.com/docs/cli/"
url "https://registry.npmjs.org/firebase-tools/-/firebase-tools-3.18.2.tgz"
sha256 "58942556c82c600cfa1dee340ae221550e01c8a968352c6a49eeb015ee150b1a"
head "https://github.com/firebase/firebase-tools.git"
bottle do
cellar :any_skip_relocation
sha256 "1657b6f67c61cd1847dcc0201644dc4df6d1e79280d05184bcf81c008f70f7f1" => :high_sierra
sha256 "93db5dccf277e005c0d54e535e2ac48fe03f009c9fb6088e880dc005c6b3d16c" => :sierra
sha256 "3fa05141d3be1d2dcc932f3644f34ca08e18580339ee5f1049cc41e75555adab" => :el_capitan
sha256 "d925b36030c14e417724f8c034aacfc1318d14ea8c34278f08f73fa5ba8c8c4f" => :x86_64_linux
end
depends_on "node"
def install
system "npm", "install", *Language::Node.std_npm_install_args(libexec)
bin.install_symlink Dir["#{libexec}/bin/*"]
end
test do
# The following test requires /usr/bin/expect.
return unless OS.mac?
(testpath/"test.exp").write <<~EOS
spawn #{bin}/firebase login:ci --no-localhost
expect "Paste"
EOS
assert_match "authorization code", shell_output("expect -f test.exp")
end
end
| 35.142857 | 94 | 0.760163 |
d5b00cd60aeba0f14ad918ec8c57cd9d9f0b7254 | 143 | class AddArchivedOntoCurrentlyInked < ActiveRecord::Migration[5.1]
def change
add_column :currently_inked, :archived_on, :date
end
end
| 23.833333 | 66 | 0.783217 |
915112222d71370ef1b8a8406c781d5952f3dcd3 | 336 | # See http://doc.doggohub.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for DoggoHub.
class AddIndexOnRunnersLocked < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
def change
add_concurrent_index :ci_runners, :locked
end
end
| 25.846154 | 71 | 0.803571 |
398d4db6d50fa6a7fc3f95abe5f83a9154eae579 | 1,483 | require 'spec_helper'
describe RSpec::Mocks::AnyInstance::MessageChains do
let(:chains) { RSpec::Mocks::AnyInstance::MessageChains.new }
let(:stub_chain) { RSpec::Mocks::AnyInstance::StubChain.new }
let(:expectation_chain) { RSpec::Mocks::AnyInstance::PositiveExpectationChain.new }
it "knows if a method does not have an expectation set on it" do
chains.add(:method_name, stub_chain)
expect(chains.has_expectation?(:method_name)).to be_false
end
it "knows if a method has an expectation set on it" do
chains.add(:method_name, stub_chain)
chains.add(:method_name, expectation_chain)
expect(chains.has_expectation?(:method_name)).to be_true
end
it "can remove all stub chains" do
chains.add(:method_name, stub_chain)
chains.add(:method_name, expectation_chain)
chains.add(:method_name, RSpec::Mocks::AnyInstance::StubChain.new)
chains.remove_stub_chains_for!(:method_name)
expect(chains[:method_name]).to eq([expectation_chain])
end
context "creating stub chains" do
it "understands how to add a stub chain for a method" do
chains.add(:method_name, stub_chain)
expect(chains[:method_name]).to eq([stub_chain])
end
it "allows multiple stub chains for a method" do
chains.add(:method_name, stub_chain)
chains.add(:method_name, another_stub_chain = RSpec::Mocks::AnyInstance::StubChain.new)
expect(chains[:method_name]).to eq([stub_chain, another_stub_chain])
end
end
end
| 36.170732 | 93 | 0.731625 |
39a57055c17e2fe3f72ee0c0258b04e2113efe34 | 71 | module Jekyll
module Archives
VERSION = "2.1.0".freeze
end
end
| 11.833333 | 28 | 0.676056 |
7a06596b35eb33a3344c056daa6c02a1095a6a48 | 1,016 | module VzaarApi
class Video
class Subtitle < Abstract
ENDPOINT = Proc.new do |video_id, subtitle_id|
File.join 'videos', video_id.to_s, 'subtitles', subtitle_id.to_s
end
ATTR_READERS = [:id,
:code,
:title,
:language,
:created_at,
:updated_at
].freeze
ATTR_ACCESSORS = [:code].freeze
prepend Lib::HasAttributes
include Lib::HasResourceUrl
include Lib::ActiveObject::Create
include Lib::ActiveObject::Save
include Lib::ActiveObject::Delete
include Lib::WillPaginate
def initialize(attrs = {})
@scope_id = attrs.delete(:scope_id)
end
def self.paginate(video_id, query = {})
super query.merge(scope_id: video_id)
end
def self.create(video_id, attrs = {})
VzaarApi::Strategy::Subtitle::Create.new(video_id, attrs, self).execute
end
end
end
end
| 26.051282 | 79 | 0.562008 |
f7d63863dc9583832d070b22506570b78766e94d | 66 |
module FifaRankings
end
require_relative '../config/environment' | 13.2 | 40 | 0.818182 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.