hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
269cab00d213a7319dcd7183866fe0b4a944b5b9 | 2,577 | class Arangodb < Formula
desc "The Multi-Model NoSQL Database"
homepage "https://www.arangodb.com/"
url "https://download.arangodb.com/Source/ArangoDB-3.3.12.tar.gz"
sha256 "c2df4a898b047e90434a1ca1523690dde167d157bee1e10a8b76044fe77f21c5"
head "https://github.com/arangodb/arangodb.git", :branch => "unstable"
bottle do
sha256 "1722c64e0e0ce2fa21a59bdf3401ad5a7f727090ff460c8fea3dbe820641cd34" => :high_sierra
sha256 "d267b875c1a1dd6cadddf677c3df087584d8064632332a6d23a1db2f5c3c5566" => :sierra
sha256 "31bb9b8dffd1e77e454c4b7657168454c42c3cf74155bf7824d48f9a7139c39a" => :el_capitan
end
depends_on :macos => :yosemite
depends_on "cmake" => :build
depends_on "go" => :build
depends_on "openssl"
needs :cxx11
fails_with :clang do
build 600
cause "Fails with compile errors"
end
def install
ENV.cxx11
mkdir "build" do
args = std_cmake_args + %W[
-DHOMEBREW=ON
-DUSE_OPTIMIZE_FOR_ARCHITECTURE=OFF
-DASM_OPTIMIZATIONS=OFF
-DCMAKE_INSTALL_DATADIR=#{share}
-DCMAKE_INSTALL_DATAROOTDIR=#{share}
-DCMAKE_INSTALL_SYSCONFDIR=#{etc}
-DCMAKE_INSTALL_LOCALSTATEDIR=#{var}
]
if ENV.compiler == "gcc-6"
ENV.append "V8_CXXFLAGS", "-O3 -g -fno-delete-null-pointer-checks"
end
system "cmake", "..", *args
system "make", "install"
%w[arangod arango-dfdb arangosh foxx-manager].each do |f|
inreplace etc/"arangodb3/#{f}.conf", pkgshare, opt_pkgshare
end
end
end
def post_install
(var/"lib/arangodb3").mkpath
(var/"log/arangodb3").mkpath
end
def caveats
s = <<~EOS
An empty password has been set. Please change it by executing
#{opt_sbin}/arango-secure-installation
EOS
s
end
plist_options :manual => "#{HOMEBREW_PREFIX}/opt/arangodb/sbin/arangod"
def plist; <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>Program</key>
<string>#{opt_sbin}/arangod</string>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
testcase = "require('@arangodb').print('it works!')"
output = shell_output("#{bin}/arangosh --server.password \"\" --javascript.execute-string \"#{testcase}\"")
assert_equal "it works!", output.chomp
end
end
| 27.709677 | 111 | 0.655413 |
e969e2ef58018809329ba8a4ecc46da55ce77426 | 1,990 | # encoding: UTF-8
# frozen_string_literal: true
module API
module V2
module Entities
class Market < Base
expose(
:id,
documentation: {
type: String,
desc: "Unique market id. It's always in the form of xxxyyy,"\
"where xxx is the base currency code, yyy is the quote"\
"currency code, e.g. 'btcusd'. All available markets can"\
"be found at /api/v2/markets."
}
)
expose(
:name,
documentation: {
type: String,
desc: 'Market name.'
}
)
expose(
:base_unit,
documentation: {
type: String,
desc: "Market Base unit."
}
)
expose(
:quote_unit,
documentation: {
type: String,
desc: "Market Quote unit."
}
)
expose(
:min_price,
documentation: {
type: BigDecimal,
desc: "Minimum order price."
}
)
expose(
:max_price,
documentation: {
type: BigDecimal,
desc: "Maximum order price."
}
)
expose(
:min_amount,
documentation: {
type: BigDecimal,
desc: "Minimum order amount."
}
)
expose(
:amount_precision,
documentation: {
type: BigDecimal,
desc: "Precision for order amount."
}
)
expose(
:price_precision,
documentation: {
type: BigDecimal,
desc: "Precision for order price."
}
)
expose(
:state,
documentation: {
type: String,
desc: "Market state defines if user can see/trade on current market."
}
)
end
end
end
end
| 21.170213 | 81 | 0.434673 |
0893dcb39b6afd0ab3e2d0e45f40fcc00289680f | 35,170 | require 'spec_helper'
describe API::Groups do
include UploadHelpers
let(:user1) { create(:user, can_create_group: false) }
let(:user2) { create(:user) }
let(:user3) { create(:user) }
let(:admin) { create(:admin) }
let!(:group1) { create(:group, avatar: File.open(uploaded_image_temp_path)) }
let!(:group2) { create(:group, :private) }
let!(:project1) { create(:project, namespace: group1) }
let!(:project2) { create(:project, namespace: group2) }
let!(:project3) { create(:project, namespace: group1, path: 'test', visibility_level: Gitlab::VisibilityLevel::PRIVATE) }
before do
group1.add_owner(user1)
group2.add_owner(user2)
end
describe "GET /groups" do
context "when unauthenticated" do
it "returns public groups" do
get api("/groups")
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
expect(json_response)
.to satisfy_one { |group| group['name'] == group1.name }
end
it 'avoids N+1 queries' do
# Establish baseline
get api("/groups", admin)
control = ActiveRecord::QueryRecorder.new do
get api("/groups", admin)
end
create(:group)
expect do
get api("/groups", admin)
end.not_to exceed_query_limit(control)
end
end
context "when authenticated as user" do
it "normal user: returns an array of groups of user1" do
get api("/groups", user1)
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
expect(json_response)
.to satisfy_one { |group| group['name'] == group1.name }
end
it "does not include statistics" do
get api("/groups", user1), params: { statistics: true }
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first).not_to include 'statistics'
end
end
context "when authenticated as admin" do
it "admin: returns an array of all groups" do
get api("/groups", admin)
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(2)
end
it "does not include statistics by default" do
get api("/groups", admin)
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first).not_to include('statistics')
end
it "includes statistics if requested" do
attributes = {
storage_size: 1158,
repository_size: 123,
wiki_size: 456,
lfs_objects_size: 234,
build_artifacts_size: 345
}.stringify_keys
exposed_attributes = attributes.dup
exposed_attributes['job_artifacts_size'] = exposed_attributes.delete('build_artifacts_size')
project1.statistics.update!(attributes)
get api("/groups", admin), params: { statistics: true }
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response)
.to satisfy_one { |group| group['statistics'] == exposed_attributes }
end
end
context "when using skip_groups in request" do
it "returns all groups excluding skipped groups" do
get api("/groups", admin), params: { skip_groups: [group2.id] }
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
end
end
context "when using all_available in request" do
let(:response_groups) { json_response.map { |group| group['name'] } }
it "returns all groups you have access to" do
public_group = create :group, :public
get api("/groups", user1), params: { all_available: true }
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(response_groups).to contain_exactly(public_group.name, group1.name)
end
end
context "when using sorting" do
let(:group3) { create(:group, name: "a#{group1.name}", path: "z#{group1.path}") }
let(:group4) { create(:group, name: "same-name", path: "y#{group1.path}") }
let(:group5) { create(:group, name: "same-name") }
let(:response_groups) { json_response.map { |group| group['name'] } }
let(:response_groups_ids) { json_response.map { |group| group['id'] } }
before do
group3.add_owner(user1)
group4.add_owner(user1)
group5.add_owner(user1)
end
it "sorts by name ascending by default" do
get api("/groups", user1)
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(response_groups).to eq(groups_visible_to_user(user1).order(:name).pluck(:name))
end
it "sorts in descending order when passed" do
get api("/groups", user1), params: { sort: "desc" }
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(response_groups).to eq(groups_visible_to_user(user1).order(name: :desc).pluck(:name))
end
it "sorts by path in order_by param" do
get api("/groups", user1), params: { order_by: "path" }
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(response_groups).to eq(groups_visible_to_user(user1).order(:path).pluck(:name))
end
it "sorts by id in the order_by param" do
get api("/groups", user1), params: { order_by: "id" }
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(response_groups).to eq(groups_visible_to_user(user1).order(:id).pluck(:name))
end
it "sorts also by descending id with pagination fix" do
get api("/groups", user1), params: { order_by: "id", sort: "desc" }
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(response_groups).to eq(groups_visible_to_user(user1).order(id: :desc).pluck(:name))
end
it "sorts identical keys by id for good pagination" do
get api("/groups", user1), params: { search: "same-name", order_by: "name" }
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(response_groups_ids).to eq(Group.select { |group| group['name'] == 'same-name' }.map { |group| group['id'] }.sort)
end
it "sorts descending identical keys by id for good pagination" do
get api("/groups", user1), params: { search: "same-name", order_by: "name", sort: "desc" }
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(response_groups_ids).to eq(Group.select { |group| group['name'] == 'same-name' }.map { |group| group['id'] }.sort)
end
def groups_visible_to_user(user)
Group.where(id: user.authorized_groups.select(:id).reorder(nil))
end
end
context 'when using owned in the request' do
it 'returns an array of groups the user owns' do
group1.add_maintainer(user2)
get api('/groups', user2), params: { owned: true }
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
expect(json_response.first['name']).to eq(group2.name)
end
end
context 'when using min_access_level in the request' do
let!(:group3) { create(:group, :private) }
let(:response_groups) { json_response.map { |group| group['id'] } }
before do
group1.add_developer(user2)
group3.add_master(user2)
end
it 'returns an array of groups the user has at least master access' do
get api('/groups', user2), params: { min_access_level: 40 }
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(response_groups).to eq([group2.id, group3.id])
end
end
end
describe "GET /groups/:id" do
# Given a group, create one project for each visibility level
#
# group - Group to add projects to
# share_with - If provided, each project will be shared with this Group
#
# Returns a Hash of visibility_level => Project pairs
def add_projects_to_group(group, share_with: nil)
projects = {
public: create(:project, :public, namespace: group),
internal: create(:project, :internal, namespace: group),
private: create(:project, :private, namespace: group)
}
if share_with
create(:project_group_link, project: projects[:public], group: share_with)
create(:project_group_link, project: projects[:internal], group: share_with)
create(:project_group_link, project: projects[:private], group: share_with)
end
projects
end
def response_project_ids(json_response, key)
json_response[key].map do |project|
project['id'].to_i
end
end
context 'when unauthenticated' do
it 'returns 404 for a private group' do
get api("/groups/#{group2.id}")
expect(response).to have_gitlab_http_status(404)
end
it 'returns 200 for a public group' do
get api("/groups/#{group1.id}")
expect(response).to have_gitlab_http_status(200)
end
it 'returns only public projects in the group' do
public_group = create(:group, :public)
projects = add_projects_to_group(public_group)
get api("/groups/#{public_group.id}")
expect(response_project_ids(json_response, 'projects'))
.to contain_exactly(projects[:public].id)
end
it 'returns only public projects shared with the group' do
public_group = create(:group, :public)
projects = add_projects_to_group(public_group, share_with: group1)
get api("/groups/#{group1.id}")
expect(response_project_ids(json_response, 'shared_projects'))
.to contain_exactly(projects[:public].id)
end
end
context "when authenticated as user" do
it "returns one of user1's groups" do
project = create(:project, namespace: group2, path: 'Foo')
create(:project_group_link, project: project, group: group1)
get api("/groups/#{group1.id}", user1)
expect(response).to have_gitlab_http_status(200)
expect(json_response['id']).to eq(group1.id)
expect(json_response['name']).to eq(group1.name)
expect(json_response['path']).to eq(group1.path)
expect(json_response['description']).to eq(group1.description)
expect(json_response['visibility']).to eq(Gitlab::VisibilityLevel.string_level(group1.visibility_level))
expect(json_response['avatar_url']).to eq(group1.avatar_url(only_path: false))
expect(json_response['web_url']).to eq(group1.web_url)
expect(json_response['request_access_enabled']).to eq(group1.request_access_enabled)
expect(json_response['full_name']).to eq(group1.full_name)
expect(json_response['full_path']).to eq(group1.full_path)
expect(json_response['parent_id']).to eq(group1.parent_id)
expect(json_response['projects']).to be_an Array
expect(json_response['projects'].length).to eq(2)
expect(json_response['shared_projects']).to be_an Array
expect(json_response['shared_projects'].length).to eq(1)
expect(json_response['shared_projects'][0]['id']).to eq(project.id)
end
it "returns one of user1's groups without projects when with_projects option is set to false" do
project = create(:project, namespace: group2, path: 'Foo')
create(:project_group_link, project: project, group: group1)
get api("/groups/#{group1.id}", user1), params: { with_projects: false }
expect(response).to have_gitlab_http_status(200)
expect(json_response['projects']).to be_nil
expect(json_response['shared_projects']).to be_nil
end
it "does not return a non existing group" do
get api("/groups/1328", user1)
expect(response).to have_gitlab_http_status(404)
end
it "does not return a group not attached to user1" do
get api("/groups/#{group2.id}", user1)
expect(response).to have_gitlab_http_status(404)
end
it 'returns only public and internal projects in the group' do
public_group = create(:group, :public)
projects = add_projects_to_group(public_group)
get api("/groups/#{public_group.id}", user2)
expect(response_project_ids(json_response, 'projects'))
.to contain_exactly(projects[:public].id, projects[:internal].id)
end
it 'returns only public and internal projects shared with the group' do
public_group = create(:group, :public)
projects = add_projects_to_group(public_group, share_with: group1)
get api("/groups/#{group1.id}", user2)
expect(response_project_ids(json_response, 'shared_projects'))
.to contain_exactly(projects[:public].id, projects[:internal].id)
end
it 'avoids N+1 queries' do
get api("/groups/#{group1.id}", admin)
control_count = ActiveRecord::QueryRecorder.new do
get api("/groups/#{group1.id}", admin)
end.count
create(:project, namespace: group1)
expect do
get api("/groups/#{group1.id}", admin)
end.not_to exceed_query_limit(control_count)
end
end
context "when authenticated as admin" do
it "returns any existing group" do
get api("/groups/#{group2.id}", admin)
expect(response).to have_gitlab_http_status(200)
expect(json_response['name']).to eq(group2.name)
end
it "does not return a non existing group" do
get api("/groups/1328", admin)
expect(response).to have_gitlab_http_status(404)
end
end
context 'when using group path in URL' do
it 'returns any existing group' do
get api("/groups/#{group1.path}", admin)
expect(response).to have_gitlab_http_status(200)
expect(json_response['name']).to eq(group1.name)
end
it 'does not return a non existing group' do
get api('/groups/unknown', admin)
expect(response).to have_gitlab_http_status(404)
end
it 'does not return a group not attached to user1' do
get api("/groups/#{group2.path}", user1)
expect(response).to have_gitlab_http_status(404)
end
end
end
describe 'PUT /groups/:id' do
let(:new_group_name) { 'New Group'}
context 'when authenticated as the group owner' do
it 'updates the group' do
put api("/groups/#{group1.id}", user1), params: { name: new_group_name, request_access_enabled: true }
expect(response).to have_gitlab_http_status(200)
expect(json_response['name']).to eq(new_group_name)
expect(json_response['request_access_enabled']).to eq(true)
end
it 'returns 404 for a non existing group' do
put api('/groups/1328', user1), params: { name: new_group_name }
expect(response).to have_gitlab_http_status(404)
end
end
context 'when authenticated as the admin' do
it 'updates the group' do
put api("/groups/#{group1.id}", admin), params: { name: new_group_name }
expect(response).to have_gitlab_http_status(200)
expect(json_response['name']).to eq(new_group_name)
end
end
context 'when authenticated as an user that can see the group' do
it 'does not updates the group' do
put api("/groups/#{group1.id}", user2), params: { name: new_group_name }
expect(response).to have_gitlab_http_status(403)
end
end
context 'when authenticated as an user that cannot see the group' do
it 'returns 404 when trying to update the group' do
put api("/groups/#{group2.id}", user1), params: { name: new_group_name }
expect(response).to have_gitlab_http_status(404)
end
end
end
describe "GET /groups/:id/projects" do
context "when authenticated as user" do
context 'with min access level' do
it 'returns projects with min access level or higher' do
group_guest = create(:user)
group1.add_guest(group_guest)
project4 = create(:project, group: group1)
project1.add_guest(group_guest)
project3.add_reporter(group_guest)
project4.add_developer(group_guest)
get api("/groups/#{group1.id}/projects", group_guest), params: { min_access_level: Gitlab::Access::REPORTER }
project_ids = json_response.map { |proj| proj['id'] }
expect(project_ids).to match_array([project3.id, project4.id])
end
end
it "returns the group's projects" do
get api("/groups/#{group1.id}/projects", user1)
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response.length).to eq(2)
project_names = json_response.map { |proj| proj['name'] }
expect(project_names).to match_array([project1.name, project3.name])
expect(json_response.first['visibility']).to be_present
end
it "returns the group's projects with simple representation" do
get api("/groups/#{group1.id}/projects", user1), params: { simple: true }
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response.length).to eq(2)
project_names = json_response.map { |proj| proj['name'] }
expect(project_names).to match_array([project1.name, project3.name])
expect(json_response.first['visibility']).not_to be_present
end
it "filters the groups projects" do
public_project = create(:project, :public, path: 'test1', group: group1)
get api("/groups/#{group1.id}/projects", user1), params: { visibility: 'public' }
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an(Array)
expect(json_response.length).to eq(1)
expect(json_response.first['name']).to eq(public_project.name)
end
it "returns projects excluding shared" do
create(:project_group_link, project: create(:project), group: group1)
create(:project_group_link, project: create(:project), group: group1)
create(:project_group_link, project: create(:project), group: group1)
get api("/groups/#{group1.id}/projects", user1), params: { with_shared: false }
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an(Array)
expect(json_response.length).to eq(2)
end
it "returns projects including those in subgroups" do
subgroup = create(:group, parent: group1)
create(:project, group: subgroup)
create(:project, group: subgroup)
get api("/groups/#{group1.id}/projects", user1), params: { include_subgroups: true }
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an(Array)
expect(json_response.length).to eq(4)
end
it "does not return a non existing group" do
get api("/groups/1328/projects", user1)
expect(response).to have_gitlab_http_status(404)
end
it "does not return a group not attached to user1" do
get api("/groups/#{group2.id}/projects", user1)
expect(response).to have_gitlab_http_status(404)
end
it "only returns projects to which user has access" do
project3.add_developer(user3)
get api("/groups/#{group1.id}/projects", user3)
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response.length).to eq(1)
expect(json_response.first['name']).to eq(project3.name)
end
it 'only returns the projects owned by user' do
project2.group.add_owner(user3)
get api("/groups/#{project2.group.id}/projects", user3), params: { owned: true }
expect(response).to have_gitlab_http_status(200)
expect(json_response.length).to eq(1)
expect(json_response.first['name']).to eq(project2.name)
end
it 'only returns the projects starred by user' do
user1.starred_projects = [project1]
get api("/groups/#{group1.id}/projects", user1), params: { starred: true }
expect(response).to have_gitlab_http_status(200)
expect(json_response.length).to eq(1)
expect(json_response.first['name']).to eq(project1.name)
end
end
context "when authenticated as admin" do
it "returns any existing group" do
get api("/groups/#{group2.id}/projects", admin)
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response.length).to eq(1)
expect(json_response.first['name']).to eq(project2.name)
end
it "does not return a non existing group" do
get api("/groups/1328/projects", admin)
expect(response).to have_gitlab_http_status(404)
end
it 'avoids N+1 queries' do
get api("/groups/#{group1.id}/projects", admin)
control_count = ActiveRecord::QueryRecorder.new do
get api("/groups/#{group1.id}/projects", admin)
end.count
create(:project, namespace: group1)
expect do
get api("/groups/#{group1.id}/projects", admin)
end.not_to exceed_query_limit(control_count)
end
end
context 'when using group path in URL' do
it 'returns any existing group' do
get api("/groups/#{group1.path}/projects", admin)
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
project_names = json_response.map { |proj| proj['name'] }
expect(project_names).to match_array([project1.name, project3.name])
end
it 'does not return a non existing group' do
get api('/groups/unknown/projects', admin)
expect(response).to have_gitlab_http_status(404)
end
it 'does not return a group not attached to user1' do
get api("/groups/#{group2.path}/projects", user1)
expect(response).to have_gitlab_http_status(404)
end
end
end
describe 'GET /groups/:id/subgroups' do
let!(:subgroup1) { create(:group, parent: group1) }
let!(:subgroup2) { create(:group, :private, parent: group1) }
let!(:subgroup3) { create(:group, :private, parent: group2) }
context 'when unauthenticated' do
it 'returns only public subgroups' do
get api("/groups/#{group1.id}/subgroups")
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
expect(json_response.first['id']).to eq(subgroup1.id)
expect(json_response.first['parent_id']).to eq(group1.id)
end
it 'returns 404 for a private group' do
get api("/groups/#{group2.id}/subgroups")
expect(response).to have_gitlab_http_status(404)
end
end
context 'when authenticated as user' do
context 'when user is not member of a public group' do
it 'returns no subgroups for the public group' do
get api("/groups/#{group1.id}/subgroups", user2)
expect(response).to have_gitlab_http_status(200)
expect(json_response).to be_an Array
expect(json_response.length).to eq(0)
end
context 'when using all_available in request' do
it 'returns public subgroups' do
get api("/groups/#{group1.id}/subgroups", user2), params: { all_available: true }
expect(response).to have_gitlab_http_status(200)
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
expect(json_response[0]['id']).to eq(subgroup1.id)
expect(json_response[0]['parent_id']).to eq(group1.id)
end
end
end
context 'when user is not member of a private group' do
it 'returns 404 for the private group' do
get api("/groups/#{group2.id}/subgroups", user1)
expect(response).to have_gitlab_http_status(404)
end
end
context 'when user is member of public group' do
before do
group1.add_guest(user2)
end
it 'returns private subgroups' do
get api("/groups/#{group1.id}/subgroups", user2)
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(2)
private_subgroups = json_response.select { |group| group['visibility'] == 'private' }
expect(private_subgroups.length).to eq(1)
expect(private_subgroups.first['id']).to eq(subgroup2.id)
expect(private_subgroups.first['parent_id']).to eq(group1.id)
end
context 'when using statistics in request' do
it 'does not include statistics' do
get api("/groups/#{group1.id}/subgroups", user2), params: { statistics: true }
expect(response).to have_gitlab_http_status(200)
expect(json_response).to be_an Array
expect(json_response.first).not_to include 'statistics'
end
end
end
context 'when user is member of private group' do
before do
group2.add_guest(user1)
end
it 'returns subgroups' do
get api("/groups/#{group2.id}/subgroups", user1)
expect(response).to have_gitlab_http_status(200)
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
expect(json_response.first['id']).to eq(subgroup3.id)
expect(json_response.first['parent_id']).to eq(group2.id)
end
end
end
context 'when authenticated as admin' do
it 'returns private subgroups of a public group' do
get api("/groups/#{group1.id}/subgroups", admin)
expect(response).to have_gitlab_http_status(200)
expect(json_response).to be_an Array
expect(json_response.length).to eq(2)
end
it 'returns subgroups of a private group' do
get api("/groups/#{group2.id}/subgroups", admin)
expect(response).to have_gitlab_http_status(200)
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
end
it 'does not include statistics by default' do
get api("/groups/#{group1.id}/subgroups", admin)
expect(response).to have_gitlab_http_status(200)
expect(json_response).to be_an Array
expect(json_response.first).not_to include('statistics')
end
it 'includes statistics if requested' do
get api("/groups/#{group1.id}/subgroups", admin), params: { statistics: true }
expect(response).to have_gitlab_http_status(200)
expect(json_response).to be_an Array
expect(json_response.first).to include('statistics')
end
end
end
describe "POST /groups" do
context "when authenticated as user without group permissions" do
it "does not create group" do
post api("/groups", user1), params: attributes_for(:group)
expect(response).to have_gitlab_http_status(403)
end
context 'as owner' do
before do
group2.add_owner(user1)
end
it 'can create subgroups' do
post api("/groups", user1), params: { parent_id: group2.id, name: 'foo', path: 'foo' }
expect(response).to have_gitlab_http_status(201)
end
end
context 'as maintainer' do
before do
group2.add_maintainer(user1)
end
it 'can create subgroups' do
post api("/groups", user1), params: { parent_id: group2.id, name: 'foo', path: 'foo' }
expect(response).to have_gitlab_http_status(201)
end
end
end
context "when authenticated as user with group permissions" do
it "creates group" do
group = attributes_for(:group, { request_access_enabled: false })
post api("/groups", user3), params: group
expect(response).to have_gitlab_http_status(201)
expect(json_response["name"]).to eq(group[:name])
expect(json_response["path"]).to eq(group[:path])
expect(json_response["request_access_enabled"]).to eq(group[:request_access_enabled])
expect(json_response["visibility"]).to eq(Gitlab::VisibilityLevel.string_level(Gitlab::CurrentSettings.current_application_settings.default_group_visibility))
end
it "creates a nested group" do
parent = create(:group)
parent.add_owner(user3)
group = attributes_for(:group, { parent_id: parent.id })
post api("/groups", user3), params: group
expect(response).to have_gitlab_http_status(201)
expect(json_response["full_path"]).to eq("#{parent.path}/#{group[:path]}")
expect(json_response["parent_id"]).to eq(parent.id)
end
it "does not create group, duplicate" do
post api("/groups", user3), params: { name: 'Duplicate Test', path: group2.path }
expect(response).to have_gitlab_http_status(400)
expect(response.message).to eq("Bad Request")
end
it "returns 400 bad request error if name not given" do
post api("/groups", user3), params: { path: group2.path }
expect(response).to have_gitlab_http_status(400)
end
it "returns 400 bad request error if path not given" do
post api("/groups", user3), params: { name: 'test' }
expect(response).to have_gitlab_http_status(400)
end
end
end
describe "DELETE /groups/:id" do
context "when authenticated as user" do
it "removes group" do
Sidekiq::Testing.fake! do
expect { delete api("/groups/#{group1.id}", user1) }.to change(GroupDestroyWorker.jobs, :size).by(1)
end
expect(response).to have_gitlab_http_status(202)
end
it_behaves_like '412 response' do
let(:request) { api("/groups/#{group1.id}", user1) }
let(:success_status) { 202 }
end
it "does not remove a group if not an owner" do
user4 = create(:user)
group1.add_maintainer(user4)
delete api("/groups/#{group1.id}", user3)
expect(response).to have_gitlab_http_status(403)
end
it "does not remove a non existing group" do
delete api("/groups/1328", user1)
expect(response).to have_gitlab_http_status(404)
end
it "does not remove a group not attached to user1" do
delete api("/groups/#{group2.id}", user1)
expect(response).to have_gitlab_http_status(404)
end
end
context "when authenticated as admin" do
it "removes any existing group" do
delete api("/groups/#{group2.id}", admin)
expect(response).to have_gitlab_http_status(202)
end
it "does not remove a non existing group" do
delete api("/groups/1328", admin)
expect(response).to have_gitlab_http_status(404)
end
end
end
describe "POST /groups/:id/projects/:project_id" do
let(:project) { create(:project) }
let(:project_path) { CGI.escape(project.full_path) }
before do
allow_any_instance_of(Projects::TransferService)
.to receive(:execute).and_return(true)
end
context "when authenticated as user" do
it "does not transfer project to group" do
post api("/groups/#{group1.id}/projects/#{project.id}", user2)
expect(response).to have_gitlab_http_status(403)
end
end
context "when authenticated as admin" do
it "transfers project to group" do
post api("/groups/#{group1.id}/projects/#{project.id}", admin)
expect(response).to have_gitlab_http_status(201)
end
context 'when using project path in URL' do
context 'with a valid project path' do
it "transfers project to group" do
post api("/groups/#{group1.id}/projects/#{project_path}", admin)
expect(response).to have_gitlab_http_status(201)
end
end
context 'with a non-existent project path' do
it "does not transfer project to group" do
post api("/groups/#{group1.id}/projects/nogroup%2Fnoproject", admin)
expect(response).to have_gitlab_http_status(404)
end
end
end
context 'when using a group path in URL' do
context 'with a valid group path' do
it "transfers project to group" do
post api("/groups/#{group1.path}/projects/#{project_path}", admin)
expect(response).to have_gitlab_http_status(201)
end
end
context 'with a non-existent group path' do
it "does not transfer project to group" do
post api("/groups/noexist/projects/#{project_path}", admin)
expect(response).to have_gitlab_http_status(404)
end
end
end
end
end
it_behaves_like 'custom attributes endpoints', 'groups' do
let(:attributable) { group1 }
let(:other_attributable) { group2 }
let(:user) { user1 }
before do
group2.add_owner(user1)
end
end
end
| 35.0998 | 166 | 0.649872 |
bf70a26e849bcda12c9994f178fdd4c59eb416f8 | 616 | module Fog
module Cloudstack
class Compute
class Real
# Remove a VMware datacenter from a zone.
#
# {CloudStack API Reference}[http://cloudstack.apache.org/docs/api/apidocs-4.4/root_admin/removeVmwareDc.html]
def remove_vmware_dc(*args)
options = {}
if args[0].is_a? Hash
options = args[0]
options.merge!('command' => 'removeVmwareDc')
else
options.merge!('command' => 'removeVmwareDc',
'zoneid' => args[0])
end
request(options)
end
end
end
end
end
| 23.692308 | 118 | 0.547078 |
f86f3f8aeb4d07d4427a7b891444917c0a5edaa7 | 695 | require "edr_treadmill/activities/base_activity"
require "etc"
module EdrTreadmill
module Activities
class ProcessActivity < BaseActivity
self.activity_description = "Spawn a new process"
self.activity_options = {
command: {
required: true,
type: :string,
desc: "Process to spawn. Can include arguments."
}
}
def initialize(command:)
@command, @args = command.split(" ", 2)
end
def execute
pid = Process.spawn(@command, @args)
result(
pid: pid,
process_name: @command,
command_line: "#{@command} #{@args}".strip
)
end
end
end
end
| 22.419355 | 58 | 0.574101 |
ac0f74c99f0e598eb16a6855a99c6bffa2477a2e | 82 | require 'rails_helper'
RSpec.describe LaunchController, type: :controller do
end
| 16.4 | 53 | 0.817073 |
39889a19adcec01c97d28a7df7d8a33c7142b537 | 135 | require 'rails_helper'
RSpec.describe ConsultationStock, type: :model do
pending "add some examples to (or delete) #{__FILE__}"
end
| 22.5 | 56 | 0.762963 |
acfc8e22412769c4a0efbc93ca85d38855cca67a | 1,446 | require "rails_helper"
RSpec.describe Post, type: :model do
%w(draft edited designed published).each do |status_name|
describe "#{status_name}?" do
subject { post.send("#{status_name}?") }
context "with status of #{status_name}" do
let(:status) { Status.new(name: status_name) }
let(:post) { Page.new(status: status) }
it { is_expected.to eq(true) }
end
context "with another status name" do
let(:status) { Status.new(name: "another") }
let(:post) { Page.new(status: status) }
it { is_expected.to eq(false) }
end
end
end
describe "dated?" do
subject { post.dated? }
context "with published_at" do
let(:post) { Page.new(published_at: Time.now) }
it { is_expected.to be(true) }
end
context "without published_at" do
let(:post) { Page.new }
it { is_expected.to be(false) }
end
end
describe "#meta_description" do
subject { post.meta_description.strip }
context "with summary" do
let(:post) { Page.new(summary: "summary") }
it { is_expected.to eq("summary") }
end
context "without summary" do
let(:post) { Page.new(content: "*content*") }
it { is_expected.to eq("content") }
end
end
describe "#generated_draft_code" do
let(:post) { Page.create(title: "test") }
subject { post.draft_code }
it { is_expected.to be_present }
end
end
| 22.59375 | 59 | 0.605809 |
269a2ed8b3ca8497488e098e5fc3bc8ba7f55d9c | 487 | cask 'pgweb' do
version '0.6.3'
sha256 'bfa041bdbdd7a3424faa0be6b524302c0f4c6610c06afa8784904b2a91b64024'
url "https://github.com/sosedoff/pgweb/releases/download/v#{version}/pgweb_darwin_amd64.zip"
appcast 'https://github.com/sosedoff/pgweb/releases.atom',
:sha256 => '3a0c23bf19e274e52895f4d1a9ae781cb86704f2d2eefa53d8a89690786ec748'
name 'pgweb'
homepage 'https://github.com/sosedoff/pgweb'
license :mit
binary 'pgweb_darwin_amd64', :target => 'pgweb'
end
| 34.785714 | 94 | 0.767967 |
79760a9684bb364fa1637a5fb4c691719c98b8f5 | 1,842 | # encoding: utf-8
#
control "V-77823" do
title "The operating system must require authentication upon booting into
single-user and maintenance modes."
desc "If the system does not require valid root authentication before it
boots into single-user or maintenance mode, anyone who invokes single-user or
maintenance mode is granted privileged access to all files on the system."
impact 0.5
tag "gtitle": "SRG-OS-000080-GPOS-00048"
tag "gid": "V-77823"
tag "rid": "SV-92519r1_rule"
tag "stig_id": "RHEL-07-010481"
tag "cci": ["CCI-000213"]
tag "documentable": false
tag "nist": ["AC-3", "Rev_4"]
tag "check": "Verify the operating system must require authentication upon
booting into single-user and maintenance modes.
Check that the operating system requires authentication upon booting into
single-user mode with the following command:
# grep -i execstart /usr/lib/systemd/system/rescue.service
ExecStart=-/bin/sh -c \"/usr/sbin/sulogin; /usr/bin/systemctl --fail --no-block
default\"
If \"ExecStart\" does not have \"/usr/sbin/sulogin\" as an option, this is a
finding.
"
tag "fix": "Configure the operating system to require authentication upon
booting into single-user and maintenance modes.
Add or modify the \"ExecStart\" line in
\"/usr/lib/systemd/system/rescue.service\" to include \"/usr/sbin/sulogin\":
ExecStart=-/bin/sh -c \"/usr/sbin/sulogin; /usr/bin/systemctl --fail --no-block
default\"
"
tag "fix_id": "F-84523r1_fix"
describe command("grep -i execstart /usr/lib/systemd/system/rescue.service") do
its('stdout.strip') { should match %r{/usr/sbin/sulogin} }
end if package('gnome-desktop3').installed?
describe "The GNOME desktop is not installed" do
skip "The GNOME desktop is not installed, this control is Not Applicable."
end if !package('gnome-desktop3').installed?
end
| 35.423077 | 81 | 0.735071 |
acebeadda9f65905b4ff73d4598bd6b7d0434cc3 | 560 |
class AddTranslationTables < ActiveRecord::Migration
def change
create_table :translation_records do |t|
#t.integer :id
t.string :locale
t.integer :translator_id
t.string :key
t.text :value
t.datetime :created_at
end
create_table :dynamic_translation_records do |t|
#t.integer :id
t.string :locale
t.integer :translator_id
t.string :model_type
t.integer :model_id
t.string :column
t.text :value
t.datetime :created_at
end
end
end
| 22.4 | 53 | 0.616071 |
e96c7f98b20a797f0eba68142f222cbef43d8cd5 | 2,210 | require 'r10k/git/rugged'
require 'r10k/git/rugged/credentials'
require 'r10k/logging'
class R10K::Git::Rugged::BaseRepository
include R10K::Logging
# @return [Pathname] The path to this repository.
# @note The `@path` instance variable must be set by inheriting classes on instantiation.
attr_reader :path
def resolve(pattern)
object = with_repo { |repo| repo.rev_parse(pattern) }
case object
when NilClass
nil
when ::Rugged::Tag, ::Rugged::Tag::Annotation
object.target.oid
else
object.oid
end
rescue ::Rugged::ReferenceError
nil
end
def branches
with_repo { |repo| repo.branches.each_name(:local).to_a }
end
def tags
with_repo { |repo| repo.tags.each_name.to_a }
end
# @return [Symbol] The type of the given ref, one of :branch, :tag, :commit, or :unknown
def ref_type(pattern)
# Try to match and resolve SHA refs as quickly as possible.
if pattern =~ /^[0-9a-f]{5,40}$/i && @_rugged_repo.include?(pattern)
:commit
elsif @_rugged_repo.tags[pattern]
:tag
elsif @_rugged_repo.branches[pattern]
:branch
elsif resolve(pattern)
:commit
else
:unknown
end
end
def remotes
remotes_hash = {}
if @_rugged_repo
@_rugged_repo.remotes.each do |remote|
remotes_hash[remote.name] = remote.url
end
end
remotes_hash
end
private
def with_repo(opts={})
if @_rugged_repo
yield @_rugged_repo
end
ensure
@_rugged_repo.close if @_rugged_repo
end
# Generate a lambda that can create a credentials object for the
# authentication type in question.
#
# @note The Rugged API expects an object that responds to #call; the
# Credentials subclasses implement #call returning self so that
# the Credentials object can be used, or a Proc that returns a
# Credentials object can be used.
#
# @api private
#
# @return [Proc]
def credentials
R10K::Git::Rugged::Credentials.new(self)
end
def report_transfer(results, remote)
logger.debug2 { "Transferred #{results[:total_objects]} objects (#{results[:received_bytes]} bytes) from '#{remote}' into #{git_dir}'" }
nil
end
end
| 23.510638 | 140 | 0.671041 |
ed018bf93719975f381c9193f0fee286ea9e7257 | 1,017 | # frozen_string_literal: true
# Copyright 2015-2017, the Linux Foundation, IDA, and the
# CII Best Practices badge contributors
# SPDX-License-Identifier: MIT
require 'test_helper'
class ClientIpTest < ActiveSupport::TestCase
# Mocked request with a fixed ip address
class MockReq1
def get_header(_x)
nil
end
def ip
'1.2.3.4'
end
end
test 'ClientIP works correctly without X-Forwarded-For' do
m = MockReq1.new
result = ClientIp.acquire(m)
assert '1.2.3.4', result
end
# Mocked request with a list as the header.
class MockReq2
def get_header(_x)
'1.1.1.1, 100.36.183.117, 157.52.82.3'
end
def ip
'1.2.3.4'
end
end
# In our production environment we must use SECOND from the end.
# Change this test, and ClientIp, if your environment is different.
test 'ClientIP works correctly with X-Forwarded-For, production env' do
m = MockReq2.new
result = ClientIp.acquire(m)
assert '100.36.183.117', result
end
end
| 22.108696 | 73 | 0.679449 |
d56138909318d864e0ee862fc826307ce42fd5d5 | 10,029 | #
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# Homebrew doesn't support specifying anything more recent than 'nehalem',
# but nehalem is 19x slower than sandybrdige at some real-world workloads,
# and sandybridge is an old enough architecture that we're going to assume
# that HHVM users have it.
module MonkeyPatchCPU
def optimization_flags
super.merge({nehalem: "-march=sandybridge"}).freeze
end
end
class << Hardware::CPU
prepend MonkeyPatchCPU
end
class Hhvm45 < Formula
desc "JIT compiler and runtime for the Hack language"
homepage "http://hhvm.com/"
url "https://dl.hhvm.com/source/hhvm-4.5.1.tar.gz"
head "https://github.com/facebook/hhvm.git"
sha256 "101e7017c1362fb4f991cb54481d20be8e00d583c27130ede02cb31d4f542274"
bottle do
root_url "https://dl.hhvm.com/homebrew-bottles"
sha256 high_sierra: "851aa49b46965d9d5f24d8d3eb26061ee7f9299279be4dacbd8c8c00c84a81c7"
sha256 mojave: "c8c80e2db8aa4e527acfa4bb2044ecd41ee4398f6db594406f235b7831178cc3"
end
option "with-debug", <<~EOS
Make an unoptimized build with assertions enabled. This will run PHP and
Hack code dramatically slower than a release build, and is suitable mostly
for debugging HHVM itself.
EOS
# Needs very recent xcode
depends_on :macos => :sierra
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "cmake" => :build
depends_on "double-conversion"
depends_on "dwarfutils"
depends_on "gawk" => :build
depends_on "libelf" => :build
depends_on "libtool" => :build
depends_on "md5sha1sum" => :build
depends_on "pkg-config" => :build
depends_on "wget" => :build
# We statically link against icu4c as every non-bugfix release is not
# backwards compatible; needing to rebuild for every release is too
# brittle
depends_on "icu4c" => :build
depends_on "boost"
depends_on "freetype"
depends_on "gd"
depends_on "gettext"
depends_on "glog"
depends_on "gmp"
depends_on "imagemagick@6"
depends_on "jemalloc"
depends_on "jpeg"
depends_on "libevent"
depends_on "libmemcached"
depends_on "libsodium"
depends_on "libpng"
depends_on "libxml2"
depends_on "libzip"
depends_on "lz4"
depends_on "mcrypt"
depends_on "oniguruma"
depends_on "openssl"
depends_on "pcre" # Used for Hack but not HHVM build - see #116
depends_on "postgresql"
depends_on "sqlite"
depends_on "tbb@2020"
def install
cmake_args = %W[
-DCMAKE_INSTALL_PREFIX=#{prefix}
-DCMAKE_INSTALL_SYSCONFDIR=#{etc}
-DDEFAULT_CONFIG_DIR=#{etc}/hhvm
]
# Force use of bundled PCRE to workaround #116
cmake_args += %W[
-DSYSTEM_PCRE_HAS_JIT=0
]
# Features which don't work on OS X yet since they haven't been ported yet.
cmake_args += %W[
-DENABLE_MCROUTER=OFF
-DENABLE_EXTENSION_MCROUTER=OFF
-DENABLE_EXTENSION_IMAP=OFF
]
# Required to specify a socket path if you are using the bundled async SQL
# client (which is very strongly recommended).
cmake_args << "-DMYSQL_UNIX_SOCK_ADDR=/tmp/mysql.sock"
# LZ4 warning macros are currently incompatible with clang
cmake_args << "-DCMAKE_C_FLAGS=-DLZ4_DISABLE_DEPRECATE_WARNINGS=1"
cmake_args << "-DCMAKE_CXX_FLAGS=-DLZ4_DISABLE_DEPRECATE_WARNINGS=1 -DU_USING_ICU_NAMESPACE=1"
# Debug builds. This switch is all that's needed, it sets all the right
# cflags and other config changes.
if build.with? "debug"
cmake_args << "-DCMAKE_BUILD_TYPE=Debug"
else
cmake_args << "-DCMAKE_BUILD_TYPE=RelWithDebInfo"
end
# Statically link libICU
cmake_args += %W[
-DICU_INCLUDE_DIR=#{Formula["icu4c"].opt_include}
-DICU_I18N_LIBRARY=#{Formula["icu4c"].opt_lib}/libicui18n.a
-DICU_LIBRARY=#{Formula["icu4c"].opt_lib}/libicuuc.a
-DICU_DATA_LIBRARY=#{Formula["icu4c"].opt_lib}/libicudata.a
]
# TBB looks for itself in a different place than brew installs to.
ENV["TBB_ARCH_PLATFORM"] = "."
cmake_args += %W[
-DTBB_INCLUDE_DIR=#{Formula["tbb@2020"].opt_include}
-DTBB_INSTALL_DIR=#{Formula["tbb@2020"].opt_prefix}
-DTBB_LIBRARY=#{Formula["tbb@2020"].opt_lib}/libtbb.dylib
-DTBB_LIBRARY_DEBUG=#{Formula["tbb@2020"].opt_lib}/libtbb.dylib
-DTBB_LIBRARY_DIR=#{Formula["tbb@2020"].opt_lib}
-DTBB_MALLOC_LIBRARY=#{Formula["tbb@2020"].opt_lib}/libtbbmalloc.dylib
-DTBB_MALLOC_LIBRARY_DEBUG=#{Formula["tbb@2020"].opt_lib}/libtbbmalloc.dylib
]
system "cmake", *cmake_args, '.'
system "make"
system "make", "install"
tp_notices = (share/"doc/third_party_notices.txt")
(share/"doc").install "third-party/third_party_notices.txt"
(share/"doc/third_party_notices.txt").append_lines <<EOF
-----
The following software may be included in this product: icu4c. This Software contains the following license and notice below:
Unicode Data Files include all data files under the directories
http://www.unicode.org/Public/, http://www.unicode.org/reports/,
http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and
http://www.unicode.org/utility/trac/browser/.
Unicode Data Files do not include PDF online code charts under the
directory http://www.unicode.org/Public/.
Software includes any source code published in the Unicode Standard
or under the directories
http://www.unicode.org/Public/, http://www.unicode.org/reports/,
http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and
http://www.unicode.org/utility/trac/browser/.
NOTICE TO USER: Carefully read the following legal agreement.
BY DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING UNICODE INC.'S
DATA FILES ("DATA FILES"), AND/OR SOFTWARE ("SOFTWARE"),
YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE
TERMS AND CONDITIONS OF THIS AGREEMENT.
IF YOU DO NOT AGREE, DO NOT DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE
THE DATA FILES OR SOFTWARE.
COPYRIGHT AND PERMISSION NOTICE
Copyright © 1991-2017 Unicode, Inc. All rights reserved.
Distributed under the Terms of Use in http://www.unicode.org/copyright.html.
Permission is hereby granted, free of charge, to any person obtaining
a copy of the Unicode data files and any associated documentation
(the "Data Files") or Unicode software and any associated documentation
(the "Software") to deal in the Data Files or Software
without restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, and/or sell copies of
the Data Files or Software, and to permit persons to whom the Data Files
or Software are furnished to do so, provided that either
(a) this copyright and permission notice appear with all copies
of the Data Files or Software, or
(b) this copyright and permission notice appear in associated
Documentation.
THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT OF THIRD PARTY RIGHTS.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THE DATA FILES OR SOFTWARE.
Except as contained in this notice, the name of a copyright holder
shall not be used in advertising or otherwise to promote the sale,
use or other dealings in these Data Files or Software without prior
written authorization of the copyright holder.
EOF
ini = etc/"hhvm"
(ini/"php.ini").write php_ini unless File.exist? (ini/"php.ini")
(ini/"server.ini").write server_ini unless File.exist? (ini/"server.ini")
end
test do
(testpath/"test.php").write <<~EOS
<?php
exit(is_integer(HHVM_VERSION_ID) ? 0 : 1);
EOS
system "#{bin}/hhvm", testpath/"test.php"
end
plist_options :manual => "hhvm -m daemon -c #{HOMEBREW_PREFIX}/etc/hhvm/php.ini -c #{HOMEBREW_PREFIX}/etc/hhvm/server.ini"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/hhvm</string>
<string>-m</string>
<string>server</string>
<string>-c</string>
<string>#{etc}/hhvm/php.ini</string>
<string>-c</string>
<string>#{etc}/hhvm/server.ini</string>
</array>
<key>WorkingDirectory</key>
<string>#{HOMEBREW_PREFIX}</string>
</dict>
</plist>
EOS
end
# https://github.com/hhvm/packaging/blob/master/hhvm/deb/skeleton/etc/hhvm/php.ini
def php_ini
<<~EOS
; php options
session.save_handler = files
session.save_path = #{var}/lib/hhvm/sessions
session.gc_maxlifetime = 1440
; hhvm specific
hhvm.log.always_log_unhandled_exceptions = true
hhvm.log.runtime_error_reporting_level = 8191
hhvm.mysql.typed_results = false
EOS
end
# https://github.com/hhvm/packaging/blob/master/hhvm/deb/skeleton/etc/hhvm/server.ini
def server_ini
<<~EOS
; php options
pid = #{var}/run/hhvm/pid
; hhvm specific
hhvm.server.port = 9000
hhvm.server.default_document = index.php
hhvm.log.use_log_file = true
hhvm.log.file = #{var}/log/hhvm/error.log
hhvm.repo.central.path = #{var}/run/hhvm/hhvm.hhbc
EOS
end
end
| 35.438163 | 125 | 0.710539 |
3308831d945d5aac364876e6080c18215f109900 | 2,835 | require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "active_resource/railtie"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
Bundler.require(*Rails.groups)
require "ucb_rails"
module Dummy
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
config.time_zone = 'Pacific Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
end
end
| 42.313433 | 100 | 0.73933 |
4aa937b04e25ec4b888ec629a43930ddeec688b4 | 194 | require File.dirname(__FILE__) + '/spec_helper'
describe ID3::TagBag::Anonymizer do
it "should print mp3s metadata" do
# puts ID3::TagBag::Anonymizer.new(ENV['HOME'] / 'Music')
end
end
| 24.25 | 61 | 0.706186 |
4a39ee36dc6b1c23cf1fb25e7ea500feb049563e | 695 | class ApplicationController < ActionController::Base
protect_from_forgery with: :exception
add_flash_types :success
private
def go_back_link_to(path)
@go_back_link_to ||= path
@go_back_link_to
end
private
def logged_in?
current_user
end
helper_method :logged_in?
def current_user
@current_user ||= User.find(session[:user_id]) if session[:user_id]
end
helper_method :current_user
def require_user
if current_user
true
else
redirect_to new_user_session_path, notice: "You must be logged in to access that page."
end
end
end
| 19.305556 | 99 | 0.623022 |
4ae0673d56962123b14a9ea8c99ca42996a022c8 | 5,242 | require 'test_helper'
class PasswordResetsTest < ActionDispatch::IntegrationTest
def setup
ActionMailer::Base.deliveries.clear #初期化?
@user = users(:michael) #michaelをテストユーザとする
end
test "password resets" do
get new_password_reset_path #forgot password?のページに移動
assert_template 'password_resets/new' #password_resetsコントローラのnewアクションが実行されているか?
# メールアドレスが無効
post password_resets_path, params: { password_reset: { email: "" } } #メールアドレスに何もいれない場合
assert_not flash.empty? #flashにエラーメッセージ
assert_template 'password_resets/new' #password_resetsコントローラのnewアクションが実行されているか?
# メールアドレスが有効
post password_resets_path,
params: { password_reset: { email: @user.email } } #michaelの有効なパスワードでPOST
assert_not_equal @user.reset_digest, @user.reload.reset_digest #reset_digestが生成されているかチェック
assert_equal 1, ActionMailer::Base.deliveries.size #メールが1通送信されたか?
assert_not flash.empty? #flashにメール送信した通知
assert_redirected_to root_url #rootにリダイレクト
# パスワード再設定フォームのテスト
user = assigns(:user) #reset_token取得のため
# メールアドレスが無効
get edit_password_reset_path(user.reset_token, email: "") #無効なメールアドレスからのアクセスの場合
assert_redirected_to root_url #rootにリダイレクト
# 無効なユーザー
user.toggle!(:activated) #activatedがfalseのユーザとして設定
get edit_password_reset_path(user.reset_token, email: user.email) #reset_pathをGET
assert_redirected_to root_url #rootにリダイレクト
user.toggle!(:activated) #activatedをtrueに変更
# メールアドレスが有効で、トークンが無効
get edit_password_reset_path('wrong token', email: user.email) #tokenが無効なアクセス
assert_redirected_to root_url #rootにリダイレクト
# メールアドレスもトークンも有効
get edit_password_reset_path(user.reset_token, email: user.email) #メールアドレスもreset_tokenも有効なアクセス
assert_template 'password_resets/edit' #password_resetsコントローラのeditアクション
assert_select "input[name=email][type=hidden][value=?]", user.email #隠しフィールドのemailが正しいか
# 無効なパスワードとパスワード確認
patch password_reset_path(user.reset_token),
params: { email: user.email,
user: { password: "foobaz",
password_confirmation: "barquux" } } #無効なパスワードの設定
assert_select 'div#error_explanation' #エラーメッセージを表示
# パスワードが空
patch password_reset_path(user.reset_token),
params: { email: user.email,
user: { password: "",
password_confirmation: "" } } #パスワードが空で設定
assert_select 'div#error_explanation' #エラーメッセージが表示
# 有効なパスワードとパスワード確認
patch password_reset_path(user.reset_token),
params: { email: user.email,
user: { password: "foobaz",
password_confirmation: "foobaz" } } #正しい形式のパスワードを設定
assert_nil user.reload.reset_digest #reset_digestがnilに更新されたか?
assert is_logged_in? #自動でログインする
assert_not flash.empty? #flashにパスワード変更のメッセージ表示
assert_redirected_to user #ユーザページにリダイレクト
end
#パスワード有効期限切れのテスト
test "expired token" do
get new_password_reset_path #forgot password?のページに移動
post password_resets_path, #正しいemailでのPOST
params: { password_reset: { email: @user.email } }
@user = assigns(:user) #reset_token取得のため
@user.update_attribute(:reset_sent_at, 3.hours.ago) #reset_sent_atを3時間前に設定
patch password_reset_path(@user.reset_token), #パスワードPATCH要求を送信
params: { email: @user.email,
user: { password: "foobar",
password_confirmation: "foobar" } }
assert_response :redirect #アクション実行結果がredirect
follow_redirect! #redirectに従う
assert_match /expired/i, response.body #response.bodyはHTML本文を全て返す。その文章にexpiredがあるかチェック。
end
end | 64.716049 | 128 | 0.499046 |
acde96fdc786d13819be2d206c2153ac8b34a612 | 3,900 | class ChangeRegFeeToRealObject < ActiveRecord::Migration
class PaymentDetail < ActiveRecord::Base
belongs_to :payment, inverse_of: :payment_details
belongs_to :expense_item
has_one :refund_detail
scope :completed, -> { includes(:payment).includes(:refund_detail).where(payments: {completed: true}).where(refund_details: {payment_detail_id: nil}) }
end
class Payment < ActiveRecord::Base
has_many :payment_details, inverse_of: :payment, dependent: :destroy
end
class Registrant < ActiveRecord::Base
has_many :registrant_expense_items, -> { includes :expense_item}, dependent: :destroy
has_many :payment_details, -> {includes :payment}, dependent: :destroy
def reg_paid?
if RegistrationPeriod.paid_for_period(competitor, paid_expense_items).nil?
false
else
true
end
end
def paid_expense_items
paid_details.map{|pd| pd.expense_item }
end
def paid_details
payment_details.completed.clone
end
end
class RegistrantExpenseItem < ActiveRecord::Base
belongs_to :registrant
belongs_to :expense_item, inverse_of: :registrant_expense_items
end
class ExpenseItem < ActiveRecord::Base
end
class RegistrationPeriod < ActiveRecord::Base
belongs_to :competitor_expense_item, class_name: "ExpenseItem"
belongs_to :noncompetitor_expense_item, class_name: "ExpenseItem"
def last_day
end_date + 1.day
end
def current_period?(date = Date.today)
(start_date <= date && date <= last_day)
end
def self.all_registration_expense_items
RegistrationPeriod.all.collect{|rp| rp.competitor_expense_item} + RegistrationPeriod.all.collect{|rp| rp.noncompetitor_expense_item}
end
def self.relevant_period(date)
RegistrationPeriod.includes(:competitor_expense_item, :noncompetitor_expense_item).all.each do |rp|
if rp.current_period?(date)
return rp
end
end
nil
end
def self.paid_for_period(competitor, paid_items)
RegistrationPeriod.includes(:noncompetitor_expense_item).includes(:competitor_expense_item).each do |rp|
if competitor
if paid_items.include?(rp.competitor_expense_item)
return rp
end
else
if paid_items.include?(rp.noncompetitor_expense_item)
return rp
end
end
end
nil
end
end
def up
PaymentDetail.reset_column_information
Payment.reset_column_information
Registrant.reset_column_information
RegistrantExpenseItem.reset_column_information
ExpenseItem.reset_column_information
RegistrationPeriod.reset_column_information
# determine the current competitor expense item, and non-competitor expense item
rp = RegistrationPeriod.relevant_period(Date.today)
unless rp.nil?
Registrant.all.each do |reg|
if reg.competitor
ei = rp.competitor_expense_item
else
ei = rp.noncompetitor_expense_item
end
# go through every registrant, and create a system entry for that expense item if they haven't paid for registration.
if reg.reg_paid?
puts "Skipping creating REI for reg #{reg.bib_number}" # rubocop:disable Rails/Output
else
rei = reg.registrant_expense_items.build(expense_item_id: ei.id, system_managed: true)
puts "creating REI of #{ei.id} for reg: #{reg.bib_number}" # rubocop:disable Rails/Output
rei.save!
end
end
end
end
def down
# remove any registrant_expense_items from the set of registration_fees
RegistrationPeriod.all_registration_expense_items.each do |ei|
RegistrantExpenseItem.where(expense_item_id: ei.id).each do |rei|
puts "deleting rei for #{rei.registrant.bib_number}" # rubocop:disable Rails/Output
rei.destroy
end
end
end
end
| 31.707317 | 155 | 0.70641 |
e8e2dc2175e148c08b4f8a3c6ebe65ea67cc41ed | 1,048 | class Planck < Formula
desc "Stand-alone ClojureScript REPL"
homepage "https://planck-repl.org/"
url "https://github.com/planck-repl/planck/archive/2.23.0.tar.gz"
sha256 "b18932d5d6db7b825e0c18edc9f6f268e741bc58890d64ea1dbc81034a275fe4"
head "https://github.com/planck-repl/planck.git"
bottle do
cellar :any
sha256 "09cb6e88959cae462136493836a0ebf496a9fc50b2f478b281b3f1fe4767fdf0" => :mojave
sha256 "b2eac1a03c217e6276bf5a654203976baf03d2467d51f453180dbb00223e0a13" => :high_sierra
sha256 "1a3048a8e8630034be48bfd4ffb5fd5c71bb2cebc9dd2b54ad6e1d79c7fc95ff" => :sierra
end
depends_on "clojure" => :build
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on :xcode => :build if OS.mac?
depends_on "icu4c"
depends_on "libzip"
def install
system "./script/build-sandbox"
bin.install "planck-c/build/planck"
bin.install "planck-sh/plk"
man1.install Dir["planck-man/*.1"]
end
test do
assert_equal "0", shell_output("#{bin}/planck -e '(- 1 1)'").chomp
end
end
| 31.757576 | 93 | 0.736641 |
6af37e26121041355a2c72cc8f8bdf764c772f10 | 1,892 | class EventsController < ApplicationController
before_action :set_event, only: [:show, :edit, :update, :destroy]
# GET /events
# GET /events.json
def index
@events = current_user.events.all
end
# GET /events/1
# GET /events/1.json
def show
end
# GET /events/new
def new
@event = Event.new
end
# GET /events/1/edit
def edit
end
# POST /events
# POST /events.json
def create
@event = current_user.events.new(event_params)
respond_to do |format|
if @event.save
format.html { redirect_to @event, notice: 'Event was successfully created.' }
format.json { render :show, status: :created, location: @event }
else
format.html { render :new }
format.json { render json: @event.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /events/1
# PATCH/PUT /events/1.json
def update
respond_to do |format|
if @event.update(event_params)
format.html { redirect_to @event, notice: 'Event was successfully updated.' }
format.json { render :show, status: :ok, location: @event }
else
format.html { render :edit }
format.json { render json: @event.errors, status: :unprocessable_entity }
end
end
end
# DELETE /events/1
# DELETE /events/1.json
def destroy
@event.destroy
respond_to do |format|
format.html { redirect_to events_url, notice: 'Event was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_event
@event = Event.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def event_params
params.require(:event).permit(:name, :desc, :start_date, :end_date, :priority, :user_id)
end
end
| 25.226667 | 94 | 0.651163 |
79ee9f027acf354a70ff5c9bcc6adc2fd6137835 | 1,161 | require 'json'
module Fastlane
module Actions
class SetPackageDataAction < Action
def self.run(params)
data_to_write = params[:data]
package_path = params[:package_path]
file = File.read(package_path)
data_hash = JSON.parse(file, symbolize_names: true)
data_hash.update(data_to_write)
pretty = JSON.pretty_generate(data_hash) + "\n"
File.write(package_path, pretty)
UI.success("#{package_path} has been updated with #{data_to_write}")
data_hash
end
def self.description
'Change data in your package.json file.'
end
def self.authors
['Hawken Rives']
end
def self.available_options
[
FastlaneCore::ConfigItem.new(key: :data,
description: 'The data to update',
type: Hash),
FastlaneCore::ConfigItem.new(key: :package_path,
description: 'The path to the package.json file',
default_value: './package.json',
type: String),
]
end
def self.is_supported?(_platform)
true
end
end
end
end
| 23.693878 | 83 | 0.59087 |
91b7d9a60cfc8e4e308bb5e89e5480796e28ab27 | 48,339 | require 'spec_helper'
require 'request_spec_shared_examples'
RSpec.describe 'Processes' do
let(:space) { VCAP::CloudController::Space.make }
let(:app_model) { VCAP::CloudController::AppModel.make(space: space, name: 'my_app', droplet: droplet) }
let(:droplet) { VCAP::CloudController::DropletModel.make }
let(:developer) { make_developer_for_space(space) }
let(:developer_headers) { headers_for(developer, user_name: user_name) }
let(:user_name) { 'ProcHudson' }
let(:build_client) { instance_double(HTTPClient, post: nil) }
let(:metadata) { {
labels: {
release: 'stable',
'seriouseats.com/potato' => 'mashed'
},
annotations: { 'checksum' => 'SHA' },
}
}
let(:rails_logger) { instance_double(ActiveSupport::Logger, info: nil) }
before do
allow_any_instance_of(::Diego::Client).to receive(:build_client).and_return(build_client)
allow(ActiveSupport::Logger).to receive(:new).and_return(rails_logger)
allow(VCAP::CloudController::TelemetryLogger).to receive(:v3_emit).and_call_original
VCAP::CloudController::TelemetryLogger.init('fake-log-path')
end
describe 'GET /v3/processes' do
let!(:web_revision) { VCAP::CloudController::RevisionModel.make }
let!(:web_process) {
VCAP::CloudController::ProcessModel.make(
:process,
app: app_model,
revision: web_revision,
type: 'web',
instances: 2,
memory: 1024,
disk_quota: 1024,
command: 'rackup',
)
}
let!(:worker_process) {
VCAP::CloudController::ProcessModel.make(
:process,
app: app_model,
type: 'worker',
instances: 1,
memory: 100,
disk_quota: 200,
command: 'start worker',
)
}
before { VCAP::CloudController::ProcessModel.make(:process, app: app_model) }
it_behaves_like 'request_spec_shared_examples.rb list query endpoint' do
let(:message) { VCAP::CloudController::ProcessesListMessage }
let(:request) { '/v3/processes' }
let(:user_header) { developer_headers }
let(:excluded_params) {
[
:app_guid
]
}
let(:params) do
{
guids: ['foo', 'bar'],
space_guids: ['foo', 'bar'],
organization_guids: ['foo', 'bar'],
types: ['foo', 'bar'],
app_guids: ['foo', 'bar'],
page: '2',
per_page: '10',
order_by: 'updated_at',
label_selector: 'foo,bar',
}
end
end
it 'returns a paginated list of processes' do
get '/v3/processes?per_page=2', nil, developer_headers
expected_response = {
'pagination' => {
'total_results' => 3,
'total_pages' => 2,
'first' => { 'href' => "#{link_prefix}/v3/processes?page=1&per_page=2" },
'last' => { 'href' => "#{link_prefix}/v3/processes?page=2&per_page=2" },
'next' => { 'href' => "#{link_prefix}/v3/processes?page=2&per_page=2" },
'previous' => nil,
},
'resources' => [
{
'guid' => web_process.guid,
'relationships' => {
'app' => { 'data' => { 'guid' => app_model.guid } },
'revision' => {
'data' => {
'guid' => web_revision.guid
}
},
},
'type' => 'web',
'command' => '[PRIVATE DATA HIDDEN IN LISTS]',
'instances' => 2,
'memory_in_mb' => 1024,
'disk_in_mb' => 1024,
'health_check' => {
'type' => 'port',
'data' => {
'timeout' => nil,
'invocation_timeout' => nil
}
},
'metadata' => { 'annotations' => {}, 'labels' => {} },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => { 'href' => "#{link_prefix}/v3/processes/#{web_process.guid}" },
'scale' => { 'href' => "#{link_prefix}/v3/processes/#{web_process.guid}/actions/scale", 'method' => 'POST' },
'app' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}" },
'space' => { 'href' => "#{link_prefix}/v3/spaces/#{space.guid}" },
'stats' => { 'href' => "#{link_prefix}/v3/processes/#{web_process.guid}/stats" },
},
},
{
'guid' => worker_process.guid,
'relationships' => {
'app' => { 'data' => { 'guid' => app_model.guid } },
'revision' => nil,
},
'type' => 'worker',
'command' => '[PRIVATE DATA HIDDEN IN LISTS]',
'instances' => 1,
'memory_in_mb' => 100,
'disk_in_mb' => 200,
'health_check' => {
'type' => 'port',
'data' => {
'timeout' => nil,
'invocation_timeout' => nil
}
},
'metadata' => { 'annotations' => {}, 'labels' => {} },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => { 'href' => "#{link_prefix}/v3/processes/#{worker_process.guid}" },
'scale' => { 'href' => "#{link_prefix}/v3/processes/#{worker_process.guid}/actions/scale", 'method' => 'POST' },
'app' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}" },
'space' => { 'href' => "#{link_prefix}/v3/spaces/#{space.guid}" },
'stats' => { 'href' => "#{link_prefix}/v3/processes/#{worker_process.guid}/stats" },
},
}
]
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response).to be_a_response_like(expected_response)
end
it 'filters by label selectors' do
VCAP::CloudController::ProcessLabelModel.make(key_name: 'fruit', value: 'strawberry', process: worker_process)
get '/v3/processes?label_selector=fruit=strawberry', {}, developer_headers
expected_pagination = {
'total_results' => 1,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/processes?label_selector=fruit%3Dstrawberry&page=1&per_page=50" },
'last' => { 'href' => "#{link_prefix}/v3/processes?label_selector=fruit%3Dstrawberry&page=1&per_page=50" },
'next' => nil,
'previous' => nil
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response['resources'].count).to eq(1)
expect(parsed_response['resources'][0]['guid']).to eq(worker_process.guid)
expect(parsed_response['pagination']).to eq(expected_pagination)
end
context 'faceted list' do
context 'by types' do
it 'returns only the matching processes' do
get '/v3/processes?per_page=2&types=worker,doesnotexist', nil, developer_headers
expected_pagination = {
'total_results' => 1,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/processes?page=1&per_page=2&types=worker%2Cdoesnotexist" },
'last' => { 'href' => "#{link_prefix}/v3/processes?page=1&per_page=2&types=worker%2Cdoesnotexist" },
'next' => nil,
'previous' => nil,
}
expect(last_response.status).to eq(200)
parsed_response = MultiJson.load(last_response.body)
returned_guids = parsed_response['resources'].map { |i| i['guid'] }
expect(returned_guids).to match_array([worker_process.guid])
expect(parsed_response['pagination']).to be_a_response_like(expected_pagination)
end
end
context 'by space_guids' do
let(:other_space) { VCAP::CloudController::Space.make(organization: space.organization) }
let(:other_app_model) { VCAP::CloudController::AppModel.make(space: other_space) }
let!(:other_space_process) {
VCAP::CloudController::ProcessModel.make(
:process,
app: other_app_model,
type: 'web',
instances: 2,
memory: 1024,
disk_quota: 1024,
command: 'rackup',
)
}
before do
other_space.add_developer developer
end
it 'returns only the matching processes' do
get "/v3/processes?per_page=2&space_guids=#{other_space.guid}", nil, developer_headers
expected_pagination = {
'total_results' => 1,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/processes?page=1&per_page=2&space_guids=#{other_space.guid}" },
'last' => { 'href' => "#{link_prefix}/v3/processes?page=1&per_page=2&space_guids=#{other_space.guid}" },
'next' => nil,
'previous' => nil,
}
expect(last_response.status).to eq(200)
parsed_response = MultiJson.load(last_response.body)
returned_guids = parsed_response['resources'].map { |i| i['guid'] }
expect(returned_guids).to match_array([other_space_process.guid])
expect(parsed_response['pagination']).to be_a_response_like(expected_pagination)
end
end
context 'by organization guids' do
let(:other_space) { VCAP::CloudController::Space.make }
let!(:other_org) { other_space.organization }
let!(:other_space_process) {
VCAP::CloudController::ProcessModel.make(
:process,
app: other_app_model,
type: 'web',
instances: 2,
memory: 1024,
disk_quota: 1024,
command: 'rackup',
)
}
let(:other_app_model) { VCAP::CloudController::AppModel.make(space: other_space) }
let(:developer) { make_developer_for_space(other_space) }
it 'returns only the matching processes' do
get "/v3/processes?per_page=2&organization_guids=#{other_org.guid}", nil, developer_headers
expected_pagination = {
'total_results' => 1,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/processes?organization_guids=#{other_org.guid}&page=1&per_page=2" },
'last' => { 'href' => "#{link_prefix}/v3/processes?organization_guids=#{other_org.guid}&page=1&per_page=2" },
'next' => nil,
'previous' => nil,
}
expect(last_response.status).to eq(200)
parsed_response = MultiJson.load(last_response.body)
returned_guids = parsed_response['resources'].map { |i| i['guid'] }
expect(returned_guids).to match_array([other_space_process.guid])
expect(parsed_response['pagination']).to be_a_response_like(expected_pagination)
end
end
context 'by app guids' do
let(:desired_app) { VCAP::CloudController::AppModel.make(space: space) }
let!(:desired_process) do
VCAP::CloudController::ProcessModel.make(:process,
app: desired_app,
type: 'persnickety',
instances: 3,
memory: 2048,
disk_quota: 2048,
command: 'at ease'
)
end
it 'returns only the matching processes' do
get "/v3/processes?per_page=2&app_guids=#{desired_app.guid}", nil, developer_headers
expected_pagination = {
'total_results' => 1,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/processes?app_guids=#{desired_app.guid}&page=1&per_page=2" },
'last' => { 'href' => "#{link_prefix}/v3/processes?app_guids=#{desired_app.guid}&page=1&per_page=2" },
'next' => nil,
'previous' => nil,
}
expect(last_response.status).to eq(200)
parsed_response = MultiJson.load(last_response.body)
returned_guids = parsed_response['resources'].map { |i| i['guid'] }
expect(returned_guids).to match_array([desired_process.guid])
expect(parsed_response['pagination']).to be_a_response_like(expected_pagination)
end
end
context 'by guids' do
it 'returns only the matching processes' do
get "/v3/processes?per_page=2&guids=#{web_process.guid},#{worker_process.guid}", nil, developer_headers
expected_pagination = {
'total_results' => 2,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/processes?guids=#{web_process.guid}%2C#{worker_process.guid}&page=1&per_page=2" },
'last' => { 'href' => "#{link_prefix}/v3/processes?guids=#{web_process.guid}%2C#{worker_process.guid}&page=1&per_page=2" },
'next' => nil,
'previous' => nil,
}
expect(last_response.status).to eq(200)
parsed_response = MultiJson.load(last_response.body)
returned_guids = parsed_response['resources'].map { |i| i['guid'] }
expect(returned_guids).to match_array([web_process.guid, worker_process.guid])
expect(parsed_response['pagination']).to be_a_response_like(expected_pagination)
end
end
end
end
describe 'GET /v3/processes/:guid' do
it 'retrieves the process' do
revision = VCAP::CloudController::RevisionModel.make
process = VCAP::CloudController::ProcessModel.make(
:process,
app: app_model,
revision: revision,
type: 'web',
instances: 2,
memory: 1024,
disk_quota: 1024,
command: 'rackup',
)
get "/v3/processes/#{process.guid}", nil, developer_headers
expected_response = {
'guid' => process.guid,
'type' => 'web',
'relationships' => {
'app' => { 'data' => { 'guid' => app_model.guid } },
'revision' => { 'data' => { 'guid' => revision.guid } },
},
'command' => 'rackup',
'instances' => 2,
'memory_in_mb' => 1024,
'disk_in_mb' => 1024,
'health_check' => {
'type' => 'port',
'data' => {
'timeout' => nil,
'invocation_timeout' => nil
}
},
'metadata' => { 'annotations' => {}, 'labels' => {} },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => { 'href' => "#{link_prefix}/v3/processes/#{process.guid}" },
'scale' => { 'href' => "#{link_prefix}/v3/processes/#{process.guid}/actions/scale", 'method' => 'POST' },
'app' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}" },
'space' => { 'href' => "#{link_prefix}/v3/spaces/#{space.guid}" },
'stats' => { 'href' => "#{link_prefix}/v3/processes/#{process.guid}/stats" },
},
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response).to be_a_response_like(expected_response)
end
it 'redacts information for auditors' do
process = VCAP::CloudController::ProcessModel.make(:process, app: app_model, command: 'rackup')
auditor = VCAP::CloudController::User.make
space.organization.add_user(auditor)
space.add_auditor(auditor)
get "/v3/processes/#{process.guid}", nil, headers_for(auditor)
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response['command']).to eq('[PRIVATE DATA HIDDEN]')
end
end
describe 'GET stats' do
let(:process) { VCAP::CloudController::ProcessModel.make(:process, type: 'worker', app: app_model) }
let(:net_info_1) {
{
address: '1.2.3.4',
ports: [
{
host_port: 8080,
container_port: 1234,
host_tls_proxy_port: 61002,
container_tls_proxy_port: 61003
},
{
host_port: 3000,
container_port: 4000,
host_tls_proxy_port: 61006,
container_tls_proxy_port: 61007
}
]
}
}
let(:stats_for_process) do
{
0 => {
state: 'RUNNING',
details: 'some-details',
isolation_segment: 'very-isolated',
stats: {
name: process.name,
uris: process.uris,
host: 'toast',
net_info: net_info_1,
uptime: 12345,
mem_quota: process[:memory] * 1024 * 1024,
disk_quota: process[:disk_quota] * 1024 * 1024,
fds_quota: process.file_descriptors,
usage: {
time: usage_time,
cpu: 80,
mem: 128,
disk: 1024,
}
}
},
}
end
let(:instances_reporters) { double(:instances_reporters) }
let(:usage_time) { Time.now.utc.to_s }
let(:expected_response) do
{
'resources' => [{
'type' => 'worker',
'index' => 0,
'state' => 'RUNNING',
'isolation_segment' => 'very-isolated',
'details' => 'some-details',
'usage' => {
'time' => usage_time,
'cpu' => 80,
'mem' => 128,
'disk' => 1024,
},
'host' => 'toast',
'instance_ports' => [
{
'external' => 8080,
'internal' => 1234,
'external_tls_proxy_port' => 61002,
'internal_tls_proxy_port' => 61003
},
{
'external' => 3000,
'internal' => 4000,
'external_tls_proxy_port' => 61006,
'internal_tls_proxy_port' => 61007
}
],
'uptime' => 12345,
'mem_quota' => 1073741824,
'disk_quota' => 1073741824,
'fds_quota' => 16384
}]
}
end
before do
CloudController::DependencyLocator.instance.register(:instances_reporters, instances_reporters)
allow(instances_reporters).to receive(:stats_for_app).and_return(stats_for_process)
end
describe 'GET /v3/processes/:guid/stats' do
context 'route integrity is enabled' do
it 'retrieves the stats for a process' do
get "/v3/processes/#{process.guid}/stats", nil, developer_headers
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response).to be_a_response_like(expected_response)
end
end
end
describe 'GET /v3/apps/:guid/processes/:type/stats' do
it 'retrieves the stats for a process belonging to an app' do
get "/v3/apps/#{app_model.guid}/processes/worker/stats", nil, developer_headers
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response).to be_a_response_like(expected_response)
end
end
end
describe 'PATCH /v3/processes/:guid' do
it 'updates the process' do
revision = VCAP::CloudController::RevisionModel.make
process = VCAP::CloudController::ProcessModel.make(
:process,
app: app_model,
revision: revision,
type: 'web',
instances: 2,
memory: 1024,
disk_quota: 1024,
command: 'rackup',
ports: [4444, 5555],
health_check_type: 'port',
health_check_timeout: 10
)
update_request = {
command: 'new command',
health_check: {
type: 'process',
data: {
timeout: 20
}
},
metadata: metadata,
}.to_json
patch "/v3/processes/#{process.guid}", update_request, developer_headers.merge('CONTENT_TYPE' => 'application/json')
expected_response = {
'guid' => process.guid,
'relationships' => {
'app' => { 'data' => { 'guid' => app_model.guid } },
'revision' => { 'data' => { 'guid' => revision.guid } },
},
'type' => 'web',
'command' => 'new command',
'instances' => 2,
'memory_in_mb' => 1024,
'disk_in_mb' => 1024,
'health_check' => {
'type' => 'process',
'data' => {
'timeout' => 20,
'invocation_timeout' => nil
}
},
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => { 'href' => "#{link_prefix}/v3/processes/#{process.guid}" },
'scale' => { 'href' => "#{link_prefix}/v3/processes/#{process.guid}/actions/scale", 'method' => 'POST' },
'app' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}" },
'space' => { 'href' => "#{link_prefix}/v3/spaces/#{space.guid}" },
'stats' => { 'href' => "#{link_prefix}/v3/processes/#{process.guid}/stats" },
},
'metadata' => {
'labels' => {
'release' => 'stable',
'seriouseats.com/potato' => 'mashed',
},
'annotations' => { 'checksum' => 'SHA' },
},
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response).to be_a_response_like(expected_response)
process.reload
expect(process.command).to eq('new command')
expect(process.health_check_type).to eq('process')
expect(process.health_check_timeout).to eq(20)
event = VCAP::CloudController::Event.last
expect(event.values).to include({
type: 'audit.app.process.update',
actee: app_model.guid,
actee_type: 'app',
actee_name: 'my_app',
actor: developer.guid,
actor_type: 'user',
actor_username: user_name,
space_guid: space.guid,
organization_guid: space.organization.guid
})
expect(event.metadata).to eq({
'process_guid' => process.guid,
'process_type' => 'web',
'request' => {
'command' => '[PRIVATE DATA HIDDEN]',
'health_check' => {
'type' => 'process',
'data' => {
'timeout' => 20,
}
},
'metadata' => {
'labels' => {
'release' => 'stable',
'seriouseats.com/potato' => 'mashed',
},
'annotations' => { 'checksum' => 'SHA' },
}
}
})
end
end
describe 'POST /v3/processes/:guid/actions/scale' do
it 'scales the process' do
process = VCAP::CloudController::ProcessModel.make(
:process,
app: app_model,
type: 'web',
instances: 2,
memory: 1024,
disk_quota: 1024,
command: 'rackup',
)
scale_request = {
instances: 5,
memory_in_mb: 10,
disk_in_mb: 20,
}
post "/v3/processes/#{process.guid}/actions/scale", scale_request.to_json, developer_headers
expected_response = {
'guid' => process.guid,
'type' => 'web',
'relationships' => {
'app' => { 'data' => { 'guid' => app_model.guid } },
'revision' => nil,
},
'command' => 'rackup',
'instances' => 5,
'memory_in_mb' => 10,
'disk_in_mb' => 20,
'health_check' => {
'type' => 'port',
'data' => {
'timeout' => nil,
'invocation_timeout' => nil
}
},
'created_at' => iso8601,
'updated_at' => iso8601,
'metadata' => { 'annotations' => {}, 'labels' => {} },
'links' => {
'self' => { 'href' => "#{link_prefix}/v3/processes/#{process.guid}" },
'scale' => { 'href' => "#{link_prefix}/v3/processes/#{process.guid}/actions/scale", 'method' => 'POST' },
'app' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}" },
'space' => { 'href' => "#{link_prefix}/v3/spaces/#{space.guid}" },
'stats' => { 'href' => "#{link_prefix}/v3/processes/#{process.guid}/stats" },
},
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(202)
expect(parsed_response).to be_a_response_like(expected_response)
process.reload
expect(process.instances).to eq(5)
expect(process.memory).to eq(10)
expect(process.disk_quota).to eq(20)
events = VCAP::CloudController::Event.where(actor: developer.guid).all
process_event = events.find { |e| e.type == 'audit.app.process.scale' }
expect(process_event.values).to include({
type: 'audit.app.process.scale',
actee: app_model.guid,
actee_type: 'app',
actee_name: 'my_app',
actor: developer.guid,
actor_type: 'user',
actor_username: user_name,
space_guid: space.guid,
organization_guid: space.organization.guid
})
expect(process_event.metadata).to eq({
'process_guid' => process.guid,
'process_type' => 'web',
'request' => {
'instances' => 5,
'memory_in_mb' => 10,
'disk_in_mb' => 20
}
})
end
it 'ensures that the memory allocation is greater than existing sidecar memory allocation' do
process = VCAP::CloudController::ProcessModel.make(
:process,
app: app_model,
type: 'web',
instances: 2,
memory: 1024,
disk_quota: 1024,
command: 'rackup',
)
sidecar = VCAP::CloudController::SidecarModel.make(
name: 'my-sidecar',
app: app_model,
memory: 256
)
VCAP::CloudController::SidecarProcessTypeModel.make(sidecar: sidecar, type: process.type, app_guid: app_model.guid)
scale_request = {
memory_in_mb: 256,
}
post "/v3/processes/#{process.guid}/actions/scale", scale_request.to_json, developer_headers
expect(last_response.status).to eq(422)
expect(parsed_response['errors'][0]['detail']).to eq 'The requested memory allocation is not large enough to run all of your sidecar processes'
process.reload
expect(process.memory).to eq(1024)
end
context 'telemetry' do
let(:process) { VCAP::CloudController::ProcessModel.make(
:process,
app: app_model,
type: 'web',
instances: 2,
memory: 1024,
disk_quota: 1024,
command: 'rackup',
)
}
let(:scale_request) do {
instances: 5,
memory_in_mb: 10,
disk_in_mb: 20,
}
end
it 'should log the required fields when the process gets scaled' do
Timecop.freeze do
post "/v3/processes/#{process.guid}/actions/scale", scale_request.to_json, developer_headers
expect(last_response.status).to eq(202)
parsed_response = MultiJson.load(last_response.body)
app_guid = parsed_response['relationships']['app']['data']['guid']
expected_json = {
'telemetry-source' => 'cloud_controller_ng',
'telemetry-time' => Time.now.to_datetime.rfc3339,
'scale-app' => {
'api-version' => 'v3',
'instance-count' => 5,
'memory-in-mb' => 10,
'disk-in-mb' => 20,
'process-type' => 'web',
'app-id' => Digest::SHA256.hexdigest(app_guid),
'user-id' => Digest::SHA256.hexdigest(developer.guid),
}
}
expect(last_response.status).to eq(202), last_response.body
expect(rails_logger).to have_received(:info).with(JSON.generate(expected_json))
end
end
end
end
describe 'DELETE /v3/processes/:guid/instances/:index' do
before do
allow_any_instance_of(VCAP::CloudController::Diego::BbsAppsClient).to receive(:stop_index)
end
it 'terminates a single instance of a process' do
process = VCAP::CloudController::ProcessModel.make(:process, type: 'web', app: app_model)
delete "/v3/processes/#{process.guid}/instances/0", nil, developer_headers
expect(last_response.status).to eq(204)
events = VCAP::CloudController::Event.where(actor: developer.guid).all
process_event = events.find { |e| e.type == 'audit.app.process.terminate_instance' }
expect(process_event.values).to include({
type: 'audit.app.process.terminate_instance',
actee: app_model.guid,
actee_type: 'app',
actee_name: 'my_app',
actor: developer.guid,
actor_type: 'user',
actor_username: user_name,
space_guid: space.guid,
organization_guid: space.organization.guid
})
expect(process_event.metadata).to eq({
'process_guid' => process.guid,
'process_type' => 'web',
'process_index' => 0
})
end
end
describe 'GET /v3/apps/:guid/processes' do
let!(:process1) {
VCAP::CloudController::ProcessModel.make(
:process,
app: app_model,
type: 'web',
instances: 2,
memory: 1024,
disk_quota: 1024,
command: 'rackup',
)
}
let!(:process2) {
VCAP::CloudController::ProcessModel.make(
:process,
app: app_model,
revision: revision2,
type: 'worker',
instances: 1,
memory: 100,
disk_quota: 200,
command: 'start worker',
)
}
let!(:process3) {
VCAP::CloudController::ProcessModel.make(:process, app: app_model, revision: revision3)
}
let!(:deployment_process) {
VCAP::CloudController::ProcessModel.make(:process, app: app_model, type: 'web-deployment', revision: deployment_revision)
}
let!(:revision3) { VCAP::CloudController::RevisionModel.make }
let!(:revision2) { VCAP::CloudController::RevisionModel.make }
let!(:deployment_revision) { VCAP::CloudController::RevisionModel.make }
it 'returns a paginated list of processes for an app' do
get "/v3/apps/#{app_model.guid}/processes?per_page=2", nil, developer_headers
expected_response = {
'pagination' => {
'total_results' => 4,
'total_pages' => 2,
'first' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}/processes?page=1&per_page=2" },
'last' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}/processes?page=2&per_page=2" },
'next' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}/processes?page=2&per_page=2" },
'previous' => nil,
},
'resources' => [
{
'guid' => process1.guid,
'relationships' => {
'app' => { 'data' => { 'guid' => app_model.guid } },
'revision' => nil,
},
'type' => 'web',
'command' => '[PRIVATE DATA HIDDEN IN LISTS]',
'instances' => 2,
'memory_in_mb' => 1024,
'disk_in_mb' => 1024,
'health_check' => {
'type' => 'port',
'data' => {
'timeout' => nil,
'invocation_timeout' => nil
}
},
'metadata' => { 'annotations' => {}, 'labels' => {} },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => { 'href' => "#{link_prefix}/v3/processes/#{process1.guid}" },
'scale' => { 'href' => "#{link_prefix}/v3/processes/#{process1.guid}/actions/scale", 'method' => 'POST' },
'app' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}" },
'space' => { 'href' => "#{link_prefix}/v3/spaces/#{space.guid}" },
'stats' => { 'href' => "#{link_prefix}/v3/processes/#{process1.guid}/stats" },
},
},
{
'guid' => process2.guid,
'relationships' => {
'app' => { 'data' => { 'guid' => app_model.guid } },
'revision' => {
'data' => {
'guid' => revision2.guid
}
},
},
'type' => 'worker',
'command' => '[PRIVATE DATA HIDDEN IN LISTS]',
'instances' => 1,
'memory_in_mb' => 100,
'disk_in_mb' => 200,
'health_check' => {
'type' => 'port',
'data' => {
'timeout' => nil,
'invocation_timeout' => nil
}
},
'metadata' => { 'annotations' => {}, 'labels' => {} },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => { 'href' => "#{link_prefix}/v3/processes/#{process2.guid}" },
'scale' => { 'href' => "#{link_prefix}/v3/processes/#{process2.guid}/actions/scale", 'method' => 'POST' },
'app' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}" },
'space' => { 'href' => "#{link_prefix}/v3/spaces/#{space.guid}" },
'stats' => { 'href' => "#{link_prefix}/v3/processes/#{process2.guid}/stats" },
},
}
]
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response).to be_a_response_like(expected_response)
end
context 'faceted list' do
context 'by types' do
it 'returns only the matching processes' do
get "/v3/apps/#{app_model.guid}/processes?per_page=2&types=worker", nil, developer_headers
expected_pagination = {
'total_results' => 1,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}/processes?page=1&per_page=2&types=worker" },
'last' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}/processes?page=1&per_page=2&types=worker" },
'next' => nil,
'previous' => nil,
}
expect(last_response.status).to eq(200)
parsed_response = MultiJson.load(last_response.body)
returned_guids = parsed_response['resources'].map { |i| i['guid'] }
expect(returned_guids).to match_array([process2.guid])
expect(parsed_response['pagination']).to be_a_response_like(expected_pagination)
end
end
context 'by guids' do
it 'returns only the matching processes' do
get "/v3/apps/#{app_model.guid}/processes?per_page=2&guids=#{process1.guid},#{process2.guid}", nil, developer_headers
expected_pagination = {
'total_results' => 2,
'total_pages' => 1,
'first' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}/processes?guids=#{process1.guid}%2C#{process2.guid}&page=1&per_page=2" },
'last' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}/processes?guids=#{process1.guid}%2C#{process2.guid}&page=1&per_page=2" },
'next' => nil,
'previous' => nil,
}
expect(last_response.status).to eq(200)
parsed_response = MultiJson.load(last_response.body)
returned_guids = parsed_response['resources'].map { |i| i['guid'] }
expect(returned_guids).to match_array([process1.guid, process2.guid])
expect(parsed_response['pagination']).to be_a_response_like(expected_pagination)
end
end
end
end
describe 'GET /v3/apps/:guid/processes/:type' do
it 'retrieves the process for an app with the requested type' do
revision = VCAP::CloudController::RevisionModel.make
process = VCAP::CloudController::ProcessModel.make(
:process,
app: app_model,
revision: revision,
type: 'web',
instances: 2,
memory: 1024,
disk_quota: 1024,
command: 'rackup',
)
get "/v3/apps/#{app_model.guid}/processes/web", nil, developer_headers
expected_response = {
'guid' => process.guid,
'relationships' => {
'app' => { 'data' => { 'guid' => app_model.guid } },
'revision' => { 'data' => { 'guid' => revision.guid } },
},
'type' => 'web',
'command' => 'rackup',
'instances' => 2,
'memory_in_mb' => 1024,
'disk_in_mb' => 1024,
'health_check' => {
'type' => 'port',
'data' => {
'timeout' => nil,
'invocation_timeout' => nil
}
},
'metadata' => { 'annotations' => {}, 'labels' => {} },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => { 'href' => "#{link_prefix}/v3/processes/#{process.guid}" },
'scale' => { 'href' => "#{link_prefix}/v3/processes/#{process.guid}/actions/scale", 'method' => 'POST' },
'app' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}" },
'space' => { 'href' => "#{link_prefix}/v3/spaces/#{space.guid}" },
'stats' => { 'href' => "#{link_prefix}/v3/processes/#{process.guid}/stats" },
},
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response).to be_a_response_like(expected_response)
end
it 'redacts information for auditors' do
VCAP::CloudController::ProcessModel.make(:process, app: app_model, type: 'web', command: 'rackup')
auditor = VCAP::CloudController::User.make
space.organization.add_user(auditor)
space.add_auditor(auditor)
get "/v3/apps/#{app_model.guid}/processes/web", nil, headers_for(auditor)
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response['command']).to eq('[PRIVATE DATA HIDDEN]')
end
end
describe 'PATCH /v3/apps/:guid/processes/:type' do
it 'updates the process' do
process = VCAP::CloudController::ProcessModel.make(
:process,
app: app_model,
type: 'web',
instances: 2,
memory: 1024,
disk_quota: 1024,
command: 'rackup',
ports: [4444, 5555],
health_check_type: 'port',
health_check_timeout: 10
)
update_request = {
command: 'new command',
health_check: {
type: 'http',
data: {
timeout: 20,
endpoint: '/healthcheck'
}
},
metadata: metadata,
}.to_json
patch "/v3/apps/#{app_model.guid}/processes/web", update_request, developer_headers.merge('CONTENT_TYPE' => 'application/json')
expected_response = {
'guid' => process.guid,
'relationships' => {
'app' => { 'data' => { 'guid' => app_model.guid } },
'revision' => nil,
},
'type' => 'web',
'command' => 'new command',
'instances' => 2,
'memory_in_mb' => 1024,
'disk_in_mb' => 1024,
'health_check' => {
'type' => 'http',
'data' => {
'timeout' => 20,
'endpoint' => '/healthcheck',
'invocation_timeout' => nil
}
},
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => { 'href' => "#{link_prefix}/v3/processes/#{process.guid}" },
'scale' => { 'href' => "#{link_prefix}/v3/processes/#{process.guid}/actions/scale", 'method' => 'POST' },
'app' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}" },
'space' => { 'href' => "#{link_prefix}/v3/spaces/#{space.guid}" },
'stats' => { 'href' => "#{link_prefix}/v3/processes/#{process.guid}/stats" },
},
'metadata' => {
'labels' => {
'release' => 'stable',
'seriouseats.com/potato' => 'mashed',
},
'annotations' => { 'checksum' => 'SHA' },
}
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(200)
expect(parsed_response).to be_a_response_like(expected_response)
process.reload
expect(process.command).to eq('new command')
expect(process.health_check_type).to eq('http')
expect(process.health_check_timeout).to eq(20)
expect(process.health_check_http_endpoint).to eq('/healthcheck')
event = VCAP::CloudController::Event.last
expect(event.values).to include({
type: 'audit.app.process.update',
actee: app_model.guid,
actee_type: 'app',
actee_name: 'my_app',
actor: developer.guid,
actor_type: 'user',
actor_username: user_name,
space_guid: space.guid,
organization_guid: space.organization.guid
})
expect(event.metadata).to eq({
'process_guid' => process.guid,
'process_type' => 'web',
'request' => {
'command' => '[PRIVATE DATA HIDDEN]',
'health_check' => {
'type' => 'http',
'data' => {
'timeout' => 20,
'endpoint' => '/healthcheck',
}
},
'metadata' => {
'labels' => {
'release' => 'stable',
'seriouseats.com/potato' => 'mashed',
},
'annotations' => { 'checksum' => 'SHA' },
},
}
})
end
end
describe 'POST /v3/apps/:guid/processes/:type/actions/scale' do
let!(:process) { VCAP::CloudController::ProcessModel.make(
:process,
app: app_model,
type: 'web',
instances: 2,
memory: 1024,
disk_quota: 1024,
command: 'rackup',
)
}
let(:scale_request) do {
instances: 5,
memory_in_mb: 10,
disk_in_mb: 20,
}
end
it 'scales the process belonging to an app' do
post "/v3/apps/#{app_model.guid}/processes/web/actions/scale", scale_request.to_json, developer_headers
expected_response = {
'guid' => process.guid,
'type' => 'web',
'relationships' => {
'app' => { 'data' => { 'guid' => app_model.guid } },
'revision' => nil,
},
'command' => 'rackup',
'instances' => 5,
'memory_in_mb' => 10,
'disk_in_mb' => 20,
'health_check' => {
'type' => 'port',
'data' => {
'timeout' => nil,
'invocation_timeout' => nil
}
},
'metadata' => { 'annotations' => {}, 'labels' => {} },
'created_at' => iso8601,
'updated_at' => iso8601,
'links' => {
'self' => { 'href' => "#{link_prefix}/v3/processes/#{process.guid}" },
'scale' => { 'href' => "#{link_prefix}/v3/processes/#{process.guid}/actions/scale", 'method' => 'POST' },
'app' => { 'href' => "#{link_prefix}/v3/apps/#{app_model.guid}" },
'space' => { 'href' => "#{link_prefix}/v3/spaces/#{space.guid}" },
'stats' => { 'href' => "#{link_prefix}/v3/processes/#{process.guid}/stats" },
},
}
parsed_response = MultiJson.load(last_response.body)
expect(last_response.status).to eq(202)
expect(parsed_response).to be_a_response_like(expected_response)
process.reload
expect(process.instances).to eq(5)
expect(process.memory).to eq(10)
expect(process.disk_quota).to eq(20)
events = VCAP::CloudController::Event.where(actor: developer.guid).all
process_event = events.find { |e| e.type == 'audit.app.process.scale' }
expect(process_event.values).to include({
type: 'audit.app.process.scale',
actee: app_model.guid,
actee_type: 'app',
actee_name: 'my_app',
actor: developer.guid,
actor_type: 'user',
actor_username: user_name,
space_guid: space.guid,
organization_guid: space.organization.guid
})
expect(process_event.metadata).to eq({
'process_guid' => process.guid,
'process_type' => 'web',
'request' => {
'instances' => 5,
'memory_in_mb' => 10,
'disk_in_mb' => 20
}
})
end
context 'telemetry' do
it 'should log the required fields when the process gets scaled' do
Timecop.freeze do
post "/v3/apps/#{app_model.guid}/processes/web/actions/scale", scale_request.to_json, developer_headers
expect(last_response.status).to eq(202)
parsed_response = MultiJson.load(last_response.body)
app_guid = parsed_response['relationships']['app']['data']['guid']
expected_json = {
'telemetry-source' => 'cloud_controller_ng',
'telemetry-time' => Time.now.to_datetime.rfc3339,
'scale-app' => {
'api-version' => 'v3',
'instance-count' => 5,
'memory-in-mb' => 10,
'disk-in-mb' => 20,
'process-type' => 'web',
'app-id' => Digest::SHA256.hexdigest(app_guid),
'user-id' => Digest::SHA256.hexdigest(developer.guid),
}
}
expect(last_response.status).to eq(202), last_response.body
expect(rails_logger).to have_received(:info).with(JSON.generate(expected_json))
end
end
end
end
describe 'DELETE /v3/apps/:guid/processes/:type/instances/:index' do
before do
allow_any_instance_of(VCAP::CloudController::Diego::BbsAppsClient).to receive(:stop_index)
end
it 'terminates a single instance of a process belonging to an app' do
process = VCAP::CloudController::ProcessModel.make(:process, type: 'web', app: app_model)
delete "/v3/apps/#{app_model.guid}/processes/web/instances/0", nil, developer_headers
expect(last_response.status).to eq(204)
events = VCAP::CloudController::Event.where(actor: developer.guid).all
process_event = events.find { |e| e.type == 'audit.app.process.terminate_instance' }
expect(process_event.values).to include({
type: 'audit.app.process.terminate_instance',
actee: app_model.guid,
actee_type: 'app',
actee_name: 'my_app',
actor: developer.guid,
actor_type: 'user',
actor_username: user_name,
space_guid: space.guid,
organization_guid: space.organization.guid
})
expect(process_event.metadata).to eq({
'process_guid' => process.guid,
'process_type' => 'web',
'process_index' => 0
})
end
end
end
| 36.100822 | 158 | 0.525911 |
ac77cbc56305d534d4eec259be48d48b64058c95 | 653 | # Loads a component with custom CSS, to make sure that also dynamically loaded components get the correct CSS applied
class LoaderOfComponentWithCustomCss < Netzke::Base
component :component_with_custom_css do |c|
c.klass = ComponentWithCustomCss
end
action :load_component_with_custom_css
def configure(c)
super
c.title = "LoaderOfComponentWithCustomCss"
c.layout = :fit
c.bbar = [:load_component_with_custom_css]
end
js_configure do |c|
c.on_load_component_with_custom_css = <<-JS
function(params){
this.netzkeLoadComponent({name: 'component_with_custom_css', container: this});
}
JS
end
end
| 27.208333 | 117 | 0.742726 |
33ad09eb21288445afb396482e251c63697c33e0 | 1,595 | # Rake task "requires" this.
module Releases
class Create < Mutations::Command
required do
# "https://github.com/FarmBot/farmbot_os/releases/download/v11.0.1/farmbot-rpi3-11.0.1.fw"
string :image_url
string :version, matches: Release::VERSION_STORAGE_FORMAT
string :platform, in: Release::PLATFORMS
string :channel, in: Release::CHANNEL # "stable"
end
def execute
# * Should be able to run this multiple times
# * Should not create duplicate
release = Release.where(inputs.except(:image_url)).first_or_initialize
process_images(release) if release.new_record?
release
end
private
# Copy the file from Github to Google Cloud Storage.
def process_images(release)
release.update!(image_url: maybe_transload(image_url),
dot_img_url: maybe_transload(dot_img_url))
end
# NOTE: FarmBot, Inc. currently follows a naming
# convention when transferring file assets from Github.
# There is an expectation that the URL to the *.fw and *.img
# files on Github are identical, excluding the file extension.
# Example of acceptable URLs:
# https://github.com/FarmBot/farmbot_os/releases/farmbot-rpi3-1.2.3.fw
# https://github.com/FarmBot/farmbot_os/releases/farmbot-rpi3-1.2.3.img
# If the URL convention changes, this method must be updated.
# -RC 5 NOV 2020
def dot_img_url
@dot_img_url ||= image_url.sub(/\.fw\z/, ".img")
end
def maybe_transload(url)
ENV["GCS_BUCKET"] ? Release.transload(url) : url
end
end
end
| 32.55102 | 96 | 0.682132 |
bb2625c90401475e524969a4ec1bb22819ed5ffa | 1,147 | class User < ApplicationRecord
attr_reader :password
validates :username, :email, :password_digest, :session_token, presence: true
validates :password, length: {minimum: 6, allow_nil: true}
validates :username, :email , uniqueness: true
after_initialize :ensure_session_token
has_many :trips
has_many :activities,
through: :trips,
source: :activities
def self.find_by_credentials (email, password)
user = User.find_by(email: email)
return if !user
user.is_password?(password) ? user : nil
end
def self.generate_session_token
SecureRandom::urlsafe_base64(16)
end
def reset_session_token!
self.session_token = User.generate_session_token
self.save!
self.session_token
end
def password=(password)
@password = password
self.password_digest = BCrypt::Password.create(password)
end
def is_password?(password)
BCrypt::Password.new(self.password_digest) == password
end
private
def ensure_session_token
self.session_token ||= User.generate_session_token
end
end | 24.934783 | 81 | 0.677419 |
03dc752702e2c3478d70608e27cb0533ec52373f | 32 | for i in 0..5
puts "#{i}"
end | 10.666667 | 14 | 0.5 |
e2b6a03521d54987697d03cc489b42d04808f223 | 969 | gem_name = ARGV[0]
raise "gem name sans version must be supplied" if gem_name.to_s == ""
api_key = ENV["GEM_HOST_API_KEY"]
raise "GEM_HOST_API_KEY must be set" if api_key.to_s == ""
version = ENV["VERSION"]
raise "VERSION environment must be set" if version.to_s == ""
gem_filename = "#{gem_name}-#{version}.gem"
raise "#{gem_filename} is missing!" unless File.exist?(gem_filename)
otp = ENV["RUBYGEMS_OTP"]
raise "RUBYGEMS_OTP environment must be set" if otp.to_s == ""
puts "Publshing the #{gem_filename} file..."
cmd = "gem push --otp #{otp} #{gem_filename}"
puts "executing: #{cmd}"
result = `#{cmd}`
if $?.to_i.zero?
puts "#{gem_filename} successfully pushed to rubygems.org!"
else
if result =~ /Repushing of gem versions is not allowed/
puts "Pushing #{gem_filename} skipped because this version is already published to rubygems.org!"
exit 0
else
puts "#{gem_filename} failed to push to rubygems.org!"
puts result
exit 1
end
end
| 29.363636 | 101 | 0.701754 |
386a7bf2c6085d9a297774ead7592203503c0b1a | 428 | # frozen_string_literal: true
# Day 1: Sonar Sweep
# Sum every three inputs in array
def sum_three(input)
input.each_cons(3).map { |a, b, c| a + b + c }
end
def sonar_sweep(input)
increments = 0
input.each_cons(2) { |p, n| increments += 1 if p < n }
increments
end
filename = $PROGRAM_NAME.gsub('.rb', '')
depthmap = File.readlines("./#{filename}.in", chomp: true).map!(&:to_i)
puts sonar_sweep(sum_three(depthmap))
| 22.526316 | 71 | 0.675234 |
1c8c367cb22de132fe0abb81832c0bda34c06967 | 294 | # Implement a caesar cipher that takes in a string and
# the shift factor and then outputs the modified string:
def caesar_cipher(string, factor)
p string.each_char.map {|char| (char.ord + factor).chr}.join
end
caesar_cipher("hello", 3)
caesar_cipher("HELLO", 3)
caesar_cipher("HeLlO", 3)
| 26.727273 | 62 | 0.744898 |
e2d14e69ac0e149b374b6c092778e242ab3dbf0a | 12,378 | require File.dirname(__FILE__) + '/../test_helper'
class ResultControllerTest < ActionController::TestCase
fixtures :users, :targets, :results
def test_routing
assert_routing '/my/target/777/results', :controller => 'result', :action => 'target', :target_id => '777'
assert_routing '/my/result/91', :controller => 'result', :action => 'show', :id => '91'
assert_routing '/target/777/new/result', :controller => 'result', :action => 'new', :target_id => '777'
assert_routing '/target/777/create/result', :controller => 'result', :action => 'create', :target_id => '777'
assert_routing '/edit/result/91', :controller => 'result', :action => 'edit', :id => '91'
assert_routing '/update/result/91', :controller => 'result', :action => 'update', :id => '91'
end
#
# target
#
def test_target
assert !targets(:ryanlowe_mri_head).destroyed?
login_as :ryanlowe
get :target, :target_id => targets(:ryanlowe_mri_head)
assert_response :success
assert_template 'target'
assert_equal targets(:ryanlowe_mri_head), assigns(:target)
end
def test_target_not_allowed
login_as :brixen
get :target, :target_id => targets(:ryanlowe_mri_head)
assert_response :not_found
end
def test_target_not_logged_in_launched
launched true
get :target, :target_id => targets(:ryanlowe_mri_head)
assert_response :redirect
assert_redirected_to login_url
end
def test_target_not_logged_in_not_launched
launched false
get :target, :target_id => targets(:ryanlowe_mri_head)
assert_response :not_found
end
def test_target_invalid_target_id
assert !Target.exists?(999)
login_as :ryanlowe
assert_raises(ActiveRecord::RecordNotFound) {
get :target, :target_id => 999
}
end
def test_target_no_id
login_as :ryanlowe
assert_raises(ActionController::RoutingError) {
get :target
}
end
#
# show
#
def test_show
assert !targets(:ryanlowe_mri_head).destroyed?
assert !results(:ryanlowe_mri_head1).destroyed?
login_as :ryanlowe
get :show, :id => results(:ryanlowe_mri_head1)
assert_response :success
assert_template 'show'
assert_equal targets(:ryanlowe_mri_head), assigns(:target)
assert_equal results(:ryanlowe_mri_head1), assigns(:result)
end
def test_show_not_allowed
login_as :brixen
get :show, :id => results(:ryanlowe_mri_head1)
assert_response :not_found
end
def test_show_not_logged_in_launched
launched true
get :show, :id => results(:ryanlowe_mri_head1)
assert_response :redirect
assert_redirected_to login_url
end
def test_show_not_logged_in_not_launched
launched false
get :show, :id => results(:ryanlowe_mri_head1)
assert_response :not_found
end
def test_show_invalid_id
assert !Result.exists?(999)
login_as :ryanlowe
assert_raises(ActiveRecord::RecordNotFound) {
get :show, :id => 999
}
end
def test_show_no_id
login_as :ryanlowe
assert_raises(ActiveRecord::RecordNotFound) {
get :show
}
end
#
# new
#
def test_new
assert !targets(:ryanlowe_mri_head).destroyed?
login_as :ryanlowe
get :new, :target_id => targets(:ryanlowe_mri_head)
assert_response :success
assert_template 'new'
assert_equal targets(:ryanlowe_mri_head), assigns(:target)
assert assigns(:result).new_record?
end
def test_new_not_allowed
login_as :brixen
get :new, :target_id => targets(:ryanlowe_mri_head)
assert_response :not_found
end
def test_new_not_logged_in_launched
launched true
get :new, :target_id => targets(:ryanlowe_mri_head)
assert_response :redirect
assert_redirected_to login_url
end
def test_new_not_logged_in_not_launched
launched false
get :new, :target_id => targets(:ryanlowe_mri_head)
assert_response :not_found
end
def test_new_invalid_id
assert !Target.exists?(999)
login_as :ryanlowe
assert_raises(ActiveRecord::RecordNotFound) {
get :new, :target_id => 999
}
end
def test_new_no_id
login_as :ryanlowe
assert_raises(ActionController::RoutingError) {
get :new
}
end
#
# create
#
def test_create
target_count = Target.count
result_count = Result.count
login_as :ryanlowe
post :create, :target_id => targets(:ryanlowe_mri_head), :result => { :log => "2487 files, 9169 examples, 30818 expectations, 13 failures, 17 errors" }
assert_equal target_count, Target.count
assert_equal result_count+1, Result.count
r = Result.last
assert_response :redirect
assert_redirected_to :action => 'show', :id => r
assert_equal users(:ryanlowe), r.creator
assert_equal targets(:ryanlowe_mri_head), r.target
assert_equal "2487 files, 9169 examples, 30818 expectations, 13 failures, 17 errors", r.log
end
def test_create_not_target_creator
target_count = Target.count
result_count = Result.count
login_as :brixen
post :create, :target_id => targets(:ryanlowe_mri_head), :result => { :log => "2487 files, 9169 examples, 30818 expectations, 13 failures, 17 errors" }
assert_equal target_count, Target.count
assert_equal result_count, Result.count
assert_response :not_found
end
def test_create_error
result_count = Result.count
login_as :ryanlowe
post :create, :target_id => targets(:ryanlowe_mri_head), :result => { :log => "" }
assert_equal result_count, Result.count
assert_response :success
assert_template 'new'
end
def test_create_get
result_count = Result.count
login_as :ryanlowe
get :create, :target_id => targets(:ryanlowe_mri_head), :result => { :log => "2487 files, 9169 examples, 30818 expectations, 13 failures, 17 errors" }
assert_equal result_count, Result.count
assert_response :redirect
assert_redirected_to front_url
end
def test_create_not_logged_in_launched
launched true
post :create, :target_id => targets(:ryanlowe_mri_head), :result => { :log => "2487 files, 9169 examples, 30818 expectations, 13 failures, 17 errors" }
assert_response :redirect
assert_redirected_to login_url
end
def test_create_not_logged_in_not_launched
launched false
post :create, :target_id => targets(:ryanlowe_mri_head), :result => { :log => "2487 files, 9169 examples, 30818 expectations, 13 failures, 17 errors" }
assert_response :not_found
end
def test_create_no_target_id
result_count = Result.count
login_as :ryanlowe
assert_raises(ActionController::RoutingError) {
post :create, :result => { :log => "2487 files, 9169 examples, 30818 expectations, 13 failures, 17 errors" }
}
assert_equal result_count, Result.count
end
def test_create_no_result
result_count = Result.count
login_as :ryanlowe
post :create, :target_id => targets(:ryanlowe_mri_head)
assert_equal result_count, Result.count
assert_response :success
assert_template 'new'
end
#
# edit
#
def test_edit
assert !targets(:ryanlowe_mri_head).destroyed?
assert !results(:ryanlowe_mri_head1).destroyed?
login_as :ryanlowe
get :edit, :id => results(:ryanlowe_mri_head1)
assert_response :success
assert_template 'edit'
assert_equal targets(:ryanlowe_mri_head), assigns(:target)
assert_equal results(:ryanlowe_mri_head1), assigns(:result)
end
def test_edit_not_allowed
login_as :brixen
get :edit, :id => results(:ryanlowe_mri_head1)
assert_response :not_found
end
def test_edit_not_logged_in_launched
launched true
get :edit, :id => results(:ryanlowe_mri_head1)
assert_response :redirect
assert_redirected_to login_url
end
def test_edit_not_logged_in_not_launched
launched false
get :edit, :id => results(:ryanlowe_mri_head1)
assert_response :not_found
end
def test_edit_invalid_id
assert !Result.exists?(999)
login_as :ryanlowe
assert_raises(ActiveRecord::RecordNotFound) {
get :edit, :id => 999
}
end
def test_edit_no_id
login_as :ryanlowe
assert_raises(ActiveRecord::RecordNotFound) {
get :edit
}
end
#
# update
#
def test_update
assert_equal "2487 files, 9169 examples, 30818 expectations, 13 failures, 17 errors", results(:ryanlowe_mri_head1).log
login_as :ryanlowe
post :update, :id => results(:ryanlowe_mri_head1), :result => { :log => "0 files, 0 examples, 0 expectations, 0 failures, 0 errors" }
assert_response :redirect
assert_redirected_to :action => 'show', :id => results(:ryanlowe_mri_head1)
results(:ryanlowe_mri_head1).reload
assert_equal "0 files, 0 examples, 0 expectations, 0 failures, 0 errors", results(:ryanlowe_mri_head1).log
end
def test_update_error
assert_equal "2487 files, 9169 examples, 30818 expectations, 13 failures, 17 errors", results(:ryanlowe_mri_head1).log
login_as :ryanlowe
post :update, :id => results(:ryanlowe_mri_head1), :result => { :log => "" }
assert_response :success
assert_template 'edit'
results(:ryanlowe_mri_head1).reload
assert_equal "2487 files, 9169 examples, 30818 expectations, 13 failures, 17 errors", results(:ryanlowe_mri_head1).log
end
def test_update_get
assert_equal "2487 files, 9169 examples, 30818 expectations, 13 failures, 17 errors", results(:ryanlowe_mri_head1).log
login_as :ryanlowe
get :update, :id => results(:ryanlowe_mri_head1), :result => { :log => "0 files, 0 examples, 0 expectations, 0 failures, 0 errors" }
assert_response :redirect
assert_redirected_to front_url
results(:ryanlowe_mri_head1).reload
assert_equal "2487 files, 9169 examples, 30818 expectations, 13 failures, 17 errors", results(:ryanlowe_mri_head1).log
end
def test_update_not_allowed
assert_equal "2487 files, 9169 examples, 30818 expectations, 13 failures, 17 errors", results(:ryanlowe_mri_head1).log
login_as :jonny
post :update, :id => results(:ryanlowe_mri_head1), :result => { :log => "0 files, 0 examples, 0 expectations, 0 failures, 0 errors" }
assert_response :not_found
results(:ryanlowe_mri_head1).reload
assert_equal "2487 files, 9169 examples, 30818 expectations, 13 failures, 17 errors", results(:ryanlowe_mri_head1).log
end
def test_update_not_logged_in_launched
launched true
assert_equal "2487 files, 9169 examples, 30818 expectations, 13 failures, 17 errors", results(:ryanlowe_mri_head1).log
post :update, :id => results(:ryanlowe_mri_head1), :result => { :log => "0 files, 0 examples, 0 expectations, 0 failures, 0 errors" }
assert_response :redirect
assert_redirected_to login_url
results(:ryanlowe_mri_head1).reload
assert_equal "2487 files, 9169 examples, 30818 expectations, 13 failures, 17 errors", results(:ryanlowe_mri_head1).log
end
def test_update_not_logged_in_not_launched
launched false
assert_equal "2487 files, 9169 examples, 30818 expectations, 13 failures, 17 errors", results(:ryanlowe_mri_head1).log
post :update, :id => results(:ryanlowe_mri_head1), :result => { :log => "0 files, 0 examples, 0 expectations, 0 failures, 0 errors" }
assert_response :not_found
results(:ryanlowe_mri_head1).reload
assert_equal "2487 files, 9169 examples, 30818 expectations, 13 failures, 17 errors", results(:ryanlowe_mri_head1).log
end
def test_update_invalid_id
assert !Target.exists?(999)
login_as :ryanlowe
assert_raises(ActiveRecord::RecordNotFound) {
post :update, :id => 999, :result => { :log => "0 files, 0 examples, 0 expectations, 0 failures, 0 errors" }
}
end
def test_update_no_id
login_as :ryanlowe
assert_raises(ActiveRecord::RecordNotFound) {
post :update, :result => { :log => "0 files, 0 examples, 0 expectations, 0 failures, 0 errors" }
}
end
end
| 27.506667 | 155 | 0.685571 |
28ad71aa932353122c57a8e686465ae5ed514eb5 | 25,543 | # -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
module DataWrangler
module Model
class PubchemCompound < Compound
SOURCE = "PubChem".freeze
EUTILS_URL = 'https://eutils.ncbi.nlm.nih.gov/entrez/eutils'.freeze
PUG_URL = 'https://pubchem.ncbi.nlm.nih.gov/rest/pug'.freeze
WEBSCRAPE_URL = 'https://pubchem.ncbi.nlm.nih.gov/compound/'.freeze
PUG_XREF_URL = '/xrefs/PubMedID,TaxonomyID/XML'.freeze
PUG_SDF_URL_1 = 'https://pubchem.ncbi.nlm.nih.gov/rest/pug/compound/cid/'.freeze
PUG_SDF_URL_2 = '/record/SDF/?record_type=3d&response_type=save&response_basename=Structure3D_CID_'.freeze
PATENTS_URL = 'https://pubchem.ncbi.nlm.nih.gov/search/#collection=patents&query_type=structure&concise_view=false&filters=false&query_subtype=identity&query='.freeze
def initialize(id = "UNKNOWN")
super(id, SOURCE)
@identifiers.pubchem_id = id unless id == "UNKNOWN"
end
def parse
success = false
tries = 0
while !success and tries < 1
begin
data = Nokogiri::XML(open("#{EUTILS_URL}/esummary.fcgi?db=pccompound&id=#{self.identifiers.pubchem_id}"))
data.remove_namespaces!
data = data.at_xpath('/eSummaryResult/DocSum')
self.identifiers.pubchem_id = data.at_xpath("Id").try(:content)
self.identifiers.name = data.at_xpath("Item[@Name='Record Title']").try(:content)
self.identifiers.name = data.xpath("Item[@Name='SynonymList']/Item")[0].try(:content) if self.identifiers.name.nil? ||
self.identifiers.iupac_name = data.at_xpath("Item[@Name='IUPACName']").try(:content)
self.structures.inchi = data.at_xpath("Item[@Name='InChI']").try(:content)
self.structures.inchikey = data.at_xpath("Item[@Name='InChIKey']").try(:content)
self.identifiers.name = data.at_xpath("Item[@Name='Record Title']").try(:content)
self.identifiers.name = data.xpath("Item[@Name='SynonymList']/Item")[0].try(:content) if self.identifiers.name.nil? || self.identifiers.name == self.structures.inchikey
self.identifiers.name = self.identifiers.iupac_name if self.identifiers.name.nil? || self.identifiers.name == self.structures.inchikey
self.structures.inchikey = 'InChIKey=' + self.structures.inchikey if !self.structures.inchikey.nil?
self.structures.smiles = data.at_xpath("Item[@Name='CanonicalSmiles']").try(:content)
self.properties.molecular_weight = data.at_xpath("Item[@Name='MolecularWeight']").try(:content)
data.xpath("Item[@Name='MeSHTermList']/Item").each do |synonym|
add_synonym(synonym.content, "MeSH") if is_proper_synonym?(synonym.content)
end
data.xpath("Item[@Name='SynonymList']/Item").each do |synonym|
if synonym.content =~ /HSDB (.*)/
self.identifiers.hsdb_id = $1
end
end
data.xpath("Item[@Name='PharmActionList']/Item").each do |pharm_action|
p_action = DataModel.new(pharm_action.content,SOURCE)
self.pharmacology_actions.push(p_action)
end
scrape_html
parse_sdf
success = true
data = nil
GC.start
rescue Exception => e
$stderr.puts "WARNING #{SOURCE}.parse #{e.message} #{e.backtrace}"
tries += 1
#
end
end
success = false
tries = 0
while !success && tries < 1
begin
data = Nokogiri::XML(open("https://pubchem.ncbi.nlm.nih.gov/rest/pug_view/data/compound/#{self.identifiers.pubchem_id}/XML"))
data = data.to_s.encode!('UTF-8', 'binary', invalid: :replace, undef: :replace, replace: '')
parse_industrial_uses(data)
parse_description(data)
parse_similar_structures
parse_image
parse_manufacturing(data)
parse_mesh_classification(data)
parse_ICSC(data)
parse_GHS_classification(data)
parse_references
parse_experimental_properties(data)
#parse_patents
success = true
data = nil
rescue Exception => e
$stderr.puts "WARNING #{SOURCE}.parse #{e.message} PUG VIEW Scraping"
tries += 1
#
end
end
self.valid!
end
def parse_references
begin
query = Nokogiri::XML(open("#{PUG_URL}/compound/cid/#{self.identifiers.pubchem_id}#{PUG_XREF_URL}")).remove_namespaces!
refs = query.xpath("//PubMedID")
refs.each do |ref|
r = ReferenceModel.new
r.pubmed_id = ref.text
r.link = "https://www.ncbi.nlm.nih.gov/pubmed/?term=#{r.pubmed_id}"
r.source = SOURCE
self.references.push(r)
end
data = nil
rescue Exception => e
$stderr.puts "WARNING #{SOURCE}.parse_references #{e.message} References XML parsing error"
end
end
def self.get_by_name(name)
begin
name = fix_names(name)
data = Nokogiri::XML(open("#{PUG_URL}/compound/name/#{URI::encode(name)}/cids/XML"))
data.remove_namespaces!
pubchem_ids = []
data.xpath("/IdentifierList/CID").each do |id|
pubchem_ids.push id.content
end
compounds = self.get_by_ids(pubchem_ids).select(&:valid?)
return self.new if compounds.first.nil?
return compounds.first
data = nil
rescue Exception => e
$stderr.puts "WARNING #{SOURCE}.name #{e.message} #{e.backtrace}"
return self.new
end
end
def self.get_by_substance_id(sid)
begin
query = "#{PUG_URL}/substance/sid/#{URI::encode(sid)}/XML"
data = open(query) { |io| io.read }
if data =~ /<PC-CompoundType_id_cid>(.*?)</
compound = self.get_by_id($1)
return compound
elsif data =~ /<Object-id_str>(.*?)</
compound = Model::KeggCompound.get_by_id($1)
return compound
end
return nil
data = nil
rescue Exception => e
$stderr.puts "WARNING #{SOURCE}.get_by_substance_id #{e.message} #{e.backtrace}"
return self.new
end
end
def self.get_by_inchikey(inchikey)
results = []
begin
open("#{PUG_URL}/compound/inchikey/#{inchikey.sub("InChIKey=",'')}/cids/TXT").each_line do |line|
results.push line.to_i if line.to_i > 0
end
rescue Exception => e
$stderr.puts "WARNING #{SOURCE}.get_by_inchikey #{e.message} #{e.backtrace}"
end
self.get_by_id(results.empty? ? nil : results.sort.first.to_s)
end
def self.get_references(id, inchikey=nil)
if inchikey.present?
compound = self.get_by_inchikey(inchikey)
if compound.nil?
return Compound.new
end
compound = self.new(compound.identifiers.pubchem_id)
else
compound = self.new(id)
end
compound.parse_references
return compound
end
protected
def self.fix_names(orig_name)
orig_name.dup.
gsub('ω', 'omega').
gsub('ε', 'epsilon').
gsub('δ', 'delta').
gsub('Δ', 'delta').
gsub('γ', 'gamma').
gsub('β', 'beta').
gsub('α', 'alpha')
end
def is_proper_synonym?(synonym)
numbers = synonym.remove(/[^0-9]/)
letters = synonym.remove(/[0-9]/)
numbers.size < letters.size
end
def scrape_html
# data = nil
# # html = Nokogiri::XML(open("#{WEBSCRAPE_URL}#{self.identifiers.pubchem_id}#section=Identification"))
# # open("#{WEBSCRAPE_URL}#{self.identifiers.pubchem_id}#section=Identification") {|io| data = io.read}
# # html.remove_namespaces
# html = Nokogiri::HTML(open("#{WEBSCRAPE_URL}#{self.identifiers.pubchem_id}"))
# inline = html.xpath('//script[not(@src)]').map(&:text)
# puts inline
end
def parse_table(datum)
list = Array.new
datum = datum.split("<StringValueList>")
if datum.length > 1
datum = datum[1..-1]
datum.each do |use|
array = use.split("</StringValueList>")
item = array[0]
list.push(item.downcase)
end
else
datum = datum[0].split("<StringValue>")
array = datum[1].split("</StringValue>")
list.push(array[0].downcase)
end
return list
end
def parse_ICSC(data)
success = false
tries = 0
while !success && tries < 1
begin
icsc = data.to_s.match(/<Section>[\s]*?<TOCHeading>ICSC Number<\/TOCHeading>[\s\S]*?(<\/Section>)/)
icsc = icsc.to_s.scan(/<StringValue>[\s\S]*?<\/StringValue>/)[-1]
icsc = icsc.gsub("<StringValue>","")
icsc = icsc.gsub("</StringValue>","")
self.identifiers.icsc_id = icsc
success = true
rescue Exception => e
$stderr.puts "WARNING #{SOURCE}.parse #{e.message} No ICSC"
tries += 1
#
end
end
end
def parse_image
success = false
tries = 0
while !success && tries < 1
begin
self.image = "https://pubchem.ncbi.nlm.nih.gov/image/imagefly.cgi?cid=#{self.identifiers.pubchem_id}&width=300&height=300"
success = true
rescue Exception => e
$stderr.puts "WARNING #{SOURCE}.parse #{e.message} No Image"
tries += 1
#
end
end
end
def parse_similar_structures
success = false
tries = 0
while !success && tries < 1
begin
if self.identifiers.pubchem_id != nil
data = Nokogiri::XML(open("https://www.ncbi.nlm.nih.gov/pccompound?LinkName=pccompound_pccompound&from_uid=#{self.identifiers.pubchem_id}"))
end
data = data.search('input[@name="EntrezSystem2.PEntrez.Pccompound.Pccompound_ResultsPanel.Pccompound_RVDocSum.uid"]')
uids = Array.new
data.each do |uid|
uids.push(uid['value'])
end
uids.each do |uid|
next if self.identifiers.pubchem_id == uid
next if uid.nil?
data = Nokogiri::XML(open("#{EUTILS_URL}/esummary.fcgi?db=pccompound&id=#{uid}"))
data.remove_namespaces!
data = data.at_xpath('/eSummaryResult/DocSum')
next if data.nil?
item = { "InChI Key" => data.at_xpath("Item[@Name='InChIKey']").try(:content),
"id" => uid,
"link" => "https://pubchem.ncbi.nlm.nih.gov/compound/#{uid}",
"image" => "https://pubchem.ncbi.nlm.nih.gov/image/imagefly.cgi?cid=#{uid}&width=200&height=200",
"info" => nil,
"Name" => nil,
"Source" => "PubChem"}
self.similar_structures.push(item)
end
data = nil
success = true
rescue Exception => e
$stderr.puts "WARNING #{SOURCE}.parse #{e.message} No Similar Structures"
tries += 1
#
end
end
end
def parse_description(data)
success = false
tries = 0
while !success && tries < 1
begin
data = data.gsub!("\n", "")
descriptions = data.scan(/<Name>Record Description<\/Name>.*?<\/Information>/)
#print descriptions.length
descriptions.each do |description|
base_desc = description
description = description.split("<StringValue>")
next if description.length < 2
description = description[1].split("</StringValue>")
description = description[0]
#puts "PREPOST DESCRIPTION: #{description.html_safe}\n"
description = description.gsub(/<a class=.*?>/,'')
description = description.gsub(/<a href=.*?>/,'')
description = description.gsub(/<\/a>/, '')
description = description.gsub(/</, '')
description = description.gsub(/>/, '')
description = description.gsub(self.identifiers.name.upcase, self.identifiers.name)
#puts "POST DESCRIPTION: #{description.html_safe}\n"
unless (base_desc.include? "<ReferenceNumber>22</ReferenceNumber>") || (base_desc.include? "<ReferenceNumber>61</ReferenceNumber>") || (base_desc.include? "<ReferenceNumber>61</ReferenceNumber>")
self.descriptions.push(DataModel.new(description.html_safe,SOURCE))
end
end
success = true
rescue Exception => e
$stderr.puts "WARNING #{SOURCE}.parse #{e.message} #{e.backtrace}"
tries += 1
#
end
end
end
def parse_industrial_uses(data)
success = false
tries = 0
while !success && tries < 1
begin
cutter = data.to_s.split("<TOCHeading>Use and Manufacturing</TOCHeading>")
unless cutter[1].nil?
uses_plus = cutter[1]
unless uses_plus.empty?
industry_plus = uses_plus.split("<TOCHeading>Industry Uses</TOCHeading>")
unless industry_plus[1].nil?
industry = industry_plus[1].split("</Section>")
self.industrial_uses = parse_table(industry[0])
end
end
unless uses_plus.empty?
consumer_plus = uses_plus.split("<TOCHeading>Consumer Uses</TOCHeading>")
unless consumer_plus[1].nil?
consumer = consumer_plus[1].split("</Section>")
self.consumer_uses = parse_table(consumer[0])
end
end
end
success = true
rescue Exception => e
$stderr.puts "WARNING #{SOURCE}.parse #{e.message} No Industrial/Consumer Uses"
tries += 1
#
end
end
end
def parse_manufacturing(data)
success = false
tries = 0
while !success && tries < 1
begin
manufacturing = data.to_s.match(/<Section>[\s]*?<TOCHeading>Methods of Manufacturing<\/TOCHeading>[\s\S]*?(<\/Section>)/)
diff_strings = manufacturing.to_s.scan(/<StringValue>[\s\S]*?<\/StringValue>/)
break if diff_strings.length == 0
clean_strings = []
diff_strings.each do |string|
string = string.gsub("<StringValue>","")
string = string.gsub("</StringValue>", "")
string = string.gsub(/<a class=.*?>/,'')
string = string.gsub(/<\/a>/, '')
string = string.gsub(/\/[\s\S]+?[\s]/, "")
string = string.gsub("/","")
clean_strings.push(string) if string.present?
end
self.method_of_manufacturing = clean_strings.max_by(&:length).downcase.capitalize if clean_strings.any?
success = true
rescue Exception => e
$stderr.puts "WARNING #{SOURCE}.parse #{e.message} No Method of Manufacturing"
tries += 1
#
end
end
end
def parse_mesh_classification(data)
success = false
tries = 0
while !success && tries < 1
begin
mesh = data.to_s.match(/<Section>[\s]*?<TOCHeading>MeSH Pharmacological Classification<\/TOCHeading>[\s\S]*?(<\/Section>)/)
diff_strings = mesh.to_s.scan(/<Information>[\s\S]*?<\/Information>/)
break if diff_strings.length == 0
mesh_types = []
diff_strings.each do |information|
mesh_model = {"name" => nil,
"classification" => nil}
name = information.to_s.scan(/<Name>[\s\S]*?<\/Name>/).first
name.gsub!("<Name>","")
name.gsub!("</Name>","")
string = information.to_s.scan(/<StringValue>[\s\S]*?<\/StringValue>/).first
string.gsub!("<StringValue>","")
string.gsub!("</StringValue>", "")
string.gsub!(/<a class=.*?>/,'')
string.gsub!(/<\/a>/, '')
string.gsub!(/\/[\s\S]+?[\s]/, "")
string.gsub!("/","")
self.mesh_classifications.push(DataModel.new(string, SOURCE, name))
end
success = true
rescue Exception => e
$stderr.puts "WARNING #{SOURCE}.parse #{e.message} No MeSH Pharmacological Classifications"
tries += 1
#
end
end
end
def parse_GHS_classification(data)
success = false
tries = 0
while !success && tries < 1
begin
ghs = data.to_s.match(/<Section>[\s]*?<TOCHeading>GHS Classification<\/TOCHeading>[\s\S]*?(<\/Section>)/)
ghs_strings = ghs.to_s.scan(/<StringValue>[\s\S]*?<\/StringValue>/)
break if ghs_strings.length == 0
best_ghs = ghs_strings.max{|a, b| a.length <=> b.length}
ghs_model = { "Images" => Array.new,
"Signal" => String.new,
"Hazards" => Array.new}
images = best_ghs.scan(/GHS[\d]+/)
images.each do |number|
ghs_model["Images"].push("https://pubchem.ncbi.nlm.nih.gov/images/ghs/#{number}.svg")
end
best_ghs.gsub!("<StringValue>","")
best_ghs.gsub!("</StringValue>", "")
best_ghs.gsub!(/<[\s\S]+?>/,"")
best_ghs.gsub!("GHS Hazard Statements", " ")
best_ghs.gsub!("/","")
hazards = best_ghs.scan(/H[\d]{3}:[\s\S]+?\]/)
signal = best_ghs.scan(/Signal:[\s][A-Za-z]+/).first.split(" ").last
ghs_model["Signal"] = signal
hazards.each do |hazard|
ghs_model["Hazards"].push(hazard)
end
self.ghs_classification = ghs_model
success = true
rescue Exception => e
$stderr.puts "WARNING #{SOURCE}.parse #{e.message} No GHS Classification"
tries += 1
#
end
end
end
def parse_experimental_properties(data)
success = false
tries = 0
while !success && tries < 1
begin
melting_points = data.to_s.match(/<Section>[\s]*?<TOCHeading>Melting Point<\/TOCHeading>[\s\S]*?(<\/Section>)/)
strings = melting_points.to_s.scan(/<StringValue>[\s\S]*?<\/StringValue>/)
values = melting_points.to_s.scan(/<StringValue>[\s\S]*?<\/StringValue>/)
melting_points = Array.new
ok = ["0","1","2","3","4","5","6","7","8","9","-","°","C","F","K"," "]
strings.each do |melt|
melt = melt.gsub("<StringValue>","")
melt = melt.gsub("</StringValue>","")
melt = melt.gsub(/\([\s\S]*?\)/,"")
melt = melt.gsub(" ","")
melt = melt.gsub("deg","°")
#bad = false
#melt.each_char {|d| bad = true if !ok.include? d}
#next if bad
melting_points.push(melt) if melt.present?
end
values.each do |melt|
melt = melt.gsub("<NumValue>","")
melt = melt.gsub("</NumValue>","")
melt = melt.gsub("<ValueUnit>","")
melt = melt.gsub("</ValueUnit>","")
melt = melt.gsub("\n","")
melt = melt.gsub(" ","")
#bad = false
#melt.each_char {|d| bad = true if !ok.include? d}
#next if bad
melting_points.push(melt) if melt.present?
end
no_melt = true
melting_point = ""
while no_melt
melting_points.each do |melt|
melting_point = to_celsius(melt)
no_melt = false if melting_point.present?
end
no_melt = false
end
self.properties.melting_point = melting_point
success = true
rescue Exception => e
$stderr.puts "WARNING #{SOURCE}.parse #{e.message} No Melting Point Properties"
tries += 1
#
end
end
success = false
tries = 0
while !success && tries < 1
begin
boiling_points = data.to_s.match(/<Section>[\s]*?<TOCHeading>Boiling Point<\/TOCHeading>[\s\S]*?(<\/Section>)/)
strings = boiling_points.to_s.scan(/<StringValue>[\s\S]*?<\/StringValue>/)
values = boiling_points.to_s.scan(/<NumValue>[\s\S]*?<\/ValueUnit>/)
boiling_points = Array.new
ok = ["0","1","2","3","4","5","6","7","8","9","-","°","C","F","K","."]
strings.each do |boil|
boil = boil.gsub("<StringValue>","")
boil = boil.gsub("</StringValue>","")
boil = boil.gsub(/\([\s\S]*?\)/,"")
boil = boil.gsub(" ","")
#bad = false
boil = boil.gsub("deg","°")
#boil.each_char {|d| bad = true if !ok.include? d}
boiling_points.push(boil) if boil.present?
end
values.each do |boil|
boil = boil.gsub("<NumValue>","")
boil = boil.gsub("</NumValue>","")
boil = boil.gsub("<ValueUnit>","")
boil = boil.gsub("</ValueUnit>","")
boil = boil.gsub("\n","")
boil = boil.gsub(" ","")
#bad = false
#boil.each_char {|d| bad = true if !ok.include? d}
#next if bad
boiling_points.push(boil) if boil.present?
end
no_boil = true
boiling_point = ""
while no_boil
boiling_points.each do |boil|
boiling_point = to_celsius(boil)
no_boil = false if boiling_point.present?
end
no_boil = false
end
if self.properties.melting_point.present? && self.properties.boiling_point.present?
if boiling_point.to_f < melting_point.to_f
boiling_point += " (sublimation)"
end
end
self.properties.boiling_point = boiling_point
success = true
rescue Exception => e
$stderr.puts "WARNING #{SOURCE}.parse #{e.message} No Boiling Point Properties"
tries += 1
#
end
end
get_state
end
def get_state
if self.properties.melting_point.present?
negative = self.properties.melting_point.starts_with?("-")
if negative
self.properties.state= "Liquid"
else
melting_point = self.properties.melting_point.gsub(/[^\d^\.]/, '').to_f
if melting_point < 20
self.properties.state = "Liquid"
elsif melting_point >= 20
self.properties.state = "Solid"
end
end
else
self.properties.state = "N/A"
end
if self.properties.boiling_point.present?
negative = self.properties.boiling_point.starts_with?("-")
boiling_point = self.properties.boiling_point.gsub(/[^\d^\.]/, '').to_f
if negative
self.properties.state = "Gas"
else
if boiling_point < 20
self.properties.state = "Gas"
end
end
else
self.properties.state = "N/A"
end
end
def to_celsius(point)
final = ""
if point.include?("C")
negative = false
negative = true if point.starts_with? ("-")
value = point.match(/[\d]+/)[0]
final += "-" if negative
final += value
final += "°C"
elsif point.include?("K")
negative = false
negative = true if point.starts_with? ("-")
value = point.match(/[\d]+/)[0]
final += "-" if negative
value = value.to_i
value += 273.15
final += value.to_s
final += "°C"
elsif point.include?("F")
negative = false
negative = true if point.starts_with? ("-")
value = point.match(/[\d]+/)[0]
final += "-" if negative
value = value.to_i
value -= 32
value *= 5
value/= 9
final += value.to_s
final += "°C"
end
final
end
def parse_patents
data = nil
success = false
tries = 0
page = 1
while !success && tries < 1
begin
#uri = URI("#{PATENTS_URL}#{self.identifiers.pubchem_id}&page=#{page}")
#res = Net::HTTP.get_response(uri)
body = Nokogiri::HTML(open("#{PATENTS_URL}#{self.identifiers.pubchem_id}&page=#{page}"))
# uri = URI.parse(SEARCH_URL)
# http = Net::HTTP.new(uri.host, uri.port)
# request = Net::HTTP::Post.new(uri.request_uri)
if page == 1
print(body)
end
success = true
rescue Exception => e
$stderr.puts "WARNING #{SOURCE}.parse #{e.message} End of Patents"
tries += 1
end
end
end
def parse_sdf
data = nil
success = false
tries = 0
while !success && tries < 1
begin
open(PUG_SDF_URL_1 + self.identifiers.pubchem_id + PUG_SDF_URL_2) { |io| data = io.read }
self.structures.sdf_3d = data
success = true
rescue Exception => e
$stderr.puts "WARNING #{SOURCE}.parse #{e.message} No SDF"
tries += 1
#
end
end
end
end
end
end
| 36.699713 | 209 | 0.542849 |
1acdc9d3343078560fd9d0fabcf2217ff469ba07 | 36 | module R10K
VERSION = '3.3.3'
end
| 9 | 19 | 0.638889 |
f8c05fdd27e8452531a24f11b820270180b3b453 | 2,827 | require 'spec_helper'
describe 'deploy job template', type: :integration do
with_reset_sandbox_before_each
it 're-evaluates job templates with new manifest job properties' do
manifest_hash = Bosh::Spec::Deployments.simple_manifest
manifest_hash['properties'] = { 'test_property' => 1 }
deploy_from_scratch(manifest_hash: manifest_hash)
foobar_vm = director.vm('foobar/0')
template = foobar_vm.read_job_template('foobar', 'bin/foobar_ctl')
expect(template).to include('test_property=1')
manifest_hash['properties'] = { 'test_property' => 2 }
deploy_simple_manifest(manifest_hash: manifest_hash)
template = foobar_vm.read_job_template('foobar', 'bin/foobar_ctl')
expect(template).to include('test_property=2')
end
it 're-evaluates job templates with new dynamic network configuration' do
manifest_hash = Bosh::Spec::Deployments.simple_manifest
manifest_hash['jobs'].first['instances'] = 1
manifest_hash['jobs'].first['properties'] = { 'network_name' => 'a' }
cloud_config_hash = Bosh::Spec::Deployments.simple_cloud_config
cloud_config_hash['networks'].first['type'] = 'dynamic'
cloud_config_hash['networks'].first['cloud_properties'] = {}
cloud_config_hash['networks'].first.delete('subnets')
cloud_config_hash['resource_pools'].first['size'] = 1
current_sandbox.cpi.commands.make_create_vm_always_use_dynamic_ip('127.0.0.101')
deploy_from_scratch(cloud_config_hash: cloud_config_hash, manifest_hash: manifest_hash)
# VM deployed for the first time knows about correct dynamic IP
template = director.vm('foobar/0').read_job_template('foobar', 'bin/foobar_ctl')
expect(template).to include('a_ip=127.0.0.101')
# Force VM recreation
cloud_config_hash['resource_pools'].first['cloud_properties'] = {'changed' => true}
upload_cloud_config(cloud_config_hash: cloud_config_hash)
current_sandbox.cpi.commands.make_create_vm_always_use_dynamic_ip('127.0.0.102')
deploy_simple_manifest(manifest_hash: manifest_hash)
# Recreated VM due to the resource pool change knows about correct dynamic IP
template = director.vm('foobar/0').read_job_template('foobar', 'bin/foobar_ctl')
expect(template).to include('a_ip=127.0.0.102')
end
context 'health monitor' do
before { current_sandbox.health_monitor_process.start }
after { current_sandbox.health_monitor_process.stop }
it 'creates alerts to mark the start and end of an update deployment' do
deploy_from_scratch
waiter.wait(60) do
expect(health_monitor.read_log).to match(/\[ALERT\] Alert @ .* Begin update deployment for 'simple'/)
end
waiter.wait(60) do
expect(health_monitor.read_log).to match(/\[ALERT\] Alert @ .* Finish update deployment for 'simple'/)
end
end
end
end
| 40.971014 | 110 | 0.73364 |
1d9b19391d9d1cf5288f48acea3ea5674ada1c67 | 383 | cask 'airdroid' do
version '3.6.3.0'
sha256 '3d23d7e711243322ed545badd60e6e1ca40d0ca97cb435fb281c60cbf7ec56c9'
# s3.amazonaws.com/dl.airdroid.com was verified as official when first introduced to the cask
url "https://s3.amazonaws.com/dl.airdroid.com/AirDroid_Desktop_Client_#{version}.dmg"
name 'AirDroid'
homepage 'https://www.airdroid.com/'
app 'AirDroid.app'
end
| 31.916667 | 95 | 0.772846 |
1c7b1e67f969fc08763a6418d8b00be95b779d7a | 1,242 | Pod::Spec.new do |s|
s.name = "Differ"
s.version = "1.4.6"
s.summary = "A very fast difference calculation library written in Swift."
s.homepage = "https://github.com/tonyarnold/Diff"
s.description = <<-DESC
Differ generates the differences between `Collection` instances (this includes Strings!).
It uses a fast algorithm `(O((N+M)*D))` to do this.
Also included are utilities for easily applying diffs and patches to `UICollectionView`/`UITableView`.
DESC
s.license = { :type => "MIT", :file => "LICENSE.md" }
s.authors = {
"Tony Arnold" => "[email protected]"
}
s.source = { :git => "https://github.com/tonyarnold/Differ.git", :tag => "1.4.6" }
s.source_files = "Sources/Differ"
s.platforms = { :ios => "9.0", :osx => "10.12", :tvos => "9.0", :watchos => "4.0" }
s.swift_versions = ['5.4']
s.ios.exclude_files = [
"Sources/Differ/Diff+AppKit.swift"
]
s.osx.exclude_files = [
"Sources/Differ/Diff+UIKit.swift"
]
s.tvos.exclude_files = [
"Sources/Differ/Diff+AppKit.swift"
]
s.watchos.exclude_files = [
"Sources/Differ/Diff+UIKit.swift",
"Sources/Differ/Diff+AppKit.swift",
"Sources/Differ/NestedBatchUpdate.swift"
]
end
| 31.05 | 102 | 0.632045 |
913e78b69aaf423bfa0d0f741dedda1f679b4d66 | 1,737 | #!/usr/bin/env ruby -w
# coding: utf-8
# Copyright (C) 2004-2021 Koichiro Eto, All rights reserved.
# License: BSD 3-Clause License
module SGL
# window functions
def window(*a) $__a__.window(*a); end
def close_window() $__a__.close_window; end
def width() $__a__.width; end
def height() $__a__.height; end
# color functions
def background(*a) $__a__.background(*a); end
def backgroundHSV(*a) $__a__.backgroundHSV(*a); end
def color(*a) $__a__.color(*a); end
def colorHSV(*a) $__a__.colorHSV(*a); end
# get status functions
def mouseX() $__a__.mouseX; end
def mouseY() $__a__.mouseY; end
def mouseDown() $__a__.mouseDown; end
def keynum() $__a__.keynum; end
# callback functions
def setup() end
def onMouseDown(x,y) end
def onMouseUp(x,y) end
def onKeyDown(k) end
def onKeyUp(k) end
def display() end
# mainloop
def mainloop
$__a__.set_setup { setup }
$__a__.set_mousedown {|x, y| onMouseDown(x, y) }
$__a__.set_mouseup {|x, y| onMouseUp(x, y) }
$__a__.set_keydown {|k| onKeyDown(k) }
$__a__.set_keyup {|k| onKeyUp(k) }
$__a__.set_display { display }
$__a__.mainloop
end
# create media object functions
def movie(*a) $__a__.movie(*a); end
def image(*a) $__a__.image(*a); end
def font(*a) $__a__.font(*a); end
def sound(*a) $__a__.sound(*a); end
# draw functions
def point(*a) $__a__.point(*a); end
def lineWidth(*a) $__a__.lineWidth(*a); end
def line(*a) $__a__.line(*a); end
def rect(*a) $__a__.rect(*a); end
def circle(*a) $__a__.circle(*a); end
def rotateZ(*a) $__a__.rotateZ(*a); end
def translate(*a) $__a__.translate(*a); end
def scale(*a) $__a__.scale(*a); end
def reset(*a) $__a__.reset(*a); end
end
| 28.47541 | 60 | 0.649971 |
391a943ff042290e6854bb377e916cf52608c95c | 2,507 | require_relative '../panos_provider'
require 'base64'
# Implementation for the panos_admin type using the Resource API.
class Puppet::Provider::PanosAdmin::PanosAdmin < Puppet::Provider::PanosProvider
def munge(entry)
if entry.key?(:ssh_key) && !entry[:ssh_key].nil?
# remove newline characters that can mess up the decode
entry[:ssh_key] = Base64.strict_decode64(entry[:ssh_key].strip)
end
if entry.key? :client_certificate_only
entry[:client_certificate_only] = string_to_bool(entry[:client_certificate_only])
end
entry
end
def validate_should(should)
if should[:client_certificate_only] == true && should[:password_hash] # rubocop:disable Style/GuardClause
raise Puppet::ResourceError, 'password_hash should not be configured when client_certificate_only is true'
elsif should[:client_certificate_only] == true && should[:authentication_profile]
raise Puppet::ResourceError, 'authentication_profile should not be configured when client_certificate_only is true'
elsif should[:password_hash] && should[:authentication_profile]
raise Puppet::ResourceError, 'authentication_profile should not be configured when password_hash is configured'
end
if should[:role] == 'custom' && !should.key?(:role_profile) # rubocop:disable Style/GuardClause
raise Puppet::ResourceError, 'Role based administrator type missing role_profile'
end
end
def xml_from_should(name, should)
builder = Builder::XmlMarkup.new
builder.entry('name' => name) do
if should[:password_hash]
builder.phash(should[:password_hash])
elsif should[:client_certificate_only]
builder.__send__('client-certificate-only', 'yes')
elsif should[:authentication_profile]
builder.__send__('authentication-profile', should[:authentication_profile])
end
if should[:ssh_key]
builder.__send__('public-key', Base64.strict_encode64(should[:ssh_key]))
end
builder.permissions do
builder.__send__('role-based') do
if should[:role] == 'custom'
builder.custom do
builder.profile(should[:role_profile])
end
else
self_closing_roles = ['devicereader', 'deviceadmin']
if self_closing_roles.include? should[:role]
builder.__send__(should[:role])
else
builder.__send__(should[:role], 'yes')
end
end
end
end
end
end
end
| 40.435484 | 121 | 0.690467 |
6aaf1d8faf540a6a60562def897bd82add6e60d7 | 2,199 | class UsersController < ApplicationController
before_action :set_user, only: [:show, :edit, :update, :destroy]
before_action :signed_in_user, only: [:edit, :update, :destroy]
before_action :correct_user, only: [:edit, :update, :destroy]
# GET /users
# GET /users.json
def index
@users = User.all
end
# GET /users/1
# GET /users/1.json
def show
#@tweet = current_user.tweets.build if signed_in?
#@feed_items = @user.tweets.paginate(page: params[:page])
@users = User.all
@user_tweets = @user.tweets
end
# GET /users/new
def new
@user = User.new
end
# GET /users/1/edit
def edit
end
# POST /users
# POST /users.json
def create
@user = User.new(user_params)
respond_to do |format|
if @user.save
format.html { redirect_to login_url, notice: 'User was successfully created.' }
format.json { render :show, status: :created, location: @user }
else
format.html { render :new }
format.json { render json: @user.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /users/1
# PATCH/PUT /users/1.json
def update
respond_to do |format|
if @user.update(user_params)
format.html { redirect_to @user, notice: 'User was successfully updated.' }
format.json { render :show, status: :ok, location: @user }
else
format.html { render :edit }
format.json { render json: @user.errors, status: :unprocessable_entity }
end
end
end
# DELETE /users/1
# DELETE /users/1.json
def destroy
@user.destroy
respond_to do |format|
format.html { redirect_to users_url, notice: 'User was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_user
@user = User.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def user_params
params.require(:user).permit(:name, :password, :password_confirmation)
end
def correct_user
redirect_to(signin_url) unless current_user?(@user)
end
end
| 25.870588 | 88 | 0.651205 |
d570adfd594188a76f948c645e382ba30e8e09c2 | 7,340 | require 'zlib'
require 'stringio'
When("I build {string} using the {string} bugsnag config") do |module_config, bugsnag_config|
steps %Q{
When I set environment variable "MODULE_CONFIG" to "#{module_config}"
When I set environment variable "BUGSNAG_CONFIG" to "#{bugsnag_config}"
And I run the script "features/scripts/build_project_module.sh" synchronously
}
end
When("I build the {string} variantOutput for {string} using the {string} bugsnag config") do |variant, module_config, bugsnag_config|
steps %Q{
When I set environment variable "VARIANT_OUTPUT_NAME" to "#{variant}"
When I set environment variable "MODULE_CONFIG" to "#{module_config}"
When I set environment variable "BUGSNAG_CONFIG" to "#{bugsnag_config}"
And I run the script "features/scripts/upload_variant_mapping.sh" synchronously
}
end
When("I bundle {string} using the {string} bugsnag config") do |module_config, bugsnag_config|
steps %Q{
When I set environment variable "MODULE_CONFIG" to "#{module_config}"
When I set environment variable "BUGSNAG_CONFIG" to "#{bugsnag_config}"
And I run the script "features/scripts/bundle_project_module.sh" synchronously
}
end
When("I bundle the {string} variantOutput for {string} using the {string} bugsnag config") do |variant, module_config, bugsnag_config|
steps %Q{
When I set environment variable "VARIANT_OUTPUT_NAME" to "#{variant}"
When I set environment variable "MODULE_CONFIG" to "#{module_config}"
When I set environment variable "BUGSNAG_CONFIG" to "#{bugsnag_config}"
And I run the script "features/scripts/bundle_one_flavor.sh" synchronously
}
end
When("I build the React Native app") do
steps %Q{
And I run the script "features/scripts/build_react_native_app.sh" synchronously
}
end
When("I build the NDK app") do
steps %Q{
And I run the script "features/scripts/build_ndk_app.sh" synchronously
}
end
When("I set the fixture JVM arguments to {string}") do |jvm_args|
steps %Q{
When I set environment variable "CUSTOM_JVM_ARGS" to "#{jvm_args}"
}
end
When("I build the failing {string} using the {string} bugsnag config") do |module_config, bugsnag_config|
Runner.environment["MODULE_CONFIG"] = module_config
Runner.environment["BUGSNAG_CONFIG"] = bugsnag_config
_, exit_code = Runner.run_script("features/scripts/bundle_project_module.sh", blocking: true)
assert(exit_code != 0, "Expected script to fail with non-zero exit code, got #{exit_code}")
end
Then(/^the exit code equals (\d+)$/) do |exit_code|
assert_equal(exit_code, $?.exitstatus.to_i)
end
Then('{int} requests are valid for the build API and match the following:') do |request_count, data_table|
requests = get_requests_with_field('builderName')
assert_equal(request_count, requests.length, 'Wrong number of build API requests')
RequestSetAssertions.assert_requests_match requests, data_table
requests.each do |request|
valid_build_api?(request[:body])
end
end
Then('{int} requests are valid for the android mapping API and match the following:') do |request_count, data_table|
requests = get_requests_with_field('proguard')
assert_equal(request_count, requests.length, 'Wrong number of mapping API requests')
RequestSetAssertions.assert_requests_match requests, data_table
requests.each do |request|
valid_android_mapping_api?(request[:body])
end
end
Then('{int} requests are valid for the android NDK mapping API and match the following:') do |request_count, data_table|
requests = get_requests_with_field('soSymbolFile')
assert_equal(request_count, requests.length, 'Wrong number of NDK mapping API requests')
RequestSetAssertions.assert_requests_match requests, data_table
requests.each do |request|
valid_android_ndk_mapping_api?(request[:body])
end
end
Then('{int} requests are valid for the android unity NDK mapping API and match the following:') do |request_count, data_table|
requests = get_requests_with_field('soSymbolTableFile')
assert_equal(request_count, requests.length, 'Wrong number of android unity NDK mapping API requests')
RequestSetAssertions.assert_requests_match requests, data_table
requests.each do |request|
valid_android_unity_ndk_mapping_api?(request[:body])
end
end
Then('{int} requests are valid for the JS source map API and match the following:') do |request_count, data_table|
requests = get_requests_with_field('sourceMap')
assert_equal(request_count, requests.length, 'Wrong number of JS source map API requests')
RequestSetAssertions.assert_requests_match requests, data_table
requests.each do |request|
valid_js_source_map_api?(request[:body])
end
end
Then('{int} requests have an R8 mapping file with the following symbols:') do |request_count, data_table|
requests = get_requests_with_field('proguard')
assert_equal(request_count, requests.length, 'Wrong number of mapping API requests')
# inflate gzipped proguard mapping file & verify contents
requests.each do |request|
valid_android_mapping_api?(request[:body])
gzipped_part = request[:body]['proguard']
archive = Zlib::GzipReader.new(StringIO.new(gzipped_part))
mapping_file_lines = archive.read.split("\n")
valid_r8_mapping_contents?(mapping_file_lines, data_table.rows)
end
end
def valid_r8_mapping_contents?(mapping_file_lines, expected_entries)
# validates that the mapping file key is present for each symbol,
# obfuscated values are not validated as they vary depending on AGP's implementation
expected_entries.each do |row|
expected_entry = row[0] + " ->"
has_mapping_entry = mapping_file_lines.one? { |line|
line.include? expected_entry
}
assert_true(has_mapping_entry, "No entry in mapping file for '#{row[0]}'.")
end
end
def valid_build_api?(request_body)
assert_equal($api_key, read_key_path(request_body, 'apiKey'))
assert_not_nil(read_key_path(request_body, 'appVersion'))
assert_not_nil(read_key_path(request_body, 'builderName'))
assert_not_nil(read_key_path(request_body, 'sourceControl.revision'))
assert_not_nil(read_key_path(request_body, 'metadata.os_name'))
assert_not_nil(read_key_path(request_body, 'metadata.os_arch'))
assert_not_nil(read_key_path(request_body, 'metadata.os_version'))
assert_not_nil(read_key_path(request_body, 'metadata.java_version'))
assert_not_nil(read_key_path(request_body, 'metadata.gradle_version'))
assert_not_nil(read_key_path(request_body, 'metadata.git_version'))
end
def valid_android_mapping_api?(request_body)
valid_mapping_api?(request_body)
assert_not_nil(request_body['proguard'])
end
def valid_android_ndk_mapping_api?(request_body)
valid_mapping_api?(request_body)
assert_not_nil(request_body['soSymbolFile'])
end
def valid_android_unity_ndk_mapping_api?(request_body)
valid_mapping_api?(request_body)
assert_not_nil(request_body['soSymbolTableFile'])
end
def valid_mapping_api?(request_body)
assert_equal($api_key, request_body['apiKey'])
assert_not_nil(request_body['appId'])
assert_not_nil(request_body['versionCode'])
assert_not_nil(request_body['buildUUID'])
assert_not_nil(request_body['versionName'])
end
def valid_js_source_map_api?(request_body)
assert_equal($api_key, request_body['apiKey'])
assert_equal('android', request_body['platform'])
assert_not_nil(request_body['sourceMap'])
assert_not_nil(request_body['bundle'])
end
| 39.462366 | 134 | 0.780518 |
914fd554d07063bd9edaaf49503d6bf8b01bc4d8 | 88 | class SponsorshipType < EnumerateIt::Base
associate_values :gold, :silver, :bronze
end | 29.333333 | 42 | 0.795455 |
ac3fe72faeff52f3577f657f96dcde771ef74716 | 41,914 | # frozen_string_literal: true
require "active_support/core_ext/hash/indifferent_access"
require "active_support/core_ext/array/wrap"
require "active_support/core_ext/string/filters"
require "active_support/core_ext/object/to_query"
require "action_dispatch/http/upload"
require "rack/test"
require "stringio"
require "set"
require "yaml"
module ActionController
# Raised when a required parameter is missing.
#
# params = ActionController::Parameters.new(a: {})
# params.fetch(:b)
# # => ActionController::ParameterMissing: param is missing or the value is empty: b
# params.require(:a)
# # => ActionController::ParameterMissing: param is missing or the value is empty: a
class ParameterMissing < KeyError
attr_reader :param, :keys # :nodoc:
def initialize(param, keys = nil) # :nodoc:
@param = param
@keys = keys
super("param is missing or the value is empty: #{param}")
end
class Correction
def initialize(error)
@error = error
end
def corrections
if @error.param && @error.keys
maybe_these = @error.keys
maybe_these.sort_by { |n|
DidYouMean::Jaro.distance(@error.param.to_s, n)
}.reverse.first(4)
else
[]
end
end
end
# We may not have DYM, and DYM might not let us register error handlers
if defined?(DidYouMean) && DidYouMean.respond_to?(:correct_error)
DidYouMean.correct_error(self, Correction)
end
end
# Raised when a supplied parameter is not expected and
# ActionController::Parameters.action_on_unpermitted_parameters
# is set to <tt>:raise</tt>.
#
# params = ActionController::Parameters.new(a: "123", b: "456")
# params.permit(:c)
# # => ActionController::UnpermittedParameters: found unpermitted parameters: :a, :b
class UnpermittedParameters < IndexError
attr_reader :params # :nodoc:
def initialize(params) # :nodoc:
@params = params
super("found unpermitted parameter#{'s' if params.size > 1 }: #{params.map { |e| ":#{e}" }.join(", ")}")
end
end
# Raised when a Parameters instance is not marked as permitted and
# an operation to transform it to hash is called.
#
# params = ActionController::Parameters.new(a: "123", b: "456")
# params.to_h
# # => ActionController::UnfilteredParameters: unable to convert unpermitted parameters to hash
class UnfilteredParameters < ArgumentError
def initialize # :nodoc:
super("unable to convert unpermitted parameters to hash")
end
end
# == Action Controller \Parameters
#
# Allows you to choose which attributes should be permitted for mass updating
# and thus prevent accidentally exposing that which shouldn't be exposed.
# Provides two methods for this purpose: #require and #permit. The former is
# used to mark parameters as required. The latter is used to set the parameter
# as permitted and limit which attributes should be allowed for mass updating.
#
# params = ActionController::Parameters.new({
# person: {
# name: "Francesco",
# age: 22,
# role: "admin"
# }
# })
#
# permitted = params.require(:person).permit(:name, :age)
# permitted # => <ActionController::Parameters {"name"=>"Francesco", "age"=>22} permitted: true>
# permitted.permitted? # => true
#
# Person.first.update!(permitted)
# # => #<Person id: 1, name: "Francesco", age: 22, role: "user">
#
# It provides two options that controls the top-level behavior of new instances:
#
# * +permit_all_parameters+ - If it's +true+, all the parameters will be
# permitted by default. The default is +false+.
# * +action_on_unpermitted_parameters+ - Allow to control the behavior when parameters
# that are not explicitly permitted are found. The values can be +false+ to just filter them
# out, <tt>:log</tt> to additionally write a message on the logger, or <tt>:raise</tt> to raise
# ActionController::UnpermittedParameters exception. The default value is <tt>:log</tt>
# in test and development environments, +false+ otherwise.
#
# Examples:
#
# params = ActionController::Parameters.new
# params.permitted? # => false
#
# ActionController::Parameters.permit_all_parameters = true
#
# params = ActionController::Parameters.new
# params.permitted? # => true
#
# params = ActionController::Parameters.new(a: "123", b: "456")
# params.permit(:c)
# # => <ActionController::Parameters {} permitted: true>
#
# ActionController::Parameters.action_on_unpermitted_parameters = :raise
#
# params = ActionController::Parameters.new(a: "123", b: "456")
# params.permit(:c)
# # => ActionController::UnpermittedParameters: found unpermitted keys: a, b
#
# Please note that these options *are not thread-safe*. In a multi-threaded
# environment they should only be set once at boot-time and never mutated at
# runtime.
#
# You can fetch values of <tt>ActionController::Parameters</tt> using either
# <tt>:key</tt> or <tt>"key"</tt>.
#
# params = ActionController::Parameters.new(key: "value")
# params[:key] # => "value"
# params["key"] # => "value"
class Parameters
cattr_accessor :permit_all_parameters, instance_accessor: false, default: false
cattr_accessor :action_on_unpermitted_parameters, instance_accessor: false
##
# :method: as_json
#
# :call-seq:
# as_json(options=nil)
#
# Returns a hash that can be used as the JSON representation for the parameters.
##
# :method: each_key
#
# :call-seq:
# each_key()
#
# Calls block once for each key in the parameters, passing the key.
# If no block is given, an enumerator is returned instead.
##
# :method: empty?
#
# :call-seq:
# empty?()
#
# Returns true if the parameters have no key/value pairs.
##
# :method: has_key?
#
# :call-seq:
# has_key?(key)
#
# Returns true if the given key is present in the parameters.
##
# :method: has_value?
#
# :call-seq:
# has_value?(value)
#
# Returns true if the given value is present for some key in the parameters.
##
# :method: include?
#
# :call-seq:
# include?(key)
#
# Returns true if the given key is present in the parameters.
##
# :method: key?
#
# :call-seq:
# key?(key)
#
# Returns true if the given key is present in the parameters.
##
# :method: member?
#
# :call-seq:
# member?(key)
#
# Returns true if the given key is present in the parameters.
##
# :method: keys
#
# :call-seq:
# keys()
#
# Returns a new array of the keys of the parameters.
##
# :method: to_s
#
# :call-seq:
# to_s()
#
# Returns the content of the parameters as a string.
##
# :method: value?
#
# :call-seq:
# value?(value)
#
# Returns true if the given value is present for some key in the parameters.
##
# :method: values
#
# :call-seq:
# values()
#
# Returns a new array of the values of the parameters.
delegate :keys, :key?, :has_key?, :member?, :values, :has_value?, :value?, :empty?, :include?,
:as_json, :to_s, :each_key, to: :@parameters
# By default, never raise an UnpermittedParameters exception if these
# params are present. The default includes both 'controller' and 'action'
# because they are added by Rails and should be of no concern. One way
# to change these is to specify `always_permitted_parameters` in your
# config. For instance:
#
# config.action_controller.always_permitted_parameters = %w( controller action format )
cattr_accessor :always_permitted_parameters, default: %w( controller action )
class << self
def nested_attribute?(key, value) # :nodoc:
/\A-?\d+\z/.match?(key) && (value.is_a?(Hash) || value.is_a?(Parameters))
end
end
# Returns a new instance of <tt>ActionController::Parameters</tt>.
# Also, sets the +permitted+ attribute to the default value of
# <tt>ActionController::Parameters.permit_all_parameters</tt>.
#
# class Person < ActiveRecord::Base
# end
#
# params = ActionController::Parameters.new(name: "Francesco")
# params.permitted? # => false
# Person.new(params) # => ActiveModel::ForbiddenAttributesError
#
# ActionController::Parameters.permit_all_parameters = true
#
# params = ActionController::Parameters.new(name: "Francesco")
# params.permitted? # => true
# Person.new(params) # => #<Person id: nil, name: "Francesco">
def initialize(parameters = {})
@parameters = parameters.with_indifferent_access
@permitted = self.class.permit_all_parameters
end
# Returns true if another +Parameters+ object contains the same content and
# permitted flag.
def ==(other)
if other.respond_to?(:permitted?)
permitted? == other.permitted? && parameters == other.parameters
else
@parameters == other
end
end
alias eql? ==
def hash
[@parameters.hash, @permitted].hash
end
# Returns a safe <tt>ActiveSupport::HashWithIndifferentAccess</tt>
# representation of the parameters with all unpermitted keys removed.
#
# params = ActionController::Parameters.new({
# name: "Senjougahara Hitagi",
# oddity: "Heavy stone crab"
# })
# params.to_h
# # => ActionController::UnfilteredParameters: unable to convert unpermitted parameters to hash
#
# safe_params = params.permit(:name)
# safe_params.to_h # => {"name"=>"Senjougahara Hitagi"}
def to_h
if permitted?
convert_parameters_to_hashes(@parameters, :to_h)
else
raise UnfilteredParameters
end
end
# Returns a safe <tt>Hash</tt> representation of the parameters
# with all unpermitted keys removed.
#
# params = ActionController::Parameters.new({
# name: "Senjougahara Hitagi",
# oddity: "Heavy stone crab"
# })
# params.to_hash
# # => ActionController::UnfilteredParameters: unable to convert unpermitted parameters to hash
#
# safe_params = params.permit(:name)
# safe_params.to_hash # => {"name"=>"Senjougahara Hitagi"}
def to_hash
to_h.to_hash
end
# Returns a string representation of the receiver suitable for use as a URL
# query string:
#
# params = ActionController::Parameters.new({
# name: "David",
# nationality: "Danish"
# })
# params.to_query
# # => ActionController::UnfilteredParameters: unable to convert unpermitted parameters to hash
#
# safe_params = params.permit(:name, :nationality)
# safe_params.to_query
# # => "name=David&nationality=Danish"
#
# An optional namespace can be passed to enclose key names:
#
# params = ActionController::Parameters.new({
# name: "David",
# nationality: "Danish"
# })
# safe_params = params.permit(:name, :nationality)
# safe_params.to_query("user")
# # => "user%5Bname%5D=David&user%5Bnationality%5D=Danish"
#
# The string pairs "key=value" that conform the query string
# are sorted lexicographically in ascending order.
#
# This method is also aliased as +to_param+.
def to_query(*args)
to_h.to_query(*args)
end
alias_method :to_param, :to_query
# Returns an unsafe, unfiltered
# <tt>ActiveSupport::HashWithIndifferentAccess</tt> representation of the
# parameters.
#
# params = ActionController::Parameters.new({
# name: "Senjougahara Hitagi",
# oddity: "Heavy stone crab"
# })
# params.to_unsafe_h
# # => {"name"=>"Senjougahara Hitagi", "oddity" => "Heavy stone crab"}
def to_unsafe_h
convert_parameters_to_hashes(@parameters, :to_unsafe_h)
end
alias_method :to_unsafe_hash, :to_unsafe_h
# Convert all hashes in values into parameters, then yield each pair in
# the same way as <tt>Hash#each_pair</tt>.
def each_pair(&block)
return to_enum(__callee__) unless block_given?
@parameters.each_pair do |key, value|
yield [key, convert_hashes_to_parameters(key, value)]
end
self
end
alias_method :each, :each_pair
# Convert all hashes in values into parameters, then yield each value in
# the same way as <tt>Hash#each_value</tt>.
def each_value(&block)
return to_enum(:each_value) unless block_given?
@parameters.each_pair do |key, value|
yield convert_hashes_to_parameters(key, value)
end
self
end
# Attribute that keeps track of converted arrays, if any, to avoid double
# looping in the common use case permit + mass-assignment. Defined in a
# method to instantiate it only if needed.
#
# Testing membership still loops, but it's going to be faster than our own
# loop that converts values. Also, we are not going to build a new array
# object per fetch.
def converted_arrays
@converted_arrays ||= Set.new
end
# Returns +true+ if the parameter is permitted, +false+ otherwise.
#
# params = ActionController::Parameters.new
# params.permitted? # => false
# params.permit!
# params.permitted? # => true
def permitted?
@permitted
end
# Sets the +permitted+ attribute to +true+. This can be used to pass
# mass assignment. Returns +self+.
#
# class Person < ActiveRecord::Base
# end
#
# params = ActionController::Parameters.new(name: "Francesco")
# params.permitted? # => false
# Person.new(params) # => ActiveModel::ForbiddenAttributesError
# params.permit!
# params.permitted? # => true
# Person.new(params) # => #<Person id: nil, name: "Francesco">
def permit!
each_pair do |key, value|
Array.wrap(value).flatten.each do |v|
v.permit! if v.respond_to? :permit!
end
end
@permitted = true
self
end
# This method accepts both a single key and an array of keys.
#
# When passed a single key, if it exists and its associated value is
# either present or the singleton +false+, returns said value:
#
# ActionController::Parameters.new(person: { name: "Francesco" }).require(:person)
# # => <ActionController::Parameters {"name"=>"Francesco"} permitted: false>
#
# Otherwise raises <tt>ActionController::ParameterMissing</tt>:
#
# ActionController::Parameters.new.require(:person)
# # ActionController::ParameterMissing: param is missing or the value is empty: person
#
# ActionController::Parameters.new(person: nil).require(:person)
# # ActionController::ParameterMissing: param is missing or the value is empty: person
#
# ActionController::Parameters.new(person: "\t").require(:person)
# # ActionController::ParameterMissing: param is missing or the value is empty: person
#
# ActionController::Parameters.new(person: {}).require(:person)
# # ActionController::ParameterMissing: param is missing or the value is empty: person
#
# When given an array of keys, the method tries to require each one of them
# in order. If it succeeds, an array with the respective return values is
# returned:
#
# params = ActionController::Parameters.new(user: { ... }, profile: { ... })
# user_params, profile_params = params.require([:user, :profile])
#
# Otherwise, the method re-raises the first exception found:
#
# params = ActionController::Parameters.new(user: {}, profile: {})
# user_params, profile_params = params.require([:user, :profile])
# # ActionController::ParameterMissing: param is missing or the value is empty: user
#
# Technically this method can be used to fetch terminal values:
#
# # CAREFUL
# params = ActionController::Parameters.new(person: { name: "Finn" })
# name = params.require(:person).require(:name) # CAREFUL
#
# but take into account that at some point those ones have to be permitted:
#
# def person_params
# params.require(:person).permit(:name).tap do |person_params|
# person_params.require(:name) # SAFER
# end
# end
#
# for example.
def require(key)
return key.map { |k| require(k) } if key.is_a?(Array)
value = self[key]
if value.present? || value == false
value
else
raise ParameterMissing.new(key, @parameters.keys)
end
end
# Alias of #require.
alias :required :require
# Returns a new <tt>ActionController::Parameters</tt> instance that
# includes only the given +filters+ and sets the +permitted+ attribute
# for the object to +true+. This is useful for limiting which attributes
# should be allowed for mass updating.
#
# params = ActionController::Parameters.new(user: { name: "Francesco", age: 22, role: "admin" })
# permitted = params.require(:user).permit(:name, :age)
# permitted.permitted? # => true
# permitted.has_key?(:name) # => true
# permitted.has_key?(:age) # => true
# permitted.has_key?(:role) # => false
#
# Only permitted scalars pass the filter. For example, given
#
# params.permit(:name)
#
# +:name+ passes if it is a key of +params+ whose associated value is of type
# +String+, +Symbol+, +NilClass+, +Numeric+, +TrueClass+, +FalseClass+,
# +Date+, +Time+, +DateTime+, +StringIO+, +IO+,
# +ActionDispatch::Http::UploadedFile+ or +Rack::Test::UploadedFile+.
# Otherwise, the key +:name+ is filtered out.
#
# You may declare that the parameter should be an array of permitted scalars
# by mapping it to an empty array:
#
# params = ActionController::Parameters.new(tags: ["rails", "parameters"])
# params.permit(tags: [])
#
# Sometimes it is not possible or convenient to declare the valid keys of
# a hash parameter or its internal structure. Just map to an empty hash:
#
# params.permit(preferences: {})
#
# Be careful because this opens the door to arbitrary input. In this
# case, +permit+ ensures values in the returned structure are permitted
# scalars and filters out anything else.
#
# You can also use +permit+ on nested parameters, like:
#
# params = ActionController::Parameters.new({
# person: {
# name: "Francesco",
# age: 22,
# pets: [{
# name: "Purplish",
# category: "dogs"
# }]
# }
# })
#
# permitted = params.permit(person: [ :name, { pets: :name } ])
# permitted.permitted? # => true
# permitted[:person][:name] # => "Francesco"
# permitted[:person][:age] # => nil
# permitted[:person][:pets][0][:name] # => "Purplish"
# permitted[:person][:pets][0][:category] # => nil
#
# Note that if you use +permit+ in a key that points to a hash,
# it won't allow all the hash. You also need to specify which
# attributes inside the hash should be permitted.
#
# params = ActionController::Parameters.new({
# person: {
# contact: {
# email: "[email protected]",
# phone: "555-1234"
# }
# }
# })
#
# params.require(:person).permit(:contact)
# # => <ActionController::Parameters {} permitted: true>
#
# params.require(:person).permit(contact: :phone)
# # => <ActionController::Parameters {"contact"=><ActionController::Parameters {"phone"=>"555-1234"} permitted: true>} permitted: true>
#
# params.require(:person).permit(contact: [ :email, :phone ])
# # => <ActionController::Parameters {"contact"=><ActionController::Parameters {"email"=>"[email protected]", "phone"=>"555-1234"} permitted: true>} permitted: true>
def permit(*filters)
params = self.class.new
filters.flatten.each do |filter|
case filter
when Symbol, String
permitted_scalar_filter(params, filter)
when Hash
hash_filter(params, filter)
end
end
unpermitted_parameters!(params) if self.class.action_on_unpermitted_parameters
params.permit!
end
# Returns a parameter for the given +key+. If not found,
# returns +nil+.
#
# params = ActionController::Parameters.new(person: { name: "Francesco" })
# params[:person] # => <ActionController::Parameters {"name"=>"Francesco"} permitted: false>
# params[:none] # => nil
def [](key)
convert_hashes_to_parameters(key, @parameters[key])
end
# Assigns a value to a given +key+. The given key may still get filtered out
# when +permit+ is called.
def []=(key, value)
@parameters[key] = value
end
# Returns a parameter for the given +key+. If the +key+
# can't be found, there are several options: With no other arguments,
# it will raise an <tt>ActionController::ParameterMissing</tt> error;
# if a second argument is given, then that is returned (converted to an
# instance of ActionController::Parameters if possible); if a block
# is given, then that will be run and its result returned.
#
# params = ActionController::Parameters.new(person: { name: "Francesco" })
# params.fetch(:person) # => <ActionController::Parameters {"name"=>"Francesco"} permitted: false>
# params.fetch(:none) # => ActionController::ParameterMissing: param is missing or the value is empty: none
# params.fetch(:none, {}) # => <ActionController::Parameters {} permitted: false>
# params.fetch(:none, "Francesco") # => "Francesco"
# params.fetch(:none) { "Francesco" } # => "Francesco"
def fetch(key, *args)
convert_value_to_parameters(
@parameters.fetch(key) {
if block_given?
yield
else
args.fetch(0) { raise ActionController::ParameterMissing.new(key, @parameters.keys) }
end
}
)
end
# Extracts the nested parameter from the given +keys+ by calling +dig+
# at each step. Returns +nil+ if any intermediate step is +nil+.
#
# params = ActionController::Parameters.new(foo: { bar: { baz: 1 } })
# params.dig(:foo, :bar, :baz) # => 1
# params.dig(:foo, :zot, :xyz) # => nil
#
# params2 = ActionController::Parameters.new(foo: [10, 11, 12])
# params2.dig(:foo, 1) # => 11
def dig(*keys)
convert_hashes_to_parameters(keys.first, @parameters[keys.first])
@parameters.dig(*keys)
end
# Returns a new <tt>ActionController::Parameters</tt> instance that
# includes only the given +keys+. If the given +keys+
# don't exist, returns an empty hash.
#
# params = ActionController::Parameters.new(a: 1, b: 2, c: 3)
# params.slice(:a, :b) # => <ActionController::Parameters {"a"=>1, "b"=>2} permitted: false>
# params.slice(:d) # => <ActionController::Parameters {} permitted: false>
def slice(*keys)
new_instance_with_inherited_permitted_status(@parameters.slice(*keys))
end
# Returns current <tt>ActionController::Parameters</tt> instance which
# contains only the given +keys+.
def slice!(*keys)
@parameters.slice!(*keys)
self
end
# Returns a new <tt>ActionController::Parameters</tt> instance that
# filters out the given +keys+.
#
# params = ActionController::Parameters.new(a: 1, b: 2, c: 3)
# params.except(:a, :b) # => <ActionController::Parameters {"c"=>3} permitted: false>
# params.except(:d) # => <ActionController::Parameters {"a"=>1, "b"=>2, "c"=>3} permitted: false>
def except(*keys)
new_instance_with_inherited_permitted_status(@parameters.except(*keys))
end
# Removes and returns the key/value pairs matching the given keys.
#
# params = ActionController::Parameters.new(a: 1, b: 2, c: 3)
# params.extract!(:a, :b) # => <ActionController::Parameters {"a"=>1, "b"=>2} permitted: false>
# params # => <ActionController::Parameters {"c"=>3} permitted: false>
def extract!(*keys)
new_instance_with_inherited_permitted_status(@parameters.extract!(*keys))
end
# Returns a new <tt>ActionController::Parameters</tt> with the results of
# running +block+ once for every value. The keys are unchanged.
#
# params = ActionController::Parameters.new(a: 1, b: 2, c: 3)
# params.transform_values { |x| x * 2 }
# # => <ActionController::Parameters {"a"=>2, "b"=>4, "c"=>6} permitted: false>
def transform_values
return to_enum(:transform_values) unless block_given?
new_instance_with_inherited_permitted_status(
@parameters.transform_values { |v| yield convert_value_to_parameters(v) }
)
end
# Performs values transformation and returns the altered
# <tt>ActionController::Parameters</tt> instance.
def transform_values!
return to_enum(:transform_values!) unless block_given?
@parameters.transform_values! { |v| yield convert_value_to_parameters(v) }
self
end
# Returns a new <tt>ActionController::Parameters</tt> instance with the
# results of running +block+ once for every key. The values are unchanged.
def transform_keys(&block)
return to_enum(:transform_keys) unless block_given?
new_instance_with_inherited_permitted_status(
@parameters.transform_keys(&block)
)
end
# Performs keys transformation and returns the altered
# <tt>ActionController::Parameters</tt> instance.
def transform_keys!(&block)
return to_enum(:transform_keys!) unless block_given?
@parameters.transform_keys!(&block)
self
end
# Returns a new <tt>ActionController::Parameters</tt> instance with the
# results of running +block+ once for every key. This includes the keys
# from the root hash and from all nested hashes and arrays. The values are unchanged.
def deep_transform_keys(&block)
new_instance_with_inherited_permitted_status(
@parameters.deep_transform_keys(&block)
)
end
# Returns the <tt>ActionController::Parameters</tt> instance changing its keys.
# This includes the keys from the root hash and from all nested hashes and arrays.
# The values are unchanged.
def deep_transform_keys!(&block)
@parameters.deep_transform_keys!(&block)
self
end
# Deletes a key-value pair from +Parameters+ and returns the value. If
# +key+ is not found, returns +nil+ (or, with optional code block, yields
# +key+ and returns the result). Cf. +#extract!+, which returns the
# corresponding +ActionController::Parameters+ object.
def delete(key, &block)
convert_value_to_parameters(@parameters.delete(key, &block))
end
# Returns a new instance of <tt>ActionController::Parameters</tt> with only
# items that the block evaluates to true.
def select(&block)
new_instance_with_inherited_permitted_status(@parameters.select(&block))
end
# Equivalent to Hash#keep_if, but returns +nil+ if no changes were made.
def select!(&block)
@parameters.select!(&block)
self
end
alias_method :keep_if, :select!
# Returns a new instance of <tt>ActionController::Parameters</tt> with items
# that the block evaluates to true removed.
def reject(&block)
new_instance_with_inherited_permitted_status(@parameters.reject(&block))
end
# Removes items that the block evaluates to true and returns self.
def reject!(&block)
@parameters.reject!(&block)
self
end
alias_method :delete_if, :reject!
# Returns a new instance of <tt>ActionController::Parameters</tt> with +nil+ values removed.
def compact
new_instance_with_inherited_permitted_status(@parameters.compact)
end
# Removes all +nil+ values in place and returns +self+, or +nil+ if no changes were made.
def compact!
self if @parameters.compact!
end
# Returns a new instance of <tt>ActionController::Parameters</tt> without the blank values.
# Uses Object#blank? for determining if a value is blank.
def compact_blank
reject { |_k, v| v.blank? }
end
# Removes all blank values in place and returns self.
# Uses Object#blank? for determining if a value is blank.
def compact_blank!
reject! { |_k, v| v.blank? }
end
# Returns values that were assigned to the given +keys+. Note that all the
# +Hash+ objects will be converted to <tt>ActionController::Parameters</tt>.
def values_at(*keys)
convert_value_to_parameters(@parameters.values_at(*keys))
end
# Returns a new <tt>ActionController::Parameters</tt> with all keys from
# +other_hash+ merged into current hash.
def merge(other_hash)
new_instance_with_inherited_permitted_status(
@parameters.merge(other_hash.to_h)
)
end
# Returns current <tt>ActionController::Parameters</tt> instance with
# +other_hash+ merged into current hash.
def merge!(other_hash)
@parameters.merge!(other_hash.to_h)
self
end
# Returns a new <tt>ActionController::Parameters</tt> with all keys from
# current hash merged into +other_hash+.
def reverse_merge(other_hash)
new_instance_with_inherited_permitted_status(
other_hash.to_h.merge(@parameters)
)
end
alias_method :with_defaults, :reverse_merge
# Returns current <tt>ActionController::Parameters</tt> instance with
# current hash merged into +other_hash+.
def reverse_merge!(other_hash)
@parameters.merge!(other_hash.to_h) { |key, left, right| left }
self
end
alias_method :with_defaults!, :reverse_merge!
# This is required by ActiveModel attribute assignment, so that user can
# pass +Parameters+ to a mass assignment methods in a model. It should not
# matter as we are using +HashWithIndifferentAccess+ internally.
def stringify_keys # :nodoc:
dup
end
def inspect
"#<#{self.class} #{@parameters} permitted: #{@permitted}>"
end
def self.hook_into_yaml_loading # :nodoc:
# Wire up YAML format compatibility with Rails 4.2 and Psych 2.0.8 and 2.0.9+.
# Makes the YAML parser call `init_with` when it encounters the keys below
# instead of trying its own parsing routines.
YAML.load_tags["!ruby/hash-with-ivars:ActionController::Parameters"] = name
YAML.load_tags["!ruby/hash:ActionController::Parameters"] = name
end
hook_into_yaml_loading
def init_with(coder) # :nodoc:
case coder.tag
when "!ruby/hash:ActionController::Parameters"
# YAML 2.0.8's format where hash instance variables weren't stored.
@parameters = coder.map.with_indifferent_access
@permitted = false
when "!ruby/hash-with-ivars:ActionController::Parameters"
# YAML 2.0.9's Hash subclass format where keys and values
# were stored under an elements hash and `permitted` within an ivars hash.
@parameters = coder.map["elements"].with_indifferent_access
@permitted = coder.map["ivars"][:@permitted]
when "!ruby/object:ActionController::Parameters"
# YAML's Object format. Only needed because of the format
# backwards compatibility above, otherwise equivalent to YAML's initialization.
@parameters, @permitted = coder.map["parameters"], coder.map["permitted"]
end
end
# Returns duplicate of object including all parameters.
def deep_dup
self.class.new(@parameters.deep_dup).tap do |duplicate|
duplicate.permitted = @permitted
end
end
protected
attr_reader :parameters
attr_writer :permitted
def nested_attributes?
@parameters.any? { |k, v| Parameters.nested_attribute?(k, v) }
end
def each_nested_attribute
hash = self.class.new
self.each { |k, v| hash[k] = yield v if Parameters.nested_attribute?(k, v) }
hash
end
private
def new_instance_with_inherited_permitted_status(hash)
self.class.new(hash).tap do |new_instance|
new_instance.permitted = @permitted
end
end
def convert_parameters_to_hashes(value, using)
case value
when Array
value.map { |v| convert_parameters_to_hashes(v, using) }
when Hash
value.transform_values do |v|
convert_parameters_to_hashes(v, using)
end.with_indifferent_access
when Parameters
value.send(using)
else
value
end
end
def convert_hashes_to_parameters(key, value)
converted = convert_value_to_parameters(value)
@parameters[key] = converted unless converted.equal?(value)
converted
end
def convert_value_to_parameters(value)
case value
when Array
return value if converted_arrays.member?(value)
converted = value.map { |_| convert_value_to_parameters(_) }
converted_arrays << converted.dup
converted
when Hash
self.class.new(value)
else
value
end
end
def each_element(object, &block)
case object
when Array
object.grep(Parameters).map { |el| yield el }.compact
when Parameters
if object.nested_attributes?
object.each_nested_attribute(&block)
else
yield object
end
end
end
def unpermitted_parameters!(params)
unpermitted_keys = unpermitted_keys(params)
if unpermitted_keys.any?
case self.class.action_on_unpermitted_parameters
when :log
name = "unpermitted_parameters.action_controller"
ActiveSupport::Notifications.instrument(name, keys: unpermitted_keys)
when :raise
raise ActionController::UnpermittedParameters.new(unpermitted_keys)
end
end
end
def unpermitted_keys(params)
keys - params.keys - always_permitted_parameters
end
#
# --- Filtering ----------------------------------------------------------
#
# This is a list of permitted scalar types that includes the ones
# supported in XML and JSON requests.
#
# This list is in particular used to filter ordinary requests, String goes
# as first element to quickly short-circuit the common case.
#
# If you modify this collection please update the API of +permit+ above.
PERMITTED_SCALAR_TYPES = [
String,
Symbol,
NilClass,
Numeric,
TrueClass,
FalseClass,
Date,
Time,
# DateTimes are Dates, we document the type but avoid the redundant check.
StringIO,
IO,
ActionDispatch::Http::UploadedFile,
Rack::Test::UploadedFile,
]
def permitted_scalar?(value)
PERMITTED_SCALAR_TYPES.any? { |type| value.is_a?(type) }
end
# Adds existing keys to the params if their values are scalar.
#
# For example:
#
# puts self.keys #=> ["zipcode(90210i)"]
# params = {}
#
# permitted_scalar_filter(params, "zipcode")
#
# puts params.keys # => ["zipcode"]
def permitted_scalar_filter(params, permitted_key)
permitted_key = permitted_key.to_s
if has_key?(permitted_key) && permitted_scalar?(self[permitted_key])
params[permitted_key] = self[permitted_key]
end
each_key do |key|
next unless key =~ /\(\d+[if]?\)\z/
next unless $~.pre_match == permitted_key
params[key] = self[key] if permitted_scalar?(self[key])
end
end
def array_of_permitted_scalars?(value)
if value.is_a?(Array) && value.all? { |element| permitted_scalar?(element) }
yield value
end
end
def non_scalar?(value)
value.is_a?(Array) || value.is_a?(Parameters)
end
EMPTY_ARRAY = []
EMPTY_HASH = {}
def hash_filter(params, filter)
filter = filter.with_indifferent_access
# Slicing filters out non-declared keys.
slice(*filter.keys).each do |key, value|
next unless value
next unless has_key? key
if filter[key] == EMPTY_ARRAY
# Declaration { comment_ids: [] }.
array_of_permitted_scalars?(self[key]) do |val|
params[key] = val
end
elsif filter[key] == EMPTY_HASH
# Declaration { preferences: {} }.
if value.is_a?(Parameters)
params[key] = permit_any_in_parameters(value)
end
elsif non_scalar?(value)
# Declaration { user: :name } or { user: [:name, :age, { address: ... }] }.
params[key] = each_element(value) do |element|
element.permit(*Array.wrap(filter[key]))
end
end
end
end
def permit_any_in_parameters(params)
self.class.new.tap do |sanitized|
params.each do |key, value|
case value
when ->(v) { permitted_scalar?(v) }
sanitized[key] = value
when Array
sanitized[key] = permit_any_in_array(value)
when Parameters
sanitized[key] = permit_any_in_parameters(value)
else
# Filter this one out.
end
end
end
end
def permit_any_in_array(array)
[].tap do |sanitized|
array.each do |element|
case element
when ->(e) { permitted_scalar?(e) }
sanitized << element
when Parameters
sanitized << permit_any_in_parameters(element)
else
# Filter this one out.
end
end
end
end
def initialize_copy(source)
super
@parameters = @parameters.dup
end
end
# == Strong \Parameters
#
# It provides an interface for protecting attributes from end-user
# assignment. This makes Action Controller parameters forbidden
# to be used in Active Model mass assignment until they have been explicitly
# enumerated.
#
# In addition, parameters can be marked as required and flow through a
# predefined raise/rescue flow to end up as a <tt>400 Bad Request</tt> with no
# effort.
#
# class PeopleController < ActionController::Base
# # Using "Person.create(params[:person])" would raise an
# # ActiveModel::ForbiddenAttributesError exception because it'd
# # be using mass assignment without an explicit permit step.
# # This is the recommended form:
# def create
# Person.create(person_params)
# end
#
# # This will pass with flying colors as long as there's a person key in the
# # parameters, otherwise it'll raise an ActionController::ParameterMissing
# # exception, which will get caught by ActionController::Base and turned
# # into a 400 Bad Request reply.
# def update
# redirect_to current_account.people.find(params[:id]).tap { |person|
# person.update!(person_params)
# }
# end
#
# private
# # Using a private method to encapsulate the permissible parameters is
# # a good pattern since you'll be able to reuse the same permit
# # list between create and update. Also, you can specialize this method
# # with per-user checking of permissible attributes.
# def person_params
# params.require(:person).permit(:name, :age)
# end
# end
#
# In order to use <tt>accepts_nested_attributes_for</tt> with Strong \Parameters, you
# will need to specify which nested attributes should be permitted. You might want
# to allow +:id+ and +:_destroy+, see ActiveRecord::NestedAttributes for more information.
#
# class Person
# has_many :pets
# accepts_nested_attributes_for :pets
# end
#
# class PeopleController < ActionController::Base
# def create
# Person.create(person_params)
# end
#
# ...
#
# private
#
# def person_params
# # It's mandatory to specify the nested attributes that should be permitted.
# # If you use `permit` with just the key that points to the nested attributes hash,
# # it will return an empty hash.
# params.require(:person).permit(:name, :age, pets_attributes: [ :id, :name, :category ])
# end
# end
#
# See ActionController::Parameters.require and ActionController::Parameters.permit
# for more information.
module StrongParameters
# Returns a new ActionController::Parameters object that
# has been instantiated with the <tt>request.parameters</tt>.
def params
@_params ||= Parameters.new(request.parameters)
end
# Assigns the given +value+ to the +params+ hash. If +value+
# is a Hash, this will create an ActionController::Parameters
# object that has been instantiated with the given +value+ hash.
def params=(value)
@_params = value.is_a?(Hash) ? Parameters.new(value) : value
end
end
end
| 34.986644 | 167 | 0.633989 |
e907b5176f372dfdd86e8a7573b7de964c8d2200 | 1,078 | require 'simp/cli/config/items/data/simp_options_ldap_sync_hash'
require 'rspec/its'
require_relative '../spec_helper'
describe Simp::Cli::Config::Item::SimpOptionsLdapSyncHash do
before :each do
@ci = Simp::Cli::Config::Item::SimpOptionsLdapSyncHash.new
item = Simp::Cli::Config::Item::SimpOptionsLdapSyncPw.new
item.value = "\xef\xb2\x2e\xac"
@ci.config_items[item.key] = item
end
describe '#encrypt' do
it 'encrypts a known password and salt to the correct SHA-1 password hash' do
expect( @ci.encrypt( 'foo', "\xef\xb2\x2e\xac" ) ).to eq '{SSHA}zxOLQEdncCJTMObl5s+y1N/Ydh3vsi6s'
end
end
describe '#validate' do
it 'validates OpenLDAP-format SHA-1 algorithm (FIPS 160-1) password hash' do
expect( @ci.validate '{SSHA}zxOLQEdncCJTMObl5s+y1N/Ydh3vsi6s' ).to eq false
end
it 'fails validation when the LDAP bind password does not validate against the hash' do
expect( @ci.validate '{SSHA}Y6x92VpatHf9G6yMiktUYTrA/3SxUFm' ).to eq false
end
end
it_behaves_like 'a child of Simp::Cli::Config::Item'
end
| 33.6875 | 103 | 0.714286 |
394ff58ee50eff3aea32e9bd0621c6fa0c092e77 | 70 | module ListHelper
def body_class
'application list'
end
end
| 11.666667 | 22 | 0.714286 |
871e70ab91c18f2bd33609834da3da9492610e6c | 636 | # Method name: factorial
# Inputs: A single non-negative integer, n
# Returns: The factorial of n (see below)
# Prints: Nothing
# The factorial of 5 is denoted by 5! and is defined as
# 5! = 5*4*3*2*1
#
# In English, you'd read "5!" as "five factorial". In general, the factorial
# of a number is the product of every number from that number down to 1, so
#
# 4! = 4*3*2*1
# 10! = 10*9*8*7*6*5*4*3*2*1
#
def factorial(n)
end
if __FILE__ == $PROGRAM_NAME
# What are the common cases? What are the corner cases?
# Your sanity checks should look like
# p factorial(input) == ...expected return value...
end
| 26.5 | 77 | 0.65566 |
bb18a638ee35e29bb1f6b77da6fc8b79e7a4bb47 | 1,854 | class GoogleJavaFormat < Formula
include Language::Python::Shebang
desc "Reformats Java source code to comply with Google Java Style"
homepage "https://github.com/google/google-java-format"
url "https://github.com/google/google-java-format/releases/download/v1.15.0/google-java-format-1.15.0-all-deps.jar"
sha256 "a356bb0236b29c57a3ab678f17a7b027aad603b0960c183a18f1fe322e4f38ea"
license "Apache-2.0"
bottle do
sha256 cellar: :any_skip_relocation, all: "0148b7af6496e069695eda9f889999cd942ab795e9ce8a8d911906d1b79b8b05"
end
depends_on "openjdk"
depends_on "[email protected]"
resource "google-java-format-diff" do
url "https://raw.githubusercontent.com/google/google-java-format/v1.15.0/scripts/google-java-format-diff.py"
sha256 "4c46a4ed6c39c2f7cbf2bc7755eefd7eaeb0a3db740ed1386053df822f15782b"
end
def install
libexec.install "google-java-format-#{version}-all-deps.jar" => "google-java-format.jar"
bin.write_jar_script libexec/"google-java-format.jar", "google-java-format"
resource("google-java-format-diff").stage do
bin.install "google-java-format-diff.py" => "google-java-format-diff"
rewrite_shebang detected_python_shebang, bin/"google-java-format-diff"
end
end
test do
(testpath/"foo.java").write "public class Foo{\n}\n"
assert_match "public class Foo {}", shell_output("#{bin}/google-java-format foo.java")
(testpath/"bar.java").write <<~BAR
class Bar{
int x;
}
BAR
patch = <<~PATCH
--- a/bar.java
+++ b/bar.java
@@ -1,0 +2 @@ class Bar{
+ int x ;
PATCH
`echo '#{patch}' | #{bin}/google-java-format-diff -p1 -i`
assert_equal <<~BAR, File.read(testpath/"bar.java")
class Bar{
int x;
}
BAR
assert_equal version, resource("google-java-format-diff").version
end
end
| 34.333333 | 117 | 0.697411 |
edde502373e8e84eac3ae03483b2cb94e1935e61 | 1,367 | # -*- encoding: utf-8 -*-
# stub: slim 2.0.2 ruby lib
Gem::Specification.new do |s|
s.name = "slim"
s.version = "2.0.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Daniel Mendler", "Andrew Stone", "Fred Wu"]
s.date = "2013-10-27"
s.description = "Slim is a template language whose goal is reduce the syntax to the essential parts without becoming cryptic."
s.email = ["[email protected]", "[email protected]", "[email protected]"]
s.executables = ["slimrb"]
s.files = ["bin/slimrb"]
s.homepage = "http://slim-lang.com/"
s.licenses = ["MIT"]
s.rubyforge_project = "slim"
s.rubygems_version = "2.2.2"
s.summary = "Slim is a template language."
s.installed_by_version = "2.2.2" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<temple>, ["~> 0.6.6"])
s.add_runtime_dependency(%q<tilt>, ["< 2.1", ">= 1.3.3"])
else
s.add_dependency(%q<temple>, ["~> 0.6.6"])
s.add_dependency(%q<tilt>, ["< 2.1", ">= 1.3.3"])
end
else
s.add_dependency(%q<temple>, ["~> 0.6.6"])
s.add_dependency(%q<tilt>, ["< 2.1", ">= 1.3.3"])
end
end
| 35.051282 | 128 | 0.633504 |
f727be05b43520d1bf6e93e9682b513b31a1a7d4 | 1,609 |
# FIXME policy = :recalc doesnt work
module EH::Game
class Goal
def initialize
@state = :before
end
def setup
@state = :progress
end
def state
return @state
end
def restart
@state = :before
end
def recalc
restart
end
end
class MotionGoal < Goal
attr_reader :dx, :dy, :x, :y
attr_accessor :state
def initialize(dx, dy, x, y)
super()
@dx, @dy = dx, dy
@x, @y = x, y
end
def recalc
@state = :recalc
end
end
class CompositeGoal < Array
attr_reader :state
def initialize(policy=:abort)
super()
@current = 0
@policy = policy
@state = :progress
end
def reset
self.clear
@current = 0
@state = :reset
end
def start
@state = :progress
end
def current
return self[@current]
end
def update
if @state == :progress
awesome_print(self) if !current
case current.state
when :before
if current.class != MotionGoal
current.setup
end
when :progress
return
when :failed
handle_failed
when :finished
advance
end
end
end
def advance
@current += 1
if @current == self.size
@state = :finished
@current = 0
end
end
private
def handle_failed
case @policy
when :abort
@state = :failed
when :retry
current.restart
when :recalc
current.recalc
end
end
end
end
| 17.117021 | 40 | 0.521442 |
f86f1fe16e75aa0d1376568f9b132c28578756ce | 869 | #
# Cookbook Name:: practicingruby
# Recipe:: god
#
# Installs, configures, and starts God
#
# Install Ruby first
include_recipe "practicingruby::_ruby"
# Install god gem
gem_package "god"
# Create config directory
directory "/etc/god" do
owner "root"
group "root"
mode "0755"
end
# Create god config file
file "/etc/god/master.conf" do
owner "root"
group "root"
mode "0644"
notifies :restart, "service[god]"
home = node["practicingruby"]["deploy"]["home_dir"]
god_file = "#{home}/current/config/delayed_job.god"
content "God.load('#{god_file}') if File.file?('#{god_file}')\n"
end
# Install startup script
cookbook_file "/etc/init/god.conf" do
source "god.upstart"
owner "root"
group "root"
mode "0644"
end
# Start god
service "god" do
provider Chef::Provider::Service::Upstart
action [:enable, :start]
end
| 18.489362 | 66 | 0.675489 |
e2e1eea7e9d27d1af65efff3bccf3d84c1ef38fe | 1,337 | require 'spec_helper'
class ForgeriesController < ActionController::Base
include Clearance::Controller
protect_from_forgery
if respond_to?(:before_action)
before_action :require_login
else
before_filter :require_login
end
# This is off in test by default, but we need it for this test
self.allow_forgery_protection = true
def create
redirect_to action: 'index'
end
end
describe ForgeriesController do
context 'signed in user' do
before do
Rails.application.routes.draw do
resources :forgeries
get '/sign_in' => 'clearance/sessions#new', as: 'sign_in'
end
@user = create(:user)
@user.update_attribute(:remember_token, 'old-token')
@request.cookies['remember_token'] = 'old-token'
end
after do
Rails.application.reload_routes!
end
it 'succeeds with authentic token' do
token = controller.send(:form_authenticity_token)
post :create, params: {
authenticity_token: token,
}
expect(subject).to redirect_to(action: 'index')
end
it 'fails with invalid token' do
post :create, params: {
authenticity_token: "hax0r",
}
expect(subject).to deny_access
end
it 'fails with no token' do
post :create
expect(subject).to deny_access
end
end
end
| 22.283333 | 66 | 0.670157 |
6ab9c92c9ad00cb92e9a8432ee49e26c5297b5dc | 374 | # frozen_string_literal: true
# vi:ts=2 sw=2 tw=79 et lbr wrap
# Copyright 2018 by David Rabkin
require_relative 'action'
module Renamr
# Omits file names shorter than limit.
class OmitAction < Action
def initialize(lim)
raise 'lim cannot be nil.' if lim.nil?
@lim = lim
end
def do(src)
src.length < @lim ? nil : src
end
end
end
| 17 | 44 | 0.652406 |
26b1037be2de2c939c93b460c6ce63fa0f541d2e | 736 | # encoding: utf-8
#
# As of 40c7bde9690e5174b6a958a5df6b2aabc6b8b041 this code produces an extra
# empty line of text in row 2.
#
# Simple rounding of string_width floats seems to fix this issue, see the patch
# in 09c837466c31bb715f1276118c606e20477577df.
#
$LOAD_PATH.unshift File.join(File.dirname(__FILE__), '..', '..', 'lib')
require "rubygems"
require "prawn"
require "prawn/layout"
Prawn::Document.generate("broken_table.pdf") do
font "#{Prawn::BASEDIR}/data/fonts/comicsans.ttf"
table [["foo", "baaar", "1" ],
["This is","a sample", "2" ],
["Table", "dont\ncha\nknow?", "3" ]],
:font_size => 30,
:padding => 10,
:border => 2,
:position => :center
end
| 29.44 | 79 | 0.629076 |
91cc918c688bb28d2b601fa3be9a12888e9b62fb | 1,199 | $:.unshift( File.expand_path( "../lib", __FILE__ ) )
require 'lab42/open_object/version'
version = Lab42::OpenObject::VERSION
Gem::Specification.new do |s|
s.name = 'lab42_open_object'
s.version = version
s.summary = 'OpenObject an Immutable OpenStruct Enhancement'
s.description = %{An Open Object à la OpenStruct, but immutable and with a Hash and Enumerable Protocol Implementation }
s.authors = ["Robert Dober"]
s.email = '[email protected]'
s.files = Dir.glob("lib/**/*.rb")
s.files += %w{LICENSE README.md}
s.homepage = "https://github.com/RobertDober/lab42_open_object"
s.licenses = %w{Apache 2}
s.required_ruby_version = '>= 2.3.1'
s.add_dependency 'forwarder2', '~> 0.2'
s.add_development_dependency 'pry', '~> 0.11'
s.add_development_dependency 'pry-byebug', '~> 3.5'
s.add_development_dependency 'rspec', '~> 3.7'
s.add_development_dependency 'lab42_literate', '~> 0.1'
s.add_development_dependency 'simplecov', '~> 0.15'
s.add_development_dependency 'codeclimate-test-reporter', '~> 1.0'
s.add_development_dependency 'travis-lint', '~> 2.0'
# s.add_development_dependency 'rake', '~> 10.3'
end
| 38.677419 | 123 | 0.680567 |
01fe73869fb00188c0287f8a1a1c904e5bec9b46 | 1,055 | # typed: true
module KubeDSL::DSL::Apps::V1
class DeploymentCondition < ::KubeDSL::DSLObject
value_field :last_transition_time
value_field :last_update_time
value_field :message
value_field :reason
value_field :status
value_field :type
validates :last_transition_time, field: { format: :string }, presence: false
validates :last_update_time, field: { format: :string }, presence: false
validates :message, field: { format: :string }, presence: false
validates :reason, field: { format: :string }, presence: false
validates :status, field: { format: :string }, presence: false
validates :type, field: { format: :string }, presence: false
def serialize
{}.tap do |result|
result[:lastTransitionTime] = last_transition_time
result[:lastUpdateTime] = last_update_time
result[:message] = message
result[:reason] = reason
result[:status] = status
result[:type] = type
end
end
def kind_sym
:deployment_condition
end
end
end
| 30.142857 | 80 | 0.67109 |
87f3810d55cfde79d2d6f8d56085c876c0bb7eb5 | 404 | # frozen_string_literal: true
require 'spec_helper'
describe GitlabSchema.types['DiffPosition'] do
it 'exposes the expected fields' do
expected_fields = %i[
diff_refs
file_path
height
new_line
new_path
old_line
old_path
position_type
width
x
y
]
expect(described_class).to have_graphql_fields(*expected_fields)
end
end
| 16.833333 | 68 | 0.655941 |
e803a637012e49c50a41c82dd315978c179c2349 | 11,166 | ##
# This code was generated by
# \ / _ _ _| _ _
# | (_)\/(_)(_|\/| |(/_ v1.0.0
# / /
#
# frozen_string_literal: true
module Twilio
module REST
class Events < Domain
class V1 < Version
##
# PLEASE NOTE that this class contains beta products that are subject to change. Use them with caution.
class EventTypeList < ListResource
##
# Initialize the EventTypeList
# @param [Version] version Version that contains the resource
# @return [EventTypeList] EventTypeList
def initialize(version)
super(version)
# Path Solution
@solution = {}
@uri = "/Types"
end
##
# Lists EventTypeInstance records from the API as a list.
# Unlike stream(), this operation is eager and will load `limit` records into
# memory before returning.
# @param [String] schema_id A string parameter filtering the results to return
# only the Event Types using a given schema.
# @param [Integer] limit Upper limit for the number of records to return. stream()
# guarantees to never return more than limit. Default is no limit
# @param [Integer] page_size Number of records to fetch per request, when
# not set will use the default value of 50 records. If no page_size is defined
# but a limit is defined, stream() will attempt to read the limit with the most
# efficient page size, i.e. min(limit, 1000)
# @return [Array] Array of up to limit results
def list(schema_id: :unset, limit: nil, page_size: nil)
self.stream(schema_id: schema_id, limit: limit, page_size: page_size).entries
end
##
# Streams EventTypeInstance records from the API as an Enumerable.
# This operation lazily loads records as efficiently as possible until the limit
# is reached.
# @param [String] schema_id A string parameter filtering the results to return
# only the Event Types using a given schema.
# @param [Integer] limit Upper limit for the number of records to return. stream()
# guarantees to never return more than limit. Default is no limit.
# @param [Integer] page_size Number of records to fetch per request, when
# not set will use the default value of 50 records. If no page_size is defined
# but a limit is defined, stream() will attempt to read the limit with the most
# efficient page size, i.e. min(limit, 1000)
# @return [Enumerable] Enumerable that will yield up to limit results
def stream(schema_id: :unset, limit: nil, page_size: nil)
limits = @version.read_limits(limit, page_size)
page = self.page(schema_id: schema_id, page_size: limits[:page_size], )
@version.stream(page, limit: limits[:limit], page_limit: limits[:page_limit])
end
##
# When passed a block, yields EventTypeInstance records from the API.
# This operation lazily loads records as efficiently as possible until the limit
# is reached.
def each
limits = @version.read_limits
page = self.page(page_size: limits[:page_size], )
@version.stream(page,
limit: limits[:limit],
page_limit: limits[:page_limit]).each {|x| yield x}
end
##
# Retrieve a single page of EventTypeInstance records from the API.
# Request is executed immediately.
# @param [String] schema_id A string parameter filtering the results to return
# only the Event Types using a given schema.
# @param [String] page_token PageToken provided by the API
# @param [Integer] page_number Page Number, this value is simply for client state
# @param [Integer] page_size Number of records to return, defaults to 50
# @return [Page] Page of EventTypeInstance
def page(schema_id: :unset, page_token: :unset, page_number: :unset, page_size: :unset)
params = Twilio::Values.of({
'SchemaId' => schema_id,
'PageToken' => page_token,
'Page' => page_number,
'PageSize' => page_size,
})
response = @version.page('GET', @uri, params: params)
EventTypePage.new(@version, response, @solution)
end
##
# Retrieve a single page of EventTypeInstance records from the API.
# Request is executed immediately.
# @param [String] target_url API-generated URL for the requested results page
# @return [Page] Page of EventTypeInstance
def get_page(target_url)
response = @version.domain.request(
'GET',
target_url
)
EventTypePage.new(@version, response, @solution)
end
##
# Provide a user friendly representation
def to_s
'#<Twilio.Events.V1.EventTypeList>'
end
end
##
# PLEASE NOTE that this class contains beta products that are subject to change. Use them with caution.
class EventTypePage < Page
##
# Initialize the EventTypePage
# @param [Version] version Version that contains the resource
# @param [Response] response Response from the API
# @param [Hash] solution Path solution for the resource
# @return [EventTypePage] EventTypePage
def initialize(version, response, solution)
super(version, response)
# Path Solution
@solution = solution
end
##
# Build an instance of EventTypeInstance
# @param [Hash] payload Payload response from the API
# @return [EventTypeInstance] EventTypeInstance
def get_instance(payload)
EventTypeInstance.new(@version, payload, )
end
##
# Provide a user friendly representation
def to_s
'<Twilio.Events.V1.EventTypePage>'
end
end
##
# PLEASE NOTE that this class contains beta products that are subject to change. Use them with caution.
class EventTypeContext < InstanceContext
##
# Initialize the EventTypeContext
# @param [Version] version Version that contains the resource
# @param [String] type A string that uniquely identifies this Event Type.
# @return [EventTypeContext] EventTypeContext
def initialize(version, type)
super(version)
# Path Solution
@solution = {type: type, }
@uri = "/Types/#{@solution[:type]}"
end
##
# Fetch the EventTypeInstance
# @return [EventTypeInstance] Fetched EventTypeInstance
def fetch
payload = @version.fetch('GET', @uri)
EventTypeInstance.new(@version, payload, type: @solution[:type], )
end
##
# Provide a user friendly representation
def to_s
context = @solution.map {|k, v| "#{k}: #{v}"}.join(',')
"#<Twilio.Events.V1.EventTypeContext #{context}>"
end
##
# Provide a detailed, user friendly representation
def inspect
context = @solution.map {|k, v| "#{k}: #{v}"}.join(',')
"#<Twilio.Events.V1.EventTypeContext #{context}>"
end
end
##
# PLEASE NOTE that this class contains beta products that are subject to change. Use them with caution.
class EventTypeInstance < InstanceResource
##
# Initialize the EventTypeInstance
# @param [Version] version Version that contains the resource
# @param [Hash] payload payload that contains response from Twilio
# @param [String] type A string that uniquely identifies this Event Type.
# @return [EventTypeInstance] EventTypeInstance
def initialize(version, payload, type: nil)
super(version)
# Marshaled Properties
@properties = {
'type' => payload['type'],
'schema_id' => payload['schema_id'],
'date_created' => Twilio.deserialize_iso8601_datetime(payload['date_created']),
'date_updated' => Twilio.deserialize_iso8601_datetime(payload['date_updated']),
'description' => payload['description'],
'url' => payload['url'],
'links' => payload['links'],
}
# Context
@instance_context = nil
@params = {'type' => type || @properties['type'], }
end
##
# Generate an instance context for the instance, the context is capable of
# performing various actions. All instance actions are proxied to the context
# @return [EventTypeContext] EventTypeContext for this EventTypeInstance
def context
unless @instance_context
@instance_context = EventTypeContext.new(@version, @params['type'], )
end
@instance_context
end
##
# @return [String] The Event Type identifier.
def type
@properties['type']
end
##
# @return [String] The Schema identifier for this Event Type.
def schema_id
@properties['schema_id']
end
##
# @return [Time] The date this Event Type was created.
def date_created
@properties['date_created']
end
##
# @return [Time] The date this Event Type was updated.
def date_updated
@properties['date_updated']
end
##
# @return [String] Event Type description.
def description
@properties['description']
end
##
# @return [String] The URL of this resource.
def url
@properties['url']
end
##
# @return [String] The links
def links
@properties['links']
end
##
# Fetch the EventTypeInstance
# @return [EventTypeInstance] Fetched EventTypeInstance
def fetch
context.fetch
end
##
# Provide a user friendly representation
def to_s
values = @params.map{|k, v| "#{k}: #{v}"}.join(" ")
"<Twilio.Events.V1.EventTypeInstance #{values}>"
end
##
# Provide a detailed, user friendly representation
def inspect
values = @properties.map{|k, v| "#{k}: #{v}"}.join(" ")
"<Twilio.Events.V1.EventTypeInstance #{values}>"
end
end
end
end
end
end | 37.469799 | 111 | 0.566452 |
87624c49bb9bf167c7dffb0f957c1dfd030742f3 | 1,101 | require 'spec_helper'
describe Membership, :type => :model do
let(:membership) { create(:membership) }
it "should have a valid factory" do
expect(build(:membership)).to be_valid
end
describe "#user_group" do
it "must have a user group" do
expect(build(:membership, user_group: nil)).to_not be_valid
end
it "should belong to a user group" do
expect(membership.user_group).to be_a(UserGroup)
end
end
describe "#user" do
it "must have a user" do
expect(build(:membership, user: nil)).to_not be_valid
end
it "should belong to a user" do
expect(membership.user).to be_a(User)
end
end
describe "#state" do
it "must have a state" do
expect(build(:membership, state: nil)).to_not be_valid
end
end
describe "#enable" do
it "should set state to active" do
m = membership
m.enable!
expect(m.active?).to be_truthy
end
end
describe "#disable" do
it "should set state to inactive" do
m = membership
m.disable!
expect(m.inactive?).to be_truthy
end
end
end
| 21.588235 | 65 | 0.645777 |
f76aabcb2970ec9a139a42f1f89f625ac81f98a2 | 267 | # RAILS_ENV=production ./script/rails runner script/patch_expense_create_at_yyyymm.rb
logger = RAILS_DEFAULT_LOGGER
logger.info "Start batch"
Expense.all.each do |ex|
ex.create_at_yyyymm = ex.created_at.strftime("%Y%m")
ex.save!
end
logger.info "End batch"
| 24.272727 | 87 | 0.76779 |
79ee3be9582b4cf660344bc41576d4bf0db61980 | 18,261 | require 'ostruct'
require 'base64'
require 'tmpdir'
require 'fileutils'
require 'timeout'
require 'iron_worker_ng/api_client'
module IronWorkerNG
class ClientProxyCaller
def initialize(client, prefix)
@client = client
@prefix = prefix
end
def method_missing(name, *args, &block)
full_name = @prefix.to_s + '_' + name.to_s
if @client.respond_to?(full_name)
@client.send(full_name, *args, &block)
else
super(name, *args, &block)
end
end
end
class Client
attr_reader :api
def initialize(options = {}, &block)
@api = IronWorkerNG::APIClient.new(options)
unless block.nil?
instance_eval(&block)
end
end
def options
@api.options
end
def token
@api.token
end
def jwt
@api.jwt
end
def project_id
@api.project_id
end
def method_missing(name, *args, &block)
if args.length == 0
IronWorkerNG::ClientProxyCaller.new(self, name)
else
super(name, *args, &block)
end
end
def stacks_list
@api.stacks_list
end
def codes_list(options = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling codes.list with options='#{options.to_s}'"
all = options[:all] || options['all']
if all
result = []
page = options[:page] || options['page'] || 0
per_page = options[:per_page] || options['per_page'] || 100
while true
next_codes = codes_list(options.merge({:page => page}).delete_if { |name| name == :all || name == 'all' })
result += next_codes
break if next_codes.length != per_page
page += 1
end
result
else
@api.codes_list(options)['codes'].map { |c| OpenStruct.new(c.merge('_id' => c['id'])) }
end
end
def codes_get(code_id)
IronCore::Logger.debug 'IronWorkerNG', "Calling codes.get with code_id='#{code_id}'"
c = @api.codes_get(code_id)
c['_id'] = c['id']
OpenStruct.new(c)
end
def codes_create(code, options = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling codes.create with code='#{code.to_s}' and options='#{options.to_s}'"
if options[:config] && options[:config].is_a?(Hash)
options = options.dup
options[:config] = options[:config].to_json
end
options.merge!(stack:code.stack) if code.stack
container_file = code.create_container
if code.zip_package
res = nil
IronWorkerNG::Fetcher.fetch_to_file(code.zip_package) do |file|
res = @api.codes_create(code.name, file, 'sh', '__runner__.sh', options)
end
elsif code.remote_build_command.nil? && (not code.full_remote_build)
res = @api.codes_create(code.name, container_file, 'sh', '__runner__.sh', options)
else
builder_code_name = code.name + (code.name[0 .. 0].upcase == code.name[0 .. 0] ? '::Builder' : '::builder')
@api.codes_create(builder_code_name, container_file, 'sh', '__runner__.sh', options)
builder_task = tasks.create(builder_code_name, :code_name => code.name, :client_options => @api.options.to_json, :codes_create_options => options.to_json)
builder_task = tasks.wait_for(builder_task._id)
if builder_task.status != 'complete'
log = tasks.log(builder_task._id)
File.unlink(container_file)
IronCore::Logger.error 'IronWorkerNG', "Error while remote building worker\n" + log, IronCore::Error
end
res = JSON.parse(builder_task.msg)
end
File.unlink(container_file) if code.zip_package.nil?
res['_id'] = res['id']
OpenStruct.new(res)
end
def codes_create_async(code, options = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling codes.create_async with code='#{code.to_s}' and options='#{options.to_s}'"
if options[:config] && options[:config].is_a?(Hash)
options = options.dup
options[:config] = options[:config].to_json
end
options.merge!(stack:code.stack) if code.stack
container_file = code.create_container
if code.remote_build_command.nil? && (not code.full_remote_build)
res = @api.codes_create(code.name, container_file, 'sh', '__runner__.sh', options)
else
builder_code_name = code.name + (code.name[0 .. 0].upcase == code.name[0 .. 0] ? '::Builder' : '::builder')
@api.codes_create(builder_code_name, container_file, 'sh', '__runner__.sh', options)
builder_task = tasks.create(builder_code_name, :code_name => code.name, :client_options => @api.options.to_json, :codes_create_options => options.to_json)
File.unlink(container_file)
return builder_task._id
end
File.unlink(container_file)
res['_id'] = res['id']
OpenStruct.new(res)
end
def codes_patch(name, options = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling codes.patch with name='#{name}' and options='#{options.to_s}'"
code = codes.list(per_page: 100).find { |c| c.name == name }
if code.nil?
IronCore::Logger.error 'IronWorkerNG', "Can't find code with name='#{name}' to patch", IronCore::Error
end
patcher_code_name = name + (name[0 .. 0].upcase == name[0 .. 0] ? '::Patcher' : '::patcher')
exec_dir = ::Dir.tmpdir + '/' + ::Dir::Tmpname.make_tmpname('iron-worker-ng-', 'exec')
exec_file_name = exec_dir + '/patchcer.rb'
FileUtils.mkdir_p(exec_dir)
exec_file = File.open(exec_file_name, 'w')
exec_file.write <<EXEC_FILE
# #{IronWorkerNG.full_version}
File.open('.gemrc', 'w') do |gemrc|
gemrc.puts('gem: --no-ri --no-rdoc')
end
`gem install iron_worker_ng`
require 'iron_worker_ng'
client = IronWorkerNG::Client.new(JSON.parse(params[:client_options]))
original_code = client.codes.get(params[:code_id])
original_code_data = client.codes.download(params[:code_id])
`mkdir code`
original_code_zip = File.open('code/code.zip', 'w')
original_code_zip.write(original_code_data)
original_code_zip.close
`cd code && unzip code.zip && rm code.zip && cd ..`
patch_params = JSON.parse(params[:patch])
patch_params.each {|k, v| system("cat patch/\#{k} > code/\#{v}")}
code_container = IronWorkerNG::Code::Container::Zip.new
Dir['code/*'].each do |entry|
code_container.add(entry[5 .. -1], entry)
end
code_container.close
res = client.api.codes_create(original_code.name, code_container.name, 'sh', '__runner__.sh', :config => original_code.config)
res['_id'] = res['id']
res = OpenStruct.new(res)
client.tasks.set_progress(iron_task_id, :msg => res.marshal_dump.to_json)
EXEC_FILE
exec_file.close
patcher_code = IronWorkerNG::Code::Base.new
patcher_code.runtime = :ruby
patcher_code.name = patcher_code_name
patcher_code.exec(exec_file_name)
options[:patch].keys.each {|v| patcher_code.file(v, 'patch')}
patch_params = Hash[options[:patch].map {|k,v| [File.basename(k), v]}]
patcher_container_file = patcher_code.create_container
@api.codes_create(patcher_code_name, patcher_container_file, 'sh', '__runner__.sh', {})
FileUtils.rm_rf(exec_dir)
File.unlink(patcher_container_file)
patcher_task = tasks.create(patcher_code_name, :code_id => code._id, :client_options => @api.options.to_json, patch: patch_params.to_json)
patcher_task = tasks.wait_for(patcher_task._id)
if patcher_task.status != 'complete'
log = tasks.log(patcher_task._id)
IronCore::Logger.error 'IronWorkerNG', "Error while patching worker\n" + log, IronCore::Error
end
res = JSON.parse(patcher_task.msg)
res['_id'] = res['id']
OpenStruct.new(res)
end
def codes_delete(code_id)
IronCore::Logger.debug 'IronWorkerNG', "Calling codes.delete with code_id='#{code_id}'"
@api.codes_delete(code_id)
true
end
def codes_revisions(code_id, options = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling codes.revisions with code_id='#{code_id}' and options='#{options.to_s}'"
@api.codes_revisions(code_id, options)['revisions'].map { |c| OpenStruct.new(c.merge('_id' => c['id'])) }
end
def codes_download(code_id, options = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling codes.download with code_id='#{code_id}' and options='#{options.to_s}'"
@api.codes_download(code_id, options)
end
def codes_pause_task_queue(code_id, options = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling codes.pause_task_queue with code_id='#{code_id}' and options='#{options.to_s}'"
res = @api.codes_pause_task_queue(code_id, options)
OpenStruct.new(res)
end
def codes_resume_task_queue(code_id, options = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling codes.resume_task_queue with code_id='#{code_id}' and options='#{options.to_s}'"
res = @api.codes_resume_task_queue(code_id, options)
OpenStruct.new(res)
end
def tasks_list(options = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling tasks.list with options='#{options.to_s}'"
@api.tasks_list(options)['tasks'].map { |t| OpenStruct.new(t.merge('_id' => t['id'])) }
end
def tasks_get(task_id)
IronCore::Logger.debug 'IronWorkerNG', "Calling tasks.get with task_id='#{task_id}'"
t = @api.tasks_get(task_id)
t['_id'] = t['id']
OpenStruct.new(t)
end
def tasks_create(code_name, params = {}, options = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling tasks.create with code_name='#{code_name}', params='#{params.to_s}' and options='#{options.to_s}'"
res = @api.tasks_create(code_name, params.is_a?(String) ? params : params.to_json, options)
t = res['tasks'][0]
t['_id'] = t['id']
OpenStruct.new(t)
end
def tasks_create_legacy(code_name, params = {}, options = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling tasks.create_legacy with code_name='#{code_name}', params='#{params.to_s}' and options='#{options.to_s}'"
res = @api.tasks_create(code_name, params_for_legacy(code_name, params), options)
t = res['tasks'][0]
t['_id'] = t['id']
OpenStruct.new(t)
end
def tasks_run(code_name, params = {}, options = {})
options['sync'] = true
IronCore::Logger.debug 'IronWorkerNG', "Calling tasks.run with code_name='#{code_name}', params='#{params.to_s}' and options='#{options.to_s}'"
res = @api.tasks_create(code_name, params.is_a?(String) ? params : params.to_json, options)
t = res['tasks'][0]
task_id = t['id']
tasks_wait_for(task_id)
tasks_wait_for_stdout(task_id)
end
def tasks_cancel(task_id)
IronCore::Logger.debug 'IronWorkerNG', "Calling tasks.cancel with task_id='#{task_id}'"
@api.tasks_cancel(task_id)
true
end
def tasks_cancel_all(code_id)
IronCore::Logger.debug 'IronWorkerNG', "Calling tasks.cancel_all with code_id='#{code_id}'"
@api.tasks_cancel_all(code_id)
true
end
def tasks_log(task_id)
IronCore::Logger.debug 'IronWorkerNG', "Calling tasks.log with task_id='#{task_id}'"
if block_given?
@api.tasks_log(task_id) { |chunk| yield(chunk) }
else
@api.tasks_log(task_id)
end
end
def tasks_stdout(task_id)
IronCore::Logger.debug 'IronWorkerNG', "Calling tasks.stdout with task_id='#{task_id}'"
if block_given?
@api.tasks_stdout(task_id) {|chunk| yield(chunk)}
else
@api.tasks_stdout(task_id)
end
end
def tasks_set_progress(task_id, options = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling tasks.set_progress with task_id='#{task_id}' and options='#{options.to_s}'"
@api.tasks_set_progress(task_id, options)
true
end
def tasks_wait_for(task_id, options = {}, &block)
IronCore::Logger.debug 'IronWorkerNG', "Calling tasks.wait_for with task_id='#{task_id}' and options='#{options.to_s}'"
options[:sleep] ||= options['sleep'] || 0.5
task = tasks_get(task_id)
Timeout::timeout(task.timeout + 15) do
while task.status == 'queued' || task.status == 'preparing' || task.status == 'running'
block.call(task) unless block.nil?
sleep options[:sleep]
options[:sleep] = sleep_between_retries options[:sleep]
task = tasks_get(task_id)
end
end
task
end
def tasks_wait_for_stdout(task_id, options = {}, &block)
IronCore::Logger.debug 'IronWorkerNG', "Calling tasks.wait_for_stdout with task_id='#{task_id}' and options='#{options.to_s}'"
options[:sleep] ||= options['sleep'] || 0.5
Timeout::timeout(60) do
while true
begin
stdout = tasks_stdout(task_id)
block.call(stdout) unless block.nil?
return stdout
rescue Rest::HttpError => e
raise e if e.code != 404
end
sleep options[:sleep]
options[:sleep] = sleep_between_retries options[:sleep]
end
end
end
def tasks_retry(task_id, options = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling tasks.retry with task_id='#{task_id}' and options='#{options.to_s}'"
res = @api.tasks_retry(task_id, options)
t = res['tasks'][0]
t['_id'] = t['id']
OpenStruct.new(t)
end
def schedules_list(options = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling schedules.list with options='#{options.to_s}'"
@api.schedules_list(options)['schedules'].map { |s| OpenStruct.new(s.merge('_id' => s['id'])) }
end
def schedules_get(schedule_id)
IronCore::Logger.debug 'IronWorkerNG', "Calling schedules.get with schedule_id='#{schedule_id}"
s = @api.schedules_get(schedule_id)
s['_id'] = s['id']
OpenStruct.new(s)
end
def schedules_create(code_name, params = {}, options = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling schedules.create with code_name='#{code_name}', params='#{params.to_s}' and options='#{options.to_s}'"
res = @api.schedules_create(code_name, params.is_a?(String) ? params : params.to_json, options)
s = res['schedules'][0]
s['_id'] = s['id']
OpenStruct.new(s)
end
def schedules_update(id, options = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling schedules.update with id='#{id}', options='#{options.to_s}'"
res = @api.schedules_update(id, options)
OpenStruct.new(res)
end
def schedules_create_legacy(code_name, params = {}, options = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling schedules.create_legacy with code_name='#{code_name}', params='#{params.to_s}' and options='#{options.to_s}'"
res = @api.schedules_create(code_name, params_for_legacy(code_name, params), options)
s = res['schedules'][0]
s['_id'] = s['id']
OpenStruct.new(s)
end
def schedules_cancel(schedule_id, options = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling schedules.cancel with schedule_id='#{schedule_id}, options='#{options.to_s}'"
@api.schedules_cancel(schedule_id, options)
true
end
def projects_get
IronCore::Logger.debug 'IronWorkerNG', "Calling projects.get"
res = @api.projects_get
res['_id'] = res['id']
OpenStruct.new(res)
end
def clusters_list(options = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling clusters.list"
clusters_list_base(options)
end
def clusters_get(id)
IronCore::Logger.debug 'IronWorkerNG', "Calling clusters.get"
res = @api.clusters_get(id)['cluster']
res['_id'] = res['id']
OpenStruct.new(res)
end
def clusters_credentials(id)
IronCore::Logger.debug 'IronWorkerNG', "Calling clusters.credentials"
res = @api.clusters_credentials(id)
OpenStruct.new(res)
end
def clusters_create(params = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling clusters.create with params='#{params.to_s}'"
res = @api.clusters_create(params)
OpenStruct.new(res)
end
def clusters_update(cluster_id, params = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling clusters.update with params='#{params.to_s}'"
res = @api.clusters_update(cluster_id, params)
OpenStruct.new(res)
end
def clusters_delete(cluster_id)
IronCore::Logger.debug 'IronWorkerNG', "Calling clusters.delete with cluster_id='#{cluster_id}'"
res = @api.clusters_delete(cluster_id)
OpenStruct.new(res)
end
def clusters_share(cluster_id, params = {})
IronCore::Logger.debug 'IronWorkerNG', "Calling clusters.share with params='#{params.to_s}'"
res = @api.clusters_share(cluster_id, params)
OpenStruct.new(res)
end
def clusters_shared_list
IronCore::Logger.debug 'IronWorkerNG', "Calling clusters.shared.list"
clusters_list_base(shared: true)
end
def clusters_internal_list
IronCore::Logger.debug 'IronWorkerNG', "Calling clusters.internal.list"
clusters_list_base(internal: true)
end
def clusters_unshare(cluster_id, user_id)
IronCore::Logger.debug 'IronWorkerNG', "Calling clusters.unshare with cluster_id='#{cluster_id}', user_id='#{user_id}'"
res = @api.clusters_unshare(cluster_id, user_id)
OpenStruct.new(res)
end
def params_for_legacy(code_name, params = {})
if params.is_a?(String)
params = JSON.parse(params)
end
attrs = {}
params.keys.each do |k|
attrs['@' + k.to_s] = params[k]
end
attrs = attrs.to_json
{:class_name => code_name, :attr_encoded => Base64.encode64(attrs), :sw_config => {:project_id => project_id, :token => token}}.to_json
end
private
def clusters_list_base(options = {})
res = @api.clusters_list(options)
(res['clusters'] || []).map { |s| OpenStruct.new(s.merge('_id' => s['id'])) }
end
# sleep between retries
def sleep_between_retries(previous_duration)
if previous_duration >= 60
return previous_duration
end
return previous_duration * 2
end
end
end
| 31.268836 | 163 | 0.645857 |
87442493f7b5de06b88204055aef263dfa48dfe2 | 231 | class CreateOrders < ActiveRecord::Migration[6.0]
def change
create_table :orders do |t|
t.integer :qty
t.decimal :amount
t.integer :product_id
t.integer :user_id
t.timestamps
end
end
end
| 17.769231 | 49 | 0.640693 |
e8e9c263d2324b71af99ec2ac33f8437b4d66cc5 | 35,422 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Issuable do
include ProjectForksHelper
using RSpec::Parameterized::TableSyntax
let(:issuable_class) { Issue }
let(:issue) { create(:issue, title: 'An issue', description: 'A description') }
let(:user) { create(:user) }
describe "Associations" do
subject { build(:issue) }
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:author) }
it { is_expected.to have_many(:notes).dependent(:destroy) }
it { is_expected.to have_many(:todos) }
it { is_expected.to have_many(:labels) }
context 'Notes' do
let!(:note) { create(:note, noteable: issue, project: issue.project) }
let(:scoped_issue) { Issue.includes(notes: :author).find(issue.id) }
it 'indicates if the notes have their authors loaded' do
expect(issue.notes).not_to be_authors_loaded
expect(scoped_issue.notes).to be_authors_loaded
end
describe 'note_authors' do
it { is_expected.to have_many(:note_authors).through(:notes) }
end
describe 'user_note_authors' do
let_it_be(:system_user) { create(:user) }
let!(:system_note) { create(:system_note, author: system_user, noteable: issue, project: issue.project) }
it 'filters the authors to those of user notes' do
authors = issue.user_note_authors
expect(authors).to include(note.author)
expect(authors).not_to include(system_user)
end
end
end
end
describe 'Included modules' do
let(:described_class) { issuable_class }
it { is_expected.to include_module(Awardable) }
end
describe "Validation" do
context 'general validations' do
subject { build(:issue) }
before do
allow(InternalId).to receive(:generate_next).and_return(nil)
end
it { is_expected.to validate_presence_of(:project) }
it { is_expected.to validate_presence_of(:author) }
it { is_expected.to validate_presence_of(:title) }
it { is_expected.to validate_length_of(:title).is_at_most(described_class::TITLE_LENGTH_MAX) }
it { is_expected.to validate_length_of(:description).is_at_most(described_class::DESCRIPTION_LENGTH_MAX).on(:create) }
it_behaves_like 'validates description length with custom validation' do
before do
allow(InternalId).to receive(:generate_next).and_call_original
end
end
it_behaves_like 'truncates the description to its allowed maximum length on import'
end
end
describe "Scope" do
it { expect(issuable_class).to respond_to(:opened) }
it { expect(issuable_class).to respond_to(:closed) }
it { expect(issuable_class).to respond_to(:assigned) }
describe '.includes_for_bulk_update' do
before do
stub_const('Example', Class.new(ActiveRecord::Base))
Example.class_eval do
include Issuable # adds :labels and :metrics, among others
belongs_to :author
has_many :assignees
end
end
it 'includes available associations' do
expect(Example.includes_for_bulk_update.includes_values).to eq([:author, :assignees, :labels, :metrics])
end
end
end
describe 'author_name' do
it 'is delegated to author' do
expect(issue.author_name).to eq issue.author.name
end
it 'returns nil when author is nil' do
issue.author_id = nil
issue.save!(validate: false)
expect(issue.author_name).to eq nil
end
end
describe '.initialize' do
it 'maps the state to the right state_id' do
described_class::STATE_ID_MAP.each do |key, value|
issuable = MergeRequest.new(state: key)
expect(issuable.state).to eq(key)
expect(issuable.state_id).to eq(value)
end
end
it 'maps a string version of the state to the right state_id' do
described_class::STATE_ID_MAP.each do |key, value|
issuable = MergeRequest.new('state' => key)
expect(issuable.state).to eq(key)
expect(issuable.state_id).to eq(value)
end
end
it 'gives preference to state_id if present' do
issuable = MergeRequest.new('state' => 'opened',
'state_id' => described_class::STATE_ID_MAP['merged'])
expect(issuable.state).to eq('merged')
expect(issuable.state_id).to eq(described_class::STATE_ID_MAP['merged'])
end
end
describe '.any_label' do
let_it_be(:issue_with_label) { create(:labeled_issue, labels: [create(:label)]) }
let_it_be(:issue_with_multiple_labels) { create(:labeled_issue, labels: [create(:label), create(:label)]) }
let_it_be(:issue_without_label) { create(:issue) }
it 'returns an issuable with at least one label' do
expect(issuable_class.any_label).to match_array([issue_with_label, issue_with_multiple_labels])
end
context 'for custom sorting' do
it 'returns an issuable with at least one label' do
expect(issuable_class.any_label('created_at')).to eq([issue_with_label, issue_with_multiple_labels])
end
end
end
describe ".search" do
let!(:searchable_issue) { create(:issue, title: "Searchable awesome issue") }
let!(:searchable_issue2) { create(:issue, title: 'Aw') }
it 'returns issues with a matching title' do
expect(issuable_class.search(searchable_issue.title))
.to eq([searchable_issue])
end
it 'returns issues with a partially matching title' do
expect(issuable_class.search('able')).to eq([searchable_issue])
end
it 'returns issues with a matching title regardless of the casing' do
expect(issuable_class.search(searchable_issue.title.upcase))
.to eq([searchable_issue])
end
it 'returns issues with a fuzzy matching title' do
expect(issuable_class.search('searchable issue')).to eq([searchable_issue])
end
it 'returns issues with a matching title for a query shorter than 3 chars' do
expect(issuable_class.search(searchable_issue2.title.downcase)).to eq([searchable_issue2])
end
end
describe ".full_search" do
let!(:searchable_issue) do
create(:issue, title: "Searchable awesome issue", description: 'Many cute kittens')
end
let!(:searchable_issue2) { create(:issue, title: "Aw", description: "Cu") }
it 'returns issues with a matching title' do
expect(issuable_class.full_search(searchable_issue.title))
.to eq([searchable_issue])
end
it 'returns issues with a partially matching title' do
expect(issuable_class.full_search('able')).to eq([searchable_issue])
end
it 'returns issues with a matching title regardless of the casing' do
expect(issuable_class.full_search(searchable_issue.title.upcase))
.to eq([searchable_issue])
end
it 'returns issues with a fuzzy matching title' do
expect(issuable_class.full_search('searchable issue')).to eq([searchable_issue])
end
it 'returns issues with a matching description' do
expect(issuable_class.full_search(searchable_issue.description))
.to eq([searchable_issue])
end
it 'returns issues with a partially matching description' do
expect(issuable_class.full_search('cut')).to eq([searchable_issue])
end
it 'returns issues with a matching description regardless of the casing' do
expect(issuable_class.full_search(searchable_issue.description.upcase))
.to eq([searchable_issue])
end
it 'returns issues with a fuzzy matching description' do
expect(issuable_class.full_search('many kittens')).to eq([searchable_issue])
end
it 'returns issues with a matching description for a query shorter than 3 chars' do
expect(issuable_class.full_search(searchable_issue2.description.downcase)).to eq([searchable_issue2])
end
it 'returns issues with a fuzzy matching description for a query shorter than 3 chars if told to do so' do
search = searchable_issue2.description.downcase.scan(/\w+/).sample[-1]
expect(issuable_class.full_search(search, use_minimum_char_limit: false)).to include(searchable_issue2)
end
it 'returns issues with a fuzzy matching title for a query shorter than 3 chars if told to do so' do
expect(issuable_class.full_search('i', use_minimum_char_limit: false)).to include(searchable_issue)
end
context 'when matching columns is "title"' do
it 'returns issues with a matching title' do
expect(issuable_class.full_search(searchable_issue.title, matched_columns: 'title'))
.to eq([searchable_issue])
end
it 'returns no issues with a matching description' do
expect(issuable_class.full_search(searchable_issue.description, matched_columns: 'title'))
.to be_empty
end
end
context 'when matching columns is "description"' do
it 'returns no issues with a matching title' do
expect(issuable_class.full_search(searchable_issue.title, matched_columns: 'description'))
.to be_empty
end
it 'returns issues with a matching description' do
expect(issuable_class.full_search(searchable_issue.description, matched_columns: 'description'))
.to eq([searchable_issue])
end
end
context 'when matching columns is "title,description"' do
it 'returns issues with a matching title' do
expect(issuable_class.full_search(searchable_issue.title, matched_columns: 'title,description'))
.to eq([searchable_issue])
end
it 'returns issues with a matching description' do
expect(issuable_class.full_search(searchable_issue.description, matched_columns: 'title,description'))
.to eq([searchable_issue])
end
end
context 'when matching columns is nil"' do
it 'returns issues with a matching title' do
expect(issuable_class.full_search(searchable_issue.title, matched_columns: nil))
.to eq([searchable_issue])
end
it 'returns issues with a matching description' do
expect(issuable_class.full_search(searchable_issue.description, matched_columns: nil))
.to eq([searchable_issue])
end
end
context 'when matching columns is "invalid"' do
it 'returns issues with a matching title' do
expect(issuable_class.full_search(searchable_issue.title, matched_columns: 'invalid'))
.to eq([searchable_issue])
end
it 'returns issues with a matching description' do
expect(issuable_class.full_search(searchable_issue.description, matched_columns: 'invalid'))
.to eq([searchable_issue])
end
end
context 'when matching columns is "title,invalid"' do
it 'returns issues with a matching title' do
expect(issuable_class.full_search(searchable_issue.title, matched_columns: 'title,invalid'))
.to eq([searchable_issue])
end
it 'returns no issues with a matching description' do
expect(issuable_class.full_search(searchable_issue.description, matched_columns: 'title,invalid'))
.to be_empty
end
end
end
describe '.to_ability_name' do
it { expect(Issue.to_ability_name).to eq("issue") }
it { expect(MergeRequest.to_ability_name).to eq("merge_request") }
end
describe "#today?" do
it "returns true when created today" do
# Avoid timezone differences and just return exactly what we want
allow(Date).to receive(:today).and_return(issue.created_at.to_date)
expect(issue.today?).to be_truthy
end
it "returns false when not created today" do
allow(Date).to receive(:today).and_return(Date.yesterday)
expect(issue.today?).to be_falsey
end
end
describe "#new?" do
it "returns false when created 30 hours ago" do
allow(issue).to receive(:created_at).and_return(Time.current - 30.hours)
expect(issue.new?).to be_falsey
end
it "returns true when created 20 hours ago" do
allow(issue).to receive(:created_at).and_return(Time.current - 20.hours)
expect(issue.new?).to be_truthy
end
end
describe "#sort_by_attribute" do
let(:project) { create(:project) }
context "by milestone due date" do
# Correct order is:
# Issues/MRs with milestones ordered by date
# Issues/MRs with milestones without dates
# Issues/MRs without milestones
let!(:issue) { create(:issue, project: project) }
let!(:early_milestone) { create(:milestone, project: project, due_date: 10.days.from_now) }
let!(:late_milestone) { create(:milestone, project: project, due_date: 30.days.from_now) }
let!(:issue1) { create(:issue, project: project, milestone: early_milestone) }
let!(:issue2) { create(:issue, project: project, milestone: late_milestone) }
let!(:issue3) { create(:issue, project: project) }
it "sorts desc" do
issues = project.issues.sort_by_attribute('milestone_due_desc')
expect(issues).to match_array([issue2, issue1, issue, issue3])
end
it "sorts asc" do
issues = project.issues.sort_by_attribute('milestone_due_asc')
expect(issues).to match_array([issue1, issue2, issue, issue3])
end
end
context 'when all of the results are level on the sort key' do
let!(:issues) do
create_list(:issue, 10, project: project)
end
it 'has no duplicates across pages' do
sorted_issue_ids = 1.upto(10).map do |i|
project.issues.sort_by_attribute('milestone_due_desc').page(i).per(1).first.id
end
expect(sorted_issue_ids).to eq(sorted_issue_ids.uniq)
end
end
context 'by title' do
let!(:issue1) { create(:issue, project: project, title: 'foo') }
let!(:issue2) { create(:issue, project: project, title: 'bar') }
let!(:issue3) { create(:issue, project: project, title: 'baz') }
let!(:issue4) { create(:issue, project: project, title: 'Baz 2') }
it 'sorts asc' do
issues = project.issues.sort_by_attribute('title_asc')
expect(issues).to eq([issue2, issue3, issue4, issue1])
end
it 'sorts desc' do
issues = project.issues.sort_by_attribute('title_desc')
expect(issues).to eq([issue1, issue4, issue3, issue2])
end
end
end
describe '#subscribed?' do
let(:project) { issue.project }
context 'user is not a participant in the issue' do
before do
allow(issue).to receive(:participants).with(user).and_return([])
end
it 'returns false when no subcription exists' do
expect(issue.subscribed?(user, project)).to be_falsey
end
it 'returns true when a subcription exists and subscribed is true' do
issue.subscriptions.create!(user: user, project: project, subscribed: true)
expect(issue.subscribed?(user, project)).to be_truthy
end
it 'returns false when a subcription exists and subscribed is false' do
issue.subscriptions.create!(user: user, project: project, subscribed: false)
expect(issue.subscribed?(user, project)).to be_falsey
end
end
context 'user is a participant in the issue' do
before do
allow(issue).to receive(:participant?).with(user).and_return(true)
end
it 'returns false when no subcription exists' do
expect(issue.subscribed?(user, project)).to be_truthy
end
it 'returns true when a subcription exists and subscribed is true' do
issue.subscriptions.create!(user: user, project: project, subscribed: true)
expect(issue.subscribed?(user, project)).to be_truthy
end
it 'returns false when a subcription exists and subscribed is false' do
issue.subscriptions.create!(user: user, project: project, subscribed: false)
expect(issue.subscribed?(user, project)).to be_falsey
end
end
end
describe '#time_estimate=' do
it 'coerces the value below Gitlab::Database::MAX_INT_VALUE' do
expect { issue.time_estimate = 100 }.to change { issue.time_estimate }.to(100)
expect { issue.time_estimate = Gitlab::Database::MAX_INT_VALUE + 100 }.to change { issue.time_estimate }.to(Gitlab::Database::MAX_INT_VALUE)
end
it 'skips coercion for not Integer values' do
expect { issue.time_estimate = nil }.to change { issue.time_estimate }.to(nil)
expect { issue.time_estimate = 'invalid time' }.not_to raise_error
expect { issue.time_estimate = 22.33 }.not_to raise_error
end
end
describe '#to_hook_data' do
let(:builder) { double }
context 'when old_associations is empty' do
let(:label) { create(:label) }
before do
issue.update!(labels: [label])
issue.assignees << user
issue.spend_time(duration: 2, user_id: user.id, spent_at: Time.current)
expect(Gitlab::DataBuilder::Issuable)
.to receive(:new).with(issue).and_return(builder)
end
it 'delegates to Gitlab::DataBuilder::Issuable#build and does not set labels, assignees, nor total_time_spent' do
expect(builder).to receive(:build).with(
user: user,
changes: {})
# In some cases, old_associations is empty, e.g. on a close event
issue.to_hook_data(user)
end
end
context 'labels are updated' do
let(:labels) { create_list(:label, 2) }
before do
issue.update!(labels: [labels[1]])
expect(Gitlab::DataBuilder::Issuable)
.to receive(:new).with(issue).and_return(builder)
end
it 'delegates to Gitlab::DataBuilder::Issuable#build' do
expect(builder).to receive(:build).with(
user: user,
changes: hash_including(
'labels' => [[labels[0].hook_attrs], [labels[1].hook_attrs]]
))
issue.to_hook_data(user, old_associations: { labels: [labels[0]] })
end
end
context 'total_time_spent is updated' do
before do
issue.spend_time(duration: 2, user_id: user.id, spent_at: Time.current)
issue.save!
expect(Gitlab::DataBuilder::Issuable)
.to receive(:new).with(issue).and_return(builder)
end
it 'delegates to Gitlab::DataBuilder::Issuable#build' do
expect(builder).to receive(:build).with(
user: user,
changes: hash_including(
'total_time_spent' => [1, 2]
))
issue.to_hook_data(user, old_associations: { total_time_spent: 1 })
end
end
context 'issue is assigned' do
let(:user2) { create(:user) }
before do
issue.assignees << user << user2
expect(Gitlab::DataBuilder::Issuable)
.to receive(:new).with(issue).and_return(builder)
end
it 'delegates to Gitlab::DataBuilder::Issuable#build' do
expect(builder).to receive(:build).with(
user: user,
changes: hash_including(
'assignees' => [[user.hook_attrs], [user.hook_attrs, user2.hook_attrs]]
))
issue.to_hook_data(user, old_associations: { assignees: [user] })
end
end
context 'merge_request is assigned' do
let(:merge_request) { create(:merge_request) }
let(:user2) { create(:user) }
before do
merge_request.update!(assignees: [user])
merge_request.update!(assignees: [user, user2])
expect(Gitlab::DataBuilder::Issuable)
.to receive(:new).with(merge_request).and_return(builder)
end
it 'delegates to Gitlab::DataBuilder::Issuable#build' do
expect(builder).to receive(:build).with(
user: user,
changes: hash_including(
'assignees' => [[user.hook_attrs], [user.hook_attrs, user2.hook_attrs]]
))
merge_request.to_hook_data(user, old_associations: { assignees: [user] })
end
end
context 'incident severity is updated' do
let(:issue) { create(:incident) }
before do
issue.update!(issuable_severity_attributes: { severity: 'low' })
expect(Gitlab::DataBuilder::Issuable)
.to receive(:new).with(issue).and_return(builder)
end
it 'delegates to Gitlab::DataBuilder::Issuable#build' do
expect(builder).to receive(:build).with(
user: user,
changes: hash_including(
'severity' => %w(unknown low)
))
issue.to_hook_data(user, old_associations: { severity: 'unknown' })
end
end
context 'escalation status is updated' do
let(:issue) { create(:incident, :with_escalation_status) }
let(:acknowledged) { IncidentManagement::IssuableEscalationStatus::STATUSES[:acknowledged] }
before do
issue.escalation_status.update!(status: acknowledged)
expect(Gitlab::DataBuilder::Issuable).to receive(:new).with(issue).and_return(builder)
end
it 'delegates to Gitlab::DataBuilder::Issuable#build' do
expect(builder).to receive(:build).with(
user: user,
changes: hash_including(
'escalation_status' => %i(triggered acknowledged)
))
issue.to_hook_data(user, old_associations: { escalation_status: :triggered })
end
end
end
describe '#labels_array' do
let(:project) { create(:project) }
let(:bug) { create(:label, project: project, title: 'bug') }
let(:issue) { create(:issue, project: project) }
before do
issue.labels << bug
end
it 'loads the association and returns it as an array' do
expect(issue.reload.labels_array).to eq([bug])
end
end
describe "#labels_hook_attrs" do
let(:project) { create(:project) }
let(:label) { create(:label) }
let(:issue) { create(:labeled_issue, project: project, labels: [label]) }
it "returns a list of label hook attributes" do
expect(issue.labels_hook_attrs).to match_array([label.hook_attrs])
end
end
describe '.labels_hash' do
let(:feature_label) { create(:label, title: 'Feature') }
let(:second_label) { create(:label, title: 'Second Label') }
let!(:issues) { create_list(:labeled_issue, 3, labels: [feature_label, second_label]) }
let(:issue_id) { issues.first.id }
it 'maps issue ids to labels titles' do
expect(Issue.labels_hash[issue_id]).to include('Feature')
end
it 'works on relations filtered by multiple labels' do
relation = Issue.with_label(['Feature', 'Second Label'])
expect(relation.labels_hash[issue_id]).to include('Feature', 'Second Label')
end
# This tests the workaround for the lack of a NOT NULL constraint in
# label_links.label_id:
# https://gitlab.com/gitlab-org/gitlab/issues/197307
context 'with a NULL label ID in the link' do
let(:issue) { create(:labeled_issue, labels: [feature_label, second_label]) }
before do
label_link = issue.label_links.find_by(label_id: second_label.id)
label_link.label_id = nil
label_link.save!(validate: false)
end
it 'filters out bad labels' do
expect(Issue.where(id: issue.id).labels_hash[issue.id]).to match_array(['Feature'])
end
end
end
describe '#user_notes_count' do
let(:project) { create(:project) }
let(:issue1) { create(:issue, project: project) }
let(:issue2) { create(:issue, project: project) }
before do
create_list(:note, 3, noteable: issue1, project: project)
create_list(:note, 6, noteable: issue2, project: project)
end
it 'counts the user notes' do
expect(issue1.user_notes_count).to be(3)
expect(issue2.user_notes_count).to be(6)
end
end
describe "votes" do
let(:project) { issue.project }
before do
create(:award_emoji, :upvote, awardable: issue)
create(:award_emoji, :downvote, awardable: issue)
end
it "returns correct values" do
expect(issue.upvotes).to eq(1)
expect(issue.downvotes).to eq(1)
end
end
describe '.order_due_date_and_labels_priority' do
let(:project) { create(:project) }
def create_issue(milestone, labels)
create(:labeled_issue, milestone: milestone, labels: labels, project: project)
end
it 'sorts issues in order of milestone due date, then label priority' do
first_priority = create(:label, project: project, priority: 1)
second_priority = create(:label, project: project, priority: 2)
no_priority = create(:label, project: project)
first_milestone = create(:milestone, project: project, due_date: Time.current)
second_milestone = create(:milestone, project: project, due_date: Time.current + 1.month)
third_milestone = create(:milestone, project: project)
# The issues here are ordered by label priority, to ensure that we don't
# accidentally just sort by creation date.
second_milestone_first_priority = create_issue(second_milestone, [first_priority, second_priority, no_priority])
third_milestone_first_priority = create_issue(third_milestone, [first_priority, second_priority, no_priority])
first_milestone_second_priority = create_issue(first_milestone, [second_priority, no_priority])
second_milestone_second_priority = create_issue(second_milestone, [second_priority, no_priority])
no_milestone_second_priority = create_issue(nil, [second_priority, no_priority])
first_milestone_no_priority = create_issue(first_milestone, [no_priority])
second_milestone_no_labels = create_issue(second_milestone, [])
third_milestone_no_priority = create_issue(third_milestone, [no_priority])
result = Issue.order_due_date_and_labels_priority
expect(result).to eq([first_milestone_second_priority,
first_milestone_no_priority,
second_milestone_first_priority,
second_milestone_second_priority,
second_milestone_no_labels,
third_milestone_first_priority,
no_milestone_second_priority,
third_milestone_no_priority])
end
end
describe '.order_labels_priority' do
let(:label_1) { create(:label, title: 'label_1', project: issue.project, priority: 1) }
let(:label_2) { create(:label, title: 'label_2', project: issue.project, priority: 2) }
subject { Issue.order_labels_priority(excluded_labels: ['label_1']).first.highest_priority }
before do
issue.labels << label_1
issue.labels << label_2
end
it { is_expected.to eq(2) }
end
describe ".with_label" do
let(:project) { create(:project, :public) }
let(:bug) { create(:label, project: project, title: 'bug') }
let(:feature) { create(:label, project: project, title: 'feature') }
let(:enhancement) { create(:label, project: project, title: 'enhancement') }
let(:issue1) { create(:issue, title: "Bugfix1", project: project) }
let(:issue2) { create(:issue, title: "Bugfix2", project: project) }
let(:issue3) { create(:issue, title: "Feature1", project: project) }
before do
issue1.labels << bug
issue1.labels << feature
issue2.labels << bug
issue2.labels << enhancement
issue3.labels << feature
end
it 'finds the correct issue containing just enhancement label' do
expect(Issue.with_label(enhancement.title)).to match_array([issue2])
end
it 'finds the correct issues containing the same label' do
expect(Issue.with_label(bug.title)).to match_array([issue1, issue2])
end
it 'finds the correct issues containing only both labels' do
expect(Issue.with_label([bug.title, enhancement.title])).to match_array([issue2])
end
end
describe '#spend_time' do
let(:user) { create(:user) }
let(:issue) { create(:issue) }
def spend_time(seconds)
issue.spend_time(duration: seconds, user_id: user.id)
issue.save!
end
context 'adding time' do
it 'updates the total time spent' do
spend_time(1800)
expect(issue.total_time_spent).to eq(1800)
end
it 'stores the time change' do
spend_time(1800)
expect(issue.time_change).to eq(1800)
end
it 'updates issues updated_at' do
issue
travel_to(2.minutes.from_now) do
expect { spend_time(1800) }.to change { issue.updated_at }
end
end
end
context 'subtracting time' do
before do
spend_time(1800)
end
it 'updates the total time spent' do
spend_time(-900)
expect(issue.total_time_spent).to eq(900)
end
it 'stores negative time change' do
spend_time(-900)
expect(issue.time_change).to eq(-900)
end
context 'when time to subtract exceeds the total time spent' do
it 'raise a validation error' do
travel_to(1.minute.from_now) do
expect do
expect do
spend_time(-3600)
end.to raise_error(ActiveRecord::RecordInvalid)
end.not_to change { issue.updated_at }
end
end
end
end
end
describe '#first_contribution?' do
let(:group) { create(:group) }
let(:project) { create(:project, namespace: group) }
let(:other_project) { create(:project) }
let(:owner) { create(:owner) }
let(:maintainer) { create(:user) }
let(:reporter) { create(:user) }
let(:guest) { create(:user) }
let(:contributor) { create(:user) }
let(:first_time_contributor) { create(:user) }
before do
group.add_owner(owner)
project.add_maintainer(maintainer)
project.add_reporter(reporter)
project.add_guest(guest)
project.add_guest(contributor)
project.add_guest(first_time_contributor)
end
let(:merged_mr) { create(:merge_request, :merged, author: contributor, target_project: project, source_project: project) }
let(:open_mr) { create(:merge_request, author: first_time_contributor, target_project: project, source_project: project) }
let(:merged_mr_other_project) { create(:merge_request, :merged, author: first_time_contributor, target_project: other_project, source_project: other_project) }
context "for merge requests" do
it "is false for MAINTAINER" do
mr = create(:merge_request, author: maintainer, target_project: project, source_project: project)
expect(mr).not_to be_first_contribution
end
it "is false for OWNER" do
mr = create(:merge_request, author: owner, target_project: project, source_project: project)
expect(mr).not_to be_first_contribution
end
it "is false for REPORTER" do
mr = create(:merge_request, author: reporter, target_project: project, source_project: project)
expect(mr).not_to be_first_contribution
end
it "is true when you don't have any merged MR" do
expect(open_mr).to be_first_contribution
expect(merged_mr).not_to be_first_contribution
end
it "handles multiple projects separately" do
expect(open_mr).to be_first_contribution
expect(merged_mr_other_project).not_to be_first_contribution
end
end
context "for issues" do
let(:contributor_issue) { create(:issue, author: contributor, project: project) }
let(:first_time_contributor_issue) { create(:issue, author: first_time_contributor, project: project) }
it "is false even without merged MR" do
expect(merged_mr).to be
expect(first_time_contributor_issue).not_to be_first_contribution
expect(contributor_issue).not_to be_first_contribution
end
end
end
describe '#matches_cross_reference_regex?' do
context "issue description with long path string" do
let(:mentionable) { build(:issue, description: "/a" * 50000) }
it_behaves_like 'matches_cross_reference_regex? fails fast'
end
context "note with long path string" do
let(:mentionable) { build(:note, note: "/a" * 50000) }
it_behaves_like 'matches_cross_reference_regex? fails fast'
end
context "note with long path string" do
let(:project) { create(:project, :public, :repository) }
let(:mentionable) { project.commit }
before do
expect(mentionable.raw).to receive(:message).and_return("/a" * 50000)
end
it_behaves_like 'matches_cross_reference_regex? fails fast'
end
end
describe '#supports_time_tracking?' do
where(:issuable_type, :supports_time_tracking) do
:issue | true
:incident | true
:merge_request | true
end
with_them do
let(:issuable) { build_stubbed(issuable_type) }
subject { issuable.supports_time_tracking? }
it { is_expected.to eq(supports_time_tracking) }
end
end
describe '#supports_severity?' do
where(:issuable_type, :supports_severity) do
:issue | false
:incident | true
:merge_request | false
end
with_them do
let(:issuable) { build_stubbed(issuable_type) }
subject { issuable.supports_severity? }
it { is_expected.to eq(supports_severity) }
end
end
describe '#supports_escalation?' do
where(:issuable_type, :supports_escalation) do
:issue | false
:incident | true
:merge_request | false
end
with_them do
let(:issuable) { build_stubbed(issuable_type) }
subject { issuable.supports_escalation? }
it { is_expected.to eq(supports_escalation) }
context 'with feature disabled' do
before do
stub_feature_flags(incident_escalations: false)
end
it { is_expected.to eq(false) }
end
end
end
describe '#incident?' do
where(:issuable_type, :incident) do
:issue | false
:incident | true
:merge_request | false
end
with_them do
let(:issuable) { build_stubbed(issuable_type) }
subject { issuable.incident? }
it { is_expected.to eq(incident) }
end
end
describe '#supports_issue_type?' do
where(:issuable_type, :supports_issue_type) do
:issue | true
:merge_request | false
end
with_them do
let(:issuable) { build_stubbed(issuable_type) }
subject { issuable.supports_issue_type? }
it { is_expected.to eq(supports_issue_type) }
end
end
describe '#severity' do
subject { issuable.severity }
context 'when issuable is not an incident' do
where(:issuable_type, :severity) do
:issue | 'unknown'
:merge_request | 'unknown'
end
with_them do
let(:issuable) { build_stubbed(issuable_type) }
it { is_expected.to eq(severity) }
end
end
context 'when issuable type is an incident' do
let!(:issuable) { build_stubbed(:incident) }
context 'when incident does not have issuable_severity' do
it 'returns default serverity' do
is_expected.to eq(IssuableSeverity::DEFAULT)
end
end
context 'when incident has issuable_severity' do
let!(:issuable_severity) { build_stubbed(:issuable_severity, issue: issuable, severity: 'critical') }
it 'returns issuable serverity' do
is_expected.to eq('critical')
end
end
end
end
end
| 33.354049 | 163 | 0.664926 |
2162e8e7f7766db3e54adc713ef0d43bd9dd82ce | 713 | require 'mini_magick'
require 'aws-sdk-s3'
file_uri = STDIN.read.strip
image = MiniMagick::Image.open(file_uri)
image.contrast
image.resize "250x200"
image.rotate "-90"
s3 = Aws::S3::Client.new
bucket = ENV['AWS_S3_BUCKET']
obj = s3.put_object( bucket: bucket,
key: File.basename(file_uri),
body: image.tempfile,
acl: "public-read",
cache_control: "max-age=604800")
# Unfortunately put_object returns `put object output`, not an object.
# So we create another reference here. Probably there is a better way to do this in S3 API.
obj = Aws::S3::Object.new bucket_name: bucket, key: File.basename(file_uri)
puts obj.public_url
| 31 | 91 | 0.664797 |
2107123cbf3d6efe10819c0cc350dc40cf98e860 | 219 | require 'test_helper'
class SiteLayoutTest < ActionDispatch::IntegrationTest
test "layout liks" do
get root_path
assert_template 'static_pages/home'
assert_select "a[href=?]", root_path,count:2
end
end
| 21.9 | 54 | 0.748858 |
ab330066b45552bb1b4e0808265b429fa2f8a21a | 256 | class Webstorm < Cask
url 'http://download.jetbrains.com/webstorm/WebStorm-8.0.2.dmg'
homepage 'http://www.jetbrains.com/webstorm/'
version '8.0.2'
sha256 '4ba90cec20a7b115f840adc26892d76e71e049a65570af9fa5d0f54ba7caa9f8'
link 'WebStorm.app'
end
| 32 | 75 | 0.777344 |
e962d41bd9520673a7ecf478c17aacd9a27e6ae4 | 391 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ServiceFabric::V7_0_0_42
module Models
#
# Defines values for FailureAction
#
module FailureAction
Invalid = "Invalid"
Rollback = "Rollback"
Manual = "Manual"
end
end
end
| 21.722222 | 70 | 0.68798 |
7a8ec94cfa1e1475c0ccb6eeb08099d09b2aa5d9 | 935 | class Support::AllocationForm
include ActiveModel::Model
attr_reader :allocation
attr_accessor :current_allocation, :school_allocation
delegate :cap, :raw_devices_ordered, :is_in_virtual_cap_pool?, to: :school_allocation
validate :check_decrease_allowed
validate :check_minimum
def initialize(params = {})
super(params)
@current_allocation = @school_allocation.dup
end
def allocation=(value)
@allocation = value.to_i
end
def order_state
school_allocation&.school&.order_state
end
private
def decreasing?
allocation < current_allocation.raw_allocation
end
def check_decrease_allowed
return unless decreasing?
errors.add(:allocation, :decreasing_in_virtual_cap_pool) if is_in_virtual_cap_pool?
end
def check_minimum
if allocation < raw_devices_ordered
errors.add(:allocation, :gte_devices_ordered, devices_ordered: raw_devices_ordered)
end
end
end
| 21.744186 | 89 | 0.767914 |
d556625dbf4a33438dd2fc2ff1c88e743b05e562 | 1,120 | require 'test_helper'
class CallBoxesControllerTest < ActionController::TestCase
setup do
@call_box = call_boxes(:one)
end
test "should get index" do
get :index
assert_response :success
assert_not_nil assigns(:call_boxes)
end
test "should get new" do
get :new
assert_response :success
end
test "should create call_box" do
assert_difference('CallBox.count') do
post :create, :call_box => @call_box.attributes
end
assert_redirected_to call_box_path(assigns(:call_box))
end
test "should show call_box" do
get :show, :id => @call_box.to_param
assert_response :success
end
test "should get edit" do
get :edit, :id => @call_box.to_param
assert_response :success
end
test "should update call_box" do
put :update, :id => @call_box.to_param, :call_box => @call_box.attributes
assert_redirected_to call_box_path(assigns(:call_box))
end
test "should destroy call_box" do
assert_difference('CallBox.count', -1) do
delete :destroy, :id => @call_box.to_param
end
assert_redirected_to call_boxes_path
end
end
| 22.4 | 77 | 0.703571 |
3954a29cad9ad826dfe87a773b0ce4b23f8e99fc | 23 | module BanksHelper
end
| 7.666667 | 18 | 0.869565 |
f80664d5fa260f721fec1bdd4c3ffe630fd1e1f3 | 1,322 | require "#{File.dirname(__FILE__)}/spec_helper"
describe "vCard subset matching" do
JOHN = <<end_vcard
BEGIN:VCARD
VERSION:3.0
N:Smith;John;M.;Mr.;Esq.
TEL;TYPE=WORK,VOICE,MSG:+1 (919) 555-1234
TEL;TYPE=CELL:+1 (919) 554-6758
TEL;TYPE=WORK,FAX:+1 (919) 555-9876
ADR;TYPE=WORK,PARCEL,POSTAL,DOM:Suite 101;1 Central St.;Any Town;NC;27654
END:VCARD
end_vcard
before do
@john = Vcard.from_vcard(JOHN)
end
specify "one field missing" do
x = @john.deep_copy
x.fields.delete_if {|f| f.name == 'ADR' }
x.should_not == @john
x.should be_subset_of(@john)
end
specify "a part of his name missing" do
x = @john.deep_copy
x.name.given = ''
x.should_not == @john
x.should be_subset_of(@john)
end
specify "another part of his name missing" do
x = @john.deep_copy
x.name.family = ''
x.name.prefix = ''
x.should_not == @john
x.should be_subset_of(@john)
end
specify "a part of the address missing" do
x = @john.deep_copy
x.should == @john
x.addresses.first.street = ''
x.should_not == @john
x.should be_subset_of(@john)
end
specify "labels missing" do
x = @john.deep_copy
x.should == @john
x.telephones.each do |tel|
tel.params.clear
end
x.should_not == @john
x.should be_subset_of(@john)
end
end
| 21.322581 | 73 | 0.65053 |
38c9f08503dedcd80aeba75998a8c7fe05f08fd5 | 1,567 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// Remark: The different fields are encoded in Big-endian.
[enum uint 8 'OpcuaDataType'
['0' NULL ]
['1' BOOL ]
['2' BYTE ]
['3' WORD ]
['4' DWORD ]
['5' LWORD ]
['6' SINT ]
['7' INT ]
['8' DINT ]
['9' LINT ]
['10' USINT ]
['11' UINT ]
['12' UDINT ]
['13' ULINT ]
['14' REAL ]
['15' LREAL ]
['16' TIME ]
['17' LTIME ]
['18' DATE ]
['19' LDATE ]
['20' TIME_OF_DAY ]
['21' LTIME_OF_DAY ]
['22' DATE_AND_TIME ]
['23' LDATE_AND_TIME ]
['24' CHAR ]
['25' WCHAR ]
['26' STRING ]
['27' WSTRING ]
]
[enum string '-1' 'OpcuaIdentifierType'
['s' STRING_IDENTIFIER]
['i' NUMBER_IDENTIFIER]
['g' GUID_IDENTIFIER]
['b' BINARY_IDENTIFIER]
]
| 26.116667 | 63 | 0.624761 |
b98d63eb5f402e8495d30bdc2763dc28a9d897f4 | 39,731 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Monitor::Mgmt::V2019_10_17_preview
#
# Monitor Management Client
#
class PrivateLinkScopes
include MsRestAzure
#
# Creates and initializes a new instance of the PrivateLinkScopes class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [MonitorManagementClient] reference to the MonitorManagementClient
attr_reader :client
#
# Gets a list of all Azure Monitor PrivateLinkScopes within a subscription.
#
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Array<AzureMonitorPrivateLinkScope>] operation results.
#
def list(custom_headers:nil)
first_page = list_as_lazy(custom_headers:custom_headers)
first_page.get_all_items
end
#
# Gets a list of all Azure Monitor PrivateLinkScopes within a subscription.
#
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_with_http_info(custom_headers:nil)
list_async(custom_headers:custom_headers).value!
end
#
# Gets a list of all Azure Monitor PrivateLinkScopes within a subscription.
#
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_async(custom_headers:nil)
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/providers/microsoft.insights/privateLinkScopes'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Monitor::Mgmt::V2019_10_17_preview::Models::AzureMonitorPrivateLinkScopeListResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Gets a list of Azure Monitor PrivateLinkScopes within a resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Array<AzureMonitorPrivateLinkScope>] operation results.
#
def list_by_resource_group(resource_group_name, custom_headers:nil)
first_page = list_by_resource_group_as_lazy(resource_group_name, custom_headers:custom_headers)
first_page.get_all_items
end
#
# Gets a list of Azure Monitor PrivateLinkScopes within a resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_resource_group_with_http_info(resource_group_name, custom_headers:nil)
list_by_resource_group_async(resource_group_name, custom_headers:custom_headers).value!
end
#
# Gets a list of Azure Monitor PrivateLinkScopes within a resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_resource_group_async(resource_group_name, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/privateLinkScopes'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Monitor::Mgmt::V2019_10_17_preview::Models::AzureMonitorPrivateLinkScopeListResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Deletes a Azure Monitor PrivateLinkScope.
#
# @param resource_group_name [String] The name of the resource group.
# @param scope_name [String] The name of the Azure Monitor PrivateLinkScope
# resource.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
def delete(resource_group_name, scope_name, custom_headers:nil)
response = delete_async(resource_group_name, scope_name, custom_headers:custom_headers).value!
nil
end
#
# @param resource_group_name [String] The name of the resource group.
# @param scope_name [String] The name of the Azure Monitor PrivateLinkScope
# resource.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def delete_async(resource_group_name, scope_name, custom_headers:nil)
# Send request
promise = begin_delete_async(resource_group_name, scope_name, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Returns a Azure Monitor PrivateLinkScope.
#
# @param resource_group_name [String] The name of the resource group.
# @param scope_name [String] The name of the Azure Monitor PrivateLinkScope
# resource.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [AzureMonitorPrivateLinkScope] operation results.
#
def get(resource_group_name, scope_name, custom_headers:nil)
response = get_async(resource_group_name, scope_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Returns a Azure Monitor PrivateLinkScope.
#
# @param resource_group_name [String] The name of the resource group.
# @param scope_name [String] The name of the Azure Monitor PrivateLinkScope
# resource.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_with_http_info(resource_group_name, scope_name, custom_headers:nil)
get_async(resource_group_name, scope_name, custom_headers:custom_headers).value!
end
#
# Returns a Azure Monitor PrivateLinkScope.
#
# @param resource_group_name [String] The name of the resource group.
# @param scope_name [String] The name of the Azure Monitor PrivateLinkScope
# resource.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_async(resource_group_name, scope_name, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'scope_name is nil' if scope_name.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/privateLinkScopes/{scopeName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'subscriptionId' => @client.subscription_id,'scopeName' => scope_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Monitor::Mgmt::V2019_10_17_preview::Models::AzureMonitorPrivateLinkScope.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Creates (or updates) a Azure Monitor PrivateLinkScope. Note: You cannot
# specify a different value for InstrumentationKey nor AppId in the Put
# operation.
#
# @param resource_group_name [String] The name of the resource group.
# @param scope_name [String] The name of the Azure Monitor PrivateLinkScope
# resource.
# @param azure_monitor_private_link_scope_payload
# [AzureMonitorPrivateLinkScope] Properties that need to be specified to create
# or update a Azure Monitor PrivateLinkScope.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [AzureMonitorPrivateLinkScope] operation results.
#
def create_or_update(resource_group_name, scope_name, azure_monitor_private_link_scope_payload, custom_headers:nil)
response = create_or_update_async(resource_group_name, scope_name, azure_monitor_private_link_scope_payload, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Creates (or updates) a Azure Monitor PrivateLinkScope. Note: You cannot
# specify a different value for InstrumentationKey nor AppId in the Put
# operation.
#
# @param resource_group_name [String] The name of the resource group.
# @param scope_name [String] The name of the Azure Monitor PrivateLinkScope
# resource.
# @param azure_monitor_private_link_scope_payload
# [AzureMonitorPrivateLinkScope] Properties that need to be specified to create
# or update a Azure Monitor PrivateLinkScope.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def create_or_update_with_http_info(resource_group_name, scope_name, azure_monitor_private_link_scope_payload, custom_headers:nil)
create_or_update_async(resource_group_name, scope_name, azure_monitor_private_link_scope_payload, custom_headers:custom_headers).value!
end
#
# Creates (or updates) a Azure Monitor PrivateLinkScope. Note: You cannot
# specify a different value for InstrumentationKey nor AppId in the Put
# operation.
#
# @param resource_group_name [String] The name of the resource group.
# @param scope_name [String] The name of the Azure Monitor PrivateLinkScope
# resource.
# @param azure_monitor_private_link_scope_payload
# [AzureMonitorPrivateLinkScope] Properties that need to be specified to create
# or update a Azure Monitor PrivateLinkScope.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def create_or_update_async(resource_group_name, scope_name, azure_monitor_private_link_scope_payload, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'scope_name is nil' if scope_name.nil?
fail ArgumentError, 'azure_monitor_private_link_scope_payload is nil' if azure_monitor_private_link_scope_payload.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::Monitor::Mgmt::V2019_10_17_preview::Models::AzureMonitorPrivateLinkScope.mapper()
request_content = @client.serialize(request_mapper, azure_monitor_private_link_scope_payload)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/privateLinkScopes/{scopeName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'subscriptionId' => @client.subscription_id,'scopeName' => scope_name},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:put, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200 || status_code == 201
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Monitor::Mgmt::V2019_10_17_preview::Models::AzureMonitorPrivateLinkScope.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
# Deserialize Response
if status_code == 201
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Monitor::Mgmt::V2019_10_17_preview::Models::AzureMonitorPrivateLinkScope.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Updates an existing PrivateLinkScope's tags. To update other fields use the
# CreateOrUpdate method.
#
# @param resource_group_name [String] The name of the resource group.
# @param scope_name [String] The name of the Azure Monitor PrivateLinkScope
# resource.
# @param private_link_scope_tags [TagsResource] Updated tag information to set
# into the PrivateLinkScope instance.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [AzureMonitorPrivateLinkScope] operation results.
#
def update_tags(resource_group_name, scope_name, private_link_scope_tags, custom_headers:nil)
response = update_tags_async(resource_group_name, scope_name, private_link_scope_tags, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Updates an existing PrivateLinkScope's tags. To update other fields use the
# CreateOrUpdate method.
#
# @param resource_group_name [String] The name of the resource group.
# @param scope_name [String] The name of the Azure Monitor PrivateLinkScope
# resource.
# @param private_link_scope_tags [TagsResource] Updated tag information to set
# into the PrivateLinkScope instance.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def update_tags_with_http_info(resource_group_name, scope_name, private_link_scope_tags, custom_headers:nil)
update_tags_async(resource_group_name, scope_name, private_link_scope_tags, custom_headers:custom_headers).value!
end
#
# Updates an existing PrivateLinkScope's tags. To update other fields use the
# CreateOrUpdate method.
#
# @param resource_group_name [String] The name of the resource group.
# @param scope_name [String] The name of the Azure Monitor PrivateLinkScope
# resource.
# @param private_link_scope_tags [TagsResource] Updated tag information to set
# into the PrivateLinkScope instance.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def update_tags_async(resource_group_name, scope_name, private_link_scope_tags, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'scope_name is nil' if scope_name.nil?
fail ArgumentError, 'private_link_scope_tags is nil' if private_link_scope_tags.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::Monitor::Mgmt::V2019_10_17_preview::Models::TagsResource.mapper()
request_content = @client.serialize(request_mapper, private_link_scope_tags)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/privateLinkScopes/{scopeName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'subscriptionId' => @client.subscription_id,'scopeName' => scope_name},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:patch, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Monitor::Mgmt::V2019_10_17_preview::Models::AzureMonitorPrivateLinkScope.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Deletes a Azure Monitor PrivateLinkScope.
#
# @param resource_group_name [String] The name of the resource group.
# @param scope_name [String] The name of the Azure Monitor PrivateLinkScope
# resource.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
#
def begin_delete(resource_group_name, scope_name, custom_headers:nil)
response = begin_delete_async(resource_group_name, scope_name, custom_headers:custom_headers).value!
nil
end
#
# Deletes a Azure Monitor PrivateLinkScope.
#
# @param resource_group_name [String] The name of the resource group.
# @param scope_name [String] The name of the Azure Monitor PrivateLinkScope
# resource.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_delete_with_http_info(resource_group_name, scope_name, custom_headers:nil)
begin_delete_async(resource_group_name, scope_name, custom_headers:custom_headers).value!
end
#
# Deletes a Azure Monitor PrivateLinkScope.
#
# @param resource_group_name [String] The name of the resource group.
# @param scope_name [String] The name of the Azure Monitor PrivateLinkScope
# resource.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_delete_async(resource_group_name, scope_name, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'scope_name is nil' if scope_name.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/privateLinkScopes/{scopeName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'subscriptionId' => @client.subscription_id,'scopeName' => scope_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:delete, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200 || status_code == 204 || status_code == 202
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
result
end
promise.execute
end
#
# Gets a list of all Azure Monitor PrivateLinkScopes within a subscription.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [AzureMonitorPrivateLinkScopeListResult] operation results.
#
def list_next(next_page_link, custom_headers:nil)
response = list_next_async(next_page_link, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Gets a list of all Azure Monitor PrivateLinkScopes within a subscription.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_next_with_http_info(next_page_link, custom_headers:nil)
list_next_async(next_page_link, custom_headers:custom_headers).value!
end
#
# Gets a list of all Azure Monitor PrivateLinkScopes within a subscription.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_next_async(next_page_link, custom_headers:nil)
fail ArgumentError, 'next_page_link is nil' if next_page_link.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = '{nextLink}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
skip_encoding_path_params: {'nextLink' => next_page_link},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Monitor::Mgmt::V2019_10_17_preview::Models::AzureMonitorPrivateLinkScopeListResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Gets a list of Azure Monitor PrivateLinkScopes within a resource group.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [AzureMonitorPrivateLinkScopeListResult] operation results.
#
def list_by_resource_group_next(next_page_link, custom_headers:nil)
response = list_by_resource_group_next_async(next_page_link, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Gets a list of Azure Monitor PrivateLinkScopes within a resource group.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_resource_group_next_with_http_info(next_page_link, custom_headers:nil)
list_by_resource_group_next_async(next_page_link, custom_headers:custom_headers).value!
end
#
# Gets a list of Azure Monitor PrivateLinkScopes within a resource group.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_resource_group_next_async(next_page_link, custom_headers:nil)
fail ArgumentError, 'next_page_link is nil' if next_page_link.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = '{nextLink}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
skip_encoding_path_params: {'nextLink' => next_page_link},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Monitor::Mgmt::V2019_10_17_preview::Models::AzureMonitorPrivateLinkScopeListResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Gets a list of all Azure Monitor PrivateLinkScopes within a subscription.
#
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [AzureMonitorPrivateLinkScopeListResult] which provide lazy access to
# pages of the response.
#
def list_as_lazy(custom_headers:nil)
response = list_async(custom_headers:custom_headers).value!
unless response.nil?
page = response.body
page.next_method = Proc.new do |next_page_link|
list_next_async(next_page_link, custom_headers:custom_headers)
end
page
end
end
#
# Gets a list of Azure Monitor PrivateLinkScopes within a resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [AzureMonitorPrivateLinkScopeListResult] which provide lazy access to
# pages of the response.
#
def list_by_resource_group_as_lazy(resource_group_name, custom_headers:nil)
response = list_by_resource_group_async(resource_group_name, custom_headers:custom_headers).value!
unless response.nil?
page = response.body
page.next_method = Proc.new do |next_page_link|
list_by_resource_group_next_async(next_page_link, custom_headers:custom_headers)
end
page
end
end
end
end
| 44.641573 | 152 | 0.706451 |
62ada22dcad5f408b2b744982bc9684a69d74988 | 496 | # Be sure to restart your server when you modify this file.
# Your secret key for verifying the integrity of signed cookies.
# If you change this key, all old signed cookies will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
Dummy::Application.config.secret_token = '05dd6adbe8c02b9758d5d792dbcde5e9127c1263143a8dc8d01dd51e50a018ee7a3397526f5c3f7c9dbd68226ca747145263a3b10baf2a8c389a57bf8f01d0ef'
| 62 | 171 | 0.832661 |
7ab0a546de0325badbe99fa1073e04dce33bad86 | 1,166 | # -*- encoding: utf-8 -*-
# stub: turbolinks 5.2.1 ruby lib
Gem::Specification.new do |s|
s.name = "turbolinks".freeze
s.version = "5.2.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.metadata = { "source_code_uri" => "https://github.com/turbolinks/turbolinks-rails" } if s.respond_to? :metadata=
s.require_paths = ["lib".freeze]
s.authors = ["David Heinemeier Hansson".freeze]
s.date = "2019-09-18"
s.description = "Rails engine for Turbolinks 5 support".freeze
s.email = "[email protected]".freeze
s.homepage = "https://github.com/turbolinks/turbolinks".freeze
s.licenses = ["MIT".freeze]
s.rubygems_version = "3.2.7".freeze
s.summary = "Turbolinks makes navigating your web application faster".freeze
s.installed_by_version = "3.2.7" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_runtime_dependency(%q<turbolinks-source>.freeze, ["~> 5.2"])
else
s.add_dependency(%q<turbolinks-source>.freeze, ["~> 5.2"])
end
end
| 36.4375 | 116 | 0.711835 |
ffc355a92be4128a8962f2827ef20f2d1a2c9339 | 1,831 | # frozen_string_literal: true
require "abstract_unit"
require "console_helpers"
class Quails::Engine::CommandsTest < ActiveSupport::TestCase
include ConsoleHelpers
def setup
@destination_root = Dir.mktmpdir("bukkits")
Dir.chdir(@destination_root) { `bundle exec quails plugin new bukkits --mountable` }
end
def teardown
FileUtils.rm_rf(@destination_root)
end
def test_help_command_work_inside_engine
output = capture(:stderr) do
Dir.chdir(plugin_path) { `bin/quails --help` }
end
assert_no_match "NameError", output
end
def test_runner_command_work_inside_engine
output = capture(:stdout) do
Dir.chdir(plugin_path) { system("bin/quails runner 'puts Quails.env'") }
end
assert_equal "test", output.strip
end
def test_console_command_work_inside_engine
skip "PTY unavailable" unless available_pty?
master, slave = PTY.open
spawn_command("console", slave)
assert_output(">", master)
ensure
master.puts "quit"
end
def test_dbconsole_command_work_inside_engine
skip "PTY unavailable" unless available_pty?
master, slave = PTY.open
spawn_command("dbconsole", slave)
assert_output("sqlite>", master)
ensure
master.puts ".exit"
end
def test_server_command_work_inside_engine
skip "PTY unavailable" unless available_pty?
master, slave = PTY.open
pid = spawn_command("server", slave)
assert_output("Listening on", master)
ensure
kill(pid)
end
private
def plugin_path
"#{@destination_root}/bukkits"
end
def spawn_command(command, fd)
Process.spawn(
"#{plugin_path}/bin/quails #{command}",
in: fd, out: fd, err: fd
)
end
def kill(pid)
Process.kill("TERM", pid)
Process.wait(pid)
rescue Errno::ESRCH
end
end
| 22.604938 | 88 | 0.691972 |
ff2d39e1e37afe6fa27821ee7753bc6a60cf6c5a | 5,569 | class ManageIQ::Providers::Openstack::CloudManager::Vm < ManageIQ::Providers::CloudManager::Vm
include_concern 'Operations'
include_concern 'RemoteConsole'
include_concern 'Resize'
include_concern 'AssociateIp'
include_concern 'ManageSecurityGroups'
include ManageIQ::Providers::Openstack::HelperMethods
supports :smartstate_analysis do
feature_supported, reason = check_feature_support('smartstate_analysis')
unless feature_supported
unsupported_reason_add(:smartstate_analysis, reason)
end
end
supports :snapshots
POWER_STATES = {
"ACTIVE" => "on",
"SHUTOFF" => "off",
"SUSPENDED" => "suspended",
"PAUSED" => "paused",
"SHELVED" => "shelved",
"SHELVED_OFFLOADED" => "shelved_offloaded",
"HARD_REBOOT" => "reboot_in_progress",
"REBOOT" => "reboot_in_progress",
"ERROR" => "non_operational",
"BUILD" => "wait_for_launch",
"REBUILD" => "wait_for_launch",
"DELETED" => "terminated",
"MIGRATING" => "migrating",
}.freeze
alias_method :private_networks, :cloud_networks
has_many :public_networks, :through => :cloud_subnets
def floating_ip
# TODO(lsmola) NetworkProvider Backwards compatibility layer with simplified architecture where VM has only one
# network. Put this into ManageIQ::Providers::CloudManager::Vm when NetworkProvider is done in all providers
floating_ips.first
end
def associate_floating_ip_from_network(public_network, port = nil)
ext_management_system.with_provider_connection(:service => "Network",
:tenant_name => cloud_tenant.name) do |connection|
unless port
network_ports.each do |network_port|
# Cycle through all ports and find one that is actually connected to the public network with router,
if network_port.public_networks.detect { |x| x.try(:ems_ref) == public_network.ems_ref }
port = network_port
break
end
end
end
unless port
raise(MiqException::MiqNetworkPortNotDefinedError,
"Neutron port for floating IP association is not defined for OpenStack"\
"network #{public_network.ems_ref} and EMS '#{ext_management_system.name}'")
end
connection.create_floating_ip(public_network.ems_ref, :port_id => port.ems_ref)
end
end
def delete_floating_ips(floating_ips)
# TODO(lsmola) we have the method here because we need to take actual cloud_tenant from the VM.
# This should be refactored to FloatingIP, when we can take tenant from elsewhere, Like user
# session? They have it in session in Horizon, ehich correlates the teannt in keytsone token.
ext_management_system.with_provider_connection(:service => "Network",
:tenant_name => cloud_tenant.name) do |connection|
floating_ips.each do |floating_ip|
begin
connection.delete_floating_ip(floating_ip.ems_ref)
rescue StandardError => e
# The FloatingIp could have been deleted by another process
_log.info("Could not delete floating IP #{floating_ip} in EMS "\
"'#{ext_management_system.name}'. Error: #{e}")
end
# Destroy it also in db, so we don't have to wait for refresh.
floating_ip.destroy
end
end
end
def provider_object(connection = nil)
connection ||= ext_management_system.connect
connection.servers.get(ems_ref)
end
def with_provider_object
super(connection_options)
end
def with_provider_connection
super(connection_options)
end
def self.connection_options(cloud_tenant = nil)
connection_options = { :service => 'Compute' }
connection_options[:tenant_name] = cloud_tenant.name if cloud_tenant
connection_options
end
def connection_options
self.class.connection_options(cloud_tenant)
end
private :connection_options
def self.calculate_power_state(raw_power_state)
POWER_STATES[raw_power_state] || "unknown"
end
def perform_metadata_scan(ost)
require 'OpenStackExtract/MiqOpenStackVm/MiqOpenStackInstance'
_log.debug "instance_id = #{ems_ref}"
ost.scanTime = Time.now.utc unless ost.scanTime
ems = ext_management_system
os_handle = ems.openstack_handle
begin
miq_vm = MiqOpenStackInstance.new(ems_ref, os_handle)
scan_via_miq_vm(miq_vm, ost)
ensure
miq_vm.unmount if miq_vm
end
end
def perform_metadata_sync(ost)
sync_stashed_metadata(ost)
end
def remove_evm_snapshot(snapshot_ci_id)
# need vm_ci and os_id of snapshot
unless (snapshot_ci = ::Snapshot.find_by(:id => snapshot_ci_id))
_log.warn "snapshot with id #{snapshot_ci_id}, not found"
return
end
raise "Could not find snapshot's VM" unless (vm_ci = snapshot_ci.vm_or_template)
ext_management_system.vm_delete_evm_snapshot(vm_ci, snapshot_ci.ems_ref)
end
# TODO: Does this code need to be reimplemented?
def proxies4job(_job)
{
:proxies => [MiqServer.my_server],
:message => 'Perform SmartState Analysis on this Instance'
}
end
def has_active_proxy?
true
end
def has_proxy?
true
end
def requires_storage_for_scan?
false
end
def memory_mb_available?
true
end
def self.display_name(number = 1)
n_('Instance (OpenStack)', 'Instances (OpenStack)', number)
end
end
| 32.005747 | 115 | 0.679835 |
1dd1ca4e115214ca0275eebf7ece702e2318bb67 | 23,787 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe API::Ci::JobArtifacts do
include HttpBasicAuthHelpers
include DependencyProxyHelpers
include HttpIOHelpers
let_it_be(:project, reload: true) do
create(:project, :repository, public_builds: false)
end
let_it_be(:pipeline, reload: true) do
create(:ci_pipeline, project: project,
sha: project.commit.id,
ref: project.default_branch)
end
let(:user) { create(:user) }
let(:api_user) { user }
let(:reporter) { create(:project_member, :reporter, project: project).user }
let(:guest) { create(:project_member, :guest, project: project).user }
let!(:job) do
create(:ci_build, :success, :tags, pipeline: pipeline,
artifacts_expire_at: 1.day.since)
end
before do
project.add_developer(user)
end
shared_examples 'returns unauthorized' do
it 'returns unauthorized' do
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
describe 'DELETE /projects/:id/jobs/:job_id/artifacts' do
let!(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
before do
delete api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
context 'when user is anonymous' do
let(:api_user) { nil }
it 'does not delete artifacts' do
expect(job.job_artifacts.size).to eq 2
end
it 'returns status 401 (unauthorized)' do
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
context 'with developer' do
it 'does not delete artifacts' do
expect(job.job_artifacts.size).to eq 2
end
it 'returns status 403 (forbidden)' do
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'with authorized user' do
let(:maintainer) { create(:project_member, :maintainer, project: project).user }
let!(:api_user) { maintainer }
it 'deletes artifacts' do
expect(job.job_artifacts.size).to eq 0
end
it 'returns status 204 (no content)' do
expect(response).to have_gitlab_http_status(:no_content)
end
end
end
describe 'DELETE /projects/:id/artifacts' do
context 'when user is anonymous' do
let(:api_user) { nil }
it 'does not execute Ci::JobArtifacts::DeleteProjectArtifactsService' do
expect(Ci::JobArtifacts::DeleteProjectArtifactsService)
.not_to receive(:new)
delete api("/projects/#{project.id}/artifacts", api_user)
end
it 'returns status 401 (unauthorized)' do
delete api("/projects/#{project.id}/artifacts", api_user)
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
context 'with developer' do
it 'does not execute Ci::JobArtifacts::DeleteProjectArtifactsService' do
expect(Ci::JobArtifacts::DeleteProjectArtifactsService)
.not_to receive(:new)
delete api("/projects/#{project.id}/artifacts", api_user)
end
it 'returns status 403 (forbidden)' do
delete api("/projects/#{project.id}/artifacts", api_user)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'with authorized user' do
let(:maintainer) { create(:project_member, :maintainer, project: project).user }
let!(:api_user) { maintainer }
it 'executes Ci::JobArtifacts::DeleteProjectArtifactsService' do
expect_next_instance_of(Ci::JobArtifacts::DeleteProjectArtifactsService, project: project) do |service|
expect(service).to receive(:execute).and_call_original
end
delete api("/projects/#{project.id}/artifacts", api_user)
end
it 'returns status 202 (accepted)' do
delete api("/projects/#{project.id}/artifacts", api_user)
expect(response).to have_gitlab_http_status(:accepted)
end
end
end
describe 'GET /projects/:id/jobs/:job_id/artifacts/:artifact_path' do
context 'when job has artifacts' do
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
let(:artifact) do
'other_artifacts_0.1.2/another-subdirectory/banana_sample.gif'
end
context 'when user is anonymous' do
let(:api_user) { nil }
context 'when project is public' do
it 'allows to access artifacts' do
project.update_column(:visibility_level,
Gitlab::VisibilityLevel::PUBLIC)
project.update_column(:public_builds, true)
get_artifact_file(artifact)
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'when project is public with artifacts that are non public' do
let(:job) { create(:ci_build, :artifacts, :non_public_artifacts, pipeline: pipeline) }
it 'rejects access to artifacts' do
project.update_column(:visibility_level,
Gitlab::VisibilityLevel::PUBLIC)
project.update_column(:public_builds, true)
get_artifact_file(artifact)
expect(response).to have_gitlab_http_status(:forbidden)
end
context 'with the non_public_artifacts feature flag disabled' do
before do
stub_feature_flags(non_public_artifacts: false)
end
it 'allows access to artifacts' do
project.update_column(:visibility_level,
Gitlab::VisibilityLevel::PUBLIC)
project.update_column(:public_builds, true)
get_artifact_file(artifact)
expect(response).to have_gitlab_http_status(:ok)
end
end
end
context 'when project is public with builds access disabled' do
it 'rejects access to artifacts' do
project.update_column(:visibility_level,
Gitlab::VisibilityLevel::PUBLIC)
project.update_column(:public_builds, false)
get_artifact_file(artifact)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when project is private' do
it 'rejects access and hides existence of artifacts' do
project.update_column(:visibility_level,
Gitlab::VisibilityLevel::PRIVATE)
project.update_column(:public_builds, true)
get_artifact_file(artifact)
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
context 'when user is authorized' do
it 'returns a specific artifact file for a valid path' do
expect(Gitlab::Workhorse)
.to receive(:send_artifacts_entry)
.and_call_original
get_artifact_file(artifact)
expect(response).to have_gitlab_http_status(:ok)
expect(response.headers.to_h)
.to include('Content-Type' => 'application/json',
'Gitlab-Workhorse-Send-Data' => /artifacts-entry/)
expect(response.headers.to_h)
.not_to include('Gitlab-Workhorse-Detect-Content-Type' => 'true')
expect(response.parsed_body).to be_empty
end
context 'when artifacts are locked' do
it 'allows access to expired artifact' do
pipeline.artifacts_locked!
job.update!(artifacts_expire_at: Time.now - 7.days)
get_artifact_file(artifact)
expect(response).to have_gitlab_http_status(:ok)
end
end
end
end
context 'when job does not have artifacts' do
it 'does not return job artifact file' do
get_artifact_file('some/artifact')
expect(response).to have_gitlab_http_status(:not_found)
end
end
def get_artifact_file(artifact_path)
get api("/projects/#{project.id}/jobs/#{job.id}/" \
"artifacts/#{artifact_path}", api_user)
end
end
describe 'GET /projects/:id/jobs/:job_id/artifacts' do
shared_examples 'downloads artifact' do
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' => %q(attachment; filename="ci_build_artifacts.zip"; filename*=UTF-8''ci_build_artifacts.zip) }
end
let(:expected_params) { { artifact_size: job.artifacts_file.size } }
let(:subject_proc) { proc { subject } }
it 'returns specific job artifacts' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(response.headers.to_h).to include(download_headers)
expect(response.body).to match_file(job.artifacts_file.file.file)
end
it_behaves_like 'storing arguments in the application context'
it_behaves_like 'not executing any extra queries for the application context'
end
context 'normal authentication' do
context 'job with artifacts' do
context 'when artifacts are stored locally' do
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
subject { get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) }
context 'authorized user' do
it_behaves_like 'downloads artifact'
end
context 'when job token is used' do
let(:other_job) { create(:ci_build, :running, user: user) }
subject { get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", job_token: other_job.token) }
before do
stub_licensed_features(cross_project_pipelines: true)
end
it_behaves_like 'downloads artifact'
context 'when job token scope is enabled' do
before do
other_job.project.ci_cd_settings.update!(job_token_scope_enabled: true)
end
it 'does not allow downloading artifacts' do
subject
expect(response).to have_gitlab_http_status(:not_found)
end
context 'when project is added to the job token scope' do
let!(:link) { create(:ci_job_token_project_scope_link, source_project: other_job.project, target_project: job.project) }
it_behaves_like 'downloads artifact'
end
end
end
context 'unauthorized user' do
let(:api_user) { nil }
it 'does not return specific job artifacts' do
subject
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
context 'when artifacts are stored remotely' do
let(:proxy_download) { false }
let(:job) { create(:ci_build, pipeline: pipeline) }
let(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
before do
stub_artifacts_object_storage(proxy_download: proxy_download)
artifact
job.reload
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
context 'when proxy download is enabled' do
let(:proxy_download) { true }
it 'responds with the workhorse send-url' do
expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:")
end
end
context 'when proxy download is disabled' do
it 'returns location redirect' do
expect(response).to have_gitlab_http_status(:found)
end
end
context 'authorized user' do
it 'returns the file remote URL' do
expect(response).to redirect_to(artifact.file.url)
end
end
context 'unauthorized user' do
let(:api_user) { nil }
it 'does not return specific job artifacts' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
context 'when public project guest and artifacts are non public' do
let(:api_user) { guest }
let(:job) { create(:ci_build, :artifacts, :non_public_artifacts, pipeline: pipeline) }
before do
project.update_column(:visibility_level,
Gitlab::VisibilityLevel::PUBLIC)
project.update_column(:public_builds, true)
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
it 'rejects access and hides existence of artifacts' do
expect(response).to have_gitlab_http_status(:forbidden)
end
context 'with the non_public_artifacts feature flag disabled' do
before do
stub_feature_flags(non_public_artifacts: false)
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
it 'allows access to artifacts' do
expect(response).to have_gitlab_http_status(:ok)
end
end
end
it 'does not return job artifacts if not uploaded' do
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
end
describe 'GET /projects/:id/artifacts/:ref_name/download?job=name' do
let(:api_user) { reporter }
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
before do
stub_artifacts_object_storage
job.success
end
def get_for_ref(ref = pipeline.ref, job_name = job.name)
get api("/projects/#{project.id}/jobs/artifacts/#{ref}/download", api_user), params: { job: job_name }
end
context 'when not logged in' do
let(:api_user) { nil }
before do
get_for_ref
end
it 'does not find a resource in a private project' do
expect(project).to be_private
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when logging as guest' do
let(:api_user) { guest }
before do
get_for_ref
end
it 'gives 403' do
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'non-existing job' do
shared_examples 'not found' do
it { expect(response).to have_gitlab_http_status(:not_found) }
end
context 'has no such ref' do
before do
get_for_ref('TAIL')
end
it_behaves_like 'not found'
end
context 'has no such job' do
before do
get_for_ref(pipeline.ref, 'NOBUILD')
end
it_behaves_like 'not found'
end
end
context 'find proper job' do
let(:job_with_artifacts) { job }
shared_examples 'a valid file' do
context 'when artifacts are stored locally', :sidekiq_might_not_need_inline do
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' =>
%Q(attachment; filename="#{job_with_artifacts.artifacts_file.filename}"; filename*=UTF-8''#{job.artifacts_file.filename}) }
end
it { expect(response).to have_gitlab_http_status(:ok) }
it { expect(response.headers.to_h).to include(download_headers) }
end
context 'when artifacts are stored remotely' do
let(:job) { create(:ci_build, pipeline: pipeline, user: api_user) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
before do
job.reload
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
it 'returns location redirect' do
expect(response).to have_gitlab_http_status(:found)
end
end
end
context 'with regular branch' do
before do
pipeline.reload
pipeline.update!(ref: 'master',
sha: project.commit('master').sha)
get_for_ref('master')
end
it_behaves_like 'a valid file'
end
context 'with branch name containing slash' do
before do
pipeline.reload
pipeline.update!(ref: 'improve/awesome', sha: project.commit('improve/awesome').sha)
get_for_ref('improve/awesome')
end
it_behaves_like 'a valid file'
end
context 'with job name in a child pipeline' do
let(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) }
let!(:child_job) { create(:ci_build, :artifacts, :success, name: 'rspec', pipeline: child_pipeline) }
let(:job_with_artifacts) { child_job }
before do
get_for_ref('master', child_job.name)
end
it_behaves_like 'a valid file'
end
end
end
describe 'GET id/jobs/artifacts/:ref_name/raw/*artifact_path?job=name' do
context 'when job has artifacts' do
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
let(:artifact) { 'other_artifacts_0.1.2/another-subdirectory/banana_sample.gif' }
let(:visibility_level) { Gitlab::VisibilityLevel::PUBLIC }
let(:public_builds) { true }
before do
stub_artifacts_object_storage
job.success
project.update!(visibility_level: visibility_level,
public_builds: public_builds)
get_artifact_file(artifact)
end
context 'when user is anonymous' do
let(:api_user) { nil }
context 'when project is public' do
let(:visibility_level) { Gitlab::VisibilityLevel::PUBLIC }
let(:public_builds) { true }
it 'allows to access artifacts', :sidekiq_might_not_need_inline do
expect(response).to have_gitlab_http_status(:ok)
expect(response.headers.to_h)
.to include('Content-Type' => 'application/json',
'Gitlab-Workhorse-Send-Data' => /artifacts-entry/,
'Gitlab-Workhorse-Detect-Content-Type' => 'true')
end
end
context 'when project is public with builds access disabled' do
let(:visibility_level) { Gitlab::VisibilityLevel::PUBLIC }
let(:public_builds) { false }
it 'rejects access to artifacts' do
expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response).to have_key('message')
expect(response.headers.to_h)
.not_to include('Gitlab-Workhorse-Send-Data' => /artifacts-entry/)
end
end
context 'when project is public with non public artifacts' do
let(:job) { create(:ci_build, :artifacts, :non_public_artifacts, pipeline: pipeline, user: api_user) }
let(:visibility_level) { Gitlab::VisibilityLevel::PUBLIC }
let(:public_builds) { true }
it 'rejects access and hides existence of artifacts', :sidekiq_might_not_need_inline do
get_artifact_file(artifact)
expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response).to have_key('message')
expect(response.headers.to_h)
.not_to include('Gitlab-Workhorse-Send-Data' => /artifacts-entry/)
end
context 'with the non_public_artifacts feature flag disabled' do
before do
stub_feature_flags(non_public_artifacts: false)
end
it 'allows access to artifacts', :sidekiq_might_not_need_inline do
get_artifact_file(artifact)
expect(response).to have_gitlab_http_status(:ok)
end
end
end
context 'when project is private' do
let(:visibility_level) { Gitlab::VisibilityLevel::PRIVATE }
let(:public_builds) { true }
it 'rejects access and hides existence of artifacts' do
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response).to have_key('message')
expect(response.headers.to_h)
.not_to include('Gitlab-Workhorse-Send-Data' => /artifacts-entry/)
end
end
end
context 'when user is authorized' do
let(:visibility_level) { Gitlab::VisibilityLevel::PRIVATE }
let(:public_builds) { true }
it 'returns a specific artifact file for a valid path', :sidekiq_might_not_need_inline do
expect(Gitlab::Workhorse)
.to receive(:send_artifacts_entry)
.and_call_original
get_artifact_file(artifact)
expect(response).to have_gitlab_http_status(:ok)
expect(response.headers.to_h)
.to include('Content-Type' => 'application/json',
'Gitlab-Workhorse-Send-Data' => /artifacts-entry/,
'Gitlab-Workhorse-Detect-Content-Type' => 'true')
expect(response.parsed_body).to be_empty
end
end
context 'with branch name containing slash' do
before do
pipeline.reload
pipeline.update!(ref: 'improve/awesome',
sha: project.commit('improve/awesome').sha)
end
it 'returns a specific artifact file for a valid path', :sidekiq_might_not_need_inline do
get_artifact_file(artifact, 'improve/awesome')
expect(response).to have_gitlab_http_status(:ok)
expect(response.headers.to_h)
.to include('Content-Type' => 'application/json',
'Gitlab-Workhorse-Send-Data' => /artifacts-entry/,
'Gitlab-Workhorse-Detect-Content-Type' => 'true')
end
end
context 'non-existing job' do
shared_examples 'not found' do
it { expect(response).to have_gitlab_http_status(:not_found) }
end
context 'has no such ref' do
before do
get_artifact_file('some/artifact', 'wrong-ref')
end
it_behaves_like 'not found'
end
context 'has no such job' do
before do
get_artifact_file('some/artifact', pipeline.ref, 'wrong-job-name')
end
it_behaves_like 'not found'
end
end
end
context 'when job does not have artifacts' do
let(:job) { create(:ci_build, pipeline: pipeline, user: api_user) }
it 'does not return job artifact file' do
get_artifact_file('some/artifact')
expect(response).to have_gitlab_http_status(:not_found)
end
end
def get_artifact_file(artifact_path, ref = pipeline.ref, job_name = job.name)
get api("/projects/#{project.id}/jobs/artifacts/#{ref}/raw/#{artifact_path}", api_user), params: { job: job_name }
end
end
describe 'POST /projects/:id/jobs/:job_id/artifacts/keep' do
before do
post api("/projects/#{project.id}/jobs/#{job.id}/artifacts/keep", user)
end
context 'artifacts did not expire' do
let(:job) do
create(:ci_build, :trace_artifact, :artifacts, :success,
project: project, pipeline: pipeline, artifacts_expire_at: Time.now + 7.days)
end
it 'keeps artifacts' do
expect(response).to have_gitlab_http_status(:ok)
expect(job.reload.artifacts_expire_at).to be_nil
end
end
context 'no artifacts' do
let(:job) { create(:ci_build, project: project, pipeline: pipeline) }
it 'responds with not found' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
end
| 32.764463 | 137 | 0.616934 |
91ee6077798b0bd36e90f35d35b2fbbd2257f15d | 117 | require "Rescue_Story/version"
module RescueStory
class Error < StandardError; end
# Your code goes here...
end
| 16.714286 | 34 | 0.752137 |
ab236e6cda118da15756c5bdce953ce19e70b760 | 311 | # frozen_string_literal: true
module ActiveValidation
module Type
class Version < ActiveModel::Type::Integer
def serialize(value)
value.to_i
end
def deserialize(value)
return unless value
ActiveValidation::Values::Version.new value
end
end
end
end
| 17.277778 | 51 | 0.659164 |
4abdeb0d1df4c138a169f0a093415a66e591369d | 198 | require 'spec_helper'
describe Outlearn do
it 'has a version number' do
expect(Outlearn::VERSION).not_to be nil
end
it 'does something useful' do
expect(false).to eq(true)
end
end
| 16.5 | 43 | 0.707071 |
f85c1bdad686856bd39feaaa45717d702eaf6e81 | 1,171 | require 'image_optim/worker'
require 'image_optim/option_helpers'
require 'image_optim/non_negative_integer_range'
class ImageOptim
class Worker
# http://pngquant.org/
class Pngquant < Worker
QUALITY_OPTION =
option(:quality, 100..100, NonNegativeIntegerRange, 'min..max - don\'t '\
'save below min, use less colors below max (both in range `0..100`; '\
'in yaml - `!ruby/range 0..100`)') do |v|
min = OptionHelpers.limit_with_range(v.begin, 0..100)
min..OptionHelpers.limit_with_range(v.end, min..100)
end
SPEED_OPTION =
option(:speed, 3, 'speed/quality trade-off: '\
'`1` - slow, '\
'`3` - default, '\
'`11` - fast & rough') do |v|
OptionHelpers.limit_with_range(v.to_i, 1..11)
end
# Always run first
def run_order
-5
end
def optimize(src, dst)
args = %W[
--quality=#{quality.begin}-#{quality.end}
--speed=#{speed}
--output=#{dst}
--force
--
#{src}
]
execute(:pngquant, *args) && optimized?(src, dst)
end
end
end
end
| 26.613636 | 80 | 0.553373 |
875ef08e9d58e80623d07b14f25119be9216058e | 2,924 | module WikiPagesHelper
acts_as_wiki_pages_helper
def wiki_content(text)
Irwi.config.formatter.format(wiki_macros(wiki_linkify( wiki_show_attachments(text)))).html_safe
end
def wiki_macros(text)
wiki_css(wiki_nav(text))
end
def wiki_nav(text)
text.gsub(/\{\{nav.*?\}\}/) do |match|
page_titles = match[/nav(.*?)\}/, 1].split(',')
html = "<ul class='leftmenu'>"
page_titles.each do |page_title|
page_title.strip!
link_class = @page && @page.title.downcase == page_title.downcase ? 'active' : nil
html += content_tag :li do
link_to page_title, wiki_link(page_title), :class => link_class
end
end
html += "</ul>"
html
end
end
def wiki_topnav
pattern = /\{\{topnav.*?\}\}/
navtxt = @page.content[pattern, 0]
return if navtxt.blank?
@page.content.gsub!(pattern, '')
html = "<ul class='topmenu'>"
page_titles = navtxt[/nav(.*?)\}/, 1].split(',')
page_titles.each do |page_title|
page_title.strip!
link_class = @page && @page.title.downcase == page_title.downcase ? 'active' : nil
html += content_tag :li do
link_to page_title, wiki_link(page_title), :class => link_class
end
end
html += "</ul>"
raw html
end
def wiki_css(text)
text.gsub(/<style.*?>(.*?)<\/style>/m) do |match|
content_for(:extracss) { match.html_safe }
end
end
def wiki_page_attachments(page = @page)
return unless Irwi::config.page_attachment_class_name
html = ""
page.attachments.each do |attachment|
img = if attachment.image?
"<a target=\"_blank\" href=\"#{attachment.wiki_page_attachment.url(:original)}\">#{image_tag(attachment.wiki_page_attachment.url(:thumb))}</a>".html_safe
else
"<a target=\"_blank\" href=\"#{attachment.wiki_page_attachment.url(:original)}\">#{ t :view }</a>".html_safe
end
html += image_and_content(img, :image_size => 100,
:class => "stacked wiki_page_attachment") do
s = link_to(wt('Remove'), wiki_remove_page_attachment_path(attachment.id), :method => :delete, :class => "right")
s += content_tag(:label, "html")
s += "<br/>".html_safe
s += content_tag(:textarea, "<img src=\"#{attachment.wiki_page_attachment.url(:original)}\"/>")
s.html_safe
end
end
html += form_for(Irwi.config.page_attachment_class.new,
:as => :wiki_page_attachment,
:url => wiki_add_page_attachment_path(page),
:html => { :multipart => true }) do |form|
"<label>Add an attached image</label><br/>".html_safe +
form.file_field(:wiki_page_attachment) +
form.hidden_field(:page_id, :value => page.id) +
form.submit('Add attachment')
end
html.html_safe
end
def wiki_user(user)
"#{link_to(user_image(user), user)} #{link_to_user(user)}".html_safe
end
end
| 33.609195 | 161 | 0.621067 |
03b9ea7b889fc0a3a2b50399633fe034d10c0cc5 | 1,227 | # Doc placeholder
module ZimbraRestApi
class Account < ZimbraBase
def add_alias(alias_name)
zmobject.add_alias(alias_name)
end
def delegated_auth_token
zmobject.delegated_auth_token
end
def disable_archive
zmobject.disable_archive
end
def enable_archive(cos_id = nil, archive_name = nil)
zmobject.enable_archive(cos_id, archive_name)
end
def mailbox
zmobject.mailbox
end
def memberships
results = zmobject.memberships
results.map { |r| {id: r.id, name: r.name, via: r.via} }
end
def remove_alias(alias_name)
zmobject.remove_alias(alias_name)
end
def self.create(params = {})
name = params.delete('name')
password = params.delete('password')
result = Zimbra::Account.create(name, password, params)
new(result)
end
def self.mailbox(account_id)
Zimbra::Account.mailbox account_id
end
def set_password(new_password)
zmobject.set_password new_password
end
def update_attributes(attributes)
if attributes['password']
set_password(attributes.delete('password'))
end
attributes.delete('password')
super
end
end
end
| 20.79661 | 62 | 0.666667 |
d50fc68b904d6224589392bdb140bf88c36ba61b | 465 | require File.expand_path('../nellie/errors/errors', __FILE__)
require File.expand_path('../nellie/configuration', __FILE__)
require File.expand_path('../nellie/api', __FILE__)
require File.expand_path('../nellie/client', __FILE__)
require File.expand_path('../nellie/response', __FILE__)
module Nellie
extend Configuration
# Alias for Nellie::Client.new
#
# @return [Nellie::Client]
def self.client(options={})
Nellie::Client.new(options)
end
end
| 27.352941 | 61 | 0.735484 |
185f03f07be2a2ea2544f83041c9f217bc764d52 | 825 | require 'test_helper'
module Abid
class StateManager
class StateServiceTest < AbidTest
def new_state_service(name, params)
StateService.new(env.state_manager.states, name, params)
end
def test_params_text
service = new_state_service('name', b: 1, a: Date.new(2000, 1, 1))
assert_equal "---\n:a: 2000-01-01\n:b: 1\n", service.send(:params_text)
end
def test_digest
service1 = new_state_service('name', b: 1, a: Date.new(2000, 1, 1))
service2 = new_state_service('name', b: 1, a: Date.parse('2000-01-01'))
service3 = new_state_service('name', b: 2, a: Date.new(2000, 1, 1))
assert_equal service1.send(:digest), service2.send(:digest)
refute_equal service1.send(:digest), service3.send(:digest)
end
end
end
end
| 30.555556 | 79 | 0.641212 |
18611f3bb38cfd397c3a64f6406c35921ff9906d | 2,207 | # Copyright 2019 Oath Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
require 'search_container_test'
class ComponentExclusive < SearchContainerTest
def setup
set_owner("valerijf")
set_description("Verify that only one instance of the component is running at a given time if it acquires lock")
@project_dir = dirs.tmpdir + "project"
@def_file_path = "#{@project_dir}/src/main/resources/configdefinitions"
end
def timeout_seconds
return 1200
end
def write_def(text, filename="exclusive-hit.def")
file = File.open("#{@def_file_path}/#{filename}", "w")
file.print(text)
file.close
end
def test_update_def
system("mkdir -p #{@project_dir}/src")
system("mkdir -p #{@project_dir}/app")
system("cp -r #{selfdir}/app #{@project_dir}")
system("cp -r #{selfdir}/src #{@project_dir}")
write_def("namespace=vespatest\ncompId string default=\"1\"\n")
clear_bundles
add_bundle_dir(@project_dir, "com.yahoo.vespatest.ExclusiveHitSearcher")
deploy(@project_dir + "/app")
start
verify_result("1")
puts("Deploying with seconds version of the def-file..")
write_def("namespace=vespatest\ncompId string default=\"2\"\n")
# Redeploy, and verify that the searcher is updated
clear_bundles
add_bundle_dir(@project_dir, "com.yahoo.vespatest.ExclusiveHitSearcher")
output = deploy(@project_dir + "/app")
wait_for_application(vespa.qrserver.values.first, output)
verify_result("2")
sleep 70
verify_result("2")
end
def teardown
stop
end
def verify_result(component_id)
result = search("test")
actual_component_id = result.hit[0].field["component_id"]
if component_id == actual_component_id
puts "Got expected component_id: #{component_id}"
else
flunk "Test failed: Expected the component id to be #{component_id}, but was #{actual_component_id}"
end
exclusivity_file = result.hit[0].field["exclusivity_file"]
if exclusivity_file.include? "21"
flunk "Test failed: Found overlapping writes to file: #{exclusivity_file}"
else
puts "No overlapping writes to file found!"
end
end
end
| 31.084507 | 116 | 0.702311 |
ff09bf5c42960cab7226e6bab721cf97260b5607 | 241 | class House < ApplicationRecord
has_many :viewings
has_one_attached :picture
validates :location, presence: true
validates :description, presence: true
validates :bedrooms, presence: true
validates :bathrooms, presence: true
end
| 26.777778 | 40 | 0.784232 |
3363b3e8788860c8c8822271e8bd9ec93e80c0f7 | 580 | worker_processes Integer(ENV['WEB_CONCURRENCY'] || 3)
timeout 15
preload_app true
before_fork do |server, worker|
Signal.trap 'TERM' do
puts 'Unicorn master intercepting TERM and sending myself QUIT instead'
Process.kill 'QUIT', Process.pid
end
defined?(ActiveRecord::Base) and
ActiveRecord::Base.connection.disconnect!
end
after_fork do |server, worker|
Signal.trap 'TERM' do
puts 'Unicorn worker intercepting TERM and doing nothing. Wait for master to send QUIT'
end
defined?(ActiveRecord::Base) and
ActiveRecord::Base.establish_connection
end
| 27.619048 | 91 | 0.756897 |
bb37cba3876724e4421c455cc147100e20e4ed63 | 618 | require 'castanet/testing'
require 'logger'
require 'net/http'
require 'openssl'
require 'uri'
module Castanet::Testing
module ConnectionTesting
LOGGER = Logger.new($stderr)
def responding?(url, logger = LOGGER)
uri = URI(url)
begin
h = Net::HTTP.new(uri.host, uri.port)
h.use_ssl = (uri.scheme == 'https')
h.verify_mode = OpenSSL::SSL::VERIFY_NONE
resp = h.get(uri.request_uri)
code = resp.code.to_i
(200..399).include?(code)
rescue => e
logger.debug "#{url}: #{e.class} (#{e.message})"
false
end
end
end
end
| 21.310345 | 56 | 0.593851 |
ff8fefea2395507de3ebbb26e10786c06c6d72f3 | 299 | # frozen_string_literal: true
class User < ApplicationRecord
has_many :api_keys, foreign_key: "issuer_id", inverse_of: "issuer"
def self.settings_keys
%w[name]
end
def name
settings["name"] || default_name
end
private
def default_name
email.split(/@/).first
end
end
| 14.95 | 68 | 0.695652 |
7a41533806b94474b65e8c14d1ca2c482f371d2d | 579 |
require 'thp/utils/embeds'
module THP
module Commands
module Team
module Attributes
ALL = [
TEAM = {
description: 'Show the creators of bot.',
help_available: true,
max_args: 0,
rescue: 'Oh no, something terrible has happened. An Error occured executing this command :c',
usage: "#{THP_BOT.prefix}team"
}.freeze
].freeze
end
def self.team(event)
THP::Utils::Embeds.send_embed(event: event, title: "Here is my parent:\n")
end
end
end
end | 24.125 | 105 | 0.564767 |
1c709e0adb4a4be885accef86ad617e30f62ccfe | 226 | def template(from, to)
erb = File.read(File.expand_path("../templates/#{from}", __FILE__))
put ERB.new(erb).result(binding), to
end
def set_default(name, *args, &block)
set(name, *args, &block) unless exists?(name)
end
| 25.111111 | 69 | 0.690265 |
e9ec8ef4c743c635b739eb6b651562b7bd3c55d1 | 1,072 | require 'rails_helper'
RSpec.describe "procedimientos/new", type: :view do
before(:each) do
assign(:procedimiento, Procedimiento.new(
:tratamiento => "MyString",
:via_acceso => "MyString",
:descripcion => "MyText",
:glucometria1 => 1,
:glucometria2 => 1,
:electrocardiograma => "MyText"
))
end
it "renders new procedimiento form" do
render
assert_select "form[action=?][method=?]", procedimientos_path, "post" do
assert_select "input#procedimiento_tratamiento[name=?]", "procedimiento[tratamiento]"
assert_select "input#procedimiento_via_acceso[name=?]", "procedimiento[via_acceso]"
assert_select "textarea#procedimiento_descripcion[name=?]", "procedimiento[descripcion]"
assert_select "input#procedimiento_glucometria1[name=?]", "procedimiento[glucometria1]"
assert_select "input#procedimiento_glucometria2[name=?]", "procedimiento[glucometria2]"
assert_select "textarea#procedimiento_electrocardiograma[name=?]", "procedimiento[electrocardiograma]"
end
end
end
| 31.529412 | 108 | 0.709888 |
1d46f81f8c19ab6d504a9995773c00223d299ba1 | 1,401 | # Copyright (c) 2018 Public Library of Science
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
class CreateDiscussionParticipants < ActiveRecord::Migration
def change
create_table :discussion_participants do |t|
t.references :discussion_topic, index: true, foreign_key: true
t.references :user, index: true, foreign_key: true
t.timestamps null: false
end
end
end
| 45.193548 | 76 | 0.772305 |
e2708be5125f7bbf571ca664449774ff1c49c9c8 | 8,633 | module Cms
module AssetsPrecompile
def self.initialize_precompile
Cms::AssetsPrecompile::SprocketsExtension.init
end
class SprocketsExtension
def self.init_options(*args)
#puts "init_options: ARGV: #{ARGV.inspect}"
arr = args
if arr.empty?
arr = (ARGV[1] || "").gsub(/\AFILES\=/, "").split(",")
end
arr = arr.select(&:present?)
self.class_variable_set(:@@_precompile_files, arr)
#puts "init_options: @@_precompile_files: #{arr.inspect}"
end
def self.precompile_file?(s)
arr = self.class_variable_get(:@@_precompile_files) rescue true
return true if arr == true || arr.blank?
#puts "precompile_file?: #{s}"
#puts "files: #{arr}"
return s.in?(arr)
end
def self.normalize_args(*args)
#puts "normalize_args: args: #{args.inspect}"
allowed_files = self.class_variable_get(:@@_precompile_files) rescue []
#puts "normalize_args: allowed_files: #{allowed_files.inspect}"
return args if allowed_files.include?("all")
allowed_files = ["app"] if allowed_files.blank?
allowed_files = allowed_files.map{|path|
if path == "app"
assets_root = Rails.root.join("app/assets/").to_s
#sources = Dir[assets_root + "**/*.{png,jpg,jpeg,gif,svg,coffee,sass,scss,css,erb}"]
sources = Dir[assets_root + "**/*"].select{|path| !File.directory?(path) }
#puts "sources: #{sources.inspect}"
sources_logical_paths = sources.map{|path| s = path[assets_root.length, path.length]; slash_index = s.index("/"); slash_index && slash_index >= 0 ? s[slash_index + 1, s.length] : nil }.select{|s| s.present? }
#puts "sources_logical_paths: #{sources_logical_paths.inspect}"
precompile_paths = Rails.application.config.assets.precompile.select{|s| next false if !s.is_a?(String); true} + ["application.css", "application.js"]
#puts "precompile_paths: #{precompile_paths.inspect}"
precompile_path_groups = precompile_paths.group_by{|path| parts = path.split("/"); parts.count > 1 ? parts.first : "__root__" }
slp_normalized_exts = sources_logical_paths.map{|path|
next path if !path.end_with?(".coffee") && !path.end_with?(".sass") && !path.end_with?(".scss")
file_name = path.split("/").last
first_dot = file_name.index(".")
if !first_dot || first_dot < 0
next nil
end
full_ext = file_name[first_dot + 1, file_name.length]
ext = nil
normalized_ext = nil
source_ext = nil
if full_ext.end_with?("js.coffee")
source_ext = "js.coffee"
normalized_ext = "js"
elsif full_ext.end_with?("coffee")
source_ext = "coffee"
normalized_ext = "js"
elsif full_ext.end_with?("css.scss")
source_ext = "css.scss"
normalized_ext = "css"
elsif full_ext.end_with?("css.sass")
source_ext = "css.sass"
normalized_ext = "css"
elsif full_ext.end_with?("scss")
source_ext = "scss"
normalized_ext = "css"
elsif full_ext.end_with?("sass")
source_ext = "sass"
normalized_ext = "css"
end
if source_ext && normalized_ext
path[0, path.length - source_ext.length] + normalized_ext
end
}.select(&:present?).uniq
sources_logical_paths_to_precompile = slp_normalized_exts.select{|s| ext = s.split(".").last; ext.in?(["jpg", "jpeg", "png", "gif", "svg", "woff", "ttf", "eot"]) || s.in?(precompile_paths) }
#puts "sources_logical_paths_to_precompile: #{sources_logical_paths_to_precompile.inspect}"
next sources_logical_paths_to_precompile
end
path
}
return [allowed_files.flatten.uniq]
sources = args.first.select{|item|
if item.is_a?(Proc) || item.is_a?(Regexp)
next item
end
if item.is_a?(String)
next item if item.in?(allowed_files)
end
}
[sources]
end
def self.init
Sprockets::Manifest.class_eval do
def compile(*args)
dont_invoke_precompile = ENV["invoke_precompile"] == false || ENV["invoke_precompile"] == 'false'
Cms::AssetsPrecompile::SprocketsExtension.init_options
#puts args.inspect
normalized_args = Cms::AssetsPrecompile::SprocketsExtension.normalize_args(*args)
logger = Cms::AssetsPrecompile::AssetLogger.new(STDOUT)
unless environment
raise Error, "manifest requires environment for compilation"
end
filenames = []
concurrent_compressors = []
concurrent_writers = []
logger.info("Compile args: #{normalized_args.first.count}")
logger.info "Start finding assets"
#return
current_file_number = 0
logger.set("total_files", normalized_args.flatten.count)
if ENV["debug_precompile"]
puts "normalized_args: #{normalized_args.inspect}"
end
processed_assets = []
find_assets(*normalized_args) do |asset|
next if processed_assets.include?(asset.logical_path)
processed_assets << asset.logical_path
if ENV["debug_precompile"]
puts "asset logical_path: " + asset.logical_path
end
next if !Cms::AssetsPrecompile::SprocketsExtension.precompile_file?(asset.logical_path)
current_file_number += 1
files[asset.digest_path] = {
'logical_path' => asset.logical_path,
'mtime' => asset.mtime.iso8601,
'size' => asset.bytesize,
'digest' => asset.hexdigest,
# Deprecated: Remove beta integrity attribute in next release.
# Callers should DigestUtils.hexdigest_integrity_uri to compute the
# digest themselves.
'integrity' => Sprockets::DigestUtils.hexdigest_integrity_uri(asset.hexdigest)
}
assets[asset.logical_path] = asset.digest_path
if alias_logical_path = self.class.compute_alias_logical_path(asset.logical_path)
assets[alias_logical_path] = asset.digest_path
end
target = File.join(dir, asset.digest_path)
if File.exist?(target)
logger.skipping(target, current_file_number)
else
logger.writing(target, current_file_number)
write_file = Concurrent::Future.execute { asset.write_to target }
concurrent_writers << write_file
end
filenames << asset.filename
next if environment.skip_gzip?
gzip = Sprockets::Utils::Gzip.new(asset)
next if gzip.cannot_compress?(environment.mime_types)
next if dont_invoke_precompile
if File.exist?("#{target}.gz")
logger.skipping("#{target}.gz", current_file_number)
else
logger.writing("#{target}.gz", current_file_number)
concurrent_compressors << Concurrent::Future.execute do
write_file.wait! if write_file
gzip.compress(target)
end
end
end
logger.info("Finishing")
concurrent_writers.each(&:wait!)
concurrent_compressors.each(&:wait!)
save
filenames
end
def find_assets(*paths, &block)
unless environment
raise Error, "manifest requires environment for compilation"
end
environment = self.environment.cached
find_asset_options = {}
paths.flatten.each do |path|
puts "find_asset: path: #{path}"
asset = environment.cached.find_asset(path, find_asset_options)
puts "find_assets: asset=nil: #{asset.nil?.inspect}"
next unless asset
yield asset
end
nil
end
end
end
end
end
end | 37.534783 | 220 | 0.562493 |
ed6860516074d0cb62fc331a9062ec739b055f0c | 4,001 | module FCB
class Verification
TEST_API_PATH = "http://www-test2.1cb.kz/VerService/VerificationService".freeze
PROD_API_PATH = "https://secure2.1cb.kz/VerService/VerificationService".freeze
ERRORS = {
"-1000" => :authentication_error,
"-1011" => :duplication_error,
"-1012" => :subject_not_found,
"-1013" => :subject_is_not_physical,
"-1014" => :contracts_not_found,
"-1015" => :not_enough_information,
"-1017" => :subject_consent_needed,
"-1018" => :active_contracts_not_found
}
MARITAL_STATUSES = {
"single" => 1,
"married" => 2,
"divorced" => 3,
"widow" => 4,
"civil_marriage" => 5
}
def initialize(env: :production, culture: "ru-RU", user_name:, password:)
@culture = culture
@user_name = user_name
@password = password
@parser = Nori.new
@env = env.to_sym
end
def call(args={})
args[:addresses] = [] unless args[:addresses]
uri = URI(@env == :production ? PROD_API_PATH : TEST_API_PATH)
request = Net::HTTP::Post.new(uri)
request.body = xml(transform_args(args))
puts xml(transform_args(args))
request.content_type = "text/xml; charset=utf-8"
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = true if uri.port == 443
http.verify_mode = OpenSSL::SSL::VERIFY_NONE if uri.port == 443
response = http.start do |_|
_.request(request)
end
hash = @parser.parse(response.body)
error = hash.dig("S:Envelope", "S:Body", "S:Fault")
return M.Failure(:request_error) if error
data = hash["S:Envelope"]["S:Body"]["StoreVerificationReqResponse"]["return"]
return M.Failure(ERRORS[data["ErrorCode"]]) unless data["errorCode"] == "1"
M.Success(data)
end
private
def transform_args(args)
args.reduce({}) do |acc, elem|
if elem[0] == :marital_status
acc[elem[0]] = MARITAL_STATUSES[elem[1]]
else
acc[elem[0]] = elem[1]
end
acc
end
end
def xml(args)
xml = Builder::XmlMarkup.new
xml.instruct!(:xml, :encoding => "UTF-8")
xml.x(:Envelope, {
"xmlns:x" => "http://schemas.xmlsoap.org/soap/envelope/",
"xmlns:ws" => "http://verification.ws.creditinfo.com/"
}) do
xml.x :Header do
xml.ws :CigWsHeader do
xml.ws :Culture, @culture
xml.ws :UserName, @user_name
xml.ws :Password, @password
end
end
xml.x :Body do
xml.ws :StoreVerificationReq do
xml.ws :application do
xml.ws :IIN, args[:iin]
xml.ws :lastName, args[:last_name]
xml.ws :firstName, args[:first_name]
xml.ws :fatherName, args[:middle_name]
xml.ws :email, args[:email]
xml.ws :maritalstatus, args[:marital_status] if args[:marital_status]
xml.ws :Documents do
xml.ws :Typeid, 7
xml.ws :Number, args[:gov_id_number]
xml.ws :IssueDate, args[:gov_id_issued_at]
xml.ws :ExpirationDate, args[:gov_id_expire_at]
end
xml.ws :Addresses do
args[:addresses].map do |a|
xml.ws :KatoId, a[:kato]
xml.ws :StreetName, a[:street]
xml.ws :StreetNumber, a[:building]
xml.ws :AppartementNo, a[:apartment]
end
end if args[:addresses].any?
xml.ws :Phones do
args[:phones].map { |phone| xml.ws :PhoneNumber, phone }
end
xml.ws :consentConfirmed, 1
end
end
end
end
end
def parse(body)
M.Maybe(body).bind do |body|
M.Maybe(body["get_reports_response".to_sym]).bind do |r|
M.Maybe(r["get_reports_result".to_sym])
end
end
end
end
end
| 32.795082 | 83 | 0.553112 |
1ab3434518db7b95953827f09571cc32681b8f02 | 2,643 | require 'spec_helper'
describe Tablado do
it 'has a version number' do
expect(Tablado::VERSION).not_to be nil
end
end
describe Tablado::Presentation do
let(:presentation) { Tablado::Presentation.new }
it 'should have attribute FPS' do expect( presentation.respond_to? :fps) end
it 'should have attribute width' do expect( presentation.respond_to? :witdh) end
it 'should have attribute height' do expect( presentation.respond_to? :height) end
describe '#width' do
it 'default value should be 1280' do expect(presentation.width).to eq(1280) end
it 'should be set' do expect{presentation.width = 800}.to change{presentation.width}.from(1280).to(800) end
end
describe '#height' do
it 'default value should be 720' do expect(presentation.height).to eq(720) end
it 'should be set' do expect{presentation.height = 400}.to change{presentation.height}.from(720).to(400) end
end
describe '#fps' do
it 'default value should be 60' do expect(presentation.fps).to eq(60) end
it 'should be set' do expect{presentation.fps = 10}.to change{presentation.fps}.from(60).to(10) end
end
end
describe Tablado::Presentation::Slide do
let(:slide) { Tablado::Presentation::Slide.new }
it 'should have attribute duration' do expect( slide.respond_to? :duration) end
describe '#duration' do
it 'default value should be 1' do expect(slide.duration).to eq(1) end
it 'should be set' do expect{slide.duration = 10}.to change{slide.duration}.from(1).to(10) end
end
end
describe Tablado::Presentation::Slide::Background do
let(:background) { Tablado::Presentation::Slide::Background.new }
it 'should have attribute image' do expect( background.respond_to? :image) end
it 'should have attribute slide' do expect( background.respond_to? :slide) end
describe '#image' do
it 'default value should be as Magick::Image' do expect(background.image.class).to eq(Magick::Image) end
end
end
describe Tablado::Presentation::Slide::Element do
let(:element) { Tablado::Presentation::Slide::Element.new }
%w[width height top left animations].each do |a|
it "should have attribute #{a}" do expect( element.respond_to? a.to_sym) end
end
describe '#draw' do
it 'should return Magick::Image Class' do
expect(element.draw.class).to eq(Magick::Image)
end
end
describe '#animations' do
it 'should return an Array' do
expect(element.animations.class).to eq(Array)
end
end
describe '#animate' do
it 'should return an animated Element' do
expect(element.animate.class).to eq(Tablado::Presentation::Slide::Element)
end
end
end | 30.034091 | 112 | 0.716988 |
4af5c1b32000cae759f56f33a90d68983ce5bccb | 684 | require "rails_helper"
describe ContactPolicy do
let(:user) { create(:user) }
let(:provider) { create(:provider) }
let(:contact) { create(:contact, provider: provider) }
subject { described_class }
permissions :show?, :update? do
context "a user that belongs to the provider" do
before do
provider.users << user
end
it { is_expected.to permit(user, contact) }
end
context "a user doesn't belong to the provider" do
it { is_expected.not_to permit(user, contact) }
end
context "a user that is an admin" do
let(:user) { create(:user, :admin) }
it { is_expected.to permit(user, contact) }
end
end
end
| 22.8 | 56 | 0.638889 |
38f170ace87d9d881e0e6f78e49ccdeef364d73a | 308 | class Enrollment < ApplicationRecord
belongs_to :user
belongs_to :course
validates_uniqueness_of :user_id, scope: :course_id, message: "You have already rated this course."
validates :rating, presence: true
validates_inclusion_of :rating, :in => 1..5
accepts_nested_attributes_for :course
end
| 25.666667 | 101 | 0.775974 |
26ab0f10ae55645a85be0ce8a89117db8bfa6809 | 1,161 | cask "protonvpn" do
version "1.8.0"
sha256 "549c9a19a75cc23e238b276fb27006b7c310434ce3539b588a990f2b43ba3e05"
url "https://protonvpn.com/download/ProtonVPN_mac_v#{version}.dmg"
appcast "https://protonvpn.com/download/macos-update2.xml"
name "ProtonVPN"
desc "VPN client focusing on security"
homepage "https://protonvpn.com/"
auto_updates true
depends_on macos: ">= :sierra"
app "ProtonVPN.app"
uninstall launchctl: "ch.protonvpn.ProtonVPNStarter",
quit: "ch.protonvpn.mac"
zap trash: [
"~/Library/Application Scripts/ch.protonvpn.ProtonVPNStarter",
"~/Library/Application Scripts/ch.protonvpn.mac",
"~/Library/Application Support/CrashReporter/ProtonVPN*",
"~/Library/Application Support/ProtonVPN",
"~/Library/Caches/SentryCrash/ProtonVPN",
"~/Library/Caches/ch.protonvpn.mac",
"~/Library/Caches/com.apple.nsurlsessiond/Downloads/ch.protonvpn.mac",
"~/Library/Containers/ch.protonvpn.*",
"~/Library/Cookies/ch.protonvpn.mac.binarycookies",
"~/Library/Logs/ProtonVPN.log",
"~/Library/Preferences/ch.protonvpn.mac.plist",
"~/Library/WebKit/ch.protonvpn.mac",
]
end
| 34.147059 | 75 | 0.719208 |
e2a9a731753e3fb7aa75c422e68ea5d7edfccead | 273 | module PopcorntimeSearch
class MovieResult
attr_accessor :title, :year, :imdb
def initialize(result)
@title = result['title']
@year = result['year']
@imdb = result['imdb_id']
end
def to_s
"#{title} (#{year})"
end
end
end
| 17.0625 | 38 | 0.582418 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.