hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
f8f54e767545f0e74b8f8dd74c6f9e37cdacc1a3 | 6,803 | ########################################################################################################################
# OpenStudio(R), Copyright (c) 2008-2019, Alliance for Sustainable Energy, LLC, and other contributors. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
#
# (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following
# disclaimer.
#
# (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with the distribution.
#
# (3) Neither the name of the copyright holder nor the names of any contributors may be used to endorse or promote products
# derived from this software without specific prior written permission from the respective party.
#
# (4) Other than as required in clauses (1) and (2), distributions in any form of modifications or other derivative works
# may not use the "OpenStudio" trademark, "OS", "os", or any other confusingly similar designation without specific prior
# written permission from Alliance for Sustainable Energy, LLC.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND ANY CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S), ANY CONTRIBUTORS, THE UNITED STATES GOVERNMENT, OR THE UNITED
# STATES DEPARTMENT OF ENERGY, NOR ANY OF THEIR EMPLOYEES, BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
########################################################################################################################
require("openstudio/sketchup_plugin/lib/interfaces/SurfaceGroup")
module OpenStudio
class InteriorPartitionSurfaceGroup < SurfaceGroup
def initialize
Plugin.log(OpenStudio::Trace, "#{current_method_name}")
super
end
##### Begin override methods for the input object #####
def self.model_object_from_handle(handle)
Plugin.log(OpenStudio::Trace, "#{current_method_name}")
model_object = Plugin.model_manager.model_interface.openstudio_model.getInteriorPartitionSurfaceGroup(handle)
if not model_object.empty?
model_object = model_object.get
else
puts "InteriorPartitionSurfaceGroup: model_object is empty for #{handle.class}, #{handle.to_s}, #{Plugin.model_manager.model_interface.openstudio_model}"
model_object = nil
end
return model_object
end
def self.new_from_handle(handle)
Plugin.log(OpenStudio::Trace, "#{current_method_name}")
drawing_interface = InteriorPartitionSurfaceGroup.new
model_object = model_object_from_handle(handle)
drawing_interface.model_object = model_object
model_object.drawing_interface = drawing_interface
drawing_interface.add_watcher
return(drawing_interface)
end
def create_model_object
Plugin.log(OpenStudio::Trace, "#{current_method_name}")
model_watcher_enabled = @model_interface.model_watcher.disable
@model_object = OpenStudio::Model::InteriorPartitionSurfaceGroup.new(@model_interface.openstudio_model)
@model_interface.model_watcher.enable if model_watcher_enabled
super
end
def check_model_object
Plugin.log(OpenStudio::Trace, "#{current_method_name}")
# Look up the parent drawing interface (might fail if the reference is bad)
if (not parent_from_model_object)
@model_interface.add_error("Error: " + @model_object.name.to_s + "\n")
@model_interface.add_error("The space referenced by this interior partition surface group does not exist, it cannot be drawn.\n\n")
return(false)
end
return(super)
end
# Updates the ModelObject with new information from the SketchUp entity.
def update_model_object
Plugin.log(OpenStudio::Trace, "#{current_method_name}")
super
if (valid_entity?)
if (@parent.class == Space)
watcher_enabled = disable_watcher
@model_object.setSpace(@parent.model_object) # Parent should already have been updated.
enable_watcher if watcher_enabled
end
end
end
# The parent interface is the space
def parent_from_model_object
Plugin.log(OpenStudio::Trace, "#{current_method_name}")
parent = nil
if (@model_object)
space = @model_object.space
if (not space.empty?)
parent = space.get.drawing_interface
end
end
return(parent)
end
##### Begin override methods for the entity #####
# Called from InteriorPartitionSurfaceGroup.new_from_entity(entity).
# Needed for recreating the Group when a partition surface is reassociated.
def create_from_entity(entity)
Plugin.log(OpenStudio::Trace, "#{current_method_name}")
@entity = entity
@entity.drawing_interface = self
if (check_entity)
#create_model_object
#update_model_object
update_entity
update_parent_from_entity # kludge...this is out of place here, but works: it adds itself as a child of model interface
#paint_entity
#add_observers # should be added ModelInterface
else
puts "DrawingInterface.create_from_entity: check_entity failed"
end
return(self)
end
def create_entity
Plugin.log(OpenStudio::Trace, "#{current_method_name}")
super
update_entity
end
##### Begin override methods for the interface #####
def set_entity_name
Plugin.log(OpenStudio::Trace, "#{current_method_name}")
if (@model_object.name.empty?)
@entity.name = "Interior Partition Surface Group: " + "(Untitled)"
else
@entity.name = "Interior Partition Surface Group: " + @model_object.name.get
end
end
##### Begin new methods for the interface #####
# surface area in in^2
def surface_area
Plugin.log(OpenStudio::Trace, "#{current_method_name}")
area = 0.0
for child in @children
area += child.gross_area
end
return(area)
end
end
end
| 35.994709 | 161 | 0.690284 |
618e51d10b8a6d3975b36e874fcccee601989b77 | 131 | require "test_helper"
class FutureGoalTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 16.375 | 47 | 0.671756 |
e9b4b2f6c431f4e9ea74909c93e10b83f470b53b | 472 | module DiscountNetwork
class Base
def self.method_missing(method_name, *arguments, &block)
if new.respond_to?(method_name, include_private: false)
new.send(method_name, *arguments, &block)
else
super
end
end
private
def build_array_params(array_params)
array_params = [array_params].flatten
array_params.map.each_with_index do |attribute, index|
[index, attribute]
end.to_h
end
end
end
| 22.47619 | 61 | 0.669492 |
039adec39558231cbe6a806f668890463792afeb | 372 | require 'rubygems'
require 'bundler/setup'
require 'ostruct'
require 'oauth'
require 'typhoeus'
require 'mime/types'
require 'active_support/core_ext/hash/slice'
require 'rgeo'
require 'rgeo/geo_json'
require 'pg'
require 'json/ext'
require 'cartodb-rb-client/cartodb'
OpenSSL::SSL.send :remove_const, :VERIFY_PEER
OpenSSL::SSL::VERIFY_PEER = OpenSSL::SSL::VERIFY_NONE
| 20.666667 | 53 | 0.77957 |
bbb9b0ceacd4837b6a334a53ce12f044075d109f | 290 | $:.unshift(File.dirname(__FILE__) + '/../lib/')
require 'rubygems'
require 'spec'
require 'adt'
require 'fileutils'
DB_PATH = File.dirname(__FILE__) + '/fixtures' unless defined?(DB_PATH)
Spec::Runner.configure do |config|
end
self.class.send :remove_const, 'Test' if defined? Test | 22.307692 | 71 | 0.713793 |
285a2ab2ed1f7eeef4a93320758f6c85cd2274fa | 8,934 | # rubocop:disable Style/CaseEquality
# rubocop:disable Style/MultilineTernaryOperator
# rubocop:disable Style/NestedTernaryOperator
module Fastlane
module Actions
class RocketChatAction < Action
def self.is_supported?(platform)
true
end
# As there is a text limit in the notifications, we are
# usually interested in the last part of the message
# e.g. for tests
def self.trim_message(message)
# We want the last 7000 characters, instead of the first 7000, as the error is at the bottom
start_index = [message.length - 7000, 0].max
message = message[start_index..-1]
message
end
def self.run(options)
require 'rocket-chat-notifier'
options[:message] = self.trim_message(options[:message].to_s || '')
notifier = ::RocketChat::Notifier.new(options[:rocket_chat_url])
notifier.username = options[:use_webhook_configured_username_and_icon] ? nil : options[:username]
icon_url = options[:use_webhook_configured_username_and_icon] ? nil : options[:icon_url]
if options[:channel].to_s.length > 0
notifier.channel = options[:channel]
notifier.channel = ('#' + notifier.channel) unless ['#', '@'].include?(notifier.channel[0]) # send message to channel by default
end
attachment = generate_attachments(options)
return [notifier, attachment] if Helper.is_test? # tests will verify the rocket chat attachments and other properties
result = notifier.ping '',
icon_url: icon_url,
attachments: [attachment]
if result.code.to_i == 200
UI.success('Successfully sent RocketChat notification')
else
UI.verbose(result)
UI.user_error!("Error pushing RocketChat message, maybe the integration has no permission to post on this channel? Try removing the channel parameter in your Fastfile.")
end
end
def self.description
"Send a success/error message to your RocketChat group"
end
def self.available_options
[
FastlaneCore::ConfigItem.new(key: :message,
env_name: "FL_ROCKET_CHAT_MESSAGE",
description: "The message that should be displayed on Rocket.Chat. This supports the standard Rocket.Chat markup language",
optional: true),
FastlaneCore::ConfigItem.new(key: :channel,
env_name: "FL_ROCKET_CHAT_CHANNEL",
description: "#channel or @username",
optional: true),
FastlaneCore::ConfigItem.new(key: :use_webhook_configured_username_and_icon,
env_name: "FL_ROCKET_CHAT_USE_WEBHOOK_CONFIGURED_USERNAME_AND_ICON",
description: "Use webook's default username and icon settings? (true/false)",
default_value: false,
is_string: false,
optional: true),
FastlaneCore::ConfigItem.new(key: :rocket_chat_url,
env_name: "ROCKET_CHAT_URL",
description: "Create an Incoming WebHook for your Rocket.Chat group"),
FastlaneCore::ConfigItem.new(key: :username,
env_name: "FL_ROCKET_CHAT_USERNAME",
description: "Overrides the webook's username property if use_webhook_configured_username_and_icon is false",
default_value: "fastlane",
is_string: true,
optional: true),
FastlaneCore::ConfigItem.new(key: :icon_url,
env_name: "FL_ROCKET_CHAT_ICON_URL",
description: "Overrides the webook's image property if use_webhook_configured_username_and_icon is false",
default_value: "https://s3-eu-west-1.amazonaws.com/fastlane.tools/fastlane.png",
is_string: true,
optional: true),
FastlaneCore::ConfigItem.new(key: :payload,
env_name: "FL_ROCKET_CHAT_PAYLOAD",
description: "Add additional information to this post. payload must be a hash containg any key with any value",
default_value: {},
is_string: false),
FastlaneCore::ConfigItem.new(key: :default_payloads,
env_name: "FL_ROCKET_CHAT_DEFAULT_PAYLOADS",
description: "Remove some of the default payloads. More information about the available payloads on GitHub",
optional: true,
is_string: false),
FastlaneCore::ConfigItem.new(key: :attachment_properties,
env_name: "FL_ROCKET_CHAT_ATTACHMENT_PROPERTIES",
description: "Merge additional properties in the Rocket.Chat attachment",
default_value: {},
is_string: false),
FastlaneCore::ConfigItem.new(key: :success,
env_name: "FL_ROCKET_CHAT_SUCCESS",
description: "Was this build successful? (true/false)",
optional: true,
default_value: true,
is_string: false)
]
end
def self.author
"thiagofelix"
end
#####################################################
# @!group Helper
#####################################################
def self.generate_attachments(options)
color = (options[:success] ? 'good' : 'danger')
should_add_payload = ->(payload_name) { options[:default_payloads].nil? || options[:default_payloads].include?(payload_name) }
attachment = {
fallback: options[:message],
text: options[:message],
color: color,
mrkdwn_in: ["pretext", "text", "fields", "message"],
fields: []
}
# custom user payloads
attachment[:fields] += options[:payload].map do |k, v|
{
title: k.to_s,
value: v.to_s
}
end
# Add the lane to the Rocket.Chat message
# This might be nil, if Rocket.Chat is called as "one-off" action
if should_add_payload[:lane] && Actions.lane_context[Actions::SharedValues::LANE_NAME]
attachment[:fields] << {
title: 'Lane',
value: Actions.lane_context[Actions::SharedValues::LANE_NAME]
}
end
# test_result
if should_add_payload[:test_result]
attachment[:fields] << {
title: 'Result',
value: (options[:success] ? 'Success' : 'Error')
}
end
# git branch
if Actions.git_branch && should_add_payload[:git_branch]
attachment[:fields] << {
title: 'Git Branch',
value: Actions.git_branch
}
end
# git_author
if Actions.git_author_email && should_add_payload[:git_author]
unless ENV['FASTLANE_ROCKET_CHAT_HIDE_AUTHOR_ON_SUCCESS'] && options[:success]
attachment[:fields] << {
title: 'Git Author',
value: Actions.git_author_email
}
end
end
# last_git_commit
if Actions.last_git_commit_message && should_add_payload[:last_git_commit]
attachment[:fields] << {
title: 'Git Commit',
value: Actions.last_git_commit_message
}
end
# merge additional properties
deep_merge(attachment, options[:attachment_properties])
end
# Adapted from http://stackoverflow.com/a/30225093/158525
def self.deep_merge(a, b)
merger = proc do |key, v1, v2|
Hash === v1 && Hash === v2 ?
v1.merge(v2, &merger) : Array === v1 && Array === v2 ?
v1 | v2 : [:undefined, nil, :nil].include?(v2) ? v1 : v2
end
a.merge(b, &merger)
end
end
end
end
# rubocop:enable Style/CaseEquality
# rubocop:enable Style/MultilineTernaryOperator
# rubocop:enable Style/NestedTernaryOperator
| 44.447761 | 179 | 0.531901 |
79a987d8dfdb1b78f83b4033a91db75abfc70a82 | 1,874 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Configure static file server for tests with Cache-Control for performance.
config.serve_static_files = true
config.static_cache_control = 'public, max-age=3600'
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Randomize the order test cases are executed.
config.active_support.test_order = :random
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
config.cache_store = :null_store
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
Rails.application.routes.default_url_options = { host: 'localhost:5000' }
end
| 39.87234 | 85 | 0.771612 |
62b2b64a4fb1787d57eecb2d2ccc6574cb5c93fa | 1,653 | # Copyright (c) 2017-present, Facebook, Inc. All rights reserved.
#
# You are hereby granted a non-exclusive, worldwide, royalty-free license to use,
# copy, modify, and distribute this software in source code or binary form for use
# in connection with the web services and APIs provided by Facebook.
#
# As with any software that integrates with the Facebook platform, your use of
# this software is subject to the Facebook Platform Policy
# [http://developers.facebook.com/policy/]. This copyright notice shall be
# included in all copies or substantial portions of the software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
require 'facebook_ads'
access_token = '<ACCESS_TOKEN>'
app_secret = '<APP_SECRET>'
app_id = '<APP_ID>'
id = '<AD_ACCOUNT_ID>'
FacebookAds.configure do |config|
config.access_token = access_token
config.app_secret = app_secret
end
ad_account = FacebookAds::AdAccount.get(id)
ads = ad_account.ads.create({
name: 'My New Offers Ad',
adset_id: '<adSetID>',
creative: {'object_story_spec':{'page_id':'<pageID>','link_data':{'offer_id':'<offerID>','link':'https:\/\/www.facebook.com\/','message':'Great Deal','name':'30% off','image_hash':'<imageHash>'}}},
status: 'PAUSED',
}) | 44.675676 | 201 | 0.745917 |
e858861891a122507c11e9670cd3f966371d3291 | 360 | class Paper < ApplicationRecord
has_and_belongs_to_many :authors
validates :year, :title, :venue, presence: true
validates :year, numericality: { only_integer: true }
scope :written_in, ->(year) { where("year == ?", year) }
def authors_names
names = ""
authors.each do |author|
names += author.name + " "
end
names
end
end
| 21.176471 | 58 | 0.65 |
ed0a07744e258da085d3eec48a40b37e79a4715b | 2,510 | class Artwork < ActiveRecord::Base
belongs_to :neighborhood
belongs_to :artist
# Data Import code
# def self.import!
# new.import!("SF_Civic_Art_Collection.csv")
# end
# def import!(file)
# CSV.foreach(file, encoding: "iso-8859-1:UTF-8", headers: true, header_converters: :symbol) do |row|
# art_lat = find_lat(row[:geometry])
# art_long = find_long(row[:geometry])
# result = Geocoder.search("#{art_lat}, #{art_long}").first
# attributes = {
# address: find_address(result),
# neighborhood_id: check_neighborhood(result),
# latitude: art_lat,
# longitude: art_long,
# artist_id: check_artist(row[:artist]),
# credit: row[:credit_line],
# title: row[:title],
# dimensions: row[:display_dimensions],
# medium: row[:medium],
# location_name: row[:location]
# }
# puts "Creating new artwork: #{attributes.inspect}"
# Artwork.create!(attributes)
# end
# end
# def check_neighborhood(obj)
# if hood_exists?(find_neighborhood(obj))
# return Neighborhood.where(name: find_neighborhood(obj)).first.id
# else
# @neighborhood = Neighborhood.create!(name: find_neighborhood(obj))
# p "creating neighborhood: #{@neighborhood.name}"
# return @neighborhood.id
# end
# end
# def check_artist(obj)
# if art_exists?(obj)
# return Artist.where(name: obj).first.id
# else
# @artist = Artist.create!(name: obj)
# p "creating artist: #{@artist.name}"
# return @artist.id
# end
# end
# def find_address(data)
# if data != nil
# data.formatted_address
# else
# return nil
# end
# end
# def find_neighborhood(data)
# if data != nil
# result = data.address_components[2]["long_name"]
# else
# return nil
# end
# end
# def find_long(string)
# if string != nil
# result = string.match /-\d+.\d+/
# result[0].to_f
# else
# return nil
# end
# end
# def find_lat(string)
# if string != nil
# result = string.match /,(\d+.\d+)/
# result[1].to_f
# else
# return nil
# end
# end
# def hood_exists?(string)
# if Neighborhood.where(name: string).first != nil
# return true
# end
# end
# def art_exists?(string)
# if Artist.where(name: string).first != nil
# return true
# end
# end
end
| 24.134615 | 105 | 0.5749 |
e2370e0bda1e88e8764dec6a3a1f6088990d8f3d | 442 | # A special router to use to instantiate an OodApp
# object if all you have is the path to the app
class PathRouter
attr_reader :category, :caption, :url, :type, :path, :name, :token
def initialize(path)
@caption = nil
@category = "App"
@url = "#"
@type = :path
@path = Pathname.new(path)
@name = @path.basename.to_s
@token = @name
end
def owner
@owner ||= Etc.getpwuid(path.stat.uid).name
end
end
| 22.1 | 68 | 0.635747 |
ac58fba2c82619a56484252a3b8e4865ae1cb948 | 2,870 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2019_09_01
module Models
#
# Result of the request to list VirtualHubs. It contains a list of
# VirtualHubs and a URL nextLink to get the next set of results.
#
class ListVirtualHubsResult
include MsRestAzure
include MsRest::JSONable
# @return [Array<VirtualHub>] List of VirtualHubs.
attr_accessor :value
# @return [String] URL to get the next set of operation list results if
# there are any.
attr_accessor :next_link
# return [Proc] with next page method call.
attr_accessor :next_method
#
# Gets the rest of the items for the request, enabling auto-pagination.
#
# @return [Array<VirtualHub>] operation results.
#
def get_all_items
items = @value
page = self
while page.next_link != nil && !page.next_link.strip.empty? do
page = page.get_next_page
items.concat(page.value)
end
items
end
#
# Gets the next page of results.
#
# @return [ListVirtualHubsResult] with next page content.
#
def get_next_page
response = @next_method.call(@next_link).value! unless @next_method.nil?
unless response.nil?
@next_link = response.body.next_link
@value = response.body.value
self
end
end
#
# Mapper for ListVirtualHubsResult class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'ListVirtualHubsResult',
type: {
name: 'Composite',
class_name: 'ListVirtualHubsResult',
model_properties: {
value: {
client_side_validation: true,
required: false,
serialized_name: 'value',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'VirtualHubElementType',
type: {
name: 'Composite',
class_name: 'VirtualHub'
}
}
}
},
next_link: {
client_side_validation: true,
required: false,
serialized_name: 'nextLink',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 28.415842 | 80 | 0.529965 |
1a50d246b3b042fc701fbabc80dbfe744b424696 | 153 | class CreatePhotos < ActiveRecord::Migration
def change
create_table :photos do |t|
t.string :filename
t.timestamps
end
end
end
| 15.3 | 44 | 0.673203 |
e90c502d01a2f1f8692f8ef48006ba0c9df8c62f | 721 | Rails.application.routes.draw do
get 'password_resets/new'
get 'password_resets/edit'
get 'sessions/new'
root 'static_pages#home'
get '/help', to: 'static_pages#help'
get '/about', to: 'static_pages#about'
get '/contact', to: 'static_pages#contact'
get '/signup', to: 'users#new'
get '/login', to: 'sessions#new'
post '/login', to: 'sessions#create'
delete '/logout', to: 'sessions#destroy'
resources :users do
member do
get :following, :followers
end
end
resources :account_activations, only: [:edit]
resources :password_resets, only: [:new, :create, :edit, :update]
resources :microposts, only: [:create, :destroy]
resources :relationships, only: [:create, :destroy]
end
| 31.347826 | 67 | 0.68516 |
382a5283aaf921aa274b92820d93a5e5747f84ab | 23,936 | require 'test_helper'
class VersionTest < ActiveSupport::TestCase
should belong_to :rubygem
should have_many :dependencies
context "#as_json" do
setup do
@version = create(:version)
end
should "only have relevant API fields" do
json = @version.as_json
assert_equal %w[number built_at summary description authors platform ruby_version prerelease downloads_count licenses requirements].map(&:to_s).sort, json.keys.sort
assert_equal @version.authors, json["authors"]
assert_equal @version.built_at, json["built_at"]
assert_equal @version.description, json["description"]
assert_equal @version.downloads_count, json["downloads_count"]
assert_equal @version.number, json["number"]
assert_equal @version.platform, json["platform"]
assert_equal @version.prerelease, json["prerelease"]
assert_equal @version.ruby_version, json["ruby_version"]
assert_equal @version.summary, json["summary"]
assert_equal @version.licenses, json["licenses"]
assert_equal @version.requirements, json["requirements"]
end
end
context "#to_xml" do
setup do
@version = create(:version)
end
should "only have relevant API fields" do
xml = Nokogiri.parse(@version.to_xml)
assert_equal %w[number built-at summary description authors platform ruby-version prerelease downloads-count licenses requirements].map(&:to_s).sort, xml.root.children.map{|a| a.name}.reject{|t| t == "text"}.sort
assert_equal @version.authors, xml.at_css("authors").content
assert_equal @version.built_at.to_i, xml.at_css("built-at").content.to_time.to_i
assert_equal @version.description, xml.at_css("description").content
assert_equal @version.downloads_count, xml.at_css("downloads-count").content.to_i
assert_equal @version.number, xml.at_css("number").content
assert_equal @version.platform, xml.at_css("platform").content
assert_equal @version.prerelease.to_s, xml.at_css("prerelease").content
assert_equal @version.ruby_version, xml.at_css("ruby-version").content
assert_equal @version.summary.to_s, xml.at_css("summary").content
assert_equal @version.licenses, xml.at_css("licenses").content
assert_equal @version.requirements, xml.at_css("requirements").content
end
end
context ".most_recent" do
setup do
@gem = create(:rubygem)
end
should "return most recently created version for versions with multiple non-ruby platforms" do
create(:version, :rubygem => @gem, :number => '0.1', :platform => 'linux')
@most_recent = create(:version, :rubygem => @gem, :number => '0.2', :platform => 'universal-rubinius')
create(:version, :rubygem => @gem, :number => '0.1', :platform => 'mswin32')
assert_equal @most_recent, Version.most_recent
end
end
context ".reverse_dependencies" do
setup do
@dep_rubygem = create(:rubygem)
@gem_one = create(:rubygem)
@gem_two = create(:rubygem)
@gem_three = create(:rubygem)
@version_one_latest = create(:version, :rubygem => @gem_one, :number => '0.2')
@version_one_earlier = create(:version, :rubygem => @gem_one, :number => '0.1')
@version_two_latest = create(:version, :rubygem => @gem_two, :number => '1.0')
@version_two_earlier = create(:version, :rubygem => @gem_two, :number => '0.5')
@version_three = create(:version, :rubygem => @gem_three, :number => '1.7')
@version_one_latest.dependencies << create(:dependency, :version => @version_one_latest, :rubygem => @dep_rubygem)
@version_two_earlier.dependencies << create(:dependency, :version => @version_two_earlier, :rubygem => @dep_rubygem)
@version_three.dependencies << create(:dependency, :version => @version_three, :rubygem => @dep_rubygem)
end
should "return all depended gem versions" do
version_list = Version.reverse_dependencies(@dep_rubygem.name)
assert_equal 3, version_list.size
assert version_list.include?(@version_one_latest)
assert version_list.include?(@version_two_earlier)
assert version_list.include?(@version_three)
assert ! version_list.include?(@version_one_earlier)
assert ! version_list.include?(@version_two_latest)
end
end
context "updated gems" do
setup do
Timecop.freeze Date.today
@existing_gem = create(:rubygem)
@second = create(:version, :rubygem => @existing_gem, :created_at => 1.day.ago)
@fourth = create(:version, :rubygem => @existing_gem, :created_at => 4.days.ago)
@another_gem = create(:rubygem)
@third = create(:version, :rubygem => @another_gem, :created_at => 3.days.ago)
@first = create(:version, :rubygem => @another_gem, :created_at => 1.minute.ago)
@yanked = create(:version, :rubygem => @another_gem, :created_at => 30.seconds.ago)
@yanked.yank!
@bad_gem = create(:rubygem)
@only_one = create(:version, :rubygem => @bad_gem, :created_at => 1.minute.ago)
end
teardown do
Timecop.return
end
should "order gems by created at and show only gems that have more than one version" do
versions = Version.just_updated
assert_equal 4, versions.size
assert_equal [@first, @second, @third, @fourth], versions
end
end
context "with a rubygem" do
setup do
@rubygem = create(:rubygem)
end
should "not allow duplicate versions" do
@version = build(:version, :rubygem => @rubygem, :number => "1.0.0", :platform => "ruby")
@dup_version = @version.dup
@number_version = build(:version, :rubygem => @rubygem, :number => "2.0.0", :platform => "ruby")
@platform_version = build(:version, :rubygem => @rubygem, :number => "1.0.0", :platform => "mswin32")
assert @version.save
assert @number_version.save
assert @platform_version.save
assert ! @dup_version.valid?
end
should "be able to find dependencies" do
@dependency = create(:rubygem)
@version = build(:version, :rubygem => @rubygem, :number => "1.0.0", :platform => "ruby")
@version.dependencies << create(:dependency, :version => @version, :rubygem => @dependency)
assert ! Version.with_deps.first.dependencies.empty?
end
should "sort dependencies alphabetically" do
@version = build(:version, :rubygem => @rubygem, :number => "1.0.0", :platform => "ruby")
@first_dependency_by_alpha = create(:rubygem, :name => 'acts_as_indexed')
@second_dependency_by_alpha = create(:rubygem, :name => 'friendly_id')
@third_dependency_by_alpha = create(:rubygem, :name => 'refinerycms')
@version.dependencies << create(:dependency, :version => @version, :rubygem => @second_dependency_by_alpha)
@version.dependencies << create(:dependency, :version => @version, :rubygem => @third_dependency_by_alpha)
@version.dependencies << create(:dependency, :version => @version, :rubygem => @first_dependency_by_alpha)
assert @first_dependency_by_alpha.name, @version.dependencies.first.name
assert @second_dependency_by_alpha.name, @version.dependencies[1].name
assert @third_dependency_by_alpha.name, @version.dependencies.last.name
end
end
context "with a ruby version" do
setup do
@ruby_version = ">= 1.9.3"
@version = create(:version)
end
subject { @version }
should "have a ruby version" do
@version.ruby_version = @ruby_version
@version.save!
new_version = Version.find(@version.id)
assert_equal new_version.ruby_version, @ruby_version
end
end
context "without a ruby version" do
setup do
@ruby_version = ">= 1.9.3"
@version = create(:version)
end
subject { @version }
should "not have a ruby version" do
@version.ruby_version = nil
@version.save!
nil_version = Version.find(@version.id)
assert_nil nil_version.ruby_version
end
end
context "with a version" do
setup do
@version = create(:version)
@info = "some info"
end
subject { @version }
should_not allow_value("#YAML<CEREALIZATION-FAIL>").for(:number)
should_not allow_value("1.2.3-\"[javalol]\"").for(:number)
should_not allow_value("0.8.45::Gem::PLATFORM::FAILBOAT").for(:number)
should_not allow_value("1.2.3\n<bad>").for(:number)
should allow_value("ruby").for(:platform)
should allow_value("mswin32").for(:platform)
should allow_value("x86_64-linux").for(:platform)
should_not allow_value("Gem::Platform::Ruby").for(:platform)
should "give number for #to_s" do
assert_equal @version.number, @version.to_s
end
should "not be platformed" do
assert ! @version.platformed?
end
should "save full name" do
assert_equal "#{@version.rubygem.name}-#{@version.number}", @version.full_name
assert_equal @version.number, @version.slug
end
should "save info into redis" do
info = Redis.current.hgetall(Version.info_key(@version.full_name))
assert_equal @version.rubygem.name, info["name"]
assert_equal @version.number, info["number"]
assert_equal @version.platform, info["platform"]
end
should "add version onto redis versions list" do
assert_equal @version.full_name, Redis.current.lindex(Rubygem.versions_key(@version.rubygem.name), 0)
end
should "raise an ActiveRecord::RecordNotFound if an invalid slug is given" do
assert_raise ActiveRecord::RecordNotFound do
Version.find_from_slug!(@version.rubygem_id, "some stupid version 399")
end
end
%w[x86_64-linux java mswin x86-mswin32-60].each do |platform|
should "be able to find with platform of #{platform}" do
version = create(:version, :platform => platform)
slug = "#{version.number}-#{platform}"
assert version.platformed?
assert_equal version.reload, Version.find_from_slug!(version.rubygem_id, slug)
assert_equal slug, version.slug
end
end
should "have a default download count" do
assert @version.downloads_count.zero?
end
should "give no version flag for the latest version" do
new_version = create(:version, :rubygem => @version.rubygem, :built_at => 1.day.from_now)
assert_equal "gem install #{@version.rubygem.name} -v #{@version.number}", @version.to_install
assert_equal "gem install #{new_version.rubygem.name}", new_version.to_install
end
should "tack on prerelease flag" do
@version.update_attributes(:number => "0.3.0.pre")
new_version = create(:version, :rubygem => @version.rubygem,
:built_at => 1.day.from_now,
:number => "0.4.0.pre")
assert @version.prerelease
assert new_version.prerelease
@version.rubygem.reorder_versions
assert_equal "gem install #{@version.rubygem.name} -v #{@version.number} --pre",
@version.to_install
assert_equal "gem install #{new_version.rubygem.name} --pre",
new_version.to_install
end
should "give no version count for the latest prerelease version" do
@version.update_attributes(:number => "0.3.0.pre")
old_version = create(:version, :rubygem => @version.rubygem,
:built_at => 1.day.from_now,
:number => "0.2.0")
assert @version.prerelease
assert !old_version.prerelease
@version.rubygem.reorder_versions
assert_equal "gem install #{@version.rubygem.name} --pre", @version.to_install
assert_equal "gem install #{old_version.rubygem.name}", old_version.to_install
end
should "give title for #to_title" do
assert_equal "#{@version.rubygem.name} (#{@version.to_s})", @version.to_title
end
should "give version with twiddle-wakka for #to_bundler" do
assert_equal %{gem '#{@version.rubygem.name}', '~> #{@version.to_s}'}, @version.to_bundler
end
should "give title and platform for #to_title" do
@version.platform = "zomg"
assert_equal "#{@version.rubygem.name} (#{@version.number}-zomg)", @version.to_title
end
should "have description for info" do
@version.description = @info
assert_equal @info, @version.info
end
should "have summary for info if description does not exist" do
@version.description = nil
@version.summary = @info
assert_equal @info, @version.info
end
should "have summary for info if description is blank" do
@version.description = ""
@version.summary = @info
assert_equal @info, @version.info
end
should "have some text for info if neither summary or description exist" do
@version.description = nil
@version.summary = nil
assert_equal "This rubygem does not have a description or summary.", @version.info
end
should "give 'N/A' for size when size not available" do
@version.size = nil
assert_equal 'N/A', @version.size
end
context "when yanked" do
setup do
@version.yank!
end
should("unindex") { assert [email protected]? }
should("be considered yanked") { assert Version.yanked.include?(@version) }
should("no longer be latest") { assert [email protected]?}
should "not appear in the version list" do
assert ! Redis.current.exists(Rubygem.versions_key(@version.rubygem.name))
end
context "and consequently unyanked" do
setup do
@version.unyank!
@version.reload
end
should("re-index") { assert @version.indexed? }
should("become the latest again") { assert @version.latest? }
should("be considered unyanked") { assert !Version.yanked.include?(@version) }
should "appear in the version list" do
assert_equal @version.full_name, Redis.current.lindex(Rubygem.versions_key(@version.rubygem.name), 0)
end
end
end
end
context "with a very long authors string." do
should "create without error" do
create(:version, :authors => ["Fbdoorman: David Pelaez", "MiniFB:Appoxy", "Dan Croak", "Mike Burns", "Jason Morrison", "Joe Ferris", "Eugene Bolshakov", "Nick Quaranto", "Josh Nichols", "Mike Breen", "Marcel G\303\266rner", "Bence Nagy", "Ben Mabey", "Eloy Duran", "Tim Pope", "Mihai Anca", "Mark Cornick", "Shay Arnett", "Jon Yurek", "Chad Pytel"])
end
end
context "when indexing" do
setup do
@rubygem = create(:rubygem)
@first_version = create(:version, :rubygem => @rubygem, :number => "0.0.1", :built_at => 7.days.ago)
@second_version = create(:version, :rubygem => @rubygem, :number => "0.0.2", :built_at => 6.days.ago)
@third_version = create(:version, :rubygem => @rubygem, :number => "0.0.3", :built_at => 5.days.ago)
@fourth_version = create(:version, :rubygem => @rubygem, :number => "0.0.4", :built_at => 5.days.ago)
end
should "always sort properly" do
assert_equal -1, (@first_version <=> @second_version)
assert_equal -1, (@first_version <=> @third_version)
assert_equal -1, (@first_version <=> @fourth_version)
assert_equal 1, (@second_version <=> @first_version)
assert_equal -1, (@second_version <=> @third_version)
assert_equal -1, (@second_version <=> @fourth_version)
assert_equal 1, (@third_version <=> @first_version)
assert_equal 1, (@third_version <=> @second_version)
assert_equal -1, (@third_version <=> @fourth_version)
assert_equal 1, (@fourth_version <=> @first_version)
assert_equal 1, (@fourth_version <=> @second_version)
assert_equal 1, (@fourth_version <=> @third_version)
end
end
context "with mixed release and prerelease versions" do
setup do
@prerelease = create(:version, :number => '1.0.rc1')
@release = create(:version, :number => '1.0')
end
should "know if it is a prelease version" do
assert @prerelease.prerelease?
assert [email protected]?
end
should "return prerelease gems from the prerelease named scope" do
assert_equal [@prerelease], Version.prerelease
assert_equal [@release], Version.release
end
end
context "with only prerelease versions" do
setup do
@rubygem = create(:rubygem)
@one = create(:version, :rubygem => @rubygem, :number => '1.0.0.pre')
@two = create(:version, :rubygem => @rubygem, :number => '1.0.1.pre')
@three = create(:version, :rubygem => @rubygem, :number => '1.0.2.pre')
@rubygem.reload
end
should "show last pushed as latest version" do
assert_equal @three, @rubygem.versions.most_recent
end
end
context "with versions created out of order" do
setup do
@gem = create(:rubygem)
create(:version, :rubygem => @gem, :number => '0.5')
create(:version, :rubygem => @gem, :number => '0.3')
create(:version, :rubygem => @gem, :number => '0.7')
create(:version, :rubygem => @gem, :number => '0.2')
@gem.reload # make sure to reload the versions just created
end
should "be in the proper order" do
assert_equal %w[0.7 0.5 0.3 0.2], @gem.versions.by_position.map(&:number)
end
should "know its latest version" do
assert_equal '0.7', @gem.versions.most_recent.number
end
end
context "with multiple rubygems and versions created out of order" do
setup do
@gem_one = create(:rubygem)
@gem_two = create(:rubygem)
@version_one_latest = create(:version, :rubygem => @gem_one, :number => '0.2')
@version_one_earlier = create(:version, :rubygem => @gem_one, :number => '0.1')
@version_two_latest = create(:version, :rubygem => @gem_two, :number => '1.0')
@version_two_earlier = create(:version, :rubygem => @gem_two, :number => '0.5')
end
should "be able to fetch the latest versions" do
assert_contains Version.latest.map(&:id), @version_one_latest.id
assert_contains Version.latest.map(&:id), @version_two_latest.id
assert_does_not_contain Version.latest.map(&:id), @version_one_earlier.id
assert_does_not_contain Version.latest.map(&:id), @version_two_earlier.id
end
end
context "with a few versions" do
setup do
@thin = create(:version, :authors => %w[thin], :built_at => 1.year.ago)
@rake = create(:version, :authors => %w[rake], :built_at => 1.month.ago)
@json = create(:version, :authors => %w[json], :built_at => 1.week.ago)
@thor = create(:version, :authors => %w[thor], :built_at => 2.days.ago)
@rack = create(:version, :authors => %w[rack], :built_at => 1.day.ago)
@haml = create(:version, :authors => %w[haml], :built_at => 1.hour.ago)
@dust = create(:version, :authors => %w[dust], :built_at => 1.day.from_now)
@fake = create(:version, :authors => %w[fake], :indexed => false, :built_at => 1.minute.ago)
end
should "get the latest versions up to today" do
assert_equal [@haml, @rack, @thor, @json, @rake].map(&:authors), Version.published(5).map(&:authors)
assert_equal [@haml, @rack, @thor, @json, @rake, @thin].map(&:authors), Version.published(6).map(&:authors)
end
end
context "with a few versions some owned by a user" do
setup do
@user = create(:user)
@gem = create(:rubygem)
@owned_one = create(:version, :rubygem => @gem, :built_at => 1.day.ago)
@owned_two = create(:version, :rubygem => @gem, :built_at => 2.days.ago)
@unowned = create(:version)
create(:ownership, :rubygem => @gem, :user => @user)
end
should "return the owned gems from #owned_by" do
assert_contains Version.owned_by(@user).map(&:id), @owned_one.id
assert_contains Version.owned_by(@user).map(&:id), @owned_two.id
end
should "not return the unowned versions from #owned_by" do
assert_does_not_contain Version.owned_by(@user).map(&:id), @unowned.id
end
end
context "with a few versions some subscribed to by a user" do
setup do
@user = create(:user)
@gem = create(:rubygem)
@subscribed_one = create(:version, :rubygem => @gem)
@subscribed_two = create(:version, :rubygem => @gem)
@unsubscribed = create(:version)
create(:subscription, :rubygem => @gem, :user => @user)
end
should "return the owned gems from #owned_by" do
assert_contains Version.subscribed_to_by(@user).map(&:id), @subscribed_one.id
assert_contains Version.subscribed_to_by(@user).map(&:id), @subscribed_two.id
end
should "not return the unowned versions from #owned_by" do
assert_does_not_contain Version.subscribed_to_by(@user).map(&:id), @unsubscribed.id
end
should "order them from latest-oldest pushed to Gemcutter, not build data" do
# Setup so that gem one was built earlier than gem two, but pushed to Gemcutter after gem two
# We do this so that:
# a) people with RSS will get smooth results, rather than gem versions jumping around the place
# b) people can't hijack the latest gem spot by building in the far future, but pushing today
@subscribed_one.update_attributes(:built_at => Time.now - 3.days, :created_at => Time.now - 1.day)
@subscribed_two.update_attributes(:built_at => Time.now - 2.days, :created_at => Time.now - 2.days)
# Even though gem two was build before gem one, it was pushed to gemcutter first
# Thus, we should have from newest to oldest, gem one, then gem two
expected = [@subscribed_one, @subscribed_two].map do |s|
s.created_at.to_s(:db)
end
actual = Version.subscribed_to_by(@user).map do |s|
s.created_at.to_s(:db)
end
assert_equal expected, actual
end
end
context "with a Gem::Specification" do
setup do
@spec = new_gemspec "test", "1.0.0", "a test gem", "ruby"
@version = build(:version)
end
[/foo/, 1337, {:foo => "bar"}].each do |example|
should "be invalid with authors as an Array of #{example.class}'s" do
assert_raise ActiveRecord::RecordInvalid do
@spec.authors = [example]
@version.update_attributes_from_gem_specification!(@spec)
end
end
end
should "have attributes set properly from the specification" do
@version.update_attributes_from_gem_specification!(@spec)
assert @version.indexed
assert_equal @spec.authors.join(', '), @version.authors
assert_equal @spec.description, @version.description
assert_equal @spec.summary, @version.summary
assert_equal @spec.date, @version.built_at
end
end
context "indexes" do
setup do
@first_rubygem = create(:rubygem, :name => "first")
@second_rubygem = create(:rubygem, :name => "second")
@first_version = create(:version, :rubygem => @first_rubygem, :number => "0.0.1", :platform => "ruby")
@second_version = create(:version, :rubygem => @first_rubygem, :number => "0.0.2", :platform => "ruby")
@other_version = create(:version, :rubygem => @second_rubygem, :number => "0.0.2", :platform => "java")
@pre_version = create(:version, :rubygem => @second_rubygem, :number => "0.0.2.pre", :platform => "java", :prerelease => true)
end
should "select all gems" do
assert_equal [
["first", "0.0.1", "ruby"],
["first", "0.0.2", "ruby"],
["second", "0.0.2", "java"]
], Version.rows_for_index
end
should "select only most recent" do
assert_equal [
["first", "0.0.2", "ruby"],
["second", "0.0.2", "java"]
], Version.rows_for_latest_index
end
should "select only prerelease" do
assert_equal [
["second", "0.0.2.pre", "java"]
], Version.rows_for_prerelease_index
end
end
end
| 39.563636 | 355 | 0.653702 |
39d1e0fd6a2b8ccfdbb67079e054d59599f510fc | 4,538 | class GdkPixbuf < Formula
desc "Toolkit for image loading and pixel buffer manipulation"
homepage "https://gtk.org"
url "https://download.gnome.org/sources/gdk-pixbuf/2.36/gdk-pixbuf-2.36.11.tar.xz"
sha256 "ae62ab87250413156ed72ef756347b10208c00e76b222d82d9ed361ed9dde2f3"
bottle do
sha256 "bd9e4d72a827f75ea2a1cd9463be0cf123ba1cda8f2e4d0a3ef0b1a1c46945f6" => :high_sierra
sha256 "a6280e13fe29c5c06548e4c8d0ed80755b50432778b6f668495327a289693cf3" => :sierra
sha256 "70aa88fda9b08b1cbd7fdd3c21d378ce1a95c1c936d5eba9dbe9efcd75254f04" => :el_capitan
sha256 "6801ba8c53a0384e7cee403056b9855c56a8c6d27df9280e5a3ae6bb0dd829d0" => :x86_64_linux
end
option "with-relocations", "Build with relocation support for bundles"
option "without-modules", "Disable dynamic module loading"
option "with-included-loaders=", "Build the specified loaders into gdk-pixbuf"
depends_on "pkg-config" => :build
depends_on "glib"
depends_on "jpeg"
depends_on "libtiff"
depends_on "libpng"
depends_on "gobject-introspection"
depends_on "shared-mime-info" unless OS.mac?
# gdk-pixbuf has an internal version number separate from the overall
# version number that specifies the location of its module and cache
# files, this will need to be updated if that internal version number
# is ever changed (as evidenced by the location no longer existing)
def gdk_so_ver
"2.0"
end
def gdk_module_ver
"2.10.0"
end
def install
# fix libtool versions
# https://bugzilla.gnome.org/show_bug.cgi?id=776892
inreplace "configure", /LT_VERSION_INFO=.+$/, "LT_VERSION_INFO=\"3602:0:3602\""
ENV.append_to_cflags "-DGDK_PIXBUF_LIBDIR=\\\"#{HOMEBREW_PREFIX}/lib\\\""
args = %W[
--disable-dependency-tracking
--disable-maintainer-mode
--enable-debug=no
--prefix=#{prefix}
--enable-introspection=yes
--disable-Bsymbolic
--enable-static
--without-gdiplus
]
args << "--enable-relocations" if build.with?("relocations")
args << "--disable-modules" if build.without?("modules")
included_loaders = ARGV.value("with-included-loaders")
args << "--with-included-loaders=#{included_loaders}" if included_loaders
system "./configure", *args
system "make"
system "make", "install"
# Other packages should use the top-level modules directory
# rather than dumping their files into the gdk-pixbuf keg.
inreplace lib/"pkgconfig/gdk-pixbuf-#{gdk_so_ver}.pc" do |s|
libv = s.get_make_var "gdk_pixbuf_binary_version"
s.change_make_var! "gdk_pixbuf_binarydir",
HOMEBREW_PREFIX/"lib/gdk-pixbuf-#{gdk_so_ver}"/libv
end
# Remove the cache. We will regenerate it in post_install
(lib/"gdk-pixbuf-#{gdk_so_ver}/#{gdk_module_ver}/loaders.cache").unlink
end
# The directory that loaders.cache gets linked into, also has the "loaders"
# directory that is scanned by gdk-pixbuf-query-loaders in the first place
def module_dir
"#{HOMEBREW_PREFIX}/lib/gdk-pixbuf-#{gdk_so_ver}/#{gdk_module_ver}"
end
def post_install
ENV["GDK_PIXBUF_MODULEDIR"] = "#{module_dir}/loaders"
system "#{bin}/gdk-pixbuf-query-loaders", "--update-cache"
end
def caveats
if build.with?("relocations") || HOMEBREW_PREFIX.to_s != "/usr/local"
<<~EOS
Programs that require this module need to set the environment variable
export GDK_PIXBUF_MODULEDIR="#{module_dir}/loaders"
If you need to manually update the query loader cache, set these variables then run
#{bin}/gdk-pixbuf-query-loaders --update-cache
EOS
end
end
test do
(testpath/"test.c").write <<~EOS
#include <gdk-pixbuf/gdk-pixbuf.h>
int main(int argc, char *argv[]) {
GType type = gdk_pixbuf_get_type();
return 0;
}
EOS
gettext = Formula["gettext"]
glib = Formula["glib"]
libpng = Formula["libpng"]
pcre = Formula["pcre"]
flags = (ENV.cflags || "").split + (ENV.cppflags || "").split + (ENV.ldflags || "").split
flags += %W[
-I#{gettext.opt_include}
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{include}/gdk-pixbuf-2.0
-I#{libpng.opt_include}/libpng16
-I#{pcre.opt_include}
-D_REENTRANT
-L#{gettext.opt_lib}
-L#{glib.opt_lib}
-L#{lib}
-lgdk_pixbuf-2.0
-lglib-2.0
-lgobject-2.0
]
flags << "-lintl" if OS.mac?
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
| 34.378788 | 94 | 0.684442 |
39d41fc0e744ee2863c926ed6e3fdea249a614b5 | 8,035 | require 'diego/action_builder'
require 'cloud_controller/diego/lifecycle_bundle_uri_generator'
require 'cloud_controller/diego/buildpack/task_action_builder'
require 'cloud_controller/diego/docker/task_action_builder'
require 'cloud_controller/diego/bbs_environment_builder'
require 'cloud_controller/diego/task_completion_callback_generator'
require 'cloud_controller/diego/task_cpu_weight_calculator'
module VCAP::CloudController
module Diego
class TaskRecipeBuilder
include ::Diego::ActionBuilder
def initialize
@egress_rules = Diego::EgressRules.new
end
def build_app_task(config, task)
task_completion_callback = VCAP::CloudController::Diego::TaskCompletionCallbackGenerator.new(config).generate(task)
app_volume_mounts = VCAP::CloudController::Diego::Protocol::AppVolumeMounts.new(task.app).as_json
task_action_builder = LifecycleProtocol.protocol_for_type(task.droplet.lifecycle_type).task_action_builder(config, task)
::Diego::Bbs::Models::TaskDefinition.new(
completion_callback_url: task_completion_callback,
cpu_weight: cpu_weight(task),
disk_mb: task.disk_in_mb,
egress_rules: generate_running_egress_rules(task.app),
log_guid: task.app.guid,
log_source: TASK_LOG_SOURCE,
max_pids: config.get(:diego, :pid_limit),
memory_mb: task.memory_in_mb,
network: generate_network(task),
privileged: config.get(:diego, :use_privileged_containers_for_running),
trusted_system_certificates_path: STAGING_TRUSTED_SYSTEM_CERT_PATH,
volume_mounts: generate_volume_mounts(app_volume_mounts),
action: task_action_builder.action,
cached_dependencies: task_action_builder.cached_dependencies,
root_fs: task_action_builder.stack,
environment_variables: task_action_builder.task_environment_variables,
PlacementTags: [VCAP::CloudController::IsolationSegmentSelector.for_space(task.space)],
certificate_properties: ::Diego::Bbs::Models::CertificateProperties.new(
organizational_unit: [
"organization:#{task.app.organization.guid}",
"space:#{task.app.space.guid}",
"app:#{task.app.guid}"
]
),
image_username: task.droplet.docker_receipt_username,
image_password: task.droplet.docker_receipt_password,
)
end
def build_staging_task(config, staging_details)
lifecycle_type = staging_details.lifecycle.type
action_builder = LifecycleProtocol.protocol_for_type(lifecycle_type).staging_action_builder(config, staging_details)
::Diego::Bbs::Models::TaskDefinition.new(
completion_callback_url: staging_completion_callback(config, staging_details),
cpu_weight: STAGING_TASK_CPU_WEIGHT,
disk_mb: staging_details.staging_disk_in_mb,
egress_rules: generate_egress_rules(staging_details),
log_guid: staging_details.package.app_guid,
log_source: STAGING_LOG_SOURCE,
memory_mb: staging_details.staging_memory_in_mb,
network: generate_network(staging_details.package),
privileged: config.get(:diego, :use_privileged_containers_for_staging),
result_file: STAGING_RESULT_FILE,
trusted_system_certificates_path: STAGING_TRUSTED_SYSTEM_CERT_PATH,
root_fs: "preloaded:#{action_builder.stack}",
action: timeout(action_builder.action, timeout_ms: config.get(:staging, :timeout_in_seconds).to_i * 1000),
environment_variables: action_builder.task_environment_variables,
cached_dependencies: action_builder.cached_dependencies,
PlacementTags: find_staging_isolation_segment(staging_details),
max_pids: config.get(:diego, :pid_limit),
certificate_properties: ::Diego::Bbs::Models::CertificateProperties.new(
organizational_unit: [
"organization:#{staging_details.package.app.organization.guid}",
"space:#{staging_details.package.app.space.guid}",
"app:#{staging_details.package.app_guid}"
]
),
image_username: staging_details.package.docker_username,
image_password: staging_details.package.docker_password,
)
end
private
def staging_completion_callback(config, staging_details)
port = config.get(:tls_port)
scheme = 'https'
auth = "#{config.get(:internal_api, :auth_user)}:#{CGI.escape(config.get(:internal_api, :auth_password))}"
host_port = "#{config.get(:internal_service_hostname)}:#{port}"
path = "/internal/v3/staging/#{staging_details.staging_guid}/build_completed?start=#{staging_details.start_after_staging}"
"#{scheme}://#{auth}@#{host_port}#{path}"
end
def cpu_weight(task)
TaskCpuWeightCalculator.new(memory_in_mb: task.memory_in_mb).calculate
end
def generate_network(task)
Protocol::ContainerNetworkInfo.new(task.app).to_bbs_network
end
def find_staging_isolation_segment(staging_details)
if staging_details.isolation_segment
[staging_details.isolation_segment]
else
[]
end
end
def generate_egress_rules(staging_details)
@egress_rules.staging(app_guid: staging_details.package.app_guid).map do |rule|
::Diego::Bbs::Models::SecurityGroupRule.new(
protocol: rule['protocol'],
destinations: rule['destinations'],
ports: rule['ports'],
port_range: rule['port_range'],
icmp_info: rule['icmp_info'],
log: rule['log'],
annotations: rule['annotations'],
)
end
end
def generate_running_egress_rules(process)
@egress_rules.running(process).map do |rule|
::Diego::Bbs::Models::SecurityGroupRule.new(
protocol: rule['protocol'],
destinations: rule['destinations'],
ports: rule['ports'],
port_range: rule['port_range'],
icmp_info: rule['icmp_info'],
log: rule['log'],
annotations: rule['annotations'],
)
end
end
def generate_volume_mounts(app_volume_mounts)
proto_volume_mounts = []
app_volume_mounts.each do |volume_mount|
proto_volume_mount = ::Diego::Bbs::Models::VolumeMount.new(
driver: volume_mount['driver'],
container_dir: volume_mount['container_dir'],
mode: volume_mount['mode']
)
mount_config = volume_mount['device']['mount_config'].present? ? volume_mount['device']['mount_config'].to_json : ''
proto_volume_mount.shared = ::Diego::Bbs::Models::SharedDevice.new(
volume_id: volume_mount['device']['volume_id'],
mount_config: mount_config
)
proto_volume_mounts.append(proto_volume_mount)
end
proto_volume_mounts
end
def logger
@logger ||= Steno.logger('cc.diego.tr')
end
end
end
end
| 47.544379 | 142 | 0.603858 |
bb882c4c263fc822dab957d10191abcb5d65257a | 6,720 | =begin
#Selling Partner API for Merchant Fulfillment
#The Selling Partner API for Merchant Fulfillment helps you build applications that let sellers purchase shipping for non-Prime and Prime orders using Amazon’s Buy Shipping Services.
OpenAPI spec version: v0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 3.0.24
=end
require 'date'
module AmzSpApi::MerchantFulfillmentApiModel
# Response schema.
class GetEligibleShipmentServicesResponse
attr_accessor :payload
attr_accessor :errors
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'payload' => :'payload',
:'errors' => :'errors'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'payload' => :'Object',
:'errors' => :'Object'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `AmzSpApi::MerchantFulfillmentApiModel::GetEligibleShipmentServicesResponse` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `AmzSpApi::MerchantFulfillmentApiModel::GetEligibleShipmentServicesResponse`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'payload')
self.payload = attributes[:'payload']
end
if attributes.key?(:'errors')
self.errors = attributes[:'errors']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
payload == o.payload &&
errors == o.errors
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[payload, errors].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
elsif attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
AmzSpApi::MerchantFulfillmentApiModel.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end end
end
| 30.967742 | 252 | 0.632292 |
910f1015c95044ed5216592e9c8064e484baf36d | 6,612 | # frozen_string_literal: true
RSpec.describe RuboCop::Cop::Lint::SafeNavigationConsistency, :config do
let(:cop_config) do
{ 'AllowedMethods' => %w[present? blank? try presence] }
end
it 'allows && without safe navigation' do
expect_no_offenses(<<~RUBY)
foo.bar && foo.baz
RUBY
end
it 'allows || without safe navigation' do
expect_no_offenses(<<~RUBY)
foo.bar || foo.baz
RUBY
end
it 'allows safe navigation when different variables are used' do
expect_no_offenses(<<~RUBY)
foo&.bar || foobar.baz
RUBY
end
it 'allows calls to methods that nil responds to' do
expect_no_offenses(<<~RUBY)
return true if a.nil? || a&.whatever?
RUBY
end
it 'registers an offense and corrects using safe navigation ' \
'on the left of &&' do
expect_offense(<<~RUBY)
foo&.bar && foo.baz
^^^^^^^^^^^^^^^^^^^ Ensure that safe navigation is used consistently inside of `&&` and `||`.
RUBY
expect_correction(<<~RUBY)
foo&.bar && foo&.baz
RUBY
end
it 'registers an offense and corrects using safe navigation ' \
'on the right of &&' do
expect_offense(<<~RUBY)
foo.bar && foo&.baz
^^^^^^^^^^^^^^^^^^^ Ensure that safe navigation is used consistently inside of `&&` and `||`.
RUBY
expect_correction(<<~RUBY)
foo&.bar && foo&.baz
RUBY
end
it 'registers an offense and corrects using safe navigation ' \
'on the left of ||' do
expect_offense(<<~RUBY)
foo&.bar || foo.baz
^^^^^^^^^^^^^^^^^^^ Ensure that safe navigation is used consistently inside of `&&` and `||`.
RUBY
expect_correction(<<~RUBY)
foo&.bar || foo&.baz
RUBY
end
it 'registers an offense and corrects using safe navigation ' \
'on the right of ||' do
expect_offense(<<~RUBY)
foo.bar || foo&.baz
^^^^^^^^^^^^^^^^^^^ Ensure that safe navigation is used consistently inside of `&&` and `||`.
RUBY
expect_correction(<<~RUBY)
foo&.bar || foo&.baz
RUBY
end
it 'registers an offense and corrects when there is code ' \
'before or after the condition' do
expect_offense(<<~RUBY)
foo = nil
foo&.bar || foo.baz
^^^^^^^^^^^^^^^^^^^ Ensure that safe navigation is used consistently inside of `&&` and `||`.
something
RUBY
expect_correction(<<~RUBY)
foo = nil
foo&.bar || foo&.baz
something
RUBY
end
it 'registers an offense but does not correct non dot method calls' do
expect_offense(<<~RUBY)
foo&.zero? || foo > 5
^^^^^^^^^^^^^^^^^^^^^ Ensure that safe navigation is used consistently inside of `&&` and `||`.
RUBY
expect_no_corrections
end
it 'registers an offense and corrects assignment' do
expect_offense(<<~RUBY)
foo&.bar && foo.baz = 1
^^^^^^^^^^^^^^^^^^^^^^^ Ensure that safe navigation is used consistently inside of `&&` and `||`.
RUBY
expect_correction(<<~RUBY)
foo&.bar && foo&.baz = 1
RUBY
end
it 'registers an offense and corrects using safe navigation ' \
'inside of separated conditions' do
expect_offense(<<~RUBY)
foo&.bar && foobar.baz && foo.qux
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Ensure that safe navigation is used consistently inside of `&&` and `||`.
RUBY
expect_correction(<<~RUBY)
foo&.bar && foobar.baz && foo&.qux
RUBY
end
it 'registers an offense and corrects using safe navigation in conditions ' \
'on the right hand side' do
expect_offense(<<~RUBY)
foobar.baz && foo&.bar && foo.qux
^^^^^^^^^^^^^^^^^^^ Ensure that safe navigation is used consistently inside of `&&` and `||`.
RUBY
expect_correction(<<~RUBY)
foobar.baz && foo&.bar && foo&.qux
RUBY
end
it 'registers and corrects multiple offenses' do
expect_offense(<<~RUBY)
foobar.baz && foo&.bar && foo.qux && foo.foobar
^^^^^^^^^^^^^^^^^^^ Ensure that safe navigation is used consistently inside of `&&` and `||`.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Ensure that safe navigation is used consistently inside of `&&` and `||`.
RUBY
expect_correction(<<~RUBY)
foobar.baz && foo&.bar && foo&.qux && foo&.foobar
RUBY
end
it 'registers an offense and corrects using unsafe navigation ' \
'with both && and ||' do
expect_offense(<<~RUBY)
foo&.bar && foo.baz || foo.qux
^^^^^^^^^^^^^^^^^^^ Ensure that safe navigation is used consistently inside of `&&` and `||`.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Ensure that safe navigation is used consistently inside of `&&` and `||`.
RUBY
expect_correction(<<~RUBY)
foo&.bar && foo&.baz || foo&.qux
RUBY
end
it 'registers an offense and corrects using unsafe navigation ' \
'with grouped conditions' do
expect_offense(<<~RUBY)
foo&.bar && (foo.baz || foo.qux)
^^^^^^^^^^^^^^^^^^^^ Ensure that safe navigation is used consistently inside of `&&` and `||`.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Ensure that safe navigation is used consistently inside of `&&` and `||`.
RUBY
expect_correction(<<~RUBY)
foo&.bar && (foo&.baz || foo&.qux)
RUBY
end
it 'registers an offense and corrects unsafe navigation that appears ' \
'before safe navigation' do
expect_offense(<<~RUBY)
foo.bar && foo.baz || foo&.qux
^^^^^^^^^^^^^^^^^^^ Ensure that safe navigation is used consistently inside of `&&` and `||`.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Ensure that safe navigation is used consistently inside of `&&` and `||`.
RUBY
expect_correction(<<~RUBY)
foo&.bar && foo&.baz || foo&.qux
RUBY
end
it 'registers an offense and corrects using unsafe navigation ' \
'and the safe navigation appears in a group' do
expect_offense(<<~RUBY)
(foo&.bar && foo.baz) || foo.qux
^^^^^^^^^^^^^^^^^^^ Ensure that safe navigation is used consistently inside of `&&` and `||`.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Ensure that safe navigation is used consistently inside of `&&` and `||`.
RUBY
expect_correction(<<~RUBY)
(foo&.bar && foo&.baz) || foo&.qux
RUBY
end
it 'registers a single offense and corrects when safe navigation is ' \
'used multiple times' do
expect_offense(<<~RUBY)
foo&.bar && foo&.baz || foo.qux
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Ensure that safe navigation is used consistently inside of `&&` and `||`.
RUBY
expect_correction(<<~RUBY)
foo&.bar && foo&.baz || foo&.qux
RUBY
end
end
| 30.611111 | 127 | 0.575923 |
ab2c84cb083105e49506a657a6b854b3c363dc6b | 9,442 | require 'rails_helper'
describe "items", js: true do
let!(:standup) { FactoryGirl.create(:standup, title: 'San Francisco', subject_prefix: "[Standup][SF]", closing_message: 'Woohoo', image_urls: 'http://example.com/bar.png', image_days: ['Mon']) }
let!(:other_standup) { FactoryGirl.create(:standup, title: 'New York') }
let(:timezone) { ActiveSupport::TimeZone.new(standup.time_zone_name) }
let(:date_today) { timezone.now.strftime("%Y-%m-%d") }
let(:date_tomorrow) { (timezone.now + 1.day).strftime("%Y-%m-%d") }
let(:date_five_days) { (timezone.now + 5.days).strftime("%Y-%m-%d") }
before do
Timecop.travel(Time.zone.local(2013, 9, 2, 12, 0, 0)) #monday
ENV["ENABLE_WINS"] = 'true'
end
after do
Timecop.return
end
it 'setup and deck.js for standup' do
login
visit '/'
click_link(standup.title)
find('a[data-kind="New face"] i').click
fill_in 'item_title', :with => "Fred Flintstone"
select 'New York', :from => 'item[standup_id]'
click_button 'Create New Face'
find('a[data-kind="New face"] i').click
fill_in 'item_title', :with => "Johnathon McKenzie"
fill_date_selector_with date_today
select 'San Francisco', from: 'item[standup_id]'
click_button 'Create New Face'
find('a[data-kind="New face"] i').click
fill_in 'item_title', :with => "Jane Doe"
fill_date_selector_with date_five_days
select 'San Francisco', from: 'item[standup_id]'
click_button 'Create New Face'
find('a[data-kind="Event"] i').click
fill_in 'item_title', :with => "Meetup"
fill_date_selector_with date_five_days
select 'New York', from: 'item[standup_id]'
click_button 'Create Item'
find('a[data-kind="Event"] i').click
fill_in 'item_title', :with => "Party"
fill_date_selector_with date_five_days
select 'San Francisco', from: 'item[standup_id]'
click_button 'Create Item'
find('a[data-kind="Event"] i').click
fill_in 'item_title', :with => "Happy Hour"
fill_date_selector_with date_today
select 'San Francisco', from: 'item[standup_id]'
click_button 'Create Item'
find('a[data-kind="Event"] i').click
fill_in 'item_title', :with => "Baseball"
fill_date_selector_with date_tomorrow
select 'San Francisco', from: 'item[standup_id]'
click_button 'Create Item'
find('a[data-kind="Interesting"] i').click
fill_in 'item_title', :with => "Linux 3.2 out"
fill_in 'item_author', :with => "Linus Torvalds"
fill_in 'item_description', with: "Check it out: `inline code!` and www.links.com"
click_button 'Create Item'
find('a[data-kind="Event"] i').click
click_button('Interesting')
fill_in 'item_title', :with => "Rails 62 is out"
fill_in 'item_author', :with => "DHH"
fill_in 'item_description', with: "Now with more f-bombs"
click_button 'Create Item'
find('a[data-kind="Win"] i').click
click_button('Win')
fill_in 'item_title', :with => "Tracker iOS 7 app"
fill_in 'item_author', :with => "Tracker team"
fill_in 'item_description', with: "In the app store now! New and shiny!"
select 'San Francisco', from: 'item[standup_id]'
click_button 'Create Item'
visit '/'
click_link(standup.title)
within '.event' do
expect(page).to have_css('.subheader.today', text: 'Today')
expect(page).to have_css('.today + .item', text: 'Happy Hour')
expect(page).to have_css('.subheader.tomorrow', text: 'Tomorrow')
expect(page).to have_css('.tomorrow + .item', text: 'Baseball')
expect(page).to have_css('.subheader.upcoming', text: 'Upcoming')
expect(page).to have_css('.upcoming + .item', text: 'Party')
end
within '.new_face' do
expect(page).to have_css('.subheader.today', text: 'Today')
expect(page).to have_css('.today + .item', text: 'Johnathon McKenzie')
expect(page).to have_css('.subheader.upcoming', text: 'Upcoming')
expect(page).to have_css('.upcoming + .item', text: 'Jane Doe')
end
within '.interesting' do
expect(page).to have_css('.item', text: 'Linus Torvalds')
first('a[data-toggle]').click
expect(page).to have_selector('.in')
expect(page).to have_selector('code', text: 'inline code!')
expect(page).to have_link('www.links.com')
end
within '.win' do
expect(page).to have_css('.item', text: 'Tracker iOS 7 app')
end
visit presentation_standup_items_path(standup)
within 'section.deck-current' do
expect(page).to have_content "Standup"
expect(page).to have_css('.countdown')
end
page.execute_script("$.deck('next')")
within 'section.deck-current' do
expect(page).to have_content "New faces"
expect(page).to have_content "Today"
expect(page).to have_content "Upcoming"
expect(page).to have_content "Johnathon McKenzie"
end
page.execute_script("$.deck('next')")
expect(find('section.deck-current')).to have_content "Helps"
page.execute_script("$.deck('next')")
within 'section.deck-current' do
expect(page).to have_content "Interestings"
expect(page).to have_content("Linux 3.2 out")
expect(page).to have_content("Linus Torvalds")
expect(page).to have_content("Rails 62 is out")
expect(page).to_not have_selector('.in')
first('a[data-toggle]').click
expect(page).to have_selector('.in')
expect(page).to have_content("Check it out:")
expect(page).to have_link("www.links.com")
expect(page).to have_selector("code", text: "inline code!")
end
page.execute_script("$.deck('next')")
expect(find('section.deck-current')).to have_content "Events"
expect(page).to have_css('section.deck-current', text: 'Today')
expect(page).to have_css('.today + ul li', text: 'Happy Hour')
expect(page).to have_css('section.deck-current', text: 'Tomorrow')
expect(page).to have_css('.tomorrow + ul li', text: 'Baseball')
expect(page).to have_css('section.deck-current', text: 'Upcoming')
expect(page).to have_css('.upcoming + ul li', text: 'Party')
expect(find('section.deck-current')).to_not have_content "Meetup"
expect(find('section.deck-current')).to_not have_content("Rails 62 is out")
page.execute_script("$.deck('next')")
expect(find('section.deck-current')).to have_content "Wins"
expect(page).to have_css('section.deck-current', text: 'Tracker iOS 7 app')
expect(find('section.deck-current')).to_not have_content 'Happy Hour'
expect(find('section.deck-current')).to_not have_content 'Baseball'
page.execute_script("$.deck('next')")
within 'section.deck-current' do
expect(page).to_not have_content "Wins"
expect(page).to have_content "Woohoo"
expect(page).to have_css('img[src="http://example.com/bar.png"]')
end
all('.exit-presentation').first.click
expect(current_path).to eq standup_items_path(standup)
end
it 'does not let you create wins if the feature flag is off' do
ENV["ENABLE_WINS"] = 'false'
login
visit '/'
click_link(standup.title)
expect(page).to_not have_css('a[data-kind="Win"] i')
end
it 'hides wins if there are none' do
login
visit presentation_standup_items_path(standup)
within 'section.deck-current' do
expect(page).to have_content "Standup"
expect(page).to have_css('.countdown')
end
page.execute_script("$.deck('next')")
expect(find('section.deck-current')).to have_content "New faces"
page.execute_script("$.deck('next')")
expect(find('section.deck-current')).to have_content "Helps"
page.execute_script("$.deck('next')")
expect(find('section.deck-current')).to have_content "Interestings"
page.execute_script("$.deck('next')")
expect(find('section.deck-current')).to have_content "Events"
page.execute_script("$.deck('next')")
within 'section.deck-current' do
expect(page).to_not have_content "Wins"
expect(page).to have_content "Woohoo"
end
end
describe "the bottom navbar" do
context "when the screen width starts at or above 737px" do
it "locks to the bottom of the screen but unlocks whenever the width goes below 737px" do
page.current_window.resize_to 737, 2000
login
visit '/'
click_link(standup.title)
expect(page.find('div.content-wrapper')).to have_css('.navbar-fixed-bottom')
page.current_window.resize_to 736, 2000
expect(page.find('div.content-wrapper')).to_not have_css('.navbar-fixed-bottom')
page.current_window.resize_to 737, 2000
expect(page.find('div.content-wrapper')).to have_css('.navbar-fixed-bottom')
end
end
context "when the screen width starts below 737px" do
it "is unlocked from the bottom of the screen but locks whenever the width goes above 737px" do
page.current_window.resize_to 736, 2000
login
visit '/'
click_link(standup.title)
expect(page.find('div.content-wrapper')).to_not have_css('.navbar-fixed-bottom')
page.current_window.resize_to 737, 2000
expect(page.find('div.content-wrapper')).to have_css('.navbar-fixed-bottom')
page.current_window.resize_to 736, 2000
expect(page.find('div.content-wrapper')).to_not have_css('.navbar-fixed-bottom')
end
end
end
end
def fill_date_selector_with(date)
fill_in 'item_date', :with => date
blur(page)
end
| 37.320158 | 196 | 0.667867 |
919f46a7537b785b87ef8e39f8576386e044273e | 338 | class HomeController < ApplicationController
before_action :authenticate_user!, only: :index
def index
@log = LogEntry.new(day: Date.today, time: Time.now.localtime.strftime("%H:%M"))
@google = Gloc.new()
@entries = LogEntry.all.where(user: current_user).order(:day, :time)
end
def policy
end
def help
end
end
| 22.533333 | 84 | 0.695266 |
62201b9cb5134a02eeac2d718eb13e7fe20e22d1 | 6,665 | # vim: set ft=javascript:
# Collision checking algorithm, implemented in JavaScript.
# Available as `Opal.DXOpal.CollisionChecker` in the runtime.
%x{ (function(){
var intersect = function(x1, y1, x2, y2, x3, y3, x4, y4){
return ((x1 - x2) * (y3 - y1) + (y1 - y2) * (x1 - x3)) *
((x1 - x2) * (y4 - y1) + (y1 - y2) * (x1 - x4));
};
var check_line_line = function(x1, y1, x2, y2, x3, y3, x4, y4){
return !((((x1 - x2) * (y3 - y1) + (y1 - y2) * (x1 - x3)) *
((x1 - x2) * (y4 - y1) + (y1 - y2) * (x1 - x4)) > 0.0) ||
(((x3 - x4) * (y1 - y3) + (y3 - y4) * (x3 - x1)) *
((x3 - x4) * (y2 - y3) + (y3 - y4) * (x3 - x2)) > 0.0 ));
};
var check_circle_line = function(x, y, r, x1, y1, x2, y2) {
var vx = x2-x1, vy = y2-y1;
var cx = x-x1, cy = y-y1;
if (vx == 0 && vy == 0 )
return CCk.check_point_circle(x, y, r, x1, y1);
var n1 = vx * cx + vy * cy;
if (n1 < 0)
return cx*cx + cy*cy < r * r;
var n2 = vx * vx + vy * vy;
if (n1 > n2) {
var len = (x2 - x)*(x2 - x) + (y2 - y)*(y2 - y);
return len < r * r;
}
else
{
var n3 = cx * cx + cy * cy;
return n3-(n1/n2)*n1 < r * r;
}
};
var CCk = {
check_point_circle: function(px, py, cx, cy, cr) {
return (cr*cr) >= ((cx-px) * (cx-px) + (cy-py) * (cy-py));
},
check_point_straight_rect: function(x, y, x1, y1, x2, y2) {
return ((x) >= (x1) &&
(y) >= (y1) &&
(x) < (x2) &&
(y) < (y2));
},
check_point_triangle: function(x, y, x1, y1, x2, y2, x3, y3){
if ((x1 - x3) * (y1 - y2) == (x1 - x2) * (y1 - y3))
return false;
var cx = (x1 + x2 + x3) / 3,
cy = (y1 + y2 + y3) / 3;
if (intersect( x1, y1, x2, y2, x, y, cx, cy ) < 0.0 ||
intersect( x2, y2, x3, y3, x, y, cx, cy ) < 0.0 ||
intersect( x3, y3, x1, y1, x, y, cx, cy ) < 0.0 ) {
return false;
}
return true;
},
check_circle_circle: function(ox, oy, or, dx, dy, dr) {
return ((or+dr) * (or+dr) >= (ox-dx) * (ox-dx) + (oy-dy) * (oy-dy));
},
check_ellipse_ellipse: function(E1, E2) {
var DefAng = E1.fAngle-E2.fAngle;
var Cos = Math.cos( DefAng );
var Sin = Math.sin( DefAng );
var nx = E2.fRad_X * Cos;
var ny = -E2.fRad_X * Sin;
var px = E2.fRad_Y * Sin;
var py = E2.fRad_Y * Cos;
var ox = Math.cos( E1.fAngle )*(E2.fCx-E1.fCx) + Math.sin(E1.fAngle)*(E2.fCy-E1.fCy);
var oy = -Math.sin( E1.fAngle )*(E2.fCx-E1.fCx) + Math.cos(E1.fAngle)*(E2.fCy-E1.fCy);
var rx_pow2 = 1/(E1.fRad_X*E1.fRad_X);
var ry_pow2 = 1/(E1.fRad_Y*E1.fRad_Y);
var A = rx_pow2*nx*nx + ry_pow2*ny*ny;
var B = rx_pow2*px*px + ry_pow2*py*py;
var D = 2*rx_pow2*nx*px + 2*ry_pow2*ny*py;
var E = 2*rx_pow2*nx*ox + 2*ry_pow2*ny*oy;
var F = 2*rx_pow2*px*ox + 2*ry_pow2*py*oy;
var G = (ox/E1.fRad_X)*(ox/E1.fRad_X) + (oy/E1.fRad_Y)*(oy/E1.fRad_Y) - 1;
var tmp1 = 1/(D*D-4*A*B);
var h = (F*D-2*E*B)*tmp1;
var k = (E*D-2*A*F)*tmp1;
var Th = (B-A)==0 ? 0 : Math.atan( D/(B-A) ) * 0.5;
var CosTh = Math.cos(Th);
var SinTh = Math.sin(Th);
var A_tt = A*CosTh*CosTh + B*SinTh*SinTh - D*CosTh*SinTh;
var B_tt = A*SinTh*SinTh + B*CosTh*CosTh + D*CosTh*SinTh;
var KK = A*h*h + B*k*k + D*h*k - E*h - F*k + G > 0 ? 0 : A*h*h + B*k*k + D*h*k - E*h - F*k + G;
var Rx_tt = 1+Math.sqrt(-KK/A_tt);
var Ry_tt = 1+Math.sqrt(-KK/B_tt);
var x_tt = CosTh*h-SinTh*k;
var y_tt = SinTh*h+CosTh*k;
var JudgeValue = x_tt*x_tt/(Rx_tt*Rx_tt) + y_tt*y_tt/(Ry_tt*Ry_tt);
return (JudgeValue <= 1);
},
check_circle_tilted_rect: function(cx, cy, cr, x1, y1, x2, y2, x3, y3, x4, y4){
return CCk.check_point_triangle(cx, cy, x1, y1, x2, y2, x3, y3) ||
CCk.check_point_triangle(cx, cy, x1, y1, x3, y3, x4, y4) ||
check_circle_line(cx, cy, cr, x1, y1, x2, y2) ||
check_circle_line(cx, cy, cr, x2, y2, x3, y3) ||
check_circle_line(cx, cy, cr, x3, y3, x4, y4) ||
check_circle_line(cx, cy, cr, x4, y4, x1, y1);
},
check_circle_triangle: function(cx, cy, cr, x1, y1, x2, y2, x3, y3) {
return CCk.check_point_triangle(cx, cy, x1, y1, x2, y2, x3, y3) ||
check_circle_line(cx, cy, cr, x1, y1, x2, y2) ||
check_circle_line(cx, cy, cr, x2, y2, x3, y3) ||
check_circle_line(cx, cy, cr, x3, y3, x1, y1);
},
check_rect_rect: function(ax1, ay1, ax2, ay2, bx1, by1, bx2, by2) {
return ax1 < bx2 &&
ay1 < by2 &&
bx1 < ax2 &&
by1 < ay2;
},
// Rect(may be tilted) vs Triangle
check_tilted_rect_triangle: function(ox1, oy1, ox2, oy2, ox3, oy3, ox4, oy4,
dx1, dy1, dx2, dy2, dx3, dy3) {
return check_line_line(ox1, oy1, ox2, oy2, dx1, dy1, dx2, dy2) ||
check_line_line(ox1, oy1, ox2, oy2, dx2, dy2, dx3, dy3) ||
check_line_line(ox1, oy1, ox2, oy2, dx3, dy3, dx1, dy1) ||
check_line_line(ox2, oy2, ox3, oy3, dx1, dy1, dx2, dy2) ||
check_line_line(ox2, oy2, ox3, oy3, dx2, dy2, dx3, dy3) ||
check_line_line(ox2, oy2, ox3, oy3, dx3, dy3, dx1, dy1) ||
check_line_line(ox3, oy3, ox4, oy4, dx1, dy1, dx2, dy2) ||
check_line_line(ox3, oy3, ox4, oy4, dx2, dy2, dx3, dy3) ||
check_line_line(ox3, oy3, ox4, oy4, dx3, dy3, dx1, dy1) ||
check_line_line(ox4, oy4, ox1, oy1, dx1, dy1, dx2, dy2) ||
check_line_line(ox4, oy4, ox1, oy1, dx2, dy2, dx3, dy3) ||
check_line_line(ox4, oy4, ox1, oy1, dx3, dy3, dx1, dy1) ||
CCk.check_point_triangle(dx1, dy1, ox1, oy1, ox2, oy2, ox3, oy3) ||
CCk.check_point_triangle(dx1, dy1, ox1, oy1, ox3, oy3, ox4, oy4) ||
CCk.check_point_triangle(ox1, oy1, dx1, dy1, dx2, dy2, dx3, dy3);
},
// Triangle vs Triangle
check_triangle_triangle: function(ox1, oy1, ox2, oy2, ox3, oy3,
dx1, dy1, dx2, dy2, dx3, dy3) {
return check_line_line(ox1, oy1, ox2, oy2, dx2, dy2, dx3, dy3) ||
check_line_line(ox1, oy1, ox2, oy2, dx3, dy3, dx1, dy1) ||
check_line_line(ox2, oy2, ox3, oy3, dx1, dy1, dx2, dy2) ||
check_line_line(ox2, oy2, ox3, oy3, dx3, dy3, dx1, dy1) ||
check_line_line(ox3, oy3, ox1, oy1, dx1, dy1, dx2, dy2) ||
check_line_line(ox3, oy3, ox1, oy1, dx2, dy2, dx3, dy3) ||
CCk.check_point_triangle(ox1, oy1, dx1, dy1, dx2, dy2, dx3, dy3) ||
CCk.check_point_triangle(dx1, dy1, ox1, oy1, ox2, oy2, ox3, oy3);
}
};
Opal.DXOpal.CollisionChecker = CCk;
Opal.DXOpal.CCk = CCk; // Alias
})(); }
| 38.75 | 100 | 0.525281 |
1db73fa0262f8a7de56b52fd72521c954c6bc3bc | 2,237 | class E2fsprogs < Formula
desc "Utilities for the ext2, ext3, and ext4 file systems"
homepage "https://e2fsprogs.sourceforge.io/"
url "https://downloads.sourceforge.net/project/e2fsprogs/e2fsprogs/v1.46.4/e2fsprogs-1.46.4.tar.gz"
sha256 "7524520b291e901431ce59ea085955b601126de371bf3cfc0f5e4fad78684265"
license all_of: [
"GPL-2.0-or-later",
"LGPL-2.0-or-later", # lib/ex2fs
"LGPL-2.0-only", # lib/e2p
"BSD-3-Clause", # lib/uuid
"MIT", # lib/et, lib/ss
]
head "https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git"
livecheck do
url :stable
regex(%r{url=.*?/e2fsprogs[._-]v?(\d+(?:\.\d+)+)\.t}i)
end
bottle do
sha256 arm64_big_sur: "b089beb986fdbc2f9a699c98ea0d7453b434a819b18e09183c8a2e54368b4652"
sha256 big_sur: "93c43050723e83dc54e9acda04b49bb9651d561a8f179b0a2837dc0b4dbc488d"
sha256 catalina: "e629177b97c03f0c073ab805dd1d452b210f4b206e63da826793420c64d151eb"
sha256 mojave: "d494d4d21d05c76acdeb381b38d2bd343cd4d1b5e536a1d2f99ebceb8fb5d917"
sha256 x86_64_linux: "cf06e4cdcc4588246eb66b3fd10d9a8424494578e7821e6e273a030fcea09d28"
end
keg_only "this installs several executables which shadow macOS system commands"
depends_on "pkg-config" => :build
depends_on "gettext"
def install
# Fix "unknown type name 'loff_t'" issue
inreplace "lib/ext2fs/imager.c", "loff_t", "off_t"
inreplace "misc/e2fuzz.c", "loff_t", "off_t"
# Enforce MKDIR_P to work around a configure bug
# see https://github.com/Homebrew/homebrew-core/pull/35339
# and https://sourceforge.net/p/e2fsprogs/discussion/7053/thread/edec6de279/
args = [
"--prefix=#{prefix}",
"--disable-e2initrd-helper",
"MKDIR_P=mkdir -p",
]
on_macos do
args << "--enable-bsd-shlibs" unless Hardware::CPU.arm?
end
on_linux do
args << "--enable-elf-shlibs"
end
system "./configure", *args
system "make"
# Fix: lib/libcom_err.1.1.dylib: No such file or directory
ENV.deparallelize
system "make", "install"
system "make", "install-libs"
end
test do
assert_equal 36, shell_output("#{bin}/uuidgen").strip.length
system bin/"lsattr", "-al"
end
end
| 32.42029 | 101 | 0.689763 |
ff200c78254ba8b12f5a890d244b253e2144d97b | 1,801 | require 'spec_helper'
describe Sidekiq::Hierarchy::Observers::WorkflowUpdate do
let(:callback_registry) { Sidekiq::Hierarchy::CallbackRegistry.new }
subject(:observer) { described_class.new }
describe '#register' do
let(:msg) { double('message') }
before { observer.register(callback_registry) }
it 'adds the observer to the registry listening for workflow updates' do
expect(observer).to receive(:call).with(msg)
callback_registry.publish(Sidekiq::Hierarchy::Notifications::WORKFLOW_UPDATE, msg)
end
end
describe '#call' do
let(:job_info) { {'class' => 'HardWorker', 'args' => [1, 'foo']} }
let(:root) { Sidekiq::Hierarchy::Job.create('0', job_info) }
let(:workflow) { Sidekiq::Hierarchy::Workflow.find(root) }
let(:running_set) { Sidekiq::Hierarchy::RunningSet.new }
let(:failed_set) { Sidekiq::Hierarchy::FailedSet.new }
context 'when the workflow is new' do
it 'adds the target workflow to the new status set' do
expect(failed_set.contains?(workflow)).to be_falsey
observer.call(workflow, :failed, workflow.status)
expect(failed_set.contains?(workflow)).to be_truthy
end
end
context 'when the workflow is already in a set' do
before { running_set.add(workflow) }
it 'removes the target workflow from its current status set' do
expect(running_set.contains?(workflow)).to be_truthy
observer.call(workflow, :failed, :running)
expect(running_set.contains?(workflow)).to be_falsey
end
it 'adds the target workflow to the new status set' do
expect(failed_set.contains?(workflow)).to be_falsey
observer.call(workflow, :failed, :running)
expect(failed_set.contains?(workflow)).to be_truthy
end
end
end
end
| 36.02 | 88 | 0.68573 |
ac3f8884ecd223d4d30589ded9c67ad2382d2405 | 731 | cask "zotero" do
version "5.0.96.3"
sha256 "72ca698334ce4f453271c9fa7fd01ed5592eadcf69095044bea7f9539ef5edb6"
url "https://download.zotero.org/client/release/#{version}/Zotero-#{version}.dmg"
name "Zotero"
desc "Collect, organize, cite, and share research sources"
homepage "https://www.zotero.org/"
livecheck do
url "https://www.zotero.org/download/"
strategy :page_match
regex(/standaloneVersions.*?"mac"\s*:\s*"(\d+(?:\.\d+)*)"/i)
end
auto_updates true
app "Zotero.app"
zap trash: [
"~/Library/Application Support/Zotero",
"~/Library/Caches/Zotero",
"~/Library/Preferences/org.zotero.zotero.plist",
"~/Library/Saved Application State/org.zotero.zotero.savedState",
]
end
| 27.074074 | 83 | 0.69357 |
edb7afb46035912e984e0339f3517ff9ba3ed9c1 | 2,229 | class QuestionnaireNode < Node
belongs_to :questionnaire, class_name: 'Questionnaire', foreign_key: 'node_object_id', inverse_of: false
belongs_to :node_object, class_name: 'Questionnaire', foreign_key: 'node_object_id', inverse_of: false
def self.table
'questionnaires'
end
def self.get(sortvar = nil, sortorder = nil, user_id = nil, show = nil, parent_id = nil, _search = nil)
conditions = if show
if User.find(user_id).role.name != 'Teaching Assistant'
'questionnaires.instructor_id = ?'
else
'questionnaires.instructor_id in (?)'
end
elsif User.find(user_id).role.name != 'Teaching Assistant'
'(questionnaires.private = 0 or questionnaires.instructor_id = ?)'
else
'(questionnaires.private = 0 or questionnaires.instructor_id in (?))'
end
values = if User.find(user_id).role.name == 'Teaching Assistant'
Ta.get_mapped_instructor_ids(user_id)
else
user_id
end
if parent_id
name = TreeFolder.find(parent_id).name + 'Questionnaire'
name.gsub!(/[^\w]/, '')
conditions += " and questionnaires.type = \"#{name}\""
end
sortvar = 'name' if sortvar.nil? || (sortvar == 'directory_path')
sortorder = 'ASC' if sortorder.nil?
(includes(:questionnaire).where([conditions, values]).order("questionnaires.#{sortvar} #{sortorder}") if Questionnaire.column_names.include?(sortvar) &&
%w[ASC DESC asc desc].include?(sortorder))
end
def get_name
Questionnaire.find_by(id: node_object_id).try(:name)
end
# this method return instructor id associated with a questionnaire
# expects no arguments
# returns int
def get_instructor_id
Questionnaire.find_by(id: node_object_id).try(:instructor_id)
end
def get_private
Questionnaire.find_by(id: node_object_id).try(:private)
end
def get_creation_date
Questionnaire.find_by(id: node_object_id).try(:created_at)
end
def get_modified_date
Questionnaire.find_by(id: node_object_id).try(:updated_at)
end
def is_leaf
true
end
end
| 33.772727 | 156 | 0.644235 |
ab916ad8c381b8e60e82e0f2cfe7286d19723c56 | 318 | module Fog
module Compute
class Ninefold
class Real
def query_async_job_result(options = {})
request('queryAsyncJobResult', options, :expects => [200],
:response_prefix => 'queryasyncjobresultresponse', :response_type => Array)
end
end
end
end
end
| 21.2 | 93 | 0.610063 |
62d7073966e12b289fc5647b3babdcc08164f551 | 173 | module StringHelpers
def remove_suffix(word, suffix_size)
word[0, word.size - suffix_size]
end
def ends_with?(word, suffix)
!!(word =~ /#{suffix}$/)
end
end | 19.222222 | 38 | 0.66474 |
b992bc1c59aa9b2142b59d60c2276b9d1eb79262 | 2,581 | #
# Author:: AJ Christensen (<[email protected]>)
# Copyright:: Copyright (c) 2008 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'chef/config'
require 'chef/exceptions'
require 'chef/log'
require 'mixlib/cli'
class Chef::Application
include Mixlib::CLI
def initialize
super
trap("TERM") do
Chef::Application.fatal!("SIGTERM received, stopping", 1)
end
trap("INT") do
Chef::Application.fatal!("SIGINT received, stopping", 2)
end
trap("HUP") do
Chef::Log.info("SIGHUP received, reconfiguring")
reconfigure
end
at_exit do
# tear down the logger
end
end
# Reconfigure the application. You'll want to override and super this method.
def reconfigure
configure_chef
configure_logging
end
# Get this party started
def run
reconfigure
setup_application
run_application
end
# Parse the configuration file
def configure_chef
parse_options
Chef::Config.from_file(config[:config_file]) if !config[:config_file].nil? && File.exists?(config[:config_file]) && File.readable?(config[:config_file])
Chef::Config.merge!(config)
end
# Initialize and configure the logger
def configure_logging
Chef::Log.init(Chef::Config[:log_location])
Chef::Log.level = Chef::Config[:log_level]
end
# Called prior to starting the application, by the run method
def setup_application
raise Chef::Exceptions::Application, "#{self.to_s}: you must override setup_application"
end
# Actually run the application
def run_application
raise Chef::Exceptions::Application, "#{self.to_s}: you must override run_application"
end
class << self
# Log a fatal error message to both STDERR and the Logger, exit the application
def fatal!(msg, err = -1)
STDERR.puts("FATAL: #{msg}")
Chef::Log.fatal(msg)
Process.exit err
end
def exit!(msg, err = -1)
Chef::Log.debug(msg)
Process.exit err
end
end
end
| 26.070707 | 156 | 0.686943 |
017d7f9f143a1243ec17c5023943ec86226eec09 | 784 | # frozen_string_literal: true
module SpreeMailchimpEcommerce
class ProductMailchimpPresenter
attr_reader :product
def initialize(product)
@product = product
end
def json
{
id: Digest::MD5.hexdigest(product.id.to_s),
title: product.name || "",
description: product.description || "",
url: "#{::Rails.application.routes.url_helpers.spree_url}products/#{product.slug}" || "",
vendor: product.category&.name || "",
image_url: image_url,
variants: variants
}.as_json
end
private
def variants
product.has_variants? ? product.variants.map(&:mailchimp_variant) : [product.master.mailchimp_variant]
end
def image_url
product.mailchimp_image_url
end
end
end
| 23.058824 | 108 | 0.649235 |
1a9018af584ee15a6044ff3bb0c1034f4c9cfb3a | 152 | class AddPromeseEndpointToPromeseSettings < ActiveRecord::Migration
def change
add_column :promese_settings, :promese_endpoint, :string
end
end
| 25.333333 | 67 | 0.815789 |
b9041f71f5ee060e91680e301bd8eda919dd2572 | 3,172 | require 'spec_helper'
describe LogDecorator do
it 'has a version number' do
expect(LogDecorator::VERSION).not_to be nil
end
describe "module methods" do
it "should respond to :prefix" do
expect(LogDecorator.respond_to?(:prefix)).to be true
end
it "should respond to :prefix=" do
expect(LogDecorator.respond_to?(:prefix=)).to be true
end
it "should respond to :logger" do
expect(LogDecorator.respond_to?(:logger)).to be true
end
it "should respond to :logger=" do
expect(LogDecorator.respond_to?(:logger=)).to be true
end
end
describe "include in class" do
before(:each) do
TestClass1._log_prefix = LogDecorator::DEFAULT_PREFIX
TestLog.sio.rewind
end
it "should respond to _log class method" do
expect(TestClass1.respond_to?(:_log)).to be true
end
it "should respond to _log instance method" do
expect(TestClass1.new.respond_to?(:_log)).to be true
end
it "should log expected message from class method" do
TestClass1.cmethod
TestLog.sio.rewind
expect(TestLog.sio.gets).to eq("TestClass1.cmethod called\n")
end
it "should log expected message from instance method" do
TestClass1.new.imethod
TestLog.sio.rewind
expect(TestLog.sio.gets).to eq("TestClass1#imethod called\n")
end
end
describe "change prefix" do
before(:each) do
TestClass1._log_prefix = lambda do |klass, separator, location|
"MIQ(#{LogDecorator::DEFAULT_PREFIX.call(klass, separator, location)})"
end
TestLog.sio.rewind
end
it "should log expected message from class method" do
TestClass1.cmethod
TestLog.sio.rewind
expect(TestLog.sio.gets).to eq("MIQ(TestClass1.cmethod) called\n")
end
it "should log expected message from instance method" do
TestClass1.new.imethod
TestLog.sio.rewind
expect(TestLog.sio.gets).to eq("MIQ(TestClass1#imethod) called\n")
end
end
describe "change loglevel" do
before(:each) do
TestClass1._log_prefix = LogDecorator::DEFAULT_PREFIX
TestClass1._log.level = Log4r::DEBUG
TestLog.sio.reopen("")
end
it "should log expected message from class method at DEBUG" do
expect(TestClass1._log.debug?).to be true
TestClass1.cmethod
TestLog.sio.rewind
expect(TestLog.sio.gets).to eq("TestClass1.cmethod called\n")
end
it "should NOT log expected message from class method at INFO" do
TestClass1._log.level = Log4r::INFO
expect(TestClass1._log.info?).to be true
TestClass1.cmethod
TestLog.sio.rewind
expect(TestLog.sio.gets).to be_nil
end
it "should log expected message from instance method at DEBUG" do
TestClass1.new.imethod
TestLog.sio.rewind
expect(TestLog.sio.gets).to eq("TestClass1#imethod called\n")
end
it "should NOT log expected message from instance method at INFO" do
obj = TestClass1.new
obj._log.level = Log4r::INFO
expect(obj._log.info?).to be true
obj.imethod
TestLog.sio.rewind
expect(TestLog.sio.gets).to be_nil
end
end
end
| 28.321429 | 79 | 0.678436 |
282ea4c4620566a4a5b0f7af1c94b722701d8a4a | 1,384 | lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "eloquant/version"
Gem::Specification.new do |spec|
spec.name = "eloquant"
spec.version = Eloquant::VERSION
spec.authors = ["Jason Stumbaugh"]
spec.email = ["[email protected]"]
spec.summary = "Eloqua REST API wrapper"
spec.description = "This gem provides a nice ruby interface to work with Oracle Eloqua's REST API"
spec.homepage = "https://github.com/jstumbaugh/eloquant"
spec.license = "MIT"
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path("..", __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_dependency "faraday_middleware", "> 0.9.0", "< 0.11.0"
spec.add_development_dependency "bundler", "~> 1.16"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_development_dependency "simplecov", "~> 0.16.0"
spec.add_development_dependency "webmock", "~> 1.21", ">= 1.21.0"
end
| 41.939394 | 102 | 0.665462 |
269df7bc8ee648a468b297ab8c558146707e7cb7 | 396 | # frozen_string_literal: true
require 'yaml'
unless ENV['MAILCHIMP_API_KEY']
if File.exist?('api_key.yml')
ENV['MAILCHIMP_API_KEY'] = YAML.load_file('api_key.yml')['api_key'] # api_key.yml is ignored in .gitignore
else
ENV['MAILCHIMP_API_KEY'] = 'vcr_playback-us11' # Will successfully replay the VCR cassettes
end
end
ENV['MAILCHIMP_DC'] = ENV['MAILCHIMP_API_KEY'].split('-')[1]
| 30.461538 | 110 | 0.724747 |
38a072dda7a7eab7f2ab4216d17db4cf0679be7d | 816 | require 'test_helper'
class SiteLayoutTest < ActionDispatch::IntegrationTest
def setup
@user = users(:agata)
end
test "layout links" do
get root_path
assert_template 'static_pages/home'
assert_select "a[href=?]", root_path, count: 2
assert_select "a[href=?]", help_path
assert_select "a[href=?]", users_path, count: 0
assert_select "a[href=?]", about_path
assert_select "a[href=?]", contact_path
get contact_path
assert_select "title", full_title("Contact")
get signup_path
assert_select "title", full_title("Sign up")
log_in_as(@user)
follow_redirect!
assert_select "a[href=?]", login_path, count = 0
assert_select "a[href=?]", users_path
assert_select "a[href=?]", edit_user_path
assert_select "a[href=?]", logout_path
end
end
| 28.137931 | 54 | 0.683824 |
bb999f02fd4614d0da94bdf4978ce7de0f98e85e | 912 | # frozen_string_literal: true
class ApplicationController < ActionController::Base
protect_from_forgery
# Some applications and libraries modify `current_user`. Their changes need
# to be reflected in `whodunnit`, so the `set_paper_trail_whodunnit` below
# must happen after this.
before_action :modify_current_user
# PT used to add this callback automatically. Now people are required to add
# it themselves, like this, allowing them to control the order of callbacks.
# The `modify_current_user` callback above shows why this control is useful.
before_action :set_paper_trail_whodunnit
def rescue_action(e)
raise e
end
# Returns id of hypothetical current user
attr_reader :current_user
def info_for_paper_trail
{ ip: request.remote_ip, user_agent: request.user_agent }
end
private
def modify_current_user
@current_user = OpenStruct.new(id: 153)
end
end
| 27.636364 | 78 | 0.770833 |
1c6ea97b872df199caec97b8557762375ed9b130 | 1,356 | require_relative '../../puppet_x/puppetlabs/netdev_stdlib/check'
if PuppetX::NetdevStdlib::Check.use_old_netdev_type
Puppet::Type.newtype(:syslog_facility) do
@doc = 'Configure severity level for syslog facilities'
apply_to_all
ensurable
newparam(:name, namevar: true) do
desc 'Facility'
validate do |value|
if value.is_a? String then super(value)
else raise "value #{value.inspect} is invalid, must be a String."
end
end
end
newproperty(:level) do
desc 'Syslog severity level to log'
munge { |v| Integer(v) }
end
end
else
require 'puppet/resource_api'
Puppet::ResourceApi.register_type(
name: 'syslog_facility',
docs: 'Configure severity level for syslog facilities',
features: ['canonicalize','simple_get_filter'] + ( Puppet::Util::NetworkDevice.current.nil? ? [] : ['remote_resource'] ),
attributes: {
ensure: {
type: 'Enum[present, absent]',
desc: 'Whether this syslog facility configuration should be present or absent on the target system.',
default: 'present'
},
name: {
type: 'String',
desc: 'Facility',
behaviour: :namevar
},
level: {
type: 'Optional[Integer[0, 7]]',
desc: 'Syslog severity level to log'
}
}
)
end
| 27.12 | 125 | 0.620944 |
280d35adcb902aa414de2f63671ebf15de977cfa | 319 | module ActionPack
# Returns the version of the currently loaded Action Pack as a <tt>Gem::Version</tt>
def self.gem_version
Gem::Version.new VERSION::STRING
end
module VERSION
MAJOR = 4
MINOR = 2
TINY = 0
PRE = "beta1"
STRING = [MAJOR, MINOR, TINY, PRE].compact.join(".")
end
end
| 19.9375 | 86 | 0.642633 |
61407f6d08806bdddf87955dc7d38b867254c68d | 1,232 | RSpec.describe ParallelReportPortal do
it 'has a version number' do
expect(ParallelReportPortal::VERSION).not_to be nil
end
context 'extends the correct modules' do
let(:extensions) { ParallelReportPortal.singleton_class.included_modules }
it 'extends ParallelReportPortal::HTTP' do
expect(extensions).to include(ParallelReportPortal::HTTP)
end
it 'extends ParallelReportPortal::Clock' do
expect(extensions).to include(ParallelReportPortal::Clock)
end
it 'extends ParallelReportPortal::FileUtils' do
expect(extensions).to include(ParallelReportPortal::FileUtils)
end
end
context 'is configurable' do
context 'if not explicitly configured' do
it 'has default configuration object' do
expect(ParallelReportPortal.configuration).not_to be_nil
end
it 'uses ParallelReportPortal::Configuration' do
expect(ParallelReportPortal.configuration).to be_kind_of(ParallelReportPortal::Configuration)
end
end
context 'allows configuration' do
it 'yield a configuration object' do
expect { |x| ParallelReportPortal.configure(&x) }.to yield_with_args(ParallelReportPortal.configuration)
end
end
end
end
| 30.8 | 112 | 0.736201 |
610d7f8c5d0bd6498d2cb949eab1dcc3bae066cd | 370 | maintainer "Opscode, Inc."
maintainer_email "[email protected]"
license "Apache 2.0"
description "Installs packages for working with XFS"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.rdoc'))
version "0.1.1"
recipe "xfs", "Installs packages for working with XFS"
%w{ debian ubuntu }.each do |os|
supports os
end
| 28.461538 | 74 | 0.7 |
1c4e217579bdd0e8f3f02df32f6e2d2bfa94514d | 3,517 | # frozen_string_literal: true
require 'spec_helper'
require 'bolt_spec/conn'
require 'bolt_spec/transport'
require 'bolt/transport/docker'
require 'bolt/inventory'
require 'shared_examples/transport'
describe Bolt::Transport::Docker, docker: true do
include BoltSpec::Conn
include BoltSpec::Transport
let(:transport) { 'docker' }
let(:hostname) { conn_info('docker')[:host] }
let(:uri) { "docker://#{hostname}" }
let(:docker) { Bolt::Transport::Docker.new }
let(:inventory) { Bolt::Inventory.empty }
let(:target) { make_target }
let(:transport_config) { {} }
def make_target
inventory.get_target(uri)
end
context 'with docker' do
let(:transport) { :docker }
let(:os_context) { posix_context }
it "can test whether the target is available" do
expect(runner.connected?(target)).to eq(true)
end
it "returns false if the target is not available" do
expect(runner.connected?(inventory.get_target('unknownfoo'))).to eq(false)
end
include_examples 'transport api'
end
context 'with_connection' do
it "fails with an unknown host" do
# Test fails differently on Windows due to issues in the docker-api gem.
expect {
docker.with_connection(inventory.get_target('not_a_target')) {}
}.to raise_error(Bolt::Node::ConnectError, /Could not find a container with name or ID matching 'not_a_target'/)
end
context "when connecting to containers by ID" do
let(:container_id) { docker.with_connection(target, &:container_id) }
it "succeeds when using full container IDs" do
expect(docker.connected?(inventory.get_target("docker://#{container_id}"))).to eq(true)
end
it "succeeds when using short container IDs" do
short_id = container_id[0..11]
expect(docker.connected?(inventory.get_target("docker://#{short_id}"))).to eq(true)
end
end
end
context 'when url is specified' do
it 'uses the url' do
set_config(target, 'service-url' => 'tcp://localhost:55555')
expect {
docker.with_connection(target) {}
}.to raise_error(Bolt::Node::ConnectError, /Could not find a container with name or ID matching/)
end
end
context 'when there is no host in the target' do
# Directly create an inventory target, since Inventory#get_target doesn't allow
# for passing config and would set the host as the name passed to it
let(:target) { Bolt::Target.from_hash({ 'name' => 'hostless' }, inventory) }
it 'errors' do
expect { docker.run_command(target, 'whoami') }.to raise_error(/does not have a host/)
end
end
context 'with shell-command specified' do
let(:target_data) {
{ 'uri' => uri,
'config' => {
'docker' => { 'shell-command' => '/bin/bash -c' }
} }
}
let(:target) { Bolt::Target.from_hash(target_data, inventory) }
it 'uses the specified shell' do
result = docker.run_command(target, 'echo $SHELL')
expect(result.value['stdout'].strip).to eq('/bin/bash')
end
end
context 'with run_as specified' do
let(:target_data) {
{ 'uri' => uri,
'config' => {
'docker' => { 'run-as' => 'root' }
} }
}
let(:target) { Bolt::Target.from_hash(target_data, inventory) }
it 'runs as the specified user' do
result = docker.run_command(target, 'whoami')
expect(result.value['stdout'].strip).to eq('root')
end
end
end
| 30.850877 | 118 | 0.642877 |
5dc86171eb439dc22c595c98ff28205237ba7aa5 | 36 | module Sway
VERSION = "0.0.1"
end
| 9 | 19 | 0.638889 |
38e21687c49f5f2b7765b6669d9e934e108e494c | 280 | require "shoelace/rails/ui/version"
require "shoelace/rails/ui/engine"
Dir[File.dirname(__FILE__) + '/../lib/*.rb'].each do |file|
require File.basename(file, File.extname(file))
end
module Shoelace
module Rails
module Ui
# Your code goes here...
end
end
end
| 21.538462 | 60 | 0.689286 |
87f7f10563a649c6f558b496651e839898cbd755 | 736 | module ChangeHealth
class Connection
URI_BUILDER = ->(host) { "https://#{host}apis.changehealthcare.com/".freeze }
QA_ENDPOINT = URI_BUILDER.call('sandbox.')
PROD_ENDPOINT = URI_BUILDER.call('')
include HTTParty
base_uri QA_ENDPOINT
headers 'Content-Type' => 'application/json;charset=UTF-8'
format :json
def request(endpoint: , body: nil, headers: {}, auth: true, verb: :post)
body = body.to_json if body.is_a?(Hash)
headers = auth_header.merge(headers) if auth
self.class.send("#{verb}", endpoint, body: body, headers: headers)
end
private
def auth_header
@auth ||= Authentication.new
return @auth.authenticate.access_header
end
end
end
| 23 | 81 | 0.657609 |
ab34f8c0843c9c439677bcda54b14fb6aac1dbfb | 452 | require 'spec_helper'
describe 'security_baseline::rules::sec_ntalk' do
on_supported_os.each do |os, os_facts|
context "on #{os}" do
let(:facts) do
os_facts.merge(
'srv_ntalk' => 'enabled',
)
end
let(:params) do
{
'enforce' => true,
'message' => 'ntalk service',
'loglevel' => 'warning',
}
end
it { is_expected.to compile }
end
end
end
| 19.652174 | 49 | 0.519912 |
d5c86ec28e8ebb928c353f0a976c205ccde1ded4 | 1,046 | module Messages
class DeadLink
include Rails.application.routes.url_helpers
attr_reader :flag
def initialize(flag)
@flag = flag
end
def title
"One of your submissions has been flagged on #{flagged_date}"
end
def content
"Hey #{username}, your project #{lesson_name} has a broken link in your submission. " \
"Please update it by #{submission_deletion_date} so it doesn\'t get removed!"
end
def url
path_course_lesson_url(flag.project_submission.lesson.course.path,
flag.project_submission.lesson.course,
flag.project_submission.lesson,
only_path: true)
end
private
def flagged_date
flag.created_at.strftime('%d %b %Y')
end
def username
flag.project_submission.user.username
end
def lesson_name
flag.project_submission.lesson.title
end
def submission_deletion_date
7.days.from_now.strftime('%d %b %Y')
end
end
end
| 22.73913 | 93 | 0.628107 |
bf47133111a75f59899ea3f2030729e04235f83c | 799 | require "active_support"
require "active_support/core_ext"
require "builder"
require "yaml"
require "json"
module Electretri
class << self
def parse_file(file)
Electretri::Calculate.new(file)
end
end
module API
class << self
def load_yml(path)
File.open('result.yml', 'w') {|f| f.write Electretri.parse_file(
YAML.load_file(path)
).project.to_yaml }
end
def load_json(path)
File.open('result.json', 'w') {|f| f.write Electretri.parse_file(
JSON.parse(File.read(path))
).project.to_json }
end
def load_xml(path)
File.open('result.xml', 'w') {|f| f.write Electretri.parse_file(
Hash.from_xml(File.read(path))["hash"]
).project.to_xml }
end
end
end
end
| 21.594595 | 73 | 0.607009 |
1cc4b6b74bdd9bbc48fda8fd4816eee0d7bd9a14 | 1,722 | Rails.application.routes.draw do
devise_for :users
resources :users
root :to => 'users#index'
get 'static_pages/home'
get 'static_pages/help'
# The priority is based upon order of creation: first created -> highest priority.
# See how all your routes lay out with "rake routes".
# You can have the root of your site routed with "root"
# root 'welcome#index'
# Example of regular route:
# get 'products/:id' => 'catalog#view'
# Example of named route that can be invoked with purchase_url(id: product.id)
# get 'products/:id/purchase' => 'catalog#purchase', as: :purchase
# Example resource route (maps HTTP verbs to controller actions automatically):
# resources :products
# Example resource route with options:
# resources :products do
# member do
# get 'short'
# post 'toggle'
# end
#
# collection do
# get 'sold'
# end
# end
# Example resource route with sub-resources:
# resources :products do
# resources :comments, :sales
# resource :seller
# end
# Example resource route with more complex sub-resources:
# resources :products do
# resources :comments
# resources :sales do
# get 'recent', on: :collection
# end
# end
# Example resource route with concerns:
# concern :toggleable do
# post 'toggle'
# end
# resources :posts, concerns: :toggleable
# resources :photos, concerns: :toggleable
# Example resource route within a namespace:
# namespace :admin do
# # Directs /admin/products/* to Admin::ProductsController
# # (app/controllers/admin/products_controller.rb)
# resources :products
# end
end
| 26.90625 | 84 | 0.649826 |
089e7aef73d89cf53959120d21cef3f8f33fc058 | 3,094 | require File.expand_path('../../../spec_helper', __FILE__)
require 'bigdecimal'
describe "BigDecimal#<" do
before :each do
@zero = BigDecimal("0")
@zero_pos = BigDecimal("+0")
@zero_neg = BigDecimal("-0")
@mixed = BigDecimal("1.23456789")
@pos_int = BigDecimal("2E5555")
@neg_int = BigDecimal("-2E5555")
@pos_frac = BigDecimal("2E-9999")
@neg_frac = BigDecimal("-2E-9999")
@int_mock = mock('123')
class << @int_mock
def coerce(other)
return [other, BigDecimal('123')]
end
def < (other)
BigDecimal('123') < other
end
end
@values = [@mixed, @pos_int, @neg_int, @pos_frac, @neg_frac,
-2**32, -2**31, -2**30, -2**16, -2**8, -100, -10, -1,
@zero , 1, 2, 10, 10.5, 2**8, 2**16, 2**32, @int_mock, @zero_pos, @zero_neg]
@infinity = BigDecimal("Infinity")
@infinity_neg = BigDecimal("-Infinity")
@float_infinity = Float::INFINITY
@float_infinity_neg = -Float::INFINITY
@nan = BigDecimal("NaN")
end
it "returns true if a < b" do
one = BigDecimal("1")
two = BigDecimal("2")
frac_1 = BigDecimal("1E-99999")
frac_2 = BigDecimal("0.9E-99999")
(@zero < one).should == true
(two < @zero).should == false
(frac_2 < frac_1).should == true
(@neg_int < @pos_int).should == true
(@pos_int < @neg_int).should == false
(@neg_int < @pos_frac).should == true
(@pos_frac < @neg_int).should == false
(@zero < @zero_pos).should == false
(@zero < @zero_neg).should == false
(@zero_neg < @zero_pos).should == false
(@zero_pos < @zero_neg).should == false
end
it "properly handles infinity values" do
@values.each { |val|
(val < @infinity).should == true
(@infinity < val).should == false
(val < @infinity_neg).should == false
(@infinity_neg < val).should == true
}
(@infinity < @infinity).should == false
(@infinity_neg < @infinity_neg).should == false
(@infinity < @infinity_neg).should == false
(@infinity_neg < @infinity).should == true
end
ruby_bug "#13674", ""..."2.4" do
it "properly handles Float infinity values" do
@values.each { |val|
(val < @float_infinity).should == true
(@float_infinity < val).should == false
(val < @float_infinity_neg).should == false
(@float_infinity_neg < val).should == true
}
end
end
it "properly handles NaN values" do
@values += [@infinity, @infinity_neg, @nan]
@values.each { |val|
(@nan < val).should == false
(val < @nan).should == false
}
end
it "raises an ArgumentError if the argument can't be coerced into a BigDecimal" do
lambda {@zero < nil }.should raise_error(ArgumentError)
lambda {@infinity < nil }.should raise_error(ArgumentError)
lambda {@infinity_neg < nil }.should raise_error(ArgumentError)
lambda {@mixed < nil }.should raise_error(ArgumentError)
lambda {@pos_int < nil }.should raise_error(ArgumentError)
lambda {@neg_frac < nil }.should raise_error(ArgumentError)
end
end
| 31.896907 | 84 | 0.603749 |
4af80a5af6766ebcf50622f1899713d41893d453 | 11,954 | # frozen_string_literal: true
class Source::URL::Null < Source::URL
attr_reader :work_id, :page_url, :profile_url
def self.match?(url)
true
end
def site_name
case host
when /ask\.fm\z/i
"Ask.fm"
when /bcy\.net\z/i
"BCY"
when /carrd\.co\z/i
"Carrd"
when /circle\.ms\z/i
"Circle.ms"
when /dlsite\.(com|net)\z/i
"DLSite"
when /doujinshi\.org\z/i, /doujinshi\.mugimugi\.org\z/i
"Doujinshi.org"
when /ko-fi\.com\z/i
"Ko-fi"
when /lit\.link\z/i
"Lit.link"
when /mixi\.jp\z/i
"Mixi.jp"
when /piapro\.jp\z/i
"Piapro.jp"
when /sakura\.ne\.jp\z/i
"Sakura.ne.jp"
else
# "www.melonbooks.co.jp" => "Melonbooks"
parsed_domain.sld.titleize
end
end
def parse
case [subdomain, domain, *path_segments]
# http://about.me/rig22
in _, "about.me", username
@username = username
@profile_url = "https://about.me/#{username}"
# http://marilyn77.ameblo.jp/
in username, "ameblo.jp", *rest unless subdomain.in?(["www", "s", nil])
@username = username
@profile_url = "https://ameblo.jp/#{username}"
# https://ameblo.jp/g8set55679
# http://ameblo.jp/hanauta-os/entry-11860045489.html
# http://s.ameblo.jp/ma-chi-no/
in _, "ameblo.jp", username, *rest
@username = username
@profile_url = "https://ameblo.jp/#{username}"
# http://stat.ameba.jp/user_images/20130802/21/moment1849/38/bd/p
# http://stat001.ameba.jp/user_images/20100212/15/weekend00/74/31/j/
in /^stat\d*$/, "ameba.jp", "user_images", _, _, username, *rest
@username = username
@profile_url = "https://ameblo.jp/#{username}"
# https://profile.ameba.jp/ameba/kbnr32rbfs
in "profile", "ameba.jp", "ameba", username
@username = username
@profile_url = "https://ameblo.jp/#{username}"
# https://anidb.net/creator/65313
in _, "anidb.net", "creator", user_id
@user_id = user_id
@profile_url = "https://anidb.net/creator/#{user_id}"
# https://anidb.net/perl-bin/animedb.pl?show=creator&creatorid=3903
in _, "anidb.net", "perl-bin", "animedb.pl" if params[:show] == "creator" and params[:creatorid].present?
@user_id = params[:creatorid]
@profile_url = "https://anidb.net/creator/#{user_id}"
# https://www.animenewsnetwork.com/encyclopedia/people.php?id=17056
in _, ("animenewsnetwork.com" | "animenewsnetwork.cc"), "encyclopedia", "people.php" if params[:id].present?
@user_id = params[:id]
@profile_url = "https://www.animenewsnetwork.com/encyclopedia/people.php?id=#{params[:id]}"
# https://ask.fm/kiminaho
# https://m.ask.fm/kiminaho
# http://ask.fm/cyoooooon/best
in _, "ask.fm", username, *rest
@username = username
@profile_url = "https://ask.fm/#{username}"
# http://nekomataya.net/diarypro/data/upfile/66-1.jpg
# http://www117.sakura.ne.jp/~cat_rice/diarypro/data/upfile/31-1.jpg
# http://webknight0.sakura.ne.jp/cgi-bin/diarypro/data/upfile/9-1.jpg
in _, _, *subdirs, "diarypro", "data", "upfile", /^(\d+)-\d+\.(jpg|png|gif)$/ => file
@work_id = $1
@page_url = [site, *subdirs, "diarypro/diary.cgi?no=#{@work_id}"].join("/")
# http://akimbo.sakura.ne.jp/diarypro/diary.cgi?mode=image&upfile=723-4.jpg
# http://www.danshaku.sakura.ne.jp/cgi-bin/diarypro/diary.cgi?mode=image&upfile=56-1.jpg
# http://www.yanbow.com/~myanie/diarypro/diary.cgi?mode=image&upfile=279-1.jpg
in _, _, *subdirs, "diarypro", "diary.cgi" if params[:mode] == "image" && params[:upfile].present?
@work_id = params[:upfile][/^\d+/]
@page_url = [site, *subdirs, "diarypro/diary.cgi?no=#{@work_id}"].join("/")
# http://com2.doujinantena.com/contents_jpg/cf0224563cf7a75450596308fe651d5f/018.jpg
# http://sozai.doujinantena.com/contents_jpg/cf0224563cf7a75450596308fe651d5f/009.jpg
in _, "doujinantena.com", "contents_jpg", /^\h{32}$/ => md5, *rest
@md5 = md5
@page_url = "http://doujinantena.com/page.php?id=#{md5}"
# https://e-shuushuu.net/images/2017-07-19-915628.jpeg
in _, "e-shuushuu.net", "images", /^\d{4}-\d{2}-\d{2}-(\d+)\.(jpeg|jpg|png|gif)$/i
@work_id = $1
@page_url = "https://e-shuushuu.net/image/#{@work_id}"
# https://scontent.fmnl9-2.fna.fbcdn.net/v/t1.6435-9/196345051_961754654392125_8855002558147907833_n.jpg?_nc_cat=103&ccb=1-5&_nc_sid=0debeb&_nc_ohc=EB1RGiEOtyEAX9XE7aL&_nc_ht=scontent.fmnl9-2.fna&oh=00_AT8NNz_keqQ6VJeC1UVSMULhjaP3iykm-ONSMR7IrtarUQ&oe=6257862E
# https://scontent.fmnl8-2.fna.fbcdn.net/v/t1.6435-9/fr/cp0/e15/q65/80900683_480934615898749_6481759463945535488_n.jpg?_nc_cat=107&ccb=1-3&_nc_sid=8024bb&_nc_ohc=cCYFUzyHDmUAX-YHJIw&_nc_ht=scontent.fmnl8-2.fna&oh=e45c3837afcfefb6a4d93adfecef88c1&oe=60F6E392
# https://scontent.fmnl13-1.fna.fbcdn.net/v/t31.18172-8/22861751_1362164640578443_432921612329393062_o.jpg
# https://scontent-sin1-1.xx.fbcdn.net/hphotos-xlp1/t31.0-8/s960x960/12971037_586686358150819_495608200196301072_o.jpg
in _, "fbcdn.net", *subdirs, /^\d+_(\d+)_(?:\d+_){1,3}[no]\.(jpg|png)$/
@work_id = $1
@page_url = "https://www.facebook.com/photo?fbid=#{@work_id}"
# https://fbcdn-sphotos-h-a.akamaihd.net/hphotos-ak-xlp1/t31.0-8/s960x960/13173066_623015164516858_1844421675339995359_o.jpg
# https://fbcdn-sphotos-h-a.akamaihd.net/hphotos-ak-xpf1/v/t1.0-9/s720x720/12032214_991569624217563_4908408819297057893_n.png?oh=efe6ea26aed89c8a12ddc1832b1f0157&oe=5667D5B1&__gda__=1453845772_c742c726735047f2feb836b845ff296f
in /fbcdn/, "akamaihd.net", *subdirs, /^\d_(\d+)_(?:\d+_){1,3}[no]\.(jpg|png)$/
@work_id = $1
@page_url = "https://www.facebook.com/photo.php?fbid=#{work_id}"
# https://gelbooru.com//images/ee/5c/ee5c9a69db9602c95debdb9b98fb3e3e.jpeg
# http://simg.gelbooru.com//images/2003/edd1d2b3881cf70c3acf540780507531.png
# https://simg3.gelbooru.com//samples/0b/3a/sample_0b3ae5e225072b8e391c827cb470d29c.jpg
# https://video-cdn3.gelbooru.com/images/62/95/6295154d082f04009160261b90e7176e.mp4
# https://img2.gelbooru.com//images/a9/64/a96478bbf9bc3f0584f2b5ddf56025fa.webm
in _, "gelbooru.com", ("images" | "samples"), *subdirs, /^(?:sample_)?(\h{32})\.(jpeg|jpg|png|gif|mp4|webm)$/
@md5 = $1
@page_url = "https://gelbooru.com/index.php?page=post&s=list&tags=md5:#{@md5}"
# https://a.hitomi.la/galleries/907838/1.png
# https://0a.hitomi.la/galleries/1169701/23.png
# https://aa.hitomi.la/galleries/990722/003_01_002.jpg
# https://la.hitomi.la/galleries/1054851/001_main_image.jpg
in _, "hitomi.la", "galleries", gallery_id, /^(\d+)\w*\.(jpg|png|gif)$/ => image_id
@gallery_id = gallery_id
@image_id = $1.to_i
@page_url = "https://hitomi.la/reader/#{gallery_id}.html##{@image_id}"
# https://aa.hitomi.la/galleries/883451/t_rena1g.png
in _, "hitomi.la", "galleries", gallery_id, file
@gallery_id = gallery_id
@page_url = "https://hitomi.la/galleries/#{gallery_id}.html"
# http://www.karabako.net/images/karabako_43878.jpg
# http://www.karabako.net/imagesub/karabako_43222_215.jpg
in _, "karabako.net", ("images" | "imagesub"), /^karabako_(\d+)/
@work_id = $1
@page_url = "http://www.karabako.net/post/view/#{work_id}"
# http://static.minitokyo.net/downloads/31/33/764181.jpg
in _, "minitokyo.net", "downloads", /^\d{2}$/, /^\d{2}$/, file
@work_id = filename
@page_url = "http://gallery.minitokyo.net/view/#{@work_id}"
# http://i.minus.com/j2LcOC52dGLtB.jpg
# http://i5.minus.com/ik26grnRJAmYh.jpg
in _, "minus.com", /^[ij]([a-zA-Z0-9]{12,})\.(jpg|png|gif)$/
@work_id = $1
@page_url = "http://minus.com/i/#{@work_id}"
# http://jpg.nijigen-daiaru.com/7364/013.jpg
in "jpg", "nijigen-daiaru.com", /^\d+$/ => work_id, file
@work_id = work_id
@page_url = "http://nijigen-daiaru.com/book.php?idb=#{@work_id}"
# http://art59.photozou.jp/pub/212/1986212/photo/118493247_org.v1534644005.jpg
# http://kura3.photozou.jp/pub/741/2662741/photo/160341863_624.v1353780834.jpg
in _, "photozou.jp", "pub", /^\d+$/, user_id, "photo", /^(\d+)/ => file
@user_id = user_id
@work_id = $1
@page_url = "https://photozou.jp/photo/show/#{@user_id}/#{@work_id}"
# https://tulip.paheal.net/_images/4f309b2b680da9c3444ed462bb172214/3910816%20-%20Dark_Magician_Girl%20MINK343%20Yu-Gi-Oh!.jpg
# http://rule34-data-002.paheal.net/_images/2ab55f9291c8f2c68cdbeac998714028/2401510%20-%20Ash_Ketchum%20Lillie%20Porkyman.jpg
# http://rule34-images.paheal.net/c4710f05e76bdee22fcd0d62bf1ac840/262685%20-%20mabinogi%20nao.jpg
in _, "paheal.net", *subdirs, /^\h{32}$/ => md5, /^(\d+)/ => file
@md5 = md5
@work_id = $1
@page_url = "https://rule34.paheal.net/post/view/#{@work_id}"
# https://api-cdn-mp4.rule34.xxx/images/4330/2f85040320f64c0e42128a8b8f6071ce.mp4
# https://ny5webm.rule34.xxx//images/4653/3c63956b940d0ff565faa8c7555b4686.mp4?5303486
# https://img.rule34.xxx//images/4977/7d76919c2f713c580f69fe129d2d1a44.jpeg?5668795
# http://rule34.xxx//images/993/5625625970c9ce8c5121fde518c2c4840801cd29.jpg?992983
# http://img3.rule34.xxx/img/rule34//images/1180/76c6497b5138c4122710c2d05458e729a8d34f7b.png?1190815
# http://aimg.rule34.xxx//samples/1267/sample_d628f215f27815dc9c1d365a199ee68e807efac1.jpg?1309664
in _, "rule34.xxx", ("images" | "samples"), *subdirs, /^(?:sample_)?(\h{32})\.(jpg|jpeg|png|gif|webm|mp4)$/
@md5 = $1
@page_url = "https://rule34.xxx/index.php?page=post&s=list&md5=#{$1}"
# https://cs.sankakucomplex.com/data/68/6c/686ceee03af38fe4ceb45bf1c50947e0.jpg?e=1591893718&m=fLlJfTrK_j2Rnc0uIHNC3w
# https://v.sankakucomplex.com/data/24/ff/24ff5da1fd7ed051b083b36e4e51de8e.mp4?e=1644999580&m=-OtZg2QdtKbibMte8vlsdw&expires=1644999580&token=0YUdUKKwTmvpozhG1WW_nRvSUQw3WJd574andQv-KYY
# https://cs.sankakucomplex.com/data/sample/2a/45/sample-2a45c67281b0fcfd26208063f81a3114.jpg?e=1590609355&m=cexHhVyJguoZqPB3z3N7aA
# http://c3.sankakucomplex.com/data/sample/8a/44/preview8a44211650e818ef07e5d00284c20a14.jpg
in _, "sankakucomplex.com", "data", *subdirs, /^(?:preview|sample-)?(\h{32})\.(jpg|jpeg|gif|png|webm|mp4)$/
@md5 = $1
@page_url = "https://chan.sankakucomplex.com/post/show?md5=#{@md5}"
# http://shimmie.katawa-shoujo.com/image/3657.jpg
in "shimmie", "katawa-shoujo.com", "image", file
@work_id = filename
@page_url = "https://shimmie.katawa-shoujo.com/post/view/#{@work_id}"
# http://img.toranoana.jp/popup_img/04/0030/09/76/040030097695-2p.jpg
# http://img.toranoana.jp/popup_img18/04/0010/22/87/040010228714-1p.jpg
# http://img.toranoana.jp/popup_blimg/04/0030/08/30/040030083068-1p.jpg
# https://ecdnimg.toranoana.jp/ec/img/04/0030/65/34/040030653417-6p.jpg
in ("img" | "ecdnimg"), "toranoana.jp", *subdirs, /^\d{2}$/, /^\d{4}$/, /^\d{2}$/, /^\d{2}$/, /^(\d{12})-\d+p\.jpg$/ => file
@work_id = $1
@page_url = "https://ec.toranoana.jp/tora_r/ec/item/#{@work_id}"
# http://p.twpl.jp/show/orig/DTaCZ
# http://p.twpl.jp/show/large/5zack
# http://p.twipple.jp/show/orig/vXqaU
in _, ("twpl.jp" | "twipple.jp"), "show", ("large" | "orig"), work_id
@work_id = work_id
@page_url = "http://p.twipple.jp/#{work_id}"
# https://static.zerochan.net/Fullmetal.Alchemist.full.2831797.png
# https://s1.zerochan.net/Cocoa.Cookie.600.2957938.jpg
# http://static.zerochan.net/full/24/13/90674.jpg
in _, "zerochan.net", *subdirs, /(\d+)\.(jpg|png|gif)$/
@work_id = $1
@page_url = "https://www.zerochan.net/#{@work_id}#full"
# http://www.zerochan.net/full/1567893
in _, "zerochan.net", "full", /^\d+$/ => work_id
@work_id = work_id
@page_url = "https://www.zerochan.net/#{@work_id}#full"
else
nil
end
end
end
| 48.396761 | 264 | 0.665468 |
e2611613f704dbaf892713a33e38b1ee009e8882 | 37,263 | module JSONAPI
class ActiveRelationResource < BasicResource
root_resource
class << self
# Finds Resources using the `filters`. Pagination and sort options are used when provided
#
# @param filters [Hash] the filters hash
# @option options [Hash] :context The context of the request, set in the controller
# @option options [Hash] :sort_criteria The `sort criteria`
# @option options [Hash] :include_directives The `include_directives`
#
# @return [Array<Resource>] the Resource instances matching the filters, sorting and pagination rules.
def find(filters, options = {})
sort_criteria = options.fetch(:sort_criteria) { [] }
join_manager = ActiveRelation::JoinManager.new(resource_klass: self,
filters: filters,
sort_criteria: sort_criteria)
paginator = options[:paginator]
records = apply_request_settings_to_records(records: records(options),
sort_criteria: sort_criteria,filters: filters,
join_manager: join_manager,
paginator: paginator,
options: options)
resources_for(records, options[:context])
end
# Counts Resources found using the `filters`
#
# @param filters [Hash] the filters hash
# @option options [Hash] :context The context of the request, set in the controller
#
# @return [Integer] the count
def count(filters, options = {})
join_manager = ActiveRelation::JoinManager.new(resource_klass: self,
filters: filters)
records = apply_request_settings_to_records(records: records(options),
filters: filters,
join_manager: join_manager,
options: options)
count_records(records)
end
# Returns the single Resource identified by `key`
#
# @param key the primary key of the resource to find
# @option options [Hash] :context The context of the request, set in the controller
def find_by_key(key, options = {})
record = find_record_by_key(key, options)
resource_for(record, options[:context])
end
# Returns an array of Resources identified by the `keys` array
#
# @param keys [Array<key>] Array of primary keys to find resources for
# @option options [Hash] :context The context of the request, set in the controller
def find_by_keys(keys, options = {})
records = find_records_by_keys(keys, options)
resources_for(records, options[:context])
end
# Returns an array of Resources identified by the `keys` array. The resources are not filtered as this
# will have been done in a prior step
#
# @param keys [Array<key>] Array of primary keys to find resources for
# @option options [Hash] :context The context of the request, set in the controller
def find_to_populate_by_keys(keys, options = {})
records = records_for_populate(options).where(_primary_key => keys)
resources_for(records, options[:context])
end
# Finds Resource fragments using the `filters`. Pagination and sort options are used when provided.
# Retrieving the ResourceIdentities and attributes does not instantiate a model instance.
# Note: This is incompatible with Polymorphic resources (which are going to come from two separate tables)
#
# @param filters [Hash] the filters hash
# @option options [Hash] :context The context of the request, set in the controller
# @option options [Hash] :sort_criteria The `sort criteria`
# @option options [Hash] :include_directives The `include_directives`
# @option options [Hash] :attributes Additional fields to be retrieved.
# @option options [Boolean] :cache Return the resources' cache field
#
# @return [Hash{ResourceIdentity => {identity: => ResourceIdentity, cache: cache_field, attributes: => {name => value}}}]
# the ResourceInstances matching the filters, sorting, and pagination rules along with any request
# additional_field values
def find_fragments(filters, options = {})
include_directives = options[:include_directives] ? options[:include_directives].include_directives : {}
resource_klass = self
linkage_relationships = to_one_relationships_for_linkage(include_directives[:include_related])
sort_criteria = options.fetch(:sort_criteria) { [] }
join_manager = ActiveRelation::JoinManager.new(resource_klass: resource_klass,
source_relationship: nil,
relationships: linkage_relationships,
sort_criteria: sort_criteria,
filters: filters)
paginator = options[:paginator]
records = apply_request_settings_to_records(records: records(options),
filters: filters,
sort_criteria: sort_criteria,
paginator: paginator,
join_manager: join_manager,
options: options)
# This alias is going to be resolve down to the model's table name and will not actually be an alias
resource_table_alias = resource_klass._table_name
pluck_fields = [Arel.sql("#{concat_table_field(resource_table_alias, resource_klass._primary_key)} AS #{resource_table_alias}_#{resource_klass._primary_key}")]
cache_field = attribute_to_model_field(:_cache_field) if options[:cache]
if cache_field
pluck_fields << Arel.sql("#{concat_table_field(resource_table_alias, cache_field[:name])} AS #{resource_table_alias}_#{cache_field[:name]}")
end
linkage_fields = []
linkage_relationships.each do |name|
linkage_relationship = resource_klass._relationship(name)
if linkage_relationship.polymorphic? && linkage_relationship.belongs_to?
linkage_relationship.resource_types.each do |resource_type|
klass = resource_klass_for(resource_type)
linkage_fields << {relationship_name: name, resource_klass: klass}
linkage_table_alias = join_manager.join_details_by_polymorphic_relationship(linkage_relationship, resource_type)[:alias]
primary_key = klass._primary_key
pluck_fields << Arel.sql("#{concat_table_field(linkage_table_alias, primary_key)} AS #{linkage_table_alias}_#{primary_key}")
end
else
klass = linkage_relationship.resource_klass
linkage_fields << {relationship_name: name, resource_klass: klass}
linkage_table_alias = join_manager.join_details_by_relationship(linkage_relationship)[:alias]
primary_key = klass._primary_key
pluck_fields << Arel.sql("#{concat_table_field(linkage_table_alias, primary_key)} AS #{linkage_table_alias}_#{primary_key}")
end
end
model_fields = {}
attributes = options[:attributes]
attributes.try(:each) do |attribute|
model_field = resource_klass.attribute_to_model_field(attribute)
model_fields[attribute] = model_field
pluck_fields << Arel.sql("#{concat_table_field(resource_table_alias, model_field[:name])} AS #{resource_table_alias}_#{model_field[:name]}")
end
sort_fields = options.dig(:_relation_helper_options, :sort_fields)
sort_fields.try(:each) do |field|
pluck_fields << Arel.sql(field)
end
fragments = {}
rows = records.pluck(*pluck_fields)
rows.each do |row|
rid = JSONAPI::ResourceIdentity.new(resource_klass, pluck_fields.length == 1 ? row : row[0])
fragments[rid] ||= JSONAPI::ResourceFragment.new(rid)
attributes_offset = 1
if cache_field
fragments[rid].cache = cast_to_attribute_type(row[1], cache_field[:type])
attributes_offset+= 1
end
linkage_fields.each do |linkage_field_details|
fragments[rid].initialize_related(linkage_field_details[:relationship_name])
related_id = row[attributes_offset]
if related_id
related_rid = JSONAPI::ResourceIdentity.new(linkage_field_details[:resource_klass], related_id)
fragments[rid].add_related_identity(linkage_field_details[:relationship_name], related_rid)
end
attributes_offset+= 1
end
model_fields.each_with_index do |k, idx|
fragments[rid].attributes[k[0]]= cast_to_attribute_type(row[idx + attributes_offset], k[1][:type])
end
end
if JSONAPI.configuration.warn_on_performance_issues && (rows.length > fragments.length)
warn "Performance issue detected: `#{self.name.to_s}.records` returned non-normalized results in `#{self.name.to_s}.find_fragments`."
end
fragments
end
# Finds Resource Fragments related to the source resources through the specified relationship
#
# @param source_rids [Array<ResourceIdentity>] The resources to find related ResourcesIdentities for
# @param relationship_name [String | Symbol] The name of the relationship
# @option options [Hash] :context The context of the request, set in the controller
# @option options [Hash] :attributes Additional fields to be retrieved.
# @option options [Boolean] :cache Return the resources' cache field
#
# @return [Hash{ResourceIdentity => {identity: => ResourceIdentity, cache: cache_field, attributes: => {name => value}, related: {relationship_name: [] }}}]
# the ResourceInstances matching the filters, sorting, and pagination rules along with any request
# additional_field values
def find_related_fragments(source_rids, relationship_name, options = {})
relationship = _relationship(relationship_name)
if relationship.polymorphic? # && relationship.foreign_key_on == :self
find_related_polymorphic_fragments(source_rids, relationship, options, false)
else
find_related_monomorphic_fragments(source_rids, relationship, options, false)
end
end
def find_included_fragments(source_rids, relationship_name, options)
relationship = _relationship(relationship_name)
if relationship.polymorphic? # && relationship.foreign_key_on == :self
find_related_polymorphic_fragments(source_rids, relationship, options, true)
else
find_related_monomorphic_fragments(source_rids, relationship, options, true)
end
end
# Counts Resources related to the source resource through the specified relationship
#
# @param source_rid [ResourceIdentity] Source resource identifier
# @param relationship_name [String | Symbol] The name of the relationship
# @option options [Hash] :context The context of the request, set in the controller
#
# @return [Integer] the count
def count_related(source_rid, relationship_name, options = {})
relationship = _relationship(relationship_name)
related_klass = relationship.resource_klass
filters = options.fetch(:filters, {})
# Joins in this case are related to the related_klass
join_manager = ActiveRelation::JoinManager.new(resource_klass: self,
source_relationship: relationship,
filters: filters)
records = apply_request_settings_to_records(records: records(options),
resource_klass: related_klass,
primary_keys: source_rid.id,
join_manager: join_manager,
filters: filters,
options: options)
related_alias = join_manager.join_details_by_relationship(relationship)[:alias]
records = records.select(Arel.sql("#{concat_table_field(related_alias, related_klass._primary_key)}"))
count_records(records)
end
# This resource class (ActiveRelationResource) uses an `ActiveRecord::Relation` as the starting point for
# retrieving models. From this relation filters, sorts and joins are applied as needed.
# Depending on which phase of the request processing different `records` methods will be called, giving the user
# the opportunity to override them differently for performance and security reasons.
# begin `records`methods
# Base for the `records` methods that follow and is not directly used for accessing model data by this class.
# Overriding this method gives a single place to affect the `ActiveRecord::Relation` used for the resource.
#
# @option options [Hash] :context The context of the request, set in the controller
#
# @return [ActiveRecord::Relation]
def records_base(_options = {})
_model_class.all
end
# The `ActiveRecord::Relation` used for finding user requested models. This may be overridden to enforce
# permissions checks on the request.
#
# @option options [Hash] :context The context of the request, set in the controller
#
# @return [ActiveRecord::Relation]
def records(options = {})
records_base(options)
end
# The `ActiveRecord::Relation` used for populating the ResourceSet. Only resources that have been previously
# identified through the `records` method will be accessed. Thus it should not be necessary to reapply permissions
# checks. However if the model needs to include other models adding `includes` is appropriate
#
# @option options [Hash] :context The context of the request, set in the controller
#
# @return [ActiveRecord::Relation]
def records_for_populate(options = {})
records_base(options)
end
# The `ActiveRecord::Relation` used for the finding related resources. Only resources that have been previously
# identified through the `records` method will be accessed and used as the basis to find related resources. Thus
# it should not be necessary to reapply permissions checks.
#
# @option options [Hash] :context The context of the request, set in the controller
#
# @return [ActiveRecord::Relation]
def records_for_source_to_related(options = {})
records_base(options)
end
# end `records` methods
def apply_join(records:, relationship:, resource_type:, join_type:, options:)
if relationship.polymorphic? && relationship.belongs_to?
case join_type
when :inner
records = records.joins(resource_type.to_s.singularize.to_sym)
when :left
records = records.joins_left(resource_type.to_s.singularize.to_sym)
end
else
relation_name = relationship.relation_name(options)
case join_type
when :inner
records = records.joins(relation_name)
when :left
records = records.joins_left(relation_name)
end
end
records
end
def relationship_records(relationship:, join_type: :inner, resource_type: nil, options: {})
records = relationship.parent_resource.records_for_source_to_related(options)
strategy = relationship.options[:apply_join]
if strategy
records = call_method_or_proc(strategy, records, relationship, resource_type, join_type, options)
else
records = apply_join(records: records,
relationship: relationship,
resource_type: resource_type,
join_type: join_type,
options: options)
end
records
end
def join_relationship(records:, relationship:, resource_type: nil, join_type: :inner, options: {})
relationship_records = relationship_records(relationship: relationship,
join_type: join_type,
resource_type: resource_type,
options: options)
records.merge(relationship_records)
end
protected
def to_one_relationships_for_linkage(include_related)
include_related ||= {}
relationships = []
_relationships.each do |name, relationship|
if relationship.is_a?(JSONAPI::Relationship::ToOne) && !include_related.has_key?(name) && relationship.include_optional_linkage_data?
relationships << name
end
end
relationships
end
def find_record_by_key(key, options = {})
record = apply_request_settings_to_records(records: records(options), primary_keys: key, options: options).first
fail JSONAPI::Exceptions::RecordNotFound.new(key) if record.nil?
record
end
def find_records_by_keys(keys, options = {})
apply_request_settings_to_records(records: records(options), primary_keys: keys, options: options)
end
def find_related_monomorphic_fragments(source_rids, relationship, options, connect_source_identity)
filters = options.fetch(:filters, {})
source_ids = source_rids.collect {|rid| rid.id}
include_directives = options[:include_directives] ? options[:include_directives].include_directives : {}
resource_klass = relationship.resource_klass
linkage_relationships = resource_klass.to_one_relationships_for_linkage(include_directives[:include_related])
sort_criteria = []
options[:sort_criteria].try(:each) do |sort|
field = sort[:field].to_s == 'id' ? resource_klass._primary_key : sort[:field]
sort_criteria << { field: field, direction: sort[:direction] }
end
join_manager = ActiveRelation::JoinManager.new(resource_klass: self,
source_relationship: relationship,
relationships: linkage_relationships,
sort_criteria: sort_criteria,
filters: filters)
paginator = options[:paginator]
records = apply_request_settings_to_records(records: records_for_source_to_related(options),
resource_klass: resource_klass,
sort_criteria: sort_criteria,
primary_keys: source_ids,
paginator: paginator,
filters: filters,
join_manager: join_manager,
options: options)
resource_table_alias = join_manager.join_details_by_relationship(relationship)[:alias]
pluck_fields = [
Arel.sql("#{_table_name}.#{_primary_key} AS source_id"),
Arel.sql("#{concat_table_field(resource_table_alias, resource_klass._primary_key)} AS #{resource_table_alias}_#{resource_klass._primary_key}")
]
cache_field = resource_klass.attribute_to_model_field(:_cache_field) if options[:cache]
if cache_field
pluck_fields << Arel.sql("#{concat_table_field(resource_table_alias, cache_field[:name])} AS #{resource_table_alias}_#{cache_field[:name]}")
end
linkage_fields = []
linkage_relationships.each do |name|
linkage_relationship = resource_klass._relationship(name)
if linkage_relationship.polymorphic? && linkage_relationship.belongs_to?
linkage_relationship.resource_types.each do |resource_type|
klass = resource_klass_for(resource_type)
linkage_fields << {relationship_name: name, resource_klass: klass}
linkage_table_alias = join_manager.join_details_by_polymorphic_relationship(linkage_relationship, resource_type)[:alias]
primary_key = klass._primary_key
pluck_fields << Arel.sql("#{concat_table_field(linkage_table_alias, primary_key)} AS #{linkage_table_alias}_#{primary_key}")
end
else
klass = linkage_relationship.resource_klass
linkage_fields << {relationship_name: name, resource_klass: klass}
linkage_table_alias = join_manager.join_details_by_relationship(linkage_relationship)[:alias]
primary_key = klass._primary_key
pluck_fields << Arel.sql("#{concat_table_field(linkage_table_alias, primary_key)} AS #{linkage_table_alias}_#{primary_key}")
end
end
model_fields = {}
attributes = options[:attributes]
attributes.try(:each) do |attribute|
model_field = resource_klass.attribute_to_model_field(attribute)
model_fields[attribute] = model_field
pluck_fields << Arel.sql("#{concat_table_field(resource_table_alias, model_field[:name])} AS #{resource_table_alias}_#{model_field[:name]}")
end
sort_fields = options.dig(:_relation_helper_options, :sort_fields)
sort_fields.try(:each) do |field|
pluck_fields << Arel.sql(field)
end
fragments = {}
rows = records.distinct.pluck(*pluck_fields)
rows.each do |row|
rid = JSONAPI::ResourceIdentity.new(resource_klass, row[1])
fragments[rid] ||= JSONAPI::ResourceFragment.new(rid)
attributes_offset = 2
if cache_field
fragments[rid].cache = cast_to_attribute_type(row[attributes_offset], cache_field[:type])
attributes_offset+= 1
end
model_fields.each_with_index do |k, idx|
fragments[rid].add_attribute(k[0], cast_to_attribute_type(row[idx + attributes_offset], k[1][:type]))
attributes_offset+= 1
end
source_rid = JSONAPI::ResourceIdentity.new(self, row[0])
fragments[rid].add_related_from(source_rid)
linkage_fields.each do |linkage_field|
fragments[rid].initialize_related(linkage_field[:relationship_name])
related_id = row[attributes_offset]
if related_id
related_rid = JSONAPI::ResourceIdentity.new(linkage_field[:resource_klass], related_id)
fragments[rid].add_related_identity(linkage_field[:relationship_name], related_rid)
end
attributes_offset+= 1
end
if connect_source_identity
related_relationship = resource_klass._relationships[relationship.inverse_relationship]
if related_relationship
fragments[rid].add_related_identity(related_relationship.name, source_rid)
end
end
end
fragments
end
# Gets resource identities where the related resource is polymorphic and the resource type and id
# are stored on the primary resources. Cache fields will always be on the related resources.
def find_related_polymorphic_fragments(source_rids, relationship, options, connect_source_identity)
filters = options.fetch(:filters, {})
source_ids = source_rids.collect {|rid| rid.id}
resource_klass = relationship.resource_klass
include_directives = options[:include_directives] ? options[:include_directives].include_directives : {}
linkage_relationships = []
resource_types = relationship.resource_types
resource_types.each do |resource_type|
related_resource_klass = resource_klass_for(resource_type)
relationships = related_resource_klass.to_one_relationships_for_linkage(include_directives[:include_related])
relationships.each do |r|
linkage_relationships << "##{resource_type}.#{r}"
end
end
join_manager = ActiveRelation::JoinManager.new(resource_klass: self,
source_relationship: relationship,
relationships: linkage_relationships,
filters: filters)
paginator = options[:paginator]
# Note: We will sort by the source table. Without using unions we can't sort on a polymorphic relationship
# in any manner that makes sense
records = apply_request_settings_to_records(records: records_for_source_to_related(options),
resource_klass: resource_klass,
sort_primary: true,
primary_keys: source_ids,
paginator: paginator,
filters: filters,
join_manager: join_manager,
options: options)
primary_key = concat_table_field(_table_name, _primary_key)
related_key = concat_table_field(_table_name, relationship.foreign_key)
related_type = concat_table_field(_table_name, relationship.polymorphic_type)
pluck_fields = [
Arel.sql("#{primary_key} AS #{_table_name}_#{_primary_key}"),
Arel.sql("#{related_key} AS #{_table_name}_#{relationship.foreign_key}"),
Arel.sql("#{related_type} AS #{_table_name}_#{relationship.polymorphic_type}")
]
# Get the additional fields from each relation. There's a limitation that the fields must exist in each relation
relation_positions = {}
relation_index = pluck_fields.length
attributes = options.fetch(:attributes, [])
# Add resource specific fields
if resource_types.nil? || resource_types.length == 0
# :nocov:
warn "No resource types found for polymorphic relationship."
# :nocov:
else
resource_types.try(:each) do |type|
related_klass = resource_klass_for(type.to_s)
cache_field = related_klass.attribute_to_model_field(:_cache_field) if options[:cache]
table_alias = join_manager.source_join_details(type)[:alias]
cache_offset = relation_index
if cache_field
pluck_fields << Arel.sql("#{concat_table_field(table_alias, cache_field[:name])} AS cache_#{type}_#{cache_field[:name]}")
relation_index+= 1
end
model_fields = {}
field_offset = relation_index
attributes.try(:each) do |attribute|
model_field = related_klass.attribute_to_model_field(attribute)
model_fields[attribute] = model_field
pluck_fields << Arel.sql("#{concat_table_field(table_alias, model_field[:name])} AS #{table_alias}_#{model_field[:name]}")
relation_index+= 1
end
model_offset = relation_index
model_fields.each do |_k, v|
pluck_fields << Arel.sql("#{concat_table_field(table_alias, v[:name])}")
relation_index+= 1
end
relation_positions[type] = {relation_klass: related_klass,
cache_field: cache_field,
cache_offset: cache_offset,
model_fields: model_fields,
model_offset: model_offset,
field_offset: field_offset}
end
end
# Add to_one linkage fields
linkage_fields = []
linkage_offset = relation_index
linkage_relationships.each do |linkage_relationship_path|
path = JSONAPI::Path.new(resource_klass: self,
path_string: "#{relationship.name}#{linkage_relationship_path}",
ensure_default_field: false)
linkage_relationship = path.segments[-1].relationship
if linkage_relationship.polymorphic? && linkage_relationship.belongs_to?
linkage_relationship.resource_types.each do |resource_type|
klass = resource_klass_for(resource_type)
linkage_fields << {relationship: linkage_relationship, resource_klass: klass}
linkage_table_alias = join_manager.join_details_by_polymorphic_relationship(linkage_relationship, resource_type)[:alias]
primary_key = klass._primary_key
pluck_fields << Arel.sql("#{concat_table_field(linkage_table_alias, primary_key)} AS #{linkage_table_alias}_#{primary_key}")
end
else
klass = linkage_relationship.resource_klass
linkage_fields << {relationship: linkage_relationship, resource_klass: klass}
linkage_table_alias = join_manager.join_details_by_relationship(linkage_relationship)[:alias]
primary_key = klass._primary_key
pluck_fields << Arel.sql("#{concat_table_field(linkage_table_alias, primary_key)} AS #{linkage_table_alias}_#{primary_key}")
end
end
rows = records.distinct.pluck(*pluck_fields)
related_fragments = {}
rows.each do |row|
unless row[1].nil? || row[2].nil?
related_klass = resource_klass_for(row[2])
rid = JSONAPI::ResourceIdentity.new(related_klass, row[1])
related_fragments[rid] ||= JSONAPI::ResourceFragment.new(rid)
source_rid = JSONAPI::ResourceIdentity.new(self, row[0])
related_fragments[rid].add_related_from(source_rid)
if connect_source_identity
related_relationship = related_klass._relationships[relationship.inverse_relationship]
if related_relationship
related_fragments[rid].add_related_identity(related_relationship.name, source_rid)
end
end
relation_position = relation_positions[row[2].downcase.pluralize]
model_fields = relation_position[:model_fields]
cache_field = relation_position[:cache_field]
cache_offset = relation_position[:cache_offset]
field_offset = relation_position[:field_offset]
if cache_field
related_fragments[rid].cache = cast_to_attribute_type(row[cache_offset], cache_field[:type])
end
if attributes.length > 0
model_fields.each_with_index do |k, idx|
related_fragments[rid].add_attribute(k[0], cast_to_attribute_type(row[idx + field_offset], k[1][:type]))
end
end
linkage_fields.each_with_index do |linkage_field_details, idx|
relationship = linkage_field_details[:relationship]
related_fragments[rid].initialize_related(relationship.name)
related_id = row[linkage_offset + idx]
if related_id
related_rid = JSONAPI::ResourceIdentity.new(linkage_field_details[:resource_klass], related_id)
related_fragments[rid].add_related_identity(relationship.name, related_rid)
end
end
end
end
related_fragments
end
def apply_request_settings_to_records(records:,
join_manager: ActiveRelation::JoinManager.new(resource_klass: self),
resource_klass: self,
filters: {},
primary_keys: nil,
sort_criteria: nil,
sort_primary: nil,
paginator: nil,
options: {})
options[:_relation_helper_options] = { join_manager: join_manager, sort_fields: [] }
records = resource_klass.apply_joins(records, join_manager, options)
if primary_keys
records = records.where(_primary_key => primary_keys)
end
unless filters.empty?
records = resource_klass.filter_records(records, filters, options)
end
if sort_primary
records = records.order(_primary_key => :asc)
else
order_options = resource_klass.construct_order_options(sort_criteria)
records = resource_klass.sort_records(records, order_options, options)
end
if paginator
records = resource_klass.apply_pagination(records, paginator, order_options)
end
records
end
def apply_joins(records, join_manager, options)
join_manager.join(records, options)
end
def apply_pagination(records, paginator, order_options)
records = paginator.apply(records, order_options) if paginator
records
end
def apply_sort(records, order_options, options)
if order_options.any?
order_options.each_pair do |field, direction|
records = apply_single_sort(records, field, direction, options)
end
end
records
end
def apply_single_sort(records, field, direction, options)
context = options[:context]
strategy = _allowed_sort.fetch(field.to_sym, {})[:apply]
options[:_relation_helper_options] ||= {}
options[:_relation_helper_options][:sort_fields] ||= []
if strategy
records = call_method_or_proc(strategy, records, direction, context)
else
join_manager = options.dig(:_relation_helper_options, :join_manager)
sort_field = join_manager ? get_aliased_field(field, join_manager) : field
options[:_relation_helper_options][:sort_fields].push("#{sort_field}")
records = records.order(Arel.sql("#{sort_field} #{direction}"))
end
records
end
# Assumes ActiveRecord's counting. Override if you need a different counting method
def count_records(records)
if Rails::VERSION::MAJOR >= 5 && ActiveRecord::VERSION::MINOR >= 1
records.count(:all)
else
records.count
end
end
def filter_records(records, filters, options)
if _polymorphic
_polymorphic_resource_klasses.each do |klass|
records = klass.apply_filters(records, filters, options)
end
else
records = apply_filters(records, filters, options)
end
records
end
def construct_order_options(sort_params)
if _polymorphic
warn "Sorting is not supported on polymorphic relationships"
else
super(sort_params)
end
end
def sort_records(records, order_options, options)
apply_sort(records, order_options, options)
end
def concat_table_field(table, field, quoted = false)
if table.blank? || field.to_s.include?('.')
# :nocov:
if quoted
"\"#{field.to_s}\""
else
field.to_s
end
# :nocov:
else
if quoted
# :nocov:
"\"#{table.to_s}\".\"#{field.to_s}\""
# :nocov:
else
"#{table.to_s}.#{field.to_s}"
end
end
end
def apply_filters(records, filters, options = {})
if filters
filters.each do |filter, value|
records = apply_filter(records, filter, value, options)
end
end
records
end
def get_aliased_field(path_with_field, join_manager)
path = JSONAPI::Path.new(resource_klass: self, path_string: path_with_field)
relationship_segment = path.segments[-2]
field_segment = path.segments[-1]
if relationship_segment
join_details = join_manager.join_details[path.last_relationship]
table_alias = join_details[:alias]
else
table_alias = self._table_name
end
concat_table_field(table_alias, field_segment.delegated_field_name)
end
def apply_filter(records, filter, value, options = {})
strategy = _allowed_filters.fetch(filter.to_sym, Hash.new)[:apply]
if strategy
records = call_method_or_proc(strategy, records, value, options)
else
join_manager = options.dig(:_relation_helper_options, :join_manager)
field = join_manager ? get_aliased_field(filter, join_manager) : filter
records = records.where(Arel.sql(field) => value)
end
records
end
end
end
end
| 43.787309 | 167 | 0.627432 |
08b7009b7b3106a522a35aba6f13fdfc1547aab1 | 18,543 | RSpec.describe KikEventsService do
let!(:admin_user) { create :user }
let!(:timestamp) { Time.now.to_i * 1000 }
let!(:bot) { create :bot, provider: 'kik' }
let!(:bc1) { create :bot_collaborator, bot: bot, user: admin_user }
let!(:bot_instance) { create :bot_instance, provider: 'kik', bot: bot }
let!(:kik_client) { double(Kik) }
let!(:first_name) { Faker::Name.first_name }
let!(:last_name) { Faker::Name.last_name }
let!(:profile_pic_url) { Faker::Internet.url }
let!(:profile_pic_last_modified) { Faker::Date.between(2.days.ago, Date.today) }
def do_request
KikEventsService.new(bot_id: bot.uid, events: events).create_events!
end
before do
allow(Kik).to receive(:new).with(bot_instance.token, bot_instance.uid).and_return(kik_client)
allow(kik_client).to receive(:call).
with("user/#{kik_user_id}", :get).
and_return(firstName: first_name,
lastName: last_name,
profilePicUrl: profile_pic_url,
profilePicLastModified: profile_pic_last_modified)
end
shared_examples "associates event with custom dashboard if custom dashboards exist" do
let!(:dashboard1) { create :dashboard, bot: bot, regex: 'hello', dashboard_type: 'custom', provider: 'slack' }
let!(:dashboard2) { create :dashboard, bot: bot, regex: 'eLLo', dashboard_type: 'custom', provider: 'slack' }
let!(:dashboard3) { create :dashboard, bot: bot, regex: 'welcome', dashboard_type: 'custom', provider: 'slack' }
it 'should associate events with dashboards that match the text' do
do_request
dashboard1.reload; dashboard2.reload; dashboard3.reload
e = bot_instance.events.last
expect(dashboard1.raw_events.to_a).to eql [e]
expect(dashboard2.raw_events.to_a).to eql [e]
expect(dashboard3.raw_events.to_a).to be_empty
end
end
shared_examples "should create an event as well as create the bot users" do
it "should create an event" do
expect {
do_request
bot_instance.reload
}.to change(bot_instance.events, :count).by(2)
event = bot_instance.events.find_by(event_type: event_type)
expect(event.event_type).to eql event_type
expect(event.provider).to eql 'kik'
expect(event.user).to eql BotUser.find_by(uid: kik_user_id)
expect(event.event_attributes.slice(*required_event_attributes.keys)).to eql required_event_attributes
expect(event.text).to eql text
expect(event.created_at.to_i).to eql timestamp / 1000
expect(event.is_from_bot).to be is_from_bot
expect(event.is_im).to be is_im
expect(event.is_for_bot).to be is_for_bot
end
it "should create a new BotUser" do
expect {
do_request
bot_instance.reload
}.to change(bot_instance.users, :count).by(1)
user = bot_instance.users.last
expect(user.user_attributes['first_name']).to eql first_name
expect(user.user_attributes['last_name']).to eql last_name
expect(user.user_attributes['profile_pic_url']).to eql profile_pic_url
expect(user.user_attributes['profile_pic_last_modified']).to eql profile_pic_last_modified.to_s
expect(user.uid).to eql kik_user_id
expect(user.provider).to eql 'kik'
expect(user.membership_type).to eql 'user'
end
it 'should create a user-added event' do
expect {
do_request
bot_instance.reload
}.to change(bot_instance.events, :count).by(2)
user = bot_instance.users.last
event = bot_instance.events.find_by(event_type: 'user-added')
expect(event.user).to eql user
expect(event.provider).to eql 'kik'
end
it 'should increment bot_interaction_count if is_for_bot, otherwise do not increment' do
do_request
user = bot_instance.users.last
if is_for_bot
expect(user.bot_interaction_count).to eql 1
else
expect(user.bot_interaction_count).to eql 0
end
end
it "should set last_interacted_with_bot_at to the event's created_at timestamp if is_for_bot, otherwise don't do anything" do
do_request
user = bot_instance.users.last
event = bot_instance.events.last
if is_for_bot
expect(user.last_interacted_with_bot_at).to eql event.created_at
else
expect(user.last_interacted_with_bot_at).to be_nil
end
end
end
shared_examples "should create an event but not create any bot users" do
let!(:user) { create :bot_user, provider: 'kik', bot_instance: bot_instance, uid: kik_user_id }
it "should create an event" do
expect {
do_request
bot_instance.reload
}.to change(bot_instance.events, :count).by(1)
event = bot_instance.events.last
expect(event.event_type).to eql event_type
expect(event.provider).to eql 'kik'
expect(event.user).to eql user
expect(event.event_attributes.slice(*required_event_attributes.keys)).to eql required_event_attributes
expect(event.text).to eql text
expect(event.created_at.to_i).to eql timestamp / 1000
expect(event.is_from_bot).to be is_from_bot
expect(event.is_im).to be is_im
expect(event.is_for_bot).to be is_for_bot
end
it "should NOT create new BotUsers" do
expect {
do_request
bot_instance.reload
}.to_not change(bot_instance.users, :count)
end
it 'should increment bot_interaction_count if is_for_bot, otherwise do not increment' do
if is_for_bot
expect {
do_request
user.reload
}.to change(user, :bot_interaction_count).from(0).to(1)
else
expect {
do_request
user.reload
}.to_not change(user, :bot_interaction_count)
end
end
it "should set last_interacted_with_bot_at to the event's created_at timestamp if is_for_bot, otherwise don't do anything" do
if is_for_bot
expect {
do_request
user.reload
}.to change(user, :last_interacted_with_bot_at)
expect(user.last_interacted_with_bot_at).to eql bot_instance.events.last.created_at
else
expect {
do_request
user.reload
}.to_not change(user, :last_interacted_with_bot_at)
end
end
end
describe 'event sub_types' do
let(:kik_user_id) { "kik-user-id" }
let(:bot_user_id) { bot.uid }
let(:text) { event_text }
let(:event_type) { 'message' }
let(:is_from_bot) { false }
let(:is_for_bot) { true }
let(:is_im) { false }
let(:required_event_attributes) {
Hash["id", "id-1", "chat_id", "chat_id-1"]
}
context 'text sub_type' do
let(:event_text) { 'Hello' }
let(:events) {
[
{
"chatId": required_event_attributes['chat_id'],
"type": "text",
"from": kik_user_id,
"participants": [kik_user_id],
"id": required_event_attributes['id'],
"timestamp": timestamp,
"body": text,
"mention": nil
}
]
}
let(:event_type) { 'message' }
context "bot user exists" do
it_behaves_like "should create an event as well as create the bot users"
it_behaves_like "associates event with custom dashboard if custom dashboards exist"
end
context "bot user does not exist" do
it_behaves_like "should create an event but not create any bot users"
it_behaves_like "associates event with custom dashboard if custom dashboards exist"
end
end
context 'link sub_type' do
let(:event_text) { nil }
let(:events) {
[
{
"chatId": required_event_attributes['chat_id'],
"type": "link",
"from": kik_user_id,
"participants": [kik_user_id],
"id": required_event_attributes['id'],
"timestamp": timestamp,
"url": Faker::Internet.url,
"attribution": {
"name": "name",
"iconUrl": Faker::Avatar.image("my-own-slug")
},
"noForward": true,
"readReceiptRequested": true,
"mention": nil
}
]
}
let(:event_type) { 'message:link-uploaded' }
context "bot user exists" do
it_behaves_like "should create an event as well as create the bot users"
end
context "bot user does not exist" do
it_behaves_like "should create an event but not create any bot users"
end
end
context 'picture sub_type' do
let(:event_text) { nil }
let(:events) {
[
{
"chatId": required_event_attributes['chat_id'],
"type": "picture",
"from": kik_user_id,
"participants": [kik_user_id],
"id": required_event_attributes['id'],
"picUrl": "http://example.kik.com/apicture.jpg",
"timestamp": timestamp,
"readReceiptRequested": true,
"attribution": {
"name": "A Title",
"iconUrl": "http://example.kik.com/anicon.png"
},
"mention": nil
}
]
}
let(:event_type) { 'message:image-uploaded' }
context "bot user exists" do
it_behaves_like "should create an event as well as create the bot users"
end
context "bot user does not exist" do
it_behaves_like "should create an event but not create any bot users"
end
end
context 'video sub_type' do
let(:event_text) { nil }
let(:events) {
[
{
"chatId": required_event_attributes['chat_id'],
"type": "video",
"from": kik_user_id,
"participants": [kik_user_id],
"id": required_event_attributes['id'],
"timestamp": timestamp,
"readReceiptRequested": true,
"videoUrl": "http://example.kik.com/video.mp4",
"attribution": {
"name": "A Title",
"iconUrl": "http://example.kik.com/anicon.png"
},
"mention": nil
}
]
}
let(:event_type) { 'message:video-uploaded' }
context "bot user exists" do
it_behaves_like "should create an event as well as create the bot users"
end
context "bot user does not exist" do
it_behaves_like "should create an event but not create any bot users"
end
end
context 'start-chatting sub_type' do
let(:event_text) { nil }
let(:events) {
[
{
"chatId": required_event_attributes['chat_id'],
"type": "start-chatting",
"from": kik_user_id,
"participants": [kik_user_id],
"id": required_event_attributes['id'],
"timestamp": timestamp,
"readReceiptRequested": false,
"mention": nil
}
]
}
let(:event_type) { 'message:start-chatting' }
context "bot user exists" do
it_behaves_like "should create an event as well as create the bot users"
end
context "bot user does not exist" do
it_behaves_like "should create an event but not create any bot users"
end
end
context 'scan-data sub_type' do
let(:event_text) { nil }
let(:events) {
[
{
"chatId": required_event_attributes['chat_id'],
"type": "scan-data",
"from": kik_user_id,
"participants": [kik_user_id],
"id": required_event_attributes['id'],
"timestamp": timestamp,
"data": "{\"store_id\": \"2538\"}",
"mention": nil
}
]
}
let(:event_type) { 'message:scanned-data' }
context "bot user exists" do
it_behaves_like "should create an event as well as create the bot users"
end
context "bot user does not exist" do
it_behaves_like "should create an event but not create any bot users"
end
end
context 'sticker sub_type' do
let(:event_text) { nil }
let(:events) {
[
{
"chatId": required_event_attributes['chat_id'],
"type": "sticker",
"id": required_event_attributes['id'],
"timestamp": timestamp,
"from": kik_user_id,
"participants": [kik_user_id],
"stickerPackId": "memes",
"stickerUrl": "http://cards-sticker-dev.herokuapp.com/stickers/memes/okay.png",
"readReceiptRequested": true,
"mention": nil
}
]
}
let(:event_type) { 'message:sticker-uploaded' }
context "bot user exists" do
it_behaves_like "should create an event as well as create the bot users"
end
context "bot user does not exist" do
it_behaves_like "should create an event but not create any bot users"
end
end
context 'is-typing sub_type' do
let(:event_text) { nil }
let(:events) {
[
{
"chatId": required_event_attributes['chat_id'],
"id": required_event_attributes['id'],
"type": "is-typing",
"from": kik_user_id,
"participants": [kik_user_id],
"timestamp": timestamp,
"isTyping": false,
"readReceiptRequested": false,
"mention": nil
}
]
}
let(:event_type) { 'message:is-typing' }
context "bot user exists" do
it_behaves_like "should create an event as well as create the bot users"
end
context "bot user does not exist" do
it_behaves_like "should create an event but not create any bot users"
end
end
context 'friend-picker sub_type' do
let(:event_text) { nil }
let(:events) {
[
{
"chatId": required_event_attributes['chat_id'],
"type": "friend-picker",
"from": kik_user_id,
"participants": [kik_user_id],
"id": required_event_attributes['id'],
"picked": ["aleem"],
"timestamp": timestamp,
"readReceiptRequested": true,
"mention": nil
}
]
}
let(:event_type) { 'message:friend-picker-chosen' }
context "bot user exists" do
it_behaves_like "should create an event as well as create the bot users"
end
context "bot user does not exist" do
it_behaves_like "should create an event but not create any bot users"
end
end
end
describe '"delivery-receipt" event' do
let(:kik_user_id) { "kik-user-id" }
let(:bot_user_id) { bot.uid }
let!(:user) { create :bot_user, bot_instance: bot_instance, provider: 'kik' }
let!(:first_name) { Faker::Name.first_name }
let!(:last_name) { Faker::Name.last_name }
let!(:profile_pic_url) { Faker::Internet.url }
let!(:profile_pic_last_modified) { Faker::Date.between(2.days.ago, Date.today) }
let!(:e1) do
create :event, user: user, bot_instance: bot_instance, event_type: 'message', provider: 'kik',
event_attributes: { id: "id-1", chat_id: "chat_id-1", sub_type: 'text' }
end
let!(:e2) do
create :event, user: user, bot_instance: bot_instance, event_type: 'message', provider: 'kik',
event_attributes: { id: "id-2", chat_id: "chat_id-1", sub_type: 'text' }
end
let!(:e3) do
create :event, user: user, bot_instance: bot_instance, event_type: 'message', provider: 'kik',
event_attributes: { id: "id-3", chat_id: "chat_id-1", sub_type: 'text' }
end
let(:events) {
[
{
chatId: "chat_id",
type: "delivery-receipt",
from: kik_user_id,
participants: [kik_user_id],
id: "id",
messageIds: ["id-1", "id-2"],
timestamp: 1399303478832,
readReceiptRequested: false,
mention: nil
}
]
}
it "should update the 'has_been_delivered' value for all of the events that belong to the bot_instance to 'true'" do
skip "disabled until faster method to update events is found"
do_request
expect(e1.reload.has_been_delivered).to be true
expect(e2.reload.has_been_delivered).to be true
expect(e3.reload.has_been_delivered).to be false
end
end
describe '"read-receipt" event' do
let(:kik_user_id) { "kik-user-id" }
let(:bot_user_id) { bot.uid }
let!(:user) { create :bot_user, bot_instance: bot_instance, provider: 'kik' }
let!(:first_name) { Faker::Name.first_name }
let!(:last_name) { Faker::Name.last_name }
let!(:profile_pic_url) { Faker::Internet.url }
let!(:profile_pic_last_modified) { Faker::Date.between(2.days.ago, Date.today) }
let!(:e1) do
create :event, user: user, bot_instance: bot_instance, event_type: 'message', provider: 'kik',
event_attributes: { id: "id-1", chat_id: "chat_id-1", sub_type: 'text' }
end
let!(:e2) do
create :event, user: user, bot_instance: bot_instance, event_type: 'message', provider: 'kik',
event_attributes: { id: "id-2", chat_id: "chat_id-1", sub_type: 'text' }
end
let!(:e3) do
create :event, user: user, bot_instance: bot_instance, event_type: 'message', provider: 'kik',
event_attributes: { id: "id-3", chat_id: "chat_id-1", sub_type: 'text' }
end
let(:events) {
[
{
chatId: "chat_id",
type: "read-receipt",
from: kik_user_id,
participants: [kik_user_id],
id: "id",
messageIds: ["id-1", "id-2"],
timestamp: 1399303478832,
readReceiptRequested: false,
mention: nil
}
]
}
it "should update the 'has_been_read' value for all of the events that belong to the bot_instance to 'true'" do
skip "disabled until faster method to update events is found"
do_request
expect(e1.reload.has_been_read).to be true
expect(e2.reload.has_been_read).to be true
expect(e3.reload.has_been_read).to be false
end
end
end
| 33.899452 | 129 | 0.592569 |
ac7742a9afff866727e274f318935374786cc1e8 | 621 | require File.expand_path("../../Abstract/abstract-php-extension", __FILE__)
class Php73Lz4 < AbstractPhp73Extension
init
desc "Handles LZ4 de/compression"
homepage "https://github.com/kjdev/php-ext-lz4"
url "https://github.com/kjdev/php-ext-lz4/archive/0.3.5.tar.gz"
sha256 "fcea0792f22e337950682129e72ba07c1fd0b3f72ceae2f5691059def3663b55"
head "https://github.com/kjdev/php-ext-lz4.git"
revision 1
def install
safe_phpize
system "./configure", "--prefix=#{prefix}", phpconfig
system "make"
prefix.install "modules/lz4.so"
write_config_file if build.with? "config-file"
end
end
| 28.227273 | 75 | 0.7343 |
b900c34372d25b2eafe41cc1242a3332bff9fcf0 | 413 | cask :v1 => 'mediabrowser-server' do
version :latest
sha256 :no_check
# github.com is the official download host per the vendor homepage
url 'https://github.com/MediaBrowser/MediaBrowser.Releases/raw/master/Server/MediaBrowser.Server.Mac.pkg'
homepage 'http://mediabrowser.tv/'
license :gpl
pkg 'MediaBrowser.Server.Mac.pkg'
uninstall :pkgutil => 'com.MediaBrowser.MediaBrowser.Server.Mac'
end
| 29.5 | 107 | 0.757869 |
e2438834c92944955d0b8e2ea8a40ce95f7aa13d | 639 | require 'rails_helper'
RSpec.describe LockGridCells do
let(:values) { [1, nil] }
let(:cells) { values.map { |value| Cell.new(value: value) } }
let(:sample_grid) { Grid.new(cells: cells) }
let(:service) { LockGridCells.new(sample_grid) }
let(:new_grid) { service.call }
let(:locked_cell) { new_grid.cells.first }
let(:unlocked_cell) { new_grid.cells.last }
it "constructs an entirely new grid" do
expect(new_grid).to_not be sample_grid
end
it "locks cells" do
expect(locked_cell.locked).to be true
end
it "doesn't lock nil cells" do
expect(unlocked_cell.locked).to be false
end
end
| 24.576923 | 69 | 0.674491 |
6a70b743997e136897f9f5e65b266454b9421927 | 145 | # Be sure to restart your server when you modify this file.
Rails.application.config.session_store :cookie_store, key: '_monitoria_app_session'
| 36.25 | 83 | 0.813793 |
26d9c31b4d276620d8501b215cbd51f814f84a44 | 4,055 | class ApplicationController < ActionController::Base
protect_from_forgery with: :exception, unless: :devise_token_controller
before_action :configure_permitted_devise_parameters, if: :devise_controller?
before_action :store_current_location, unless: -> { devise_controller? || !request.format.html? }
before_action :set_locale
before_action :prepare_meta_tags, if: -> { request.get? }
before_action :verify_terms_version, if: -> { !request.get? }
helper_method :alert_update, :alert_create, :alert_destroy,
:can_administrate?, :authorize_admin!
include Alerts
include InstanceAuthorization
extend ControllerAuthorization
rescue_from CanCan::AccessDenied do |ex|
if current_user.nil?
redirect_to(new_user_session_path, alert: ex.message)
else
redirect_to :root, alert: ex.message
end
end
rescue_from ActiveRecord::RecordInvalid do |ex|
flash[:alert] = ex.record.errors.full_messages.join '; '
render referring_action, status: :unprocessable_entity
end
rescue_from ActiveRecord::RecordNotFound do
# translate record not found -> HTTP 404
fail ActionController::RoutingError.new 'not found'
end
protected
def prepare_meta_tags(options = {})
site_name = I18n.t('global.title')
description = options[:description] || I18n.t('global.description')
image = options[:image] || view_context.image_url('sektionsmarke.png')
current_url = request.url
defaults = {
site: site_name,
image: image,
description: description,
keywords: I18n.t('global.keywords'),
twitter: {
site_name: site_name,
site: '@fsektionen',
card: 'summary',
description: description,
image: image
},
og: {
url: current_url,
site_name: site_name,
image: image,
description: description,
type: 'website',
locale: I18n.locale.to_s
}
}
options.reverse_merge!(defaults)
set_meta_tags(options)
end
def configure_permitted_devise_parameters
devise_parameter_sanitizer.permit(:sign_in, keys: [:email, :password, :remember_me])
devise_parameter_sanitizer.permit(:sign_up, keys: [:firstname, :lastname, :email,
:password, :password_confirmation])
devise_parameter_sanitizer.permit(:account_update, keys: [:password, :current_password,
:password_confirmation])
end
def set_locale
I18n.locale = params[:locale] || I18n.default_locale
Rails.application.routes.default_url_options[:locale] = I18n.locale
end
def referring_action
Rails.application.routes.recognize_path(request.referer)[:action]
end
# Makes redirect after sign_in work
def store_current_location
store_location_for(:user, request.url)
end
def recache_menu
I18n.available_locales.each do |loc|
expire_fragment("main_menu/#{loc}")
end
end
# Ignore session cookies when we want to sign in with devise token auth!
def devise_token_controller
params[:controller].split('/')[0] == 'devise_token_auth'
end
# Adds pagination meta data (from kaminari) to a serializer
# Usage: `render json: @collection, meta: pagination_dict(collection)`
def pagination_meta(collection)
{
current_page: collection.current_page,
next_page: collection.next_page,
prev_page: collection.prev_page,
total_pages: collection.total_pages,
total_count: collection.total_count
}
end
# Make sure the user terms are accepted before POST/PATCH/PUT/DELETE are alllowed
# We must still allow the user controllers so that the terms can be accepted and logouts allowed
# Get requests are handled in `applications.html`
def verify_terms_version
if current_user.present? && current_user.terms_version != Versions.get(:terms)
unless controller_name == 'users' || devise_controller?
redirect_to root_path
end
end
end
end
| 32.701613 | 99 | 0.691245 |
219e880647e52e938a570f42b28730151ed8493a | 1,589 | require 'devise'
require 'rack/oauth2'
require 'koala'
require 'devise_oauth2_providable'
require 'devise/oauth2_facebook_grantable/strategies/facebook_grant_type'
require 'devise/oauth2_facebook_grantable/models/oauth2_facebook_grantable'
module Devise
module Oauth2ProvidableFacebook
def self.logger
@@logger
end
def self.logger=(logger)
@@logger = logger
end
def self.debugging?
@@debugging
end
def self.debugging=(boolean)
@@debugging = boolean
end
def self.facebook_user_for_token(token)
begin
@@logger.error("Oauth2FacebookGrantable => Getting information from user token: #{token}")
@graph = Koala::Facebook::API.new(token)
return @graph.get_object("me")
rescue => e
@@logger.error("Oauth2FacebookGrantable => Could not authenticate with token: #{e}")
return false
end
end
class Railties < ::Rails::Railtie
initializer 'Rails logger' do
Devise::Oauth2ProvidableFacebook.logger = Rails.logger
end
end
class Engine < Rails::Engine
engine_name 'oauth2_facebook_grantable'
isolate_namespace Devise::Oauth2ProvidableFacebook
initializer "oauth2_facebook_grantable.initialize_application", :before=> :load_config_initializers do |app|
app.config.filter_parameters << :facebook_access_token
end
end
end
end
Devise.add_module(:oauth2_facebook_grantable,
:strategy => true,
:model => "devise/oauth2_facebook_grantable/models/oauth2_facebook_grantable")
| 28.375 | 114 | 0.696035 |
01ca4c9ad5de45f9c4f698ff8b8a756ef12dec44 | 1,575 | #-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2020 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require_dependency 'roles_helper'
module OpenProject::GlobalRoles::Patches
module RolesHelperPatch
def self.included(base)
base.class_eval do
def permissions_id(permissions)
'permissions_' + permissions[0].hash.to_s
end
end
end
end
end
RolesHelper.send(:include, OpenProject::GlobalRoles::Patches::RolesHelperPatch)
| 35.795455 | 91 | 0.753016 |
26195e90762aa470068f4c888f4a7915659d0925 | 1,887 | # encoding: utf-8
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require '../azure_mgmt_event_grid/lib/module_definition'
require '../azure_mgmt_event_grid/lib/version'
Gem::Specification.new do |spec|
spec.name = 'azure_mgmt_event_grid'
spec.version = Azure::EventGrid::Mgmt::VERSION
spec.authors = 'Microsoft Corporation'
spec.email = '[email protected]'
spec.description = 'Microsoft Azure Event Grid Services Library for Ruby'
spec.summary = 'Official Ruby client library to consume Microsoft Azure Event Grid.'
spec.homepage = 'https://aka.ms/azure-sdk-for-ruby'
spec.license = 'MIT'
spec.metadata = {
'bug_tracker_uri' => 'https://github.com/Azure/azure-sdk-for-ruby/issues',
'changelog_uri' => 'https://github.com/Azure/azure-sdk-for-ruby/blob/master/ChangeLog.md',
'documentation_uri' => 'https://azure.microsoft.com/en-us/develop/ruby/',
'homepage_uri' => 'https://aka.ms/azure-sdk-for-ruby',
'source_code_uri' => 'https://github.com/Azure/azure-sdk-for-ruby/tree/master/management/azure_mgmt_event_grid'
}
spec.files = Dir["LICENSE.txt", "lib/**/*"]
spec.files.reject! { |fn| fn.include? "build.json" }
spec.bindir = 'bin'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.required_ruby_version = '>= 2.0.0'
spec.add_development_dependency 'bundler', '~> 2.2.10'
spec.add_development_dependency 'rake', '~> 10'
spec.add_development_dependency 'rspec', '~> 3'
spec.add_development_dependency 'dotenv', '~> 2'
spec.add_runtime_dependency 'ms_rest_azure', '~> 0.12.0'
end
| 43.883721 | 115 | 0.691044 |
792354c9e70d0c4f1f85ac155c560d0508a7fdb5 | 7,103 | # frozen_string_literal: true
# title: Export plugin example
# description: Speak the most recent entry (macOS)
# author: Brett Terpstra
# url: https://brettterpstra.com
# Example
#
# doing show -o sayit
#
# ## Configuration
#
# Change what the plugin says by generating a template with
# `doing template --type say`, saving it to a file, and
# putting the path to that file in `export_templates->say` in
# config.yml.
#
# export_templates:
# say: /path/to/template.txt
#
# Use a different voice by adding a `say_voice` key to your
# config.yml. Use `say -v ?` to see available voices.
#
# say_voice: Zarvox
module Doing
##
## Plugin class
##
class SayExport
include Doing::Util
#-------------------------------------------------------
## Plugin Settings. A plugin must have a self.settings
## method that returns a hash with plugin settings.
##
## trigger: (required) Regular expression to match
## FORMAT when used with `--output FORMAT`. Registered
## name of plugin must be able to match the trigger, but
## alternatives can be included
##
## templates: (optional) Array of templates this plugin
## can export (plugin must have :template method)
##
## Each template is a hash containing:
## - name: display name for template
## - trigger: regular expression for
## `template --type FORMAT`
## - format: a descriptor of the file format (erb, haml, stylus, etc.)
## - filename: a default filename used when the template is written to disk
##
## If a template is included, a config key will
## automatically be added for the user to override
## The config key will be available at:
##
## wwid.config['export_templates'][PLUGIN_NAME]
##
## config: (optional) A Hash which will be
## added to the main configuration in the plugins section.
## Options defined here are included when config file is
## created or updated with `config --update`. Use this to
## add new configuration keys, not to override existing
## ones.
##
## The configuration keys will be available at:
##
## wwid.config['plugins'][PLUGIN_NAME][KEY]
##
## Method to return plugin settings (required)
##
## @return Hash of settings for this plugin
##
def self.settings
{
trigger: 'say(?:it)?',
templates: [
{ name: 'say', trigger: 'say(?:it)?', format: 'text', filename: 'say.txt' }
],
config: {
'say_voice' => 'Fiona'
}
}
end
#-------------------------------------------------------
## Output a template. Only required if template(s) are
## included in settings. The method should return a
## string (not output it to the STDOUT).
##
## Method to return template (optional)
##
## @param trigger The trigger passed to the
## template function. When this
## method defines multiple
## templates, the trigger can be
## used to determine which one is
## output.
##
## @return [String] template contents
##
def self.template(trigger)
return unless trigger =~ /^say(it)?$/
'On %date, you were %title, recorded in section %section%took'
end
##
## Render data received from an output
## command
##
## @param wwid The wwid object with config
## and public methods
## @param items An array of items to be output
## { <Date>date, <String>title,
## <String>section, <Array>note }
## @param variables Additional variables including
## flags passed to command
## (variables[:options])
##
## @return [String] Rendered output
##
def self.render(wwid, items, variables: {})
return if items.nil? || items.empty?
# the :options key includes the flags passed to the
# command that called the plugin use `puts
# variables.inspect` to see properties and methods
# when run
opt = variables[:options]
# This plugin just grabs the last item in the `items`
# list (which could be the oldest or newest, depending
# on the sort order of the command that called the
# plugin). Most of the time you'll want to use :each
# or :map to generate output.
i = items[-1]
# Format the item. Items are an object with 4 methods:
# date, title, section (parent section), and note.
# Start time is in item.date. The wwid object has some
# methods for calculation and formatting, including
# wwid.item.end_date to convert the @done timestamp to
# an end date.
if opt[:times]
interval = i.interval
if interval
took = '. You finished on '
finished_at = i.end_date
took += finished_at.strftime('%A %B %e at %I:%M%p')
d, h, m = wwid.format_time(interval)
took += ' and it took'
took += " #{d.to_i} days" if d.to_i.positive?
took += " #{h.to_i} hours" if h.to_i.positive?
took += " #{m.to_i} minutes" if m.to_i.positive?
end
end
date = i.date.strftime('%A %B %e at %I:%M%p')
title = i.title.gsub(/@/, 'hashtag ')
tpl = template('say')
if wwid.config['export_templates'].key?('say')
cfg_tpl = wwid.config['export_templates']['say']
tpl = cfg_tpl unless cfg_tpl.nil? || cfg_tpl.empty?
end
output = tpl.dup
output.gsub!(/%date/, date)
output.gsub!(/%title/, title)
output.gsub!(/%section/, i.section)
output.gsub!(/%took/, took || '')
# Debugging output
# warn "Saying: #{output}"
# To provide results on the command line after the
# command runs, use Doing.logger, which responds to
# :debug, :info, :warn, and :error. e.g.:
#
# Doing.logger.info("This plugin has run")
# Doing.logger.error("This message will be displayed even if run in --quiet mode.")
#
# Results are
# provided on STDERR unless doing is run with
# `--stdout` or non-interactively.
Doing.logger.info('Spoke the last entry. Did you hear it?')
# This export runs a command for fun, most plugins won't
voice = wwid.config['plugins']['say']['say_voice'] || 'Alex'
`say -v "#{voice}" "#{output}"`
# Return the result (don't output to terminal with puts or print)
output
end
# Register the plugin with doing.
# Doing::Plugins.register 'NAME', TYPE, Class
#
# Name should be lowercase, no spaces
#
# TYPE is :import or :export
#
# Class is the plugin class (e.g. Doing::SayExport), or
# self if called within the class
Doing::Plugins.register 'say', :export, self
end
end
| 33.504717 | 93 | 0.571871 |
03561e7382863803c414c3413fe26aa0487622a8 | 5,147 | # frozen_string_literal: true
class Projects::FeatureFlagsController < Projects::ApplicationController
respond_to :html
before_action :authorize_read_feature_flag!
before_action :authorize_create_feature_flag!, only: [:new, :create]
before_action :authorize_update_feature_flag!, only: [:edit, :update]
before_action :authorize_destroy_feature_flag!, only: [:destroy]
before_action :feature_flag, only: [:edit, :update, :destroy]
before_action :ensure_flag_writable!, only: [:update]
before_action do
push_frontend_feature_flag(:feature_flag_permissions)
end
feature_category :feature_flags
def index
@feature_flags = FeatureFlagsFinder
.new(project, current_user, scope: params[:scope])
.execute
.page(params[:page])
.per(30)
respond_to do |format|
format.html
format.json do
Gitlab::PollingInterval.set_header(response, interval: 10_000)
render json: { feature_flags: feature_flags_json }.merge(summary_json)
end
end
end
def new
end
def show
respond_to do |format|
format.json do
Gitlab::PollingInterval.set_header(response, interval: 10_000)
render_success_json(feature_flag)
end
end
end
def create
result = FeatureFlags::CreateService.new(project, current_user, create_params).execute
if result[:status] == :success
respond_to do |format|
format.json { render_success_json(result[:feature_flag]) }
end
else
respond_to do |format|
format.json { render_error_json(result[:message]) }
end
end
end
def edit
exclude_legacy_flags_check
end
def update
result = FeatureFlags::UpdateService.new(project, current_user, update_params).execute(feature_flag)
if result[:status] == :success
respond_to do |format|
format.json { render_success_json(result[:feature_flag]) }
end
else
respond_to do |format|
format.json { render_error_json(result[:message], result[:http_status]) }
end
end
end
def destroy
result = FeatureFlags::DestroyService.new(project, current_user).execute(feature_flag)
if result[:status] == :success
respond_to do |format|
format.html { redirect_to_index(notice: _('Feature flag was successfully removed.')) }
format.json { render_success_json(feature_flag) }
end
else
respond_to do |format|
format.html { redirect_to_index(alert: _('Feature flag was not removed.')) }
format.json { render_error_json(result[:message]) }
end
end
end
protected
def feature_flag
@feature_flag ||= @noteable = project.operations_feature_flags.find_by_iid!(params[:iid])
end
def ensure_flag_writable!
if feature_flag.legacy_flag?
render_error_json(['Legacy feature flags are read-only'])
end
end
def create_params
params.require(:operations_feature_flag)
.permit(:name, :description, :active, :version,
scopes_attributes: [:environment_scope, :active,
strategies: [:name, parameters: [:groupId, :percentage, :userIds]]],
strategies_attributes: [:name, :user_list_id,
parameters: [:groupId, :percentage, :userIds, :rollout, :stickiness],
scopes_attributes: [:environment_scope]])
end
def update_params
params.require(:operations_feature_flag)
.permit(:name, :description, :active,
scopes_attributes: [:id, :environment_scope, :active, :_destroy,
strategies: [:name, parameters: [:groupId, :percentage, :userIds]]],
strategies_attributes: [:id, :name, :user_list_id, :_destroy,
parameters: [:groupId, :percentage, :userIds, :rollout, :stickiness],
scopes_attributes: [:id, :environment_scope, :_destroy]])
end
def feature_flag_json(feature_flag)
FeatureFlagSerializer
.new(project: @project, current_user: @current_user)
.represent(feature_flag)
end
def feature_flags_json
FeatureFlagSerializer
.new(project: @project, current_user: @current_user)
.with_pagination(request, response)
.represent(@feature_flags)
end
def summary_json
FeatureFlagSummarySerializer
.new(project: @project, current_user: @current_user)
.represent(@project)
end
def redirect_to_index(**args)
redirect_to project_feature_flags_path(@project), status: :found, **args
end
def render_success_json(feature_flag)
render json: feature_flag_json(feature_flag), status: :ok
end
def render_error_json(messages, status = :bad_request)
render json: { message: messages },
status: status
end
def exclude_legacy_flags_check
if Feature.enabled?(:remove_legacy_flags, project, default_enabled: :yaml) &&
Feature.disabled?(:remove_legacy_flags_override, project, default_enabled: :yaml) &&
feature_flag.legacy_flag?
not_found
end
end
end
| 30.099415 | 110 | 0.668156 |
33f86f36c621bfc2e5abe432782dec16b4c30f48 | 1,614 | class Dosbox < Formula
desc "DOS Emulator"
homepage "https://www.dosbox.com/"
url "https://downloads.sourceforge.net/project/dosbox/dosbox/0.74-3/dosbox-0.74-3.tar.gz"
sha256 "c0d13dd7ed2ed363b68de615475781e891cd582e8162b5c3669137502222260a"
license "GPL-2.0"
bottle do
sha256 cellar: :any, arm64_big_sur: "999bf1d034d6cd7eae80c5439fc07bd5681ccc315edd872872050adcf76dffc7"
sha256 cellar: :any, big_sur: "7adbfaa213d56b44eb98645794f954e298dda776f37d5106e40c563704f1a7ab"
sha256 cellar: :any, catalina: "b204c9a07dce5bf4f476c9912f177481a69e8843045ab19d01f3e016d875dceb"
sha256 cellar: :any, mojave: "de46ee6c3c638829ba3b9dc3ee009811d26a19359d10804b9ff93706df2a6863"
sha256 cellar: :any, high_sierra: "66b1b073b1ae7db629c64f66249254aefcb8fb6585c065c858a364bd258785d4"
sha256 cellar: :any, sierra: "3bd2c41c7f76e214c0964acec02723d2a2a611eca92cf5edb93c029333a78adf"
sha256 cellar: :any, x86_64_linux: "e39aea7e583b6ec129959c2bba3aa75d70a80ce21820bfc54bd15a57b106197e" # linuxbrew-core
end
head do
url "https://svn.code.sf.net/p/dosbox/code-0/dosbox/trunk"
depends_on "autoconf" => :build
depends_on "automake" => :build
end
depends_on "libpng"
depends_on "sdl"
depends_on "sdl_net"
depends_on "sdl_sound"
def install
args = %W[
--prefix=#{prefix}
--disable-dependency-tracking
--disable-sdltest
--enable-core-inline
]
system "./autogen.sh" if build.head?
system "./configure", *args
system "make", "install"
end
test do
system "#{bin}/dosbox", "-version"
end
end
| 35.086957 | 123 | 0.739157 |
bf5842d937e7ec2f6601345f9843ffabf10eea3e | 718 | require 'net/smtp'
# Example:
# begin
# some http call
# rescue *HTTP_ERRORS => error
# notify_hoptoad error
# end
HTTP_ERRORS = [Timeout::Error,
Errno::EINVAL,
Errno::ECONNRESET,
EOFError,
Net::HTTPBadResponse,
Net::HTTPHeaderSyntaxError,
Net::ProtocolError]
SMTP_SERVER_ERRORS = [Timeout::Error,
IOError,
Net::SMTPUnknownError,
Net::SMTPServerBusy,
Net::SMTPAuthenticationError]
SMTP_CLIENT_ERRORS = [Net::SMTPFatalError,
Net::SMTPSyntaxError]
SMTP_ERRORS = SMTP_SERVER_ERRORS + SMTP_CLIENT_ERRORS
| 26.592593 | 53 | 0.551532 |
081f9deae5cfcd91a1a3566ee0a24fdd9f668fe4 | 3,460 | #
# Copyright:: Copyright 2015-2016, Chef Software, Inc
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
describe Chef::Knife::SubcommandLoader::HashedCommandLoader do
before do
allow(ChefConfig).to receive(:windows?) { false }
end
let(:plugin_manifest) {
{
"_autogenerated_command_paths" => {
"plugins_paths" => {
"cool_a" => ["/file/for/plugin/a"],
"cooler_b" => ["/file/for/plugin/b"],
},
"plugins_by_category" => {
"cool" => [
"cool_a",
],
"cooler" => [
"cooler_b",
],
},
},
}
}
let(:loader) { Chef::Knife::SubcommandLoader::HashedCommandLoader.new(
File.join(CHEF_SPEC_DATA, "knife-site-subcommands"),
plugin_manifest)}
describe "#list_commands" do
before do
allow(File).to receive(:exists?).and_return(true)
end
it "lists all commands by category when no argument is given" do
expect(loader.list_commands).to eq({ "cool" => ["cool_a"], "cooler" => ["cooler_b"] })
end
it "lists only commands in the given category when a category is given" do
expect(loader.list_commands("cool")).to eq({ "cool" => ["cool_a"] })
end
context "when the plugin path is invalid" do
before do
expect(File).to receive(:exists?).with("/file/for/plugin/b").and_return(false)
end
it "lists all commands by category when no argument is given" do
expect(Chef::Log).to receive(:error).with(/There are files specified in the manifest that are missing/)
expect(Chef::Log).to receive(:error).with("Missing files:\n\t/file/for/plugin/b")
expect(loader.list_commands).to eq({})
end
end
end
describe "#subcommand_files" do
it "lists all the files" do
expect(loader.subcommand_files).to eq(["/file/for/plugin/a", "/file/for/plugin/b"])
end
end
describe "#load_commands" do
before do
allow(Kernel).to receive(:load).and_return(true)
end
it "returns false for non-existant commands" do
expect(loader.load_command(["nothere"])).to eq(false)
end
it "loads the correct file and returns true if the command exists" do
allow(File).to receive(:exists?).and_return(true)
expect(Kernel).to receive(:load).with("/file/for/plugin/a").and_return(true)
expect(loader.load_command(["cool_a"])).to eq(true)
end
end
describe "#subcommand_for_args" do
it "returns the subcommands for an exact match" do
expect(loader.subcommand_for_args(["cooler_b"])).to eq("cooler_b")
end
it "finds the right subcommand even when _'s are elided" do
expect(loader.subcommand_for_args(%w{cooler b})).to eq("cooler_b")
end
it "returns nil if the the subcommand isn't in our manifest" do
expect(loader.subcommand_for_args(["cooler c"])).to eq(nil)
end
end
end
| 31.454545 | 111 | 0.654046 |
e2b5c3ec8672311ad1ebf869f2da7512cadb1dbe | 2,198 | # frozen_string_literal: true
# Copyright, 2021, by Samuel G. D. Williams. <http://www.codeotaku.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
require 'traces'
class MyClass
def my_method(argument)
argument
end
end
class MySubClass < MyClass
def my_method(argument)
super * 2
end
def my_other_method(argument)
argument
end
end
Traces::Provider(MyClass) do
def my_method(argument)
trace('my_method', attributes: {argument: argument}) {super}
end
end
Traces::Provider(MySubClass) do
def my_other_method(argument)
trace('my_other_method', attributes: {argument: argument}) {super}
end
end
RSpec.describe Traces do
it "has a version number" do
expect(Traces::VERSION).not_to be nil
end
describe MyClass do
it "can invoke trace wrapper" do
expect(subject).to receive(:trace).and_call_original
expect(subject.my_method(10)).to be == 10
end
end
describe MySubClass do
it "can invoke trace wrapper" do
expect(subject).to receive(:trace).and_call_original
expect(subject.my_method(10)).to be == 20
end
it "does not affect the base class" do
expect(MyClass.new).to_not respond_to(:my_other_method)
end
end
end
| 28.179487 | 79 | 0.754322 |
1a1588641d667896fc380a76bd6a272de51dea18 | 2,221 | require_relative '../lib/validator.rb'
describe Validator do
let(:validator) { Validator.new }
context '#search_type_validator' do
it "returns true if '1' is given" do
expect(validator.search_type_validator('1')).to eql(true)
end
it "returns true if '2' is given" do
expect(validator.search_type_validator('2')).to eql(true)
end
it "returns false if not '1' or '2' are given" do
expect(validator.search_type_validator('9')).to eql(false)
end
it 'returns false if empty string' do
expect(validator.search_type_validator('')).to eql(false)
end
end
context '#place_validator' do
it 'returns true if the number in (1..250) range' do
expect(validator.place_validator(50)).to eql(true)
end
it 'returns false if the number is not in (1..250) range' do
expect(validator.place_validator(300)).to eql(false)
end
it 'returns false if string in empty' do
expect(validator.place_validator(''.to_i)).to eql(false)
end
end
context '#empty_validator' do
it 'returns true if string is empty' do
expect(validator.empty_validator('')).to eql(true)
end
it 'returns false if string is not empty' do
expect(validator.empty_validator('hello')).to eql(false)
end
end
context '#choice_validator' do
it "returns true if the value is equal to '1'" do
expect(validator.choice_validator('1')).to eql(true)
end
it "returns false if the value other than '1'" do
expect(validator.choice_validator('2')).to eql(false)
end
it 'returns false if the string is empty' do
expect(validator.choice_validator('')).to eql(false)
end
end
context '#yes_no_validator' do
it "returns true if the value equal to 'y'" do
expect(validator.yes_no_validator('y')).to eql(true)
end
it "returns true if the value equal to 'n'" do
expect(validator.yes_no_validator('n')).to eql(true)
end
it "returns false if the value other than 'y' or 'n'" do
expect(validator.yes_no_validator('o')).to eql(false)
end
it 'returns false if the string is empty' do
expect(validator.yes_no_validator('')).to eql(false)
end
end
end
| 28.113924 | 64 | 0.668167 |
214aace5cb50f2a4b242add01092c3be0121f92e | 750 | module Intrigue
module Issue
class WordpressConfigLeak < BaseIssue
def self.generate(instance_details={})
{
name: "wordpress_config_leak",
pretty_name: "Wordpress Configuration Information Leak",
severity: 1,
category: "application",
status: "confirmed",
description: "A wordpress site was found with an exposed configuration.",
remediation: "Set permissions on the configuration file to prevent anonymous users being able to read it.",
affected_software: [{ :vendor => "Wordpress", :product => "Wordpress" }],
references: [ ] # types: description, remediation, detection_rule, exploit, threat_intel
}.merge!(instance_details)
end
end
end
end | 35.714286 | 115 | 0.669333 |
626e91b658633db9e157a8e8e95ff03b50ceed75 | 116 | class ServersRenameUrl < ActiveRecord::Migration
def change
rename_column :servers, :url, :site_url
end
end
| 19.333333 | 48 | 0.758621 |
f8d59ffc1a7da6cc09cffdeb0a3a01b0c2ab2cc4 | 3,794 | #
# Copyright (C) 2010-2016 dtk contributors
#
# This file is part of the dtk project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Ramaze::Helper
module Rest
def rest_response
unless @ctrl_results.is_a?(BundleAndReturnHelper::ControllerResultsRest)
fail Error.new("controller results are in wrong form; it should have 'rest' form")
end
begin
JSON.generate(@ctrl_results)
rescue Exception => ex
::DTK::Log.warn "Encoding error has occured, trying to fix it. Error #{ex.class} #{ex.message}"
JSON.generate DTK::ActionResultsQueue::Result.normalize_data_to_utf8_output!(@ctrl_results)
end
end
# opts can have keys:
# :encode_into
# :datatype
# :info
# :warn
# :error # TODO: is this possible
def rest_ok_response(data = nil, opts = {})
data ||= {}
if encode_format = opts[:encode_into]
# This might be a misnomer in taht payload is still a hash which then in RestResponse.new becomes json
# for case of yaml, the data wil be a string formed by yaml encoding
data =
case encode_format
when :yaml then encode_into_yaml(data)
else fail Error.new("Unexpected encode format (#{encode_format})")
end
end
payload = { status: :ok, data: data }
payload.merge!(datatype: opts[:datatype]) if opts[:datatype]
# set custom messages in response
[:info, :warn, :error].each do |msg_type|
payload.merge!(msg_type => opts[msg_type]) if opts[msg_type]
end
RestResponse.new(payload)
end
#
# Actions needed is Array of Hashes with following attributes:
#
# :action => Name of action to be executed
# :params => Parameters needed to execute that action
# :wait_for_complete => In case we need to wait for end of that action, type and id
# It will call task_status for given entity.
# Example:
#[
# :action => :start,
# :params => {:assembly_id => assembly[:id]},
# :wait_for_complete => {:type => :assembly, :id => assembly[:id]}
#]
def rest_validate_response(message, actions_needed)
RestResponse.new(status: :notok,
validation: {
message: message,
actions_needed: actions_needed
})
end
def rest_notok_response(errors = [{ code: :error }])
if errors.is_a?(Hash)
errors = [errors]
end
RestResponse.new(status: :notok, errors: errors)
end
private
def encode_into_yaml(data, opts = {})
data_to_encode = data
if opts[:remove_null_keys]
data_to_encode = remove_null_keys(data)
end
::DTK::Aux.serialize(data_to_encode, :yaml) + "\n"
end
def remove_null_keys(data)
if data.is_a?(Hash)
ret = {}
data.each_pair { |k, v| ret[k] = remove_null_keys(v) unless v.nil? }
ret
elsif data.is_a?(Array)
data.map { |el| remove_null_keys(el) }
else
data
end
end
class RestResponse < Hash
def initialize(hash)
replace(hash)
end
def is_ok?
self[:status] == :ok
end
def data
self[:data]
end
end
end
end
| 29.640625 | 110 | 0.622035 |
8707c6476ea8886aa90b9d04ccd9d873df733672 | 1,875 | #
# Author:: Daniel DeLeo (<[email protected]>)
# Copyright:: Copyright 2010-2016, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
require "chef/mixin/deprecation"
describe Chef::Mixin do
describe "deprecating constants (Class/Module)" do
before do
Chef::Mixin.deprecate_constant(:DeprecatedClass, Chef::Node, "This is a test deprecation")
@log_io = StringIO.new
Chef::Log.init(@log_io)
end
it "has a list of deprecated constants" do
expect(Chef::Mixin.deprecated_constants).to have_key(:DeprecatedClass)
end
it "returns the replacement when accessing the deprecated constant" do
expect(Chef::Mixin::DeprecatedClass).to eq(Chef::Node)
end
it "warns when accessing the deprecated constant" do
Chef::Mixin::DeprecatedClass # rubocop:disable Lint/Void
expect(@log_io.string).to include("This is a test deprecation")
end
end
end
describe Chef::Mixin::Deprecation::DeprecatedInstanceVariable do
before do
Chef::Log.logger = Logger.new(StringIO.new)
@deprecated_ivar = Chef::Mixin::Deprecation::DeprecatedInstanceVariable.new("value", "an_ivar")
end
it "forward method calls to the target object" do
expect(@deprecated_ivar.length).to eq(5)
expect(@deprecated_ivar.to_sym).to eq(:value)
end
end
| 32.327586 | 99 | 0.730667 |
e2d03dde296ec6690d69edc303bbd3b18adbaf6c | 162 | require 'test/unit'
require 'wikk_configuration'
class TestBlah < Test::Unit::TestCase
def test_sanity
flunk 'write tests or I will kneecap you'
end
end
| 18 | 45 | 0.746914 |
03cece5885c6af639d8c0183c0c4ef2b24cebf81 | 137,542 | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'date'
require 'google/apis/core/base_service'
require 'google/apis/core/json_representation'
require 'google/apis/core/hashable'
require 'google/apis/errors'
module Google
module Apis
module ToolresultsV1beta3
# Android app information.
class AndroidAppInfo
include Google::Apis::Core::Hashable
# The name of the app. Optional
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# The package name of the app. Required.
# Corresponds to the JSON property `packageName`
# @return [String]
attr_accessor :package_name
# The internal version code of the app. Optional.
# Corresponds to the JSON property `versionCode`
# @return [String]
attr_accessor :version_code
# The version name of the app. Optional.
# Corresponds to the JSON property `versionName`
# @return [String]
attr_accessor :version_name
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@name = args[:name] if args.key?(:name)
@package_name = args[:package_name] if args.key?(:package_name)
@version_code = args[:version_code] if args.key?(:version_code)
@version_name = args[:version_name] if args.key?(:version_name)
end
end
# A test of an Android application that can control an Android component
# independently of its normal lifecycle.
# See for more information on types of Android tests.
class AndroidInstrumentationTest
include Google::Apis::Core::Hashable
# The java package for the test to be executed. Required
# Corresponds to the JSON property `testPackageId`
# @return [String]
attr_accessor :test_package_id
# The InstrumentationTestRunner class. Required
# Corresponds to the JSON property `testRunnerClass`
# @return [String]
attr_accessor :test_runner_class
# Each target must be fully qualified with the package name or class name, in
# one of these formats: - "package package_name" - "class package_name.
# class_name" - "class package_name.class_name#method_name"
# If empty, all targets in the module will be run.
# Corresponds to the JSON property `testTargets`
# @return [Array<String>]
attr_accessor :test_targets
# The flag indicates whether Android Test Orchestrator will be used to run test
# or not.
# Corresponds to the JSON property `useOrchestrator`
# @return [Boolean]
attr_accessor :use_orchestrator
alias_method :use_orchestrator?, :use_orchestrator
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@test_package_id = args[:test_package_id] if args.key?(:test_package_id)
@test_runner_class = args[:test_runner_class] if args.key?(:test_runner_class)
@test_targets = args[:test_targets] if args.key?(:test_targets)
@use_orchestrator = args[:use_orchestrator] if args.key?(:use_orchestrator)
end
end
# A test of an android application that explores the application on a virtual or
# physical Android device, finding culprits and crashes as it goes.
class AndroidRoboTest
include Google::Apis::Core::Hashable
# The initial activity that should be used to start the app. Optional
# Corresponds to the JSON property `appInitialActivity`
# @return [String]
attr_accessor :app_initial_activity
# The java package for the bootstrap. Optional
# Corresponds to the JSON property `bootstrapPackageId`
# @return [String]
attr_accessor :bootstrap_package_id
# The runner class for the bootstrap. Optional
# Corresponds to the JSON property `bootstrapRunnerClass`
# @return [String]
attr_accessor :bootstrap_runner_class
# The max depth of the traversal stack Robo can explore. Optional
# Corresponds to the JSON property `maxDepth`
# @return [Fixnum]
attr_accessor :max_depth
# The max number of steps/actions Robo can execute. Default is no limit (0).
# Optional
# Corresponds to the JSON property `maxSteps`
# @return [Fixnum]
attr_accessor :max_steps
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@app_initial_activity = args[:app_initial_activity] if args.key?(:app_initial_activity)
@bootstrap_package_id = args[:bootstrap_package_id] if args.key?(:bootstrap_package_id)
@bootstrap_runner_class = args[:bootstrap_runner_class] if args.key?(:bootstrap_runner_class)
@max_depth = args[:max_depth] if args.key?(:max_depth)
@max_steps = args[:max_steps] if args.key?(:max_steps)
end
end
# An Android mobile test specification.
class AndroidTest
include Google::Apis::Core::Hashable
# Android app information.
# Corresponds to the JSON property `androidAppInfo`
# @return [Google::Apis::ToolresultsV1beta3::AndroidAppInfo]
attr_accessor :android_app_info
# A test of an Android application that can control an Android component
# independently of its normal lifecycle.
# See for more information on types of Android tests.
# Corresponds to the JSON property `androidInstrumentationTest`
# @return [Google::Apis::ToolresultsV1beta3::AndroidInstrumentationTest]
attr_accessor :android_instrumentation_test
# A test of an android application that explores the application on a virtual or
# physical Android device, finding culprits and crashes as it goes.
# Corresponds to the JSON property `androidRoboTest`
# @return [Google::Apis::ToolresultsV1beta3::AndroidRoboTest]
attr_accessor :android_robo_test
# A Duration represents a signed, fixed-length span of time represented as a
# count of seconds and fractions of seconds at nanosecond resolution. It is
# independent of any calendar and concepts like "day" or "month". It is related
# to Timestamp in that the difference between two Timestamp values is a Duration
# and it can be added or subtracted from a Timestamp. Range is approximately +-
# 10,000 years.
# # Examples
# Example 1: Compute Duration from two Timestamps in pseudo code.
# Timestamp start = ...; Timestamp end = ...; Duration duration = ...;
# duration.seconds = end.seconds - start.seconds; duration.nanos = end.nanos -
# start.nanos;
# if (duration.seconds 0) ` duration.seconds += 1; duration.nanos -= 1000000000;
# ` else if (durations.seconds > 0 && duration.nanos < 0) ` duration.seconds -=
# 1; duration.nanos += 1000000000; `
# Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
# Timestamp start = ...; Duration duration = ...; Timestamp end = ...;
# end.seconds = start.seconds + duration.seconds; end.nanos = start.nanos +
# duration.nanos;
# if (end.nanos = 1000000000) ` end.seconds += 1; end.nanos -= 1000000000; `
# Example 3: Compute Duration from datetime.timedelta in Python.
# td = datetime.timedelta(days=3, minutes=10) duration = Duration() duration.
# FromTimedelta(td)
# # JSON Mapping
# In JSON format, the Duration type is encoded as a string rather than an object,
# where the string ends in the suffix "s" (indicating seconds) and is preceded
# by the number of seconds, with nanoseconds expressed as fractional seconds.
# For example, 3 seconds with 0 nanoseconds should be encoded in JSON format as "
# 3s", while 3 seconds and 1 nanosecond should be expressed in JSON format as "3.
# 000000001s", and 3 seconds and 1 microsecond should be expressed in JSON
# format as "3.000001s".
# Corresponds to the JSON property `testTimeout`
# @return [Google::Apis::ToolresultsV1beta3::Duration]
attr_accessor :test_timeout
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@android_app_info = args[:android_app_info] if args.key?(:android_app_info)
@android_instrumentation_test = args[:android_instrumentation_test] if args.key?(:android_instrumentation_test)
@android_robo_test = args[:android_robo_test] if args.key?(:android_robo_test)
@test_timeout = args[:test_timeout] if args.key?(:test_timeout)
end
end
# `Any` contains an arbitrary serialized protocol buffer message along with a
# URL that describes the type of the serialized message.
# Protobuf library provides support to pack/unpack Any values in the form of
# utility functions or additional generated methods of the Any type.
# Example 1: Pack and unpack a message in C++.
# Foo foo = ...; Any any; any.PackFrom(foo); ... if (any.UnpackTo(&foo)) ` ... `
# Example 2: Pack and unpack a message in Java.
# Foo foo = ...; Any any = Any.pack(foo); ... if (any.is(Foo.class)) ` foo = any.
# unpack(Foo.class); `
# Example 3: Pack and unpack a message in Python.
# foo = Foo(...) any = Any() any.Pack(foo) ... if any.Is(Foo.DESCRIPTOR): any.
# Unpack(foo) ...
# Example 4: Pack and unpack a message in Go
# foo := &pb.Foo`...` any, err := ptypes.MarshalAny(foo) ... foo := &pb.Foo`` if
# err := ptypes.UnmarshalAny(any, foo); err != nil ` ... `
# The pack methods provided by protobuf library will by default use 'type.
# googleapis.com/full.type.name' as the type URL and the unpack methods only use
# the fully qualified type name after the last '/' in the type URL, for example "
# foo.bar.com/x/y.z" will yield type name "y.z".
# JSON ==== The JSON representation of an `Any` value uses the regular
# representation of the deserialized, embedded message, with an additional field
# `@type` which contains the type URL. Example:
# package google.profile; message Person ` string first_name = 1; string
# last_name = 2; `
# ` "@type": "type.googleapis.com/google.profile.Person", "firstName": , "
# lastName": `
# If the embedded message type is well-known and has a custom JSON
# representation, that representation will be embedded adding a field `value`
# which holds the custom JSON in addition to the `@type` field. Example (for
# message [google.protobuf.Duration][]):
# ` "@type": "type.googleapis.com/google.protobuf.Duration", "value": "1.212s" `
class Any
include Google::Apis::Core::Hashable
# A URL/resource name that uniquely identifies the type of the serialized
# protocol buffer message. This string must contain at least one "/" character.
# The last segment of the URL's path must represent the fully qualified name of
# the type (as in `path/google.protobuf.Duration`). The name should be in a
# canonical form (e.g., leading "." is not accepted).
# In practice, teams usually precompile into the binary all types that they
# expect it to use in the context of Any. However, for URLs which use the scheme
# `http`, `https`, or no scheme, one can optionally set up a type server that
# maps type URLs to message definitions as follows:
# * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must
# yield a [google.protobuf.Type][] value in binary format, or produce an error. *
# Applications are allowed to cache lookup results based on the URL, or have
# them precompiled into a binary to avoid any lookup. Therefore, binary
# compatibility needs to be preserved on changes to types. (Use versioned type
# names to manage breaking changes.)
# Note: this functionality is not currently available in the official protobuf
# release, and it is not used for type URLs beginning with type.googleapis.com.
# Schemes other than `http`, `https` (or the empty scheme) might be used with
# implementation specific semantics.
# Corresponds to the JSON property `typeUrl`
# @return [String]
attr_accessor :type_url
# Must be a valid serialized protocol buffer of the above specified type.
# Corresponds to the JSON property `value`
# NOTE: Values are automatically base64 encoded/decoded in the client library.
# @return [String]
attr_accessor :value
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@type_url = args[:type_url] if args.key?(:type_url)
@value = args[:value] if args.key?(:value)
end
end
#
class AppStartTime
include Google::Apis::Core::Hashable
# A Duration represents a signed, fixed-length span of time represented as a
# count of seconds and fractions of seconds at nanosecond resolution. It is
# independent of any calendar and concepts like "day" or "month". It is related
# to Timestamp in that the difference between two Timestamp values is a Duration
# and it can be added or subtracted from a Timestamp. Range is approximately +-
# 10,000 years.
# # Examples
# Example 1: Compute Duration from two Timestamps in pseudo code.
# Timestamp start = ...; Timestamp end = ...; Duration duration = ...;
# duration.seconds = end.seconds - start.seconds; duration.nanos = end.nanos -
# start.nanos;
# if (duration.seconds 0) ` duration.seconds += 1; duration.nanos -= 1000000000;
# ` else if (durations.seconds > 0 && duration.nanos < 0) ` duration.seconds -=
# 1; duration.nanos += 1000000000; `
# Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
# Timestamp start = ...; Duration duration = ...; Timestamp end = ...;
# end.seconds = start.seconds + duration.seconds; end.nanos = start.nanos +
# duration.nanos;
# if (end.nanos = 1000000000) ` end.seconds += 1; end.nanos -= 1000000000; `
# Example 3: Compute Duration from datetime.timedelta in Python.
# td = datetime.timedelta(days=3, minutes=10) duration = Duration() duration.
# FromTimedelta(td)
# # JSON Mapping
# In JSON format, the Duration type is encoded as a string rather than an object,
# where the string ends in the suffix "s" (indicating seconds) and is preceded
# by the number of seconds, with nanoseconds expressed as fractional seconds.
# For example, 3 seconds with 0 nanoseconds should be encoded in JSON format as "
# 3s", while 3 seconds and 1 nanosecond should be expressed in JSON format as "3.
# 000000001s", and 3 seconds and 1 microsecond should be expressed in JSON
# format as "3.000001s".
# Corresponds to the JSON property `fullyDrawnTime`
# @return [Google::Apis::ToolresultsV1beta3::Duration]
attr_accessor :fully_drawn_time
# A Duration represents a signed, fixed-length span of time represented as a
# count of seconds and fractions of seconds at nanosecond resolution. It is
# independent of any calendar and concepts like "day" or "month". It is related
# to Timestamp in that the difference between two Timestamp values is a Duration
# and it can be added or subtracted from a Timestamp. Range is approximately +-
# 10,000 years.
# # Examples
# Example 1: Compute Duration from two Timestamps in pseudo code.
# Timestamp start = ...; Timestamp end = ...; Duration duration = ...;
# duration.seconds = end.seconds - start.seconds; duration.nanos = end.nanos -
# start.nanos;
# if (duration.seconds 0) ` duration.seconds += 1; duration.nanos -= 1000000000;
# ` else if (durations.seconds > 0 && duration.nanos < 0) ` duration.seconds -=
# 1; duration.nanos += 1000000000; `
# Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
# Timestamp start = ...; Duration duration = ...; Timestamp end = ...;
# end.seconds = start.seconds + duration.seconds; end.nanos = start.nanos +
# duration.nanos;
# if (end.nanos = 1000000000) ` end.seconds += 1; end.nanos -= 1000000000; `
# Example 3: Compute Duration from datetime.timedelta in Python.
# td = datetime.timedelta(days=3, minutes=10) duration = Duration() duration.
# FromTimedelta(td)
# # JSON Mapping
# In JSON format, the Duration type is encoded as a string rather than an object,
# where the string ends in the suffix "s" (indicating seconds) and is preceded
# by the number of seconds, with nanoseconds expressed as fractional seconds.
# For example, 3 seconds with 0 nanoseconds should be encoded in JSON format as "
# 3s", while 3 seconds and 1 nanosecond should be expressed in JSON format as "3.
# 000000001s", and 3 seconds and 1 microsecond should be expressed in JSON
# format as "3.000001s".
# Corresponds to the JSON property `initialDisplayTime`
# @return [Google::Apis::ToolresultsV1beta3::Duration]
attr_accessor :initial_display_time
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@fully_drawn_time = args[:fully_drawn_time] if args.key?(:fully_drawn_time)
@initial_display_time = args[:initial_display_time] if args.key?(:initial_display_time)
end
end
# Encapsulates the metadata for basic sample series represented by a line chart
class BasicPerfSampleSeries
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `perfMetricType`
# @return [String]
attr_accessor :perf_metric_type
#
# Corresponds to the JSON property `perfUnit`
# @return [String]
attr_accessor :perf_unit
#
# Corresponds to the JSON property `sampleSeriesLabel`
# @return [String]
attr_accessor :sample_series_label
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@perf_metric_type = args[:perf_metric_type] if args.key?(:perf_metric_type)
@perf_unit = args[:perf_unit] if args.key?(:perf_unit)
@sample_series_label = args[:sample_series_label] if args.key?(:sample_series_label)
end
end
# The request must provide up to a maximum of 5000 samples to be created; a
# larger sample size will cause an INVALID_ARGUMENT error
class BatchCreatePerfSamplesRequest
include Google::Apis::Core::Hashable
# The set of PerfSamples to create should not include existing timestamps
# Corresponds to the JSON property `perfSamples`
# @return [Array<Google::Apis::ToolresultsV1beta3::PerfSample>]
attr_accessor :perf_samples
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@perf_samples = args[:perf_samples] if args.key?(:perf_samples)
end
end
#
class BatchCreatePerfSamplesResponse
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `perfSamples`
# @return [Array<Google::Apis::ToolresultsV1beta3::PerfSample>]
attr_accessor :perf_samples
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@perf_samples = args[:perf_samples] if args.key?(:perf_samples)
end
end
#
class CpuInfo
include Google::Apis::Core::Hashable
# description of the device processor ie '1.8 GHz hexa core 64-bit ARMv8-A'
# Corresponds to the JSON property `cpuProcessor`
# @return [String]
attr_accessor :cpu_processor
# the CPU clock speed in GHz
# Corresponds to the JSON property `cpuSpeedInGhz`
# @return [Float]
attr_accessor :cpu_speed_in_ghz
# the number of CPU cores
# Corresponds to the JSON property `numberOfCores`
# @return [Fixnum]
attr_accessor :number_of_cores
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@cpu_processor = args[:cpu_processor] if args.key?(:cpu_processor)
@cpu_speed_in_ghz = args[:cpu_speed_in_ghz] if args.key?(:cpu_speed_in_ghz)
@number_of_cores = args[:number_of_cores] if args.key?(:number_of_cores)
end
end
# A Duration represents a signed, fixed-length span of time represented as a
# count of seconds and fractions of seconds at nanosecond resolution. It is
# independent of any calendar and concepts like "day" or "month". It is related
# to Timestamp in that the difference between two Timestamp values is a Duration
# and it can be added or subtracted from a Timestamp. Range is approximately +-
# 10,000 years.
# # Examples
# Example 1: Compute Duration from two Timestamps in pseudo code.
# Timestamp start = ...; Timestamp end = ...; Duration duration = ...;
# duration.seconds = end.seconds - start.seconds; duration.nanos = end.nanos -
# start.nanos;
# if (duration.seconds 0) ` duration.seconds += 1; duration.nanos -= 1000000000;
# ` else if (durations.seconds > 0 && duration.nanos < 0) ` duration.seconds -=
# 1; duration.nanos += 1000000000; `
# Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
# Timestamp start = ...; Duration duration = ...; Timestamp end = ...;
# end.seconds = start.seconds + duration.seconds; end.nanos = start.nanos +
# duration.nanos;
# if (end.nanos = 1000000000) ` end.seconds += 1; end.nanos -= 1000000000; `
# Example 3: Compute Duration from datetime.timedelta in Python.
# td = datetime.timedelta(days=3, minutes=10) duration = Duration() duration.
# FromTimedelta(td)
# # JSON Mapping
# In JSON format, the Duration type is encoded as a string rather than an object,
# where the string ends in the suffix "s" (indicating seconds) and is preceded
# by the number of seconds, with nanoseconds expressed as fractional seconds.
# For example, 3 seconds with 0 nanoseconds should be encoded in JSON format as "
# 3s", while 3 seconds and 1 nanosecond should be expressed in JSON format as "3.
# 000000001s", and 3 seconds and 1 microsecond should be expressed in JSON
# format as "3.000001s".
class Duration
include Google::Apis::Core::Hashable
# Signed fractions of a second at nanosecond resolution of the span of time.
# Durations less than one second are represented with a 0 `seconds` field and a
# positive or negative `nanos` field. For durations of one second or more, a non-
# zero value for the `nanos` field must be of the same sign as the `seconds`
# field. Must be from -999,999,999 to +999,999,999 inclusive.
# Corresponds to the JSON property `nanos`
# @return [Fixnum]
attr_accessor :nanos
# Signed seconds of the span of time. Must be from -315,576,000,000 to +315,576,
# 000,000 inclusive. Note: these bounds are computed from: 60 sec/min * 60 min/
# hr * 24 hr/day * 365.25 days/year * 10000 years
# Corresponds to the JSON property `seconds`
# @return [Fixnum]
attr_accessor :seconds
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@nanos = args[:nanos] if args.key?(:nanos)
@seconds = args[:seconds] if args.key?(:seconds)
end
end
# An Execution represents a collection of Steps. For instance, it could
# represent: - a mobile test executed across a range of device configurations -
# a jenkins job with a build step followed by a test step
# The maximum size of an execution message is 1 MiB.
# An Execution can be updated until its state is set to COMPLETE at which point
# it becomes immutable.
class Execution
include Google::Apis::Core::Hashable
# A Timestamp represents a point in time independent of any time zone or local
# calendar, encoded as a count of seconds and fractions of seconds at nanosecond
# resolution. The count is relative to an epoch at UTC midnight on January 1,
# 1970, in the proleptic Gregorian calendar which extends the Gregorian calendar
# backwards to year one.
# All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
# second table is needed for interpretation, using a [24-hour linear smear](
# https://developers.google.com/time/smear).
# The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
# restricting to that range, we ensure that we can convert to and from [RFC 3339]
# (https://www.ietf.org/rfc/rfc3339.txt) date strings.
# # Examples
# Example 1: Compute Timestamp from POSIX `time()`.
# Timestamp timestamp; timestamp.set_seconds(time(NULL)); timestamp.set_nanos(0);
# Example 2: Compute Timestamp from POSIX `gettimeofday()`.
# struct timeval tv; gettimeofday(&tv, NULL);
# Timestamp timestamp; timestamp.set_seconds(tv.tv_sec); timestamp.set_nanos(tv.
# tv_usec * 1000);
# Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
# FILETIME ft; GetSystemTimeAsFileTime(&ft); UINT64 ticks = (((UINT64)ft.
# dwHighDateTime) << 32) | ft.dwLowDateTime;
# // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z // is
# 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. Timestamp
# timestamp; timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
# timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
# Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
# long millis = System.currentTimeMillis();
# Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) .
# setNanos((int) ((millis % 1000) * 1000000)).build();
# Example 5: Compute Timestamp from current time in Python.
# timestamp = Timestamp() timestamp.GetCurrentTime()
# # JSON Mapping
# In JSON format, the Timestamp type is encoded as a string in the [RFC 3339](
# https://www.ietf.org/rfc/rfc3339.txt) format. That is, the format is "`year`-`
# month`-`day`T`hour`:`min`:`sec`[.`frac_sec`]Z" where `year` is always
# expressed using four digits while `month`, `day`, `hour`, `min`, and `sec` are
# zero-padded to two digits each. The fractional seconds, which can go up to 9
# digits (i.e. up to 1 nanosecond resolution), are optional. The "Z" suffix
# indicates the timezone ("UTC"); the timezone is required. A proto3 JSON
# serializer should always use UTC (as indicated by "Z") when printing the
# Timestamp type and a proto3 JSON parser should be able to accept both UTC and
# other timezones (as indicated by an offset).
# For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past 01:30 UTC on
# January 15, 2017.
# In JavaScript, one can convert a Date object to this format using the standard
# [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/
# Reference/Global_Objects/Date/toISOString) method. In Python, a standard `
# datetime.datetime` object can be converted to this format using [`strftime`](
# https://docs.python.org/2/library/time.html#time.strftime) with the time
# format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use the Joda
# Time's [`ISODateTimeFormat.dateTime()`]( http://www.joda.org/joda-time/apidocs/
# org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D ) to obtain a
# formatter capable of generating timestamps in this format.
# Corresponds to the JSON property `completionTime`
# @return [Google::Apis::ToolresultsV1beta3::Timestamp]
attr_accessor :completion_time
# A Timestamp represents a point in time independent of any time zone or local
# calendar, encoded as a count of seconds and fractions of seconds at nanosecond
# resolution. The count is relative to an epoch at UTC midnight on January 1,
# 1970, in the proleptic Gregorian calendar which extends the Gregorian calendar
# backwards to year one.
# All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
# second table is needed for interpretation, using a [24-hour linear smear](
# https://developers.google.com/time/smear).
# The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
# restricting to that range, we ensure that we can convert to and from [RFC 3339]
# (https://www.ietf.org/rfc/rfc3339.txt) date strings.
# # Examples
# Example 1: Compute Timestamp from POSIX `time()`.
# Timestamp timestamp; timestamp.set_seconds(time(NULL)); timestamp.set_nanos(0);
# Example 2: Compute Timestamp from POSIX `gettimeofday()`.
# struct timeval tv; gettimeofday(&tv, NULL);
# Timestamp timestamp; timestamp.set_seconds(tv.tv_sec); timestamp.set_nanos(tv.
# tv_usec * 1000);
# Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
# FILETIME ft; GetSystemTimeAsFileTime(&ft); UINT64 ticks = (((UINT64)ft.
# dwHighDateTime) << 32) | ft.dwLowDateTime;
# // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z // is
# 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. Timestamp
# timestamp; timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
# timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
# Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
# long millis = System.currentTimeMillis();
# Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) .
# setNanos((int) ((millis % 1000) * 1000000)).build();
# Example 5: Compute Timestamp from current time in Python.
# timestamp = Timestamp() timestamp.GetCurrentTime()
# # JSON Mapping
# In JSON format, the Timestamp type is encoded as a string in the [RFC 3339](
# https://www.ietf.org/rfc/rfc3339.txt) format. That is, the format is "`year`-`
# month`-`day`T`hour`:`min`:`sec`[.`frac_sec`]Z" where `year` is always
# expressed using four digits while `month`, `day`, `hour`, `min`, and `sec` are
# zero-padded to two digits each. The fractional seconds, which can go up to 9
# digits (i.e. up to 1 nanosecond resolution), are optional. The "Z" suffix
# indicates the timezone ("UTC"); the timezone is required. A proto3 JSON
# serializer should always use UTC (as indicated by "Z") when printing the
# Timestamp type and a proto3 JSON parser should be able to accept both UTC and
# other timezones (as indicated by an offset).
# For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past 01:30 UTC on
# January 15, 2017.
# In JavaScript, one can convert a Date object to this format using the standard
# [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/
# Reference/Global_Objects/Date/toISOString) method. In Python, a standard `
# datetime.datetime` object can be converted to this format using [`strftime`](
# https://docs.python.org/2/library/time.html#time.strftime) with the time
# format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use the Joda
# Time's [`ISODateTimeFormat.dateTime()`]( http://www.joda.org/joda-time/apidocs/
# org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D ) to obtain a
# formatter capable of generating timestamps in this format.
# Corresponds to the JSON property `creationTime`
# @return [Google::Apis::ToolresultsV1beta3::Timestamp]
attr_accessor :creation_time
# A unique identifier within a History for this Execution.
# Returns INVALID_ARGUMENT if this field is set or overwritten by the caller.
# - In response always set - In create/update request: never set
# Corresponds to the JSON property `executionId`
# @return [String]
attr_accessor :execution_id
# Interprets a result so that humans and machines can act on it.
# Corresponds to the JSON property `outcome`
# @return [Google::Apis::ToolresultsV1beta3::Outcome]
attr_accessor :outcome
# The details about how to run the execution.
# Corresponds to the JSON property `specification`
# @return [Google::Apis::ToolresultsV1beta3::Specification]
attr_accessor :specification
# The initial state is IN_PROGRESS.
# The only legal state transitions is from IN_PROGRESS to COMPLETE.
# A PRECONDITION_FAILED will be returned if an invalid transition is requested.
# The state can only be set to COMPLETE once. A FAILED_PRECONDITION will be
# returned if the state is set to COMPLETE multiple times.
# If the state is set to COMPLETE, all the in-progress steps within the
# execution will be set as COMPLETE. If the outcome of the step is not set, the
# outcome will be set to INCONCLUSIVE.
# - In response always set - In create/update request: optional
# Corresponds to the JSON property `state`
# @return [String]
attr_accessor :state
# TestExecution Matrix ID that the TestExecutionService uses.
# - In response: present if set by create - In create: optional - In update:
# never set
# Corresponds to the JSON property `testExecutionMatrixId`
# @return [String]
attr_accessor :test_execution_matrix_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@completion_time = args[:completion_time] if args.key?(:completion_time)
@creation_time = args[:creation_time] if args.key?(:creation_time)
@execution_id = args[:execution_id] if args.key?(:execution_id)
@outcome = args[:outcome] if args.key?(:outcome)
@specification = args[:specification] if args.key?(:specification)
@state = args[:state] if args.key?(:state)
@test_execution_matrix_id = args[:test_execution_matrix_id] if args.key?(:test_execution_matrix_id)
end
end
#
class FailureDetail
include Google::Apis::Core::Hashable
# If the failure was severe because the system (app) under test crashed.
# Corresponds to the JSON property `crashed`
# @return [Boolean]
attr_accessor :crashed
alias_method :crashed?, :crashed
# If an app is not installed and thus no test can be run with the app. This
# might be caused by trying to run a test on an unsupported platform.
# Corresponds to the JSON property `notInstalled`
# @return [Boolean]
attr_accessor :not_installed
alias_method :not_installed?, :not_installed
# If a native process (including any other than the app) crashed.
# Corresponds to the JSON property `otherNativeCrash`
# @return [Boolean]
attr_accessor :other_native_crash
alias_method :other_native_crash?, :other_native_crash
# If the test overran some time limit, and that is why it failed.
# Corresponds to the JSON property `timedOut`
# @return [Boolean]
attr_accessor :timed_out
alias_method :timed_out?, :timed_out
# If the robo was unable to crawl the app; perhaps because the app did not start.
# Corresponds to the JSON property `unableToCrawl`
# @return [Boolean]
attr_accessor :unable_to_crawl
alias_method :unable_to_crawl?, :unable_to_crawl
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@crashed = args[:crashed] if args.key?(:crashed)
@not_installed = args[:not_installed] if args.key?(:not_installed)
@other_native_crash = args[:other_native_crash] if args.key?(:other_native_crash)
@timed_out = args[:timed_out] if args.key?(:timed_out)
@unable_to_crawl = args[:unable_to_crawl] if args.key?(:unable_to_crawl)
end
end
# A reference to a file.
class FileReference
include Google::Apis::Core::Hashable
# The URI of a file stored in Google Cloud Storage.
# For example: http://storage.googleapis.com/mybucket/path/to/test.xml or in
# gsutil format: gs://mybucket/path/to/test.xml with version-specific info, gs://
# mybucket/path/to/test.xml#1360383693690000
# An INVALID_ARGUMENT error will be returned if the URI format is not supported.
# - In response: always set - In create/update request: always set
# Corresponds to the JSON property `fileUri`
# @return [String]
attr_accessor :file_uri
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@file_uri = args[:file_uri] if args.key?(:file_uri)
end
end
# Graphics statistics for the App. The information is collected from 'adb shell
# dumpsys graphicsstats'. For more info see: https://developer.android.com/
# training/testing/performance.html Statistics will only be present for API 23+.
class GraphicsStats
include Google::Apis::Core::Hashable
# Histogram of frame render times. There should be 154 buckets ranging from [5ms,
# 6ms) to [4950ms, infinity)
# Corresponds to the JSON property `buckets`
# @return [Array<Google::Apis::ToolresultsV1beta3::GraphicsStatsBucket>]
attr_accessor :buckets
# Total "high input latency" events.
# Corresponds to the JSON property `highInputLatencyCount`
# @return [Fixnum]
attr_accessor :high_input_latency_count
# Total frames with slow render time. Should be <= total_frames.
# Corresponds to the JSON property `jankyFrames`
# @return [Fixnum]
attr_accessor :janky_frames
# Total "missed vsync" events.
# Corresponds to the JSON property `missedVsyncCount`
# @return [Fixnum]
attr_accessor :missed_vsync_count
# 50th percentile frame render time in milliseconds.
# Corresponds to the JSON property `p50Millis`
# @return [Fixnum]
attr_accessor :p50_millis
# 90th percentile frame render time in milliseconds.
# Corresponds to the JSON property `p90Millis`
# @return [Fixnum]
attr_accessor :p90_millis
# 95th percentile frame render time in milliseconds.
# Corresponds to the JSON property `p95Millis`
# @return [Fixnum]
attr_accessor :p95_millis
# 99th percentile frame render time in milliseconds.
# Corresponds to the JSON property `p99Millis`
# @return [Fixnum]
attr_accessor :p99_millis
# Total "slow bitmap upload" events.
# Corresponds to the JSON property `slowBitmapUploadCount`
# @return [Fixnum]
attr_accessor :slow_bitmap_upload_count
# Total "slow draw" events.
# Corresponds to the JSON property `slowDrawCount`
# @return [Fixnum]
attr_accessor :slow_draw_count
# Total "slow UI thread" events.
# Corresponds to the JSON property `slowUiThreadCount`
# @return [Fixnum]
attr_accessor :slow_ui_thread_count
# Total frames rendered by package.
# Corresponds to the JSON property `totalFrames`
# @return [Fixnum]
attr_accessor :total_frames
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@buckets = args[:buckets] if args.key?(:buckets)
@high_input_latency_count = args[:high_input_latency_count] if args.key?(:high_input_latency_count)
@janky_frames = args[:janky_frames] if args.key?(:janky_frames)
@missed_vsync_count = args[:missed_vsync_count] if args.key?(:missed_vsync_count)
@p50_millis = args[:p50_millis] if args.key?(:p50_millis)
@p90_millis = args[:p90_millis] if args.key?(:p90_millis)
@p95_millis = args[:p95_millis] if args.key?(:p95_millis)
@p99_millis = args[:p99_millis] if args.key?(:p99_millis)
@slow_bitmap_upload_count = args[:slow_bitmap_upload_count] if args.key?(:slow_bitmap_upload_count)
@slow_draw_count = args[:slow_draw_count] if args.key?(:slow_draw_count)
@slow_ui_thread_count = args[:slow_ui_thread_count] if args.key?(:slow_ui_thread_count)
@total_frames = args[:total_frames] if args.key?(:total_frames)
end
end
#
class GraphicsStatsBucket
include Google::Apis::Core::Hashable
# Number of frames in the bucket.
# Corresponds to the JSON property `frameCount`
# @return [Fixnum]
attr_accessor :frame_count
# Lower bound of render time in milliseconds.
# Corresponds to the JSON property `renderMillis`
# @return [Fixnum]
attr_accessor :render_millis
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@frame_count = args[:frame_count] if args.key?(:frame_count)
@render_millis = args[:render_millis] if args.key?(:render_millis)
end
end
# A History represents a sorted list of Executions ordered by the
# start_timestamp_millis field (descending). It can be used to group all the
# Executions of a continuous build.
# Note that the ordering only operates on one-dimension. If a repository has
# multiple branches, it means that multiple histories will need to be used in
# order to order Executions per branch.
class History
include Google::Apis::Core::Hashable
# A short human-readable (plain text) name to display in the UI. Maximum of 100
# characters.
# - In response: present if set during create. - In create request: optional
# Corresponds to the JSON property `displayName`
# @return [String]
attr_accessor :display_name
# A unique identifier within a project for this History.
# Returns INVALID_ARGUMENT if this field is set or overwritten by the caller.
# - In response always set - In create request: never set
# Corresponds to the JSON property `historyId`
# @return [String]
attr_accessor :history_id
# A name to uniquely identify a history within a project. Maximum of 200
# characters.
# - In response always set - In create request: always set
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@display_name = args[:display_name] if args.key?(:display_name)
@history_id = args[:history_id] if args.key?(:history_id)
@name = args[:name] if args.key?(:name)
end
end
# An image, with a link to the main image and a thumbnail.
class Image
include Google::Apis::Core::Hashable
# The `Status` type defines a logical error model that is suitable for different
# programming environments, including REST APIs and RPC APIs. It is used by [
# gRPC](https://github.com/grpc). The error model is designed to be:
# - Simple to use and understand for most users - Flexible enough to meet
# unexpected needs
# # Overview
# The `Status` message contains three pieces of data: error code, error message,
# and error details. The error code should be an enum value of [google.rpc.Code][
# ], but it may accept additional error codes if needed. The error message
# should be a developer-facing English message that helps developers *understand*
# and *resolve* the error. If a localized user-facing error message is needed,
# put the localized message in the error details or localize it in the client.
# The optional error details may contain arbitrary information about the error.
# There is a predefined set of error detail types in the package `google.rpc`
# that can be used for common error conditions.
# # Language mapping
# The `Status` message is the logical representation of the error model, but it
# is not necessarily the actual wire format. When the `Status` message is
# exposed in different client libraries and different wire protocols, it can be
# mapped differently. For example, it will likely be mapped to some exceptions
# in Java, but more likely mapped to some error codes in C.
# # Other uses
# The error model and the `Status` message can be used in a variety of
# environments, either with or without APIs, to provide a consistent developer
# experience across different environments.
# Example uses of this error model include:
# - Partial errors. If a service needs to return partial errors to the client,
# it may embed the `Status` in the normal response to indicate the partial
# errors.
# - Workflow errors. A typical workflow has multiple steps. Each step may have a
# `Status` message for error reporting.
# - Batch operations. If a client uses batch request and batch response, the `
# Status` message should be used directly inside batch response, one for each
# error sub-response.
# - Asynchronous operations. If an API call embeds asynchronous operation
# results in its response, the status of those operations should be represented
# directly using the `Status` message.
# - Logging. If some API errors are stored in logs, the message `Status` could
# be used directly after any stripping needed for security/privacy reasons.
# Corresponds to the JSON property `error`
# @return [Google::Apis::ToolresultsV1beta3::Status]
attr_accessor :error
# A reference to a ToolExecution output file.
# Corresponds to the JSON property `sourceImage`
# @return [Google::Apis::ToolresultsV1beta3::ToolOutputReference]
attr_accessor :source_image
# The step to which the image is attached.
# Always set.
# Corresponds to the JSON property `stepId`
# @return [String]
attr_accessor :step_id
# A single thumbnail, with its size and format.
# Corresponds to the JSON property `thumbnail`
# @return [Google::Apis::ToolresultsV1beta3::Thumbnail]
attr_accessor :thumbnail
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@error = args[:error] if args.key?(:error)
@source_image = args[:source_image] if args.key?(:source_image)
@step_id = args[:step_id] if args.key?(:step_id)
@thumbnail = args[:thumbnail] if args.key?(:thumbnail)
end
end
#
class InconclusiveDetail
include Google::Apis::Core::Hashable
# If the end user aborted the test execution before a pass or fail could be
# determined. For example, the user pressed ctrl-c which sent a kill signal to
# the test runner while the test was running.
# Corresponds to the JSON property `abortedByUser`
# @return [Boolean]
attr_accessor :aborted_by_user
alias_method :aborted_by_user?, :aborted_by_user
# If the test runner could not determine success or failure because the test
# depends on a component other than the system under test which failed.
# For example, a mobile test requires provisioning a device where the test
# executes, and that provisioning can fail.
# Corresponds to the JSON property `infrastructureFailure`
# @return [Boolean]
attr_accessor :infrastructure_failure
alias_method :infrastructure_failure?, :infrastructure_failure
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@aborted_by_user = args[:aborted_by_user] if args.key?(:aborted_by_user)
@infrastructure_failure = args[:infrastructure_failure] if args.key?(:infrastructure_failure)
end
end
# Step Id and outcome of each individual step that was run as a group with other
# steps with the same configuration.
class IndividualOutcome
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `outcomeSummary`
# @return [String]
attr_accessor :outcome_summary
#
# Corresponds to the JSON property `stepId`
# @return [String]
attr_accessor :step_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@outcome_summary = args[:outcome_summary] if args.key?(:outcome_summary)
@step_id = args[:step_id] if args.key?(:step_id)
end
end
#
class ListExecutionsResponse
include Google::Apis::Core::Hashable
# Executions.
# Always set.
# Corresponds to the JSON property `executions`
# @return [Array<Google::Apis::ToolresultsV1beta3::Execution>]
attr_accessor :executions
# A continuation token to resume the query at the next item.
# Will only be set if there are more Executions to fetch.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@executions = args[:executions] if args.key?(:executions)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
end
end
# Response message for HistoryService.List
class ListHistoriesResponse
include Google::Apis::Core::Hashable
# Histories.
# Corresponds to the JSON property `histories`
# @return [Array<Google::Apis::ToolresultsV1beta3::History>]
attr_accessor :histories
# A continuation token to resume the query at the next item.
# Will only be set if there are more histories to fetch.
# Tokens are valid for up to one hour from the time of the first list request.
# For instance, if you make a list request at 1PM and use the token from this
# first request 10 minutes later, the token from this second response will only
# be valid for 50 minutes.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@histories = args[:histories] if args.key?(:histories)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
end
end
#
class ListPerfSampleSeriesResponse
include Google::Apis::Core::Hashable
# The resulting PerfSampleSeries sorted by id
# Corresponds to the JSON property `perfSampleSeries`
# @return [Array<Google::Apis::ToolresultsV1beta3::PerfSampleSeries>]
attr_accessor :perf_sample_series
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@perf_sample_series = args[:perf_sample_series] if args.key?(:perf_sample_series)
end
end
#
class ListPerfSamplesResponse
include Google::Apis::Core::Hashable
# Optional, returned if result size exceeds the page size specified in the
# request (or the default page size, 500, if unspecified). It indicates the last
# sample timestamp to be used as page_token in subsequent request
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
#
# Corresponds to the JSON property `perfSamples`
# @return [Array<Google::Apis::ToolresultsV1beta3::PerfSample>]
attr_accessor :perf_samples
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
@perf_samples = args[:perf_samples] if args.key?(:perf_samples)
end
end
#
class ListScreenshotClustersResponse
include Google::Apis::Core::Hashable
# The set of clusters associated with an execution Always set
# Corresponds to the JSON property `clusters`
# @return [Array<Google::Apis::ToolresultsV1beta3::ScreenshotCluster>]
attr_accessor :clusters
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@clusters = args[:clusters] if args.key?(:clusters)
end
end
# A response containing the thumbnails in a step.
class ListStepThumbnailsResponse
include Google::Apis::Core::Hashable
# A continuation token to resume the query at the next item.
# If set, indicates that there are more thumbnails to read, by calling list
# again with this value in the page_token field.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
# A list of image data.
# Images are returned in a deterministic order; they are ordered by these
# factors, in order of importance: * First, by their associated test case.
# Images without a test case are considered greater than images with one. *
# Second, by their creation time. Images without a creation time are greater
# than images with one. * Third, by the order in which they were added to the
# step (by calls to CreateStep or UpdateStep).
# Corresponds to the JSON property `thumbnails`
# @return [Array<Google::Apis::ToolresultsV1beta3::Image>]
attr_accessor :thumbnails
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
@thumbnails = args[:thumbnails] if args.key?(:thumbnails)
end
end
# Response message for StepService.List.
class ListStepsResponse
include Google::Apis::Core::Hashable
# A continuation token to resume the query at the next item.
# If set, indicates that there are more steps to read, by calling list again
# with this value in the page_token field.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
# Steps.
# Corresponds to the JSON property `steps`
# @return [Array<Google::Apis::ToolresultsV1beta3::Step>]
attr_accessor :steps
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
@steps = args[:steps] if args.key?(:steps)
end
end
#
class MemoryInfo
include Google::Apis::Core::Hashable
# Maximum memory that can be allocated to the process in KiB
# Corresponds to the JSON property `memoryCapInKibibyte`
# @return [Fixnum]
attr_accessor :memory_cap_in_kibibyte
# Total memory available on the device in KiB
# Corresponds to the JSON property `memoryTotalInKibibyte`
# @return [Fixnum]
attr_accessor :memory_total_in_kibibyte
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@memory_cap_in_kibibyte = args[:memory_cap_in_kibibyte] if args.key?(:memory_cap_in_kibibyte)
@memory_total_in_kibibyte = args[:memory_total_in_kibibyte] if args.key?(:memory_total_in_kibibyte)
end
end
# Details when multiple steps are run with the same configuration as a group.
class MultiStep
include Google::Apis::Core::Hashable
# Unique int given to each step. Ranges from 0(inclusive) to total number of
# steps(exclusive). The primary step is 0.
# Corresponds to the JSON property `multistepNumber`
# @return [Fixnum]
attr_accessor :multistep_number
# Stores rollup test status of multiple steps that were run as a group and
# outcome of each individual step.
# Corresponds to the JSON property `primaryStep`
# @return [Google::Apis::ToolresultsV1beta3::PrimaryStep]
attr_accessor :primary_step
# Step Id of the primary (original) step, which might be this step.
# Corresponds to the JSON property `primaryStepId`
# @return [String]
attr_accessor :primary_step_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@multistep_number = args[:multistep_number] if args.key?(:multistep_number)
@primary_step = args[:primary_step] if args.key?(:primary_step)
@primary_step_id = args[:primary_step_id] if args.key?(:primary_step_id)
end
end
# Interprets a result so that humans and machines can act on it.
class Outcome
include Google::Apis::Core::Hashable
# More information about a FAILURE outcome.
# Returns INVALID_ARGUMENT if this field is set but the summary is not FAILURE.
# Optional
# Corresponds to the JSON property `failureDetail`
# @return [Google::Apis::ToolresultsV1beta3::FailureDetail]
attr_accessor :failure_detail
# More information about an INCONCLUSIVE outcome.
# Returns INVALID_ARGUMENT if this field is set but the summary is not
# INCONCLUSIVE.
# Optional
# Corresponds to the JSON property `inconclusiveDetail`
# @return [Google::Apis::ToolresultsV1beta3::InconclusiveDetail]
attr_accessor :inconclusive_detail
# More information about a SKIPPED outcome.
# Returns INVALID_ARGUMENT if this field is set but the summary is not SKIPPED.
# Optional
# Corresponds to the JSON property `skippedDetail`
# @return [Google::Apis::ToolresultsV1beta3::SkippedDetail]
attr_accessor :skipped_detail
# More information about a SUCCESS outcome.
# Returns INVALID_ARGUMENT if this field is set but the summary is not SUCCESS.
# Optional
# Corresponds to the JSON property `successDetail`
# @return [Google::Apis::ToolresultsV1beta3::SuccessDetail]
attr_accessor :success_detail
# The simplest way to interpret a result.
# Required
# Corresponds to the JSON property `summary`
# @return [String]
attr_accessor :summary
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@failure_detail = args[:failure_detail] if args.key?(:failure_detail)
@inconclusive_detail = args[:inconclusive_detail] if args.key?(:inconclusive_detail)
@skipped_detail = args[:skipped_detail] if args.key?(:skipped_detail)
@success_detail = args[:success_detail] if args.key?(:success_detail)
@summary = args[:summary] if args.key?(:summary)
end
end
# Encapsulates performance environment info
class PerfEnvironment
include Google::Apis::Core::Hashable
# CPU related environment info
# Corresponds to the JSON property `cpuInfo`
# @return [Google::Apis::ToolresultsV1beta3::CpuInfo]
attr_accessor :cpu_info
# Memory related environment info
# Corresponds to the JSON property `memoryInfo`
# @return [Google::Apis::ToolresultsV1beta3::MemoryInfo]
attr_accessor :memory_info
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@cpu_info = args[:cpu_info] if args.key?(:cpu_info)
@memory_info = args[:memory_info] if args.key?(:memory_info)
end
end
# A summary of perf metrics collected and performance environment info
class PerfMetricsSummary
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `appStartTime`
# @return [Google::Apis::ToolresultsV1beta3::AppStartTime]
attr_accessor :app_start_time
# A tool results execution ID.
# Corresponds to the JSON property `executionId`
# @return [String]
attr_accessor :execution_id
# Graphics statistics for the App. The information is collected from 'adb shell
# dumpsys graphicsstats'. For more info see: https://developer.android.com/
# training/testing/performance.html Statistics will only be present for API 23+.
# Corresponds to the JSON property `graphicsStats`
# @return [Google::Apis::ToolresultsV1beta3::GraphicsStats]
attr_accessor :graphics_stats
# A tool results history ID.
# Corresponds to the JSON property `historyId`
# @return [String]
attr_accessor :history_id
# Encapsulates performance environment info
# Corresponds to the JSON property `perfEnvironment`
# @return [Google::Apis::ToolresultsV1beta3::PerfEnvironment]
attr_accessor :perf_environment
# Set of resource collected
# Corresponds to the JSON property `perfMetrics`
# @return [Array<String>]
attr_accessor :perf_metrics
# The cloud project
# Corresponds to the JSON property `projectId`
# @return [String]
attr_accessor :project_id
# A tool results step ID.
# Corresponds to the JSON property `stepId`
# @return [String]
attr_accessor :step_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@app_start_time = args[:app_start_time] if args.key?(:app_start_time)
@execution_id = args[:execution_id] if args.key?(:execution_id)
@graphics_stats = args[:graphics_stats] if args.key?(:graphics_stats)
@history_id = args[:history_id] if args.key?(:history_id)
@perf_environment = args[:perf_environment] if args.key?(:perf_environment)
@perf_metrics = args[:perf_metrics] if args.key?(:perf_metrics)
@project_id = args[:project_id] if args.key?(:project_id)
@step_id = args[:step_id] if args.key?(:step_id)
end
end
# Resource representing a single performance measure or data point
class PerfSample
include Google::Apis::Core::Hashable
# A Timestamp represents a point in time independent of any time zone or local
# calendar, encoded as a count of seconds and fractions of seconds at nanosecond
# resolution. The count is relative to an epoch at UTC midnight on January 1,
# 1970, in the proleptic Gregorian calendar which extends the Gregorian calendar
# backwards to year one.
# All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
# second table is needed for interpretation, using a [24-hour linear smear](
# https://developers.google.com/time/smear).
# The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
# restricting to that range, we ensure that we can convert to and from [RFC 3339]
# (https://www.ietf.org/rfc/rfc3339.txt) date strings.
# # Examples
# Example 1: Compute Timestamp from POSIX `time()`.
# Timestamp timestamp; timestamp.set_seconds(time(NULL)); timestamp.set_nanos(0);
# Example 2: Compute Timestamp from POSIX `gettimeofday()`.
# struct timeval tv; gettimeofday(&tv, NULL);
# Timestamp timestamp; timestamp.set_seconds(tv.tv_sec); timestamp.set_nanos(tv.
# tv_usec * 1000);
# Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
# FILETIME ft; GetSystemTimeAsFileTime(&ft); UINT64 ticks = (((UINT64)ft.
# dwHighDateTime) << 32) | ft.dwLowDateTime;
# // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z // is
# 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. Timestamp
# timestamp; timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
# timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
# Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
# long millis = System.currentTimeMillis();
# Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) .
# setNanos((int) ((millis % 1000) * 1000000)).build();
# Example 5: Compute Timestamp from current time in Python.
# timestamp = Timestamp() timestamp.GetCurrentTime()
# # JSON Mapping
# In JSON format, the Timestamp type is encoded as a string in the [RFC 3339](
# https://www.ietf.org/rfc/rfc3339.txt) format. That is, the format is "`year`-`
# month`-`day`T`hour`:`min`:`sec`[.`frac_sec`]Z" where `year` is always
# expressed using four digits while `month`, `day`, `hour`, `min`, and `sec` are
# zero-padded to two digits each. The fractional seconds, which can go up to 9
# digits (i.e. up to 1 nanosecond resolution), are optional. The "Z" suffix
# indicates the timezone ("UTC"); the timezone is required. A proto3 JSON
# serializer should always use UTC (as indicated by "Z") when printing the
# Timestamp type and a proto3 JSON parser should be able to accept both UTC and
# other timezones (as indicated by an offset).
# For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past 01:30 UTC on
# January 15, 2017.
# In JavaScript, one can convert a Date object to this format using the standard
# [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/
# Reference/Global_Objects/Date/toISOString) method. In Python, a standard `
# datetime.datetime` object can be converted to this format using [`strftime`](
# https://docs.python.org/2/library/time.html#time.strftime) with the time
# format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use the Joda
# Time's [`ISODateTimeFormat.dateTime()`]( http://www.joda.org/joda-time/apidocs/
# org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D ) to obtain a
# formatter capable of generating timestamps in this format.
# Corresponds to the JSON property `sampleTime`
# @return [Google::Apis::ToolresultsV1beta3::Timestamp]
attr_accessor :sample_time
# Value observed
# Corresponds to the JSON property `value`
# @return [Float]
attr_accessor :value
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@sample_time = args[:sample_time] if args.key?(:sample_time)
@value = args[:value] if args.key?(:value)
end
end
# Resource representing a collection of performance samples (or data points)
class PerfSampleSeries
include Google::Apis::Core::Hashable
# Encapsulates the metadata for basic sample series represented by a line chart
# Corresponds to the JSON property `basicPerfSampleSeries`
# @return [Google::Apis::ToolresultsV1beta3::BasicPerfSampleSeries]
attr_accessor :basic_perf_sample_series
# A tool results execution ID.
# Corresponds to the JSON property `executionId`
# @return [String]
attr_accessor :execution_id
# A tool results history ID.
# Corresponds to the JSON property `historyId`
# @return [String]
attr_accessor :history_id
# The cloud project
# Corresponds to the JSON property `projectId`
# @return [String]
attr_accessor :project_id
# A sample series id
# Corresponds to the JSON property `sampleSeriesId`
# @return [String]
attr_accessor :sample_series_id
# A tool results step ID.
# Corresponds to the JSON property `stepId`
# @return [String]
attr_accessor :step_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@basic_perf_sample_series = args[:basic_perf_sample_series] if args.key?(:basic_perf_sample_series)
@execution_id = args[:execution_id] if args.key?(:execution_id)
@history_id = args[:history_id] if args.key?(:history_id)
@project_id = args[:project_id] if args.key?(:project_id)
@sample_series_id = args[:sample_series_id] if args.key?(:sample_series_id)
@step_id = args[:step_id] if args.key?(:step_id)
end
end
# Stores rollup test status of multiple steps that were run as a group and
# outcome of each individual step.
class PrimaryStep
include Google::Apis::Core::Hashable
# Step Id and outcome of each individual step.
# Corresponds to the JSON property `individualOutcome`
# @return [Array<Google::Apis::ToolresultsV1beta3::IndividualOutcome>]
attr_accessor :individual_outcome
# Rollup test status of multiple steps that were run with the same configuration
# as a group.
# Corresponds to the JSON property `rollUp`
# @return [String]
attr_accessor :roll_up
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@individual_outcome = args[:individual_outcome] if args.key?(:individual_outcome)
@roll_up = args[:roll_up] if args.key?(:roll_up)
end
end
# Per-project settings for the Tool Results service.
class ProjectSettings
include Google::Apis::Core::Hashable
# The name of the Google Cloud Storage bucket to which results are written.
# By default, this is unset.
# In update request: optional In response: optional
# Corresponds to the JSON property `defaultBucket`
# @return [String]
attr_accessor :default_bucket
# The name of the project's settings.
# Always of the form: projects/`project-id`/settings
# In update request: never set In response: always set
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@default_bucket = args[:default_bucket] if args.key?(:default_bucket)
@name = args[:name] if args.key?(:name)
end
end
# Request message for StepService.PublishXunitXmlFiles.
class PublishXunitXmlFilesRequest
include Google::Apis::Core::Hashable
# URI of the Xunit XML files to publish.
# The maximum size of the file this reference is pointing to is 50MB.
# Required.
# Corresponds to the JSON property `xunitXmlFiles`
# @return [Array<Google::Apis::ToolresultsV1beta3::FileReference>]
attr_accessor :xunit_xml_files
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@xunit_xml_files = args[:xunit_xml_files] if args.key?(:xunit_xml_files)
end
end
#
class Screen
include Google::Apis::Core::Hashable
# File reference of the png file. Required.
# Corresponds to the JSON property `fileReference`
# @return [String]
attr_accessor :file_reference
# Locale of the device that the screenshot was taken on. Required.
# Corresponds to the JSON property `locale`
# @return [String]
attr_accessor :locale
# Model of the device that the screenshot was taken on. Required.
# Corresponds to the JSON property `model`
# @return [String]
attr_accessor :model
# OS version of the device that the screenshot was taken on. Required.
# Corresponds to the JSON property `version`
# @return [String]
attr_accessor :version
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@file_reference = args[:file_reference] if args.key?(:file_reference)
@locale = args[:locale] if args.key?(:locale)
@model = args[:model] if args.key?(:model)
@version = args[:version] if args.key?(:version)
end
end
#
class ScreenshotCluster
include Google::Apis::Core::Hashable
# A string that describes the activity of every screen in the cluster.
# Corresponds to the JSON property `activity`
# @return [String]
attr_accessor :activity
# A unique identifier for the cluster.
# Corresponds to the JSON property `clusterId`
# @return [String]
attr_accessor :cluster_id
# A singular screen that represents the cluster as a whole. This screen will act
# as the "cover" of the entire cluster. When users look at the clusters, only
# the key screen from each cluster will be shown. Which screen is the key screen
# is determined by the ClusteringAlgorithm
# Corresponds to the JSON property `keyScreen`
# @return [Google::Apis::ToolresultsV1beta3::Screen]
attr_accessor :key_screen
# Full list of screens.
# Corresponds to the JSON property `screens`
# @return [Array<Google::Apis::ToolresultsV1beta3::Screen>]
attr_accessor :screens
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@activity = args[:activity] if args.key?(:activity)
@cluster_id = args[:cluster_id] if args.key?(:cluster_id)
@key_screen = args[:key_screen] if args.key?(:key_screen)
@screens = args[:screens] if args.key?(:screens)
end
end
#
class SkippedDetail
include Google::Apis::Core::Hashable
# If the App doesn't support the specific API level.
# Corresponds to the JSON property `incompatibleAppVersion`
# @return [Boolean]
attr_accessor :incompatible_app_version
alias_method :incompatible_app_version?, :incompatible_app_version
# If the App doesn't run on the specific architecture, for example, x86.
# Corresponds to the JSON property `incompatibleArchitecture`
# @return [Boolean]
attr_accessor :incompatible_architecture
alias_method :incompatible_architecture?, :incompatible_architecture
# If the requested OS version doesn't run on the specific device model.
# Corresponds to the JSON property `incompatibleDevice`
# @return [Boolean]
attr_accessor :incompatible_device
alias_method :incompatible_device?, :incompatible_device
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@incompatible_app_version = args[:incompatible_app_version] if args.key?(:incompatible_app_version)
@incompatible_architecture = args[:incompatible_architecture] if args.key?(:incompatible_architecture)
@incompatible_device = args[:incompatible_device] if args.key?(:incompatible_device)
end
end
# The details about how to run the execution.
class Specification
include Google::Apis::Core::Hashable
# An Android mobile test specification.
# Corresponds to the JSON property `androidTest`
# @return [Google::Apis::ToolresultsV1beta3::AndroidTest]
attr_accessor :android_test
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@android_test = args[:android_test] if args.key?(:android_test)
end
end
# A stacktrace.
class StackTrace
include Google::Apis::Core::Hashable
# The stack trace message.
# Required
# Corresponds to the JSON property `exception`
# @return [String]
attr_accessor :exception
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@exception = args[:exception] if args.key?(:exception)
end
end
# The `Status` type defines a logical error model that is suitable for different
# programming environments, including REST APIs and RPC APIs. It is used by [
# gRPC](https://github.com/grpc). The error model is designed to be:
# - Simple to use and understand for most users - Flexible enough to meet
# unexpected needs
# # Overview
# The `Status` message contains three pieces of data: error code, error message,
# and error details. The error code should be an enum value of [google.rpc.Code][
# ], but it may accept additional error codes if needed. The error message
# should be a developer-facing English message that helps developers *understand*
# and *resolve* the error. If a localized user-facing error message is needed,
# put the localized message in the error details or localize it in the client.
# The optional error details may contain arbitrary information about the error.
# There is a predefined set of error detail types in the package `google.rpc`
# that can be used for common error conditions.
# # Language mapping
# The `Status` message is the logical representation of the error model, but it
# is not necessarily the actual wire format. When the `Status` message is
# exposed in different client libraries and different wire protocols, it can be
# mapped differently. For example, it will likely be mapped to some exceptions
# in Java, but more likely mapped to some error codes in C.
# # Other uses
# The error model and the `Status` message can be used in a variety of
# environments, either with or without APIs, to provide a consistent developer
# experience across different environments.
# Example uses of this error model include:
# - Partial errors. If a service needs to return partial errors to the client,
# it may embed the `Status` in the normal response to indicate the partial
# errors.
# - Workflow errors. A typical workflow has multiple steps. Each step may have a
# `Status` message for error reporting.
# - Batch operations. If a client uses batch request and batch response, the `
# Status` message should be used directly inside batch response, one for each
# error sub-response.
# - Asynchronous operations. If an API call embeds asynchronous operation
# results in its response, the status of those operations should be represented
# directly using the `Status` message.
# - Logging. If some API errors are stored in logs, the message `Status` could
# be used directly after any stripping needed for security/privacy reasons.
class Status
include Google::Apis::Core::Hashable
# The status code, which should be an enum value of [google.rpc.Code][].
# Corresponds to the JSON property `code`
# @return [Fixnum]
attr_accessor :code
# A list of messages that carry the error details. There is a common set of
# message types for APIs to use.
# Corresponds to the JSON property `details`
# @return [Array<Google::Apis::ToolresultsV1beta3::Any>]
attr_accessor :details
# A developer-facing error message, which should be in English. Any user-facing
# error message should be localized and sent in the [google.rpc.Status.details][]
# field, or localized by the client.
# Corresponds to the JSON property `message`
# @return [String]
attr_accessor :message
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@code = args[:code] if args.key?(:code)
@details = args[:details] if args.key?(:details)
@message = args[:message] if args.key?(:message)
end
end
# A Step represents a single operation performed as part of Execution. A step
# can be used to represent the execution of a tool ( for example a test runner
# execution or an execution of a compiler).
# Steps can overlap (for instance two steps might have the same start time if
# some operations are done in parallel).
# Here is an example, let's consider that we have a continuous build is
# executing a test runner for each iteration. The workflow would look like: -
# user creates a Execution with id 1 - user creates an TestExecutionStep with id
# 100 for Execution 1 - user update TestExecutionStep with id 100 to add a raw
# xml log + the service parses the xml logs and returns a TestExecutionStep with
# updated TestResult(s). - user update the status of TestExecutionStep with id
# 100 to COMPLETE
# A Step can be updated until its state is set to COMPLETE at which points it
# becomes immutable.
class Step
include Google::Apis::Core::Hashable
# A Timestamp represents a point in time independent of any time zone or local
# calendar, encoded as a count of seconds and fractions of seconds at nanosecond
# resolution. The count is relative to an epoch at UTC midnight on January 1,
# 1970, in the proleptic Gregorian calendar which extends the Gregorian calendar
# backwards to year one.
# All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
# second table is needed for interpretation, using a [24-hour linear smear](
# https://developers.google.com/time/smear).
# The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
# restricting to that range, we ensure that we can convert to and from [RFC 3339]
# (https://www.ietf.org/rfc/rfc3339.txt) date strings.
# # Examples
# Example 1: Compute Timestamp from POSIX `time()`.
# Timestamp timestamp; timestamp.set_seconds(time(NULL)); timestamp.set_nanos(0);
# Example 2: Compute Timestamp from POSIX `gettimeofday()`.
# struct timeval tv; gettimeofday(&tv, NULL);
# Timestamp timestamp; timestamp.set_seconds(tv.tv_sec); timestamp.set_nanos(tv.
# tv_usec * 1000);
# Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
# FILETIME ft; GetSystemTimeAsFileTime(&ft); UINT64 ticks = (((UINT64)ft.
# dwHighDateTime) << 32) | ft.dwLowDateTime;
# // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z // is
# 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. Timestamp
# timestamp; timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
# timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
# Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
# long millis = System.currentTimeMillis();
# Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) .
# setNanos((int) ((millis % 1000) * 1000000)).build();
# Example 5: Compute Timestamp from current time in Python.
# timestamp = Timestamp() timestamp.GetCurrentTime()
# # JSON Mapping
# In JSON format, the Timestamp type is encoded as a string in the [RFC 3339](
# https://www.ietf.org/rfc/rfc3339.txt) format. That is, the format is "`year`-`
# month`-`day`T`hour`:`min`:`sec`[.`frac_sec`]Z" where `year` is always
# expressed using four digits while `month`, `day`, `hour`, `min`, and `sec` are
# zero-padded to two digits each. The fractional seconds, which can go up to 9
# digits (i.e. up to 1 nanosecond resolution), are optional. The "Z" suffix
# indicates the timezone ("UTC"); the timezone is required. A proto3 JSON
# serializer should always use UTC (as indicated by "Z") when printing the
# Timestamp type and a proto3 JSON parser should be able to accept both UTC and
# other timezones (as indicated by an offset).
# For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past 01:30 UTC on
# January 15, 2017.
# In JavaScript, one can convert a Date object to this format using the standard
# [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/
# Reference/Global_Objects/Date/toISOString) method. In Python, a standard `
# datetime.datetime` object can be converted to this format using [`strftime`](
# https://docs.python.org/2/library/time.html#time.strftime) with the time
# format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use the Joda
# Time's [`ISODateTimeFormat.dateTime()`]( http://www.joda.org/joda-time/apidocs/
# org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D ) to obtain a
# formatter capable of generating timestamps in this format.
# Corresponds to the JSON property `completionTime`
# @return [Google::Apis::ToolresultsV1beta3::Timestamp]
attr_accessor :completion_time
# A Timestamp represents a point in time independent of any time zone or local
# calendar, encoded as a count of seconds and fractions of seconds at nanosecond
# resolution. The count is relative to an epoch at UTC midnight on January 1,
# 1970, in the proleptic Gregorian calendar which extends the Gregorian calendar
# backwards to year one.
# All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
# second table is needed for interpretation, using a [24-hour linear smear](
# https://developers.google.com/time/smear).
# The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
# restricting to that range, we ensure that we can convert to and from [RFC 3339]
# (https://www.ietf.org/rfc/rfc3339.txt) date strings.
# # Examples
# Example 1: Compute Timestamp from POSIX `time()`.
# Timestamp timestamp; timestamp.set_seconds(time(NULL)); timestamp.set_nanos(0);
# Example 2: Compute Timestamp from POSIX `gettimeofday()`.
# struct timeval tv; gettimeofday(&tv, NULL);
# Timestamp timestamp; timestamp.set_seconds(tv.tv_sec); timestamp.set_nanos(tv.
# tv_usec * 1000);
# Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
# FILETIME ft; GetSystemTimeAsFileTime(&ft); UINT64 ticks = (((UINT64)ft.
# dwHighDateTime) << 32) | ft.dwLowDateTime;
# // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z // is
# 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. Timestamp
# timestamp; timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
# timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
# Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
# long millis = System.currentTimeMillis();
# Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) .
# setNanos((int) ((millis % 1000) * 1000000)).build();
# Example 5: Compute Timestamp from current time in Python.
# timestamp = Timestamp() timestamp.GetCurrentTime()
# # JSON Mapping
# In JSON format, the Timestamp type is encoded as a string in the [RFC 3339](
# https://www.ietf.org/rfc/rfc3339.txt) format. That is, the format is "`year`-`
# month`-`day`T`hour`:`min`:`sec`[.`frac_sec`]Z" where `year` is always
# expressed using four digits while `month`, `day`, `hour`, `min`, and `sec` are
# zero-padded to two digits each. The fractional seconds, which can go up to 9
# digits (i.e. up to 1 nanosecond resolution), are optional. The "Z" suffix
# indicates the timezone ("UTC"); the timezone is required. A proto3 JSON
# serializer should always use UTC (as indicated by "Z") when printing the
# Timestamp type and a proto3 JSON parser should be able to accept both UTC and
# other timezones (as indicated by an offset).
# For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past 01:30 UTC on
# January 15, 2017.
# In JavaScript, one can convert a Date object to this format using the standard
# [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/
# Reference/Global_Objects/Date/toISOString) method. In Python, a standard `
# datetime.datetime` object can be converted to this format using [`strftime`](
# https://docs.python.org/2/library/time.html#time.strftime) with the time
# format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use the Joda
# Time's [`ISODateTimeFormat.dateTime()`]( http://www.joda.org/joda-time/apidocs/
# org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D ) to obtain a
# formatter capable of generating timestamps in this format.
# Corresponds to the JSON property `creationTime`
# @return [Google::Apis::ToolresultsV1beta3::Timestamp]
attr_accessor :creation_time
# A description of this tool For example: mvn clean package -D skipTests=true
# - In response: present if set by create/update request - In create/update
# request: optional
# Corresponds to the JSON property `description`
# @return [String]
attr_accessor :description
# A Duration represents a signed, fixed-length span of time represented as a
# count of seconds and fractions of seconds at nanosecond resolution. It is
# independent of any calendar and concepts like "day" or "month". It is related
# to Timestamp in that the difference between two Timestamp values is a Duration
# and it can be added or subtracted from a Timestamp. Range is approximately +-
# 10,000 years.
# # Examples
# Example 1: Compute Duration from two Timestamps in pseudo code.
# Timestamp start = ...; Timestamp end = ...; Duration duration = ...;
# duration.seconds = end.seconds - start.seconds; duration.nanos = end.nanos -
# start.nanos;
# if (duration.seconds 0) ` duration.seconds += 1; duration.nanos -= 1000000000;
# ` else if (durations.seconds > 0 && duration.nanos < 0) ` duration.seconds -=
# 1; duration.nanos += 1000000000; `
# Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
# Timestamp start = ...; Duration duration = ...; Timestamp end = ...;
# end.seconds = start.seconds + duration.seconds; end.nanos = start.nanos +
# duration.nanos;
# if (end.nanos = 1000000000) ` end.seconds += 1; end.nanos -= 1000000000; `
# Example 3: Compute Duration from datetime.timedelta in Python.
# td = datetime.timedelta(days=3, minutes=10) duration = Duration() duration.
# FromTimedelta(td)
# # JSON Mapping
# In JSON format, the Duration type is encoded as a string rather than an object,
# where the string ends in the suffix "s" (indicating seconds) and is preceded
# by the number of seconds, with nanoseconds expressed as fractional seconds.
# For example, 3 seconds with 0 nanoseconds should be encoded in JSON format as "
# 3s", while 3 seconds and 1 nanosecond should be expressed in JSON format as "3.
# 000000001s", and 3 seconds and 1 microsecond should be expressed in JSON
# format as "3.000001s".
# Corresponds to the JSON property `deviceUsageDuration`
# @return [Google::Apis::ToolresultsV1beta3::Duration]
attr_accessor :device_usage_duration
# If the execution containing this step has any dimension_definition set, then
# this field allows the child to specify the values of the dimensions.
# The keys must exactly match the dimension_definition of the execution.
# For example, if the execution has `dimension_definition = ['attempt', 'device']
# ` then a step must define values for those dimensions, eg. `dimension_value = [
# 'attempt': '1', 'device': 'Nexus 6']`
# If a step does not participate in one dimension of the matrix, the value for
# that dimension should be empty string. For example, if one of the tests is
# executed by a runner which does not support retries, the step could have `
# dimension_value = ['attempt': '', 'device': 'Nexus 6']`
# If the step does not participate in any dimensions of the matrix, it may leave
# dimension_value unset.
# A PRECONDITION_FAILED will be returned if any of the keys do not exist in the
# dimension_definition of the execution.
# A PRECONDITION_FAILED will be returned if another step in this execution
# already has the same name and dimension_value, but differs on other data
# fields, for example, step field is different.
# A PRECONDITION_FAILED will be returned if dimension_value is set, and there is
# a dimension_definition in the execution which is not specified as one of the
# keys.
# - In response: present if set by create - In create request: optional - In
# update request: never set
# Corresponds to the JSON property `dimensionValue`
# @return [Array<Google::Apis::ToolresultsV1beta3::StepDimensionValueEntry>]
attr_accessor :dimension_value
# Whether any of the outputs of this step are images whose thumbnails can be
# fetched with ListThumbnails.
# - In response: always set - In create/update request: never set
# Corresponds to the JSON property `hasImages`
# @return [Boolean]
attr_accessor :has_images
alias_method :has_images?, :has_images
# Arbitrary user-supplied key/value pairs that are associated with the step.
# Users are responsible for managing the key namespace such that keys don't
# accidentally collide.
# An INVALID_ARGUMENT will be returned if the number of labels exceeds 100 or if
# the length of any of the keys or values exceeds 100 characters.
# - In response: always set - In create request: optional - In update request:
# optional; any new key/value pair will be added to the map, and any new value
# for an existing key will update that key's value
# Corresponds to the JSON property `labels`
# @return [Array<Google::Apis::ToolresultsV1beta3::StepLabelsEntry>]
attr_accessor :labels
# Details when multiple steps are run with the same configuration as a group.
# Corresponds to the JSON property `multiStep`
# @return [Google::Apis::ToolresultsV1beta3::MultiStep]
attr_accessor :multi_step
# A short human-readable name to display in the UI. Maximum of 100 characters.
# For example: Clean build
# A PRECONDITION_FAILED will be returned upon creating a new step if it shares
# its name and dimension_value with an existing step. If two steps represent a
# similar action, but have different dimension values, they should share the
# same name. For instance, if the same set of tests is run on two different
# platforms, the two steps should have the same name.
# - In response: always set - In create request: always set - In update request:
# never set
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# Interprets a result so that humans and machines can act on it.
# Corresponds to the JSON property `outcome`
# @return [Google::Apis::ToolresultsV1beta3::Outcome]
attr_accessor :outcome
# A Duration represents a signed, fixed-length span of time represented as a
# count of seconds and fractions of seconds at nanosecond resolution. It is
# independent of any calendar and concepts like "day" or "month". It is related
# to Timestamp in that the difference between two Timestamp values is a Duration
# and it can be added or subtracted from a Timestamp. Range is approximately +-
# 10,000 years.
# # Examples
# Example 1: Compute Duration from two Timestamps in pseudo code.
# Timestamp start = ...; Timestamp end = ...; Duration duration = ...;
# duration.seconds = end.seconds - start.seconds; duration.nanos = end.nanos -
# start.nanos;
# if (duration.seconds 0) ` duration.seconds += 1; duration.nanos -= 1000000000;
# ` else if (durations.seconds > 0 && duration.nanos < 0) ` duration.seconds -=
# 1; duration.nanos += 1000000000; `
# Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
# Timestamp start = ...; Duration duration = ...; Timestamp end = ...;
# end.seconds = start.seconds + duration.seconds; end.nanos = start.nanos +
# duration.nanos;
# if (end.nanos = 1000000000) ` end.seconds += 1; end.nanos -= 1000000000; `
# Example 3: Compute Duration from datetime.timedelta in Python.
# td = datetime.timedelta(days=3, minutes=10) duration = Duration() duration.
# FromTimedelta(td)
# # JSON Mapping
# In JSON format, the Duration type is encoded as a string rather than an object,
# where the string ends in the suffix "s" (indicating seconds) and is preceded
# by the number of seconds, with nanoseconds expressed as fractional seconds.
# For example, 3 seconds with 0 nanoseconds should be encoded in JSON format as "
# 3s", while 3 seconds and 1 nanosecond should be expressed in JSON format as "3.
# 000000001s", and 3 seconds and 1 microsecond should be expressed in JSON
# format as "3.000001s".
# Corresponds to the JSON property `runDuration`
# @return [Google::Apis::ToolresultsV1beta3::Duration]
attr_accessor :run_duration
# The initial state is IN_PROGRESS. The only legal state transitions are *
# IN_PROGRESS -> COMPLETE
# A PRECONDITION_FAILED will be returned if an invalid transition is requested.
# It is valid to create Step with a state set to COMPLETE. The state can only be
# set to COMPLETE once. A PRECONDITION_FAILED will be returned if the state is
# set to COMPLETE multiple times.
# - In response: always set - In create/update request: optional
# Corresponds to the JSON property `state`
# @return [String]
attr_accessor :state
# A unique identifier within a Execution for this Step.
# Returns INVALID_ARGUMENT if this field is set or overwritten by the caller.
# - In response: always set - In create/update request: never set
# Corresponds to the JSON property `stepId`
# @return [String]
attr_accessor :step_id
# A step that represents running tests.
# It accepts ant-junit xml files which will be parsed into structured test
# results by the service. Xml file paths are updated in order to append more
# files, however they can't be deleted.
# Users can also add test results manually by using the test_result field.
# Corresponds to the JSON property `testExecutionStep`
# @return [Google::Apis::ToolresultsV1beta3::TestExecutionStep]
attr_accessor :test_execution_step
# Generic tool step to be used for binaries we do not explicitly support. For
# example: running cp to copy artifacts from one location to another.
# Corresponds to the JSON property `toolExecutionStep`
# @return [Google::Apis::ToolresultsV1beta3::ToolExecutionStep]
attr_accessor :tool_execution_step
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@completion_time = args[:completion_time] if args.key?(:completion_time)
@creation_time = args[:creation_time] if args.key?(:creation_time)
@description = args[:description] if args.key?(:description)
@device_usage_duration = args[:device_usage_duration] if args.key?(:device_usage_duration)
@dimension_value = args[:dimension_value] if args.key?(:dimension_value)
@has_images = args[:has_images] if args.key?(:has_images)
@labels = args[:labels] if args.key?(:labels)
@multi_step = args[:multi_step] if args.key?(:multi_step)
@name = args[:name] if args.key?(:name)
@outcome = args[:outcome] if args.key?(:outcome)
@run_duration = args[:run_duration] if args.key?(:run_duration)
@state = args[:state] if args.key?(:state)
@step_id = args[:step_id] if args.key?(:step_id)
@test_execution_step = args[:test_execution_step] if args.key?(:test_execution_step)
@tool_execution_step = args[:tool_execution_step] if args.key?(:tool_execution_step)
end
end
#
class StepDimensionValueEntry
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `key`
# @return [String]
attr_accessor :key
#
# Corresponds to the JSON property `value`
# @return [String]
attr_accessor :value
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@key = args[:key] if args.key?(:key)
@value = args[:value] if args.key?(:value)
end
end
#
class StepLabelsEntry
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `key`
# @return [String]
attr_accessor :key
#
# Corresponds to the JSON property `value`
# @return [String]
attr_accessor :value
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@key = args[:key] if args.key?(:key)
@value = args[:value] if args.key?(:value)
end
end
#
class SuccessDetail
include Google::Apis::Core::Hashable
# If a native process other than the app crashed.
# Corresponds to the JSON property `otherNativeCrash`
# @return [Boolean]
attr_accessor :other_native_crash
alias_method :other_native_crash?, :other_native_crash
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@other_native_crash = args[:other_native_crash] if args.key?(:other_native_crash)
end
end
# A reference to a test case.
# Test case references are canonically ordered lexicographically by these three
# factors: * First, by test_suite_name. * Second, by class_name. * Third, by
# name.
class TestCaseReference
include Google::Apis::Core::Hashable
# The name of the class.
# Corresponds to the JSON property `className`
# @return [String]
attr_accessor :class_name
# The name of the test case.
# Required.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# The name of the test suite to which this test case belongs.
# Corresponds to the JSON property `testSuiteName`
# @return [String]
attr_accessor :test_suite_name
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@class_name = args[:class_name] if args.key?(:class_name)
@name = args[:name] if args.key?(:name)
@test_suite_name = args[:test_suite_name] if args.key?(:test_suite_name)
end
end
# A step that represents running tests.
# It accepts ant-junit xml files which will be parsed into structured test
# results by the service. Xml file paths are updated in order to append more
# files, however they can't be deleted.
# Users can also add test results manually by using the test_result field.
class TestExecutionStep
include Google::Apis::Core::Hashable
# Issues observed during the test execution.
# For example, if the mobile app under test crashed during the test, the error
# message and the stack trace content can be recorded here to assist debugging.
# - In response: present if set by create or update - In create/update request:
# optional
# Corresponds to the JSON property `testIssues`
# @return [Array<Google::Apis::ToolresultsV1beta3::TestIssue>]
attr_accessor :test_issues
# List of test suite overview contents. This could be parsed from xUnit XML log
# by server, or uploaded directly by user. This references should only be called
# when test suites are fully parsed or uploaded.
# The maximum allowed number of test suite overviews per step is 1000.
# - In response: always set - In create request: optional - In update request:
# never (use publishXunitXmlFiles custom method instead)
# Corresponds to the JSON property `testSuiteOverviews`
# @return [Array<Google::Apis::ToolresultsV1beta3::TestSuiteOverview>]
attr_accessor :test_suite_overviews
# Testing timing break down to know phases.
# Corresponds to the JSON property `testTiming`
# @return [Google::Apis::ToolresultsV1beta3::TestTiming]
attr_accessor :test_timing
# An execution of an arbitrary tool. It could be a test runner or a tool copying
# artifacts or deploying code.
# Corresponds to the JSON property `toolExecution`
# @return [Google::Apis::ToolresultsV1beta3::ToolExecution]
attr_accessor :tool_execution
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@test_issues = args[:test_issues] if args.key?(:test_issues)
@test_suite_overviews = args[:test_suite_overviews] if args.key?(:test_suite_overviews)
@test_timing = args[:test_timing] if args.key?(:test_timing)
@tool_execution = args[:tool_execution] if args.key?(:tool_execution)
end
end
# An issue detected occurring during a test execution.
class TestIssue
include Google::Apis::Core::Hashable
# Category of issue. Required.
# Corresponds to the JSON property `category`
# @return [String]
attr_accessor :category
# A brief human-readable message describing the issue. Required.
# Corresponds to the JSON property `errorMessage`
# @return [String]
attr_accessor :error_message
# Severity of issue. Required.
# Corresponds to the JSON property `severity`
# @return [String]
attr_accessor :severity
# A stacktrace.
# Corresponds to the JSON property `stackTrace`
# @return [Google::Apis::ToolresultsV1beta3::StackTrace]
attr_accessor :stack_trace
# Type of issue. Required.
# Corresponds to the JSON property `type`
# @return [String]
attr_accessor :type
# `Any` contains an arbitrary serialized protocol buffer message along with a
# URL that describes the type of the serialized message.
# Protobuf library provides support to pack/unpack Any values in the form of
# utility functions or additional generated methods of the Any type.
# Example 1: Pack and unpack a message in C++.
# Foo foo = ...; Any any; any.PackFrom(foo); ... if (any.UnpackTo(&foo)) ` ... `
# Example 2: Pack and unpack a message in Java.
# Foo foo = ...; Any any = Any.pack(foo); ... if (any.is(Foo.class)) ` foo = any.
# unpack(Foo.class); `
# Example 3: Pack and unpack a message in Python.
# foo = Foo(...) any = Any() any.Pack(foo) ... if any.Is(Foo.DESCRIPTOR): any.
# Unpack(foo) ...
# Example 4: Pack and unpack a message in Go
# foo := &pb.Foo`...` any, err := ptypes.MarshalAny(foo) ... foo := &pb.Foo`` if
# err := ptypes.UnmarshalAny(any, foo); err != nil ` ... `
# The pack methods provided by protobuf library will by default use 'type.
# googleapis.com/full.type.name' as the type URL and the unpack methods only use
# the fully qualified type name after the last '/' in the type URL, for example "
# foo.bar.com/x/y.z" will yield type name "y.z".
# JSON ==== The JSON representation of an `Any` value uses the regular
# representation of the deserialized, embedded message, with an additional field
# `@type` which contains the type URL. Example:
# package google.profile; message Person ` string first_name = 1; string
# last_name = 2; `
# ` "@type": "type.googleapis.com/google.profile.Person", "firstName": , "
# lastName": `
# If the embedded message type is well-known and has a custom JSON
# representation, that representation will be embedded adding a field `value`
# which holds the custom JSON in addition to the `@type` field. Example (for
# message [google.protobuf.Duration][]):
# ` "@type": "type.googleapis.com/google.protobuf.Duration", "value": "1.212s" `
# Corresponds to the JSON property `warning`
# @return [Google::Apis::ToolresultsV1beta3::Any]
attr_accessor :warning
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@category = args[:category] if args.key?(:category)
@error_message = args[:error_message] if args.key?(:error_message)
@severity = args[:severity] if args.key?(:severity)
@stack_trace = args[:stack_trace] if args.key?(:stack_trace)
@type = args[:type] if args.key?(:type)
@warning = args[:warning] if args.key?(:warning)
end
end
# A summary of a test suite result either parsed from XML or uploaded directly
# by a user.
# Note: the API related comments are for StepService only. This message is also
# being used in ExecutionService in a read only mode for the corresponding step.
class TestSuiteOverview
include Google::Apis::Core::Hashable
# Number of test cases in error, typically set by the service by parsing the
# xml_source.
# - In create/response: always set - In update request: never
# Corresponds to the JSON property `errorCount`
# @return [Fixnum]
attr_accessor :error_count
# Number of failed test cases, typically set by the service by parsing the
# xml_source. May also be set by the user.
# - In create/response: always set - In update request: never
# Corresponds to the JSON property `failureCount`
# @return [Fixnum]
attr_accessor :failure_count
# The name of the test suite.
# - In create/response: always set - In update request: never
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# Number of test cases not run, typically set by the service by parsing the
# xml_source.
# - In create/response: always set - In update request: never
# Corresponds to the JSON property `skippedCount`
# @return [Fixnum]
attr_accessor :skipped_count
# Number of test cases, typically set by the service by parsing the xml_source.
# - In create/response: always set - In update request: never
# Corresponds to the JSON property `totalCount`
# @return [Fixnum]
attr_accessor :total_count
# A reference to a file.
# Corresponds to the JSON property `xmlSource`
# @return [Google::Apis::ToolresultsV1beta3::FileReference]
attr_accessor :xml_source
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@error_count = args[:error_count] if args.key?(:error_count)
@failure_count = args[:failure_count] if args.key?(:failure_count)
@name = args[:name] if args.key?(:name)
@skipped_count = args[:skipped_count] if args.key?(:skipped_count)
@total_count = args[:total_count] if args.key?(:total_count)
@xml_source = args[:xml_source] if args.key?(:xml_source)
end
end
# Testing timing break down to know phases.
class TestTiming
include Google::Apis::Core::Hashable
# A Duration represents a signed, fixed-length span of time represented as a
# count of seconds and fractions of seconds at nanosecond resolution. It is
# independent of any calendar and concepts like "day" or "month". It is related
# to Timestamp in that the difference between two Timestamp values is a Duration
# and it can be added or subtracted from a Timestamp. Range is approximately +-
# 10,000 years.
# # Examples
# Example 1: Compute Duration from two Timestamps in pseudo code.
# Timestamp start = ...; Timestamp end = ...; Duration duration = ...;
# duration.seconds = end.seconds - start.seconds; duration.nanos = end.nanos -
# start.nanos;
# if (duration.seconds 0) ` duration.seconds += 1; duration.nanos -= 1000000000;
# ` else if (durations.seconds > 0 && duration.nanos < 0) ` duration.seconds -=
# 1; duration.nanos += 1000000000; `
# Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
# Timestamp start = ...; Duration duration = ...; Timestamp end = ...;
# end.seconds = start.seconds + duration.seconds; end.nanos = start.nanos +
# duration.nanos;
# if (end.nanos = 1000000000) ` end.seconds += 1; end.nanos -= 1000000000; `
# Example 3: Compute Duration from datetime.timedelta in Python.
# td = datetime.timedelta(days=3, minutes=10) duration = Duration() duration.
# FromTimedelta(td)
# # JSON Mapping
# In JSON format, the Duration type is encoded as a string rather than an object,
# where the string ends in the suffix "s" (indicating seconds) and is preceded
# by the number of seconds, with nanoseconds expressed as fractional seconds.
# For example, 3 seconds with 0 nanoseconds should be encoded in JSON format as "
# 3s", while 3 seconds and 1 nanosecond should be expressed in JSON format as "3.
# 000000001s", and 3 seconds and 1 microsecond should be expressed in JSON
# format as "3.000001s".
# Corresponds to the JSON property `testProcessDuration`
# @return [Google::Apis::ToolresultsV1beta3::Duration]
attr_accessor :test_process_duration
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@test_process_duration = args[:test_process_duration] if args.key?(:test_process_duration)
end
end
# A single thumbnail, with its size and format.
class Thumbnail
include Google::Apis::Core::Hashable
# The thumbnail's content type, i.e. "image/png".
# Always set.
# Corresponds to the JSON property `contentType`
# @return [String]
attr_accessor :content_type
# The thumbnail file itself.
# That is, the bytes here are precisely the bytes that make up the thumbnail
# file; they can be served as an image as-is (with the appropriate content type.)
# Always set.
# Corresponds to the JSON property `data`
# NOTE: Values are automatically base64 encoded/decoded in the client library.
# @return [String]
attr_accessor :data
# The height of the thumbnail, in pixels.
# Always set.
# Corresponds to the JSON property `heightPx`
# @return [Fixnum]
attr_accessor :height_px
# The width of the thumbnail, in pixels.
# Always set.
# Corresponds to the JSON property `widthPx`
# @return [Fixnum]
attr_accessor :width_px
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@content_type = args[:content_type] if args.key?(:content_type)
@data = args[:data] if args.key?(:data)
@height_px = args[:height_px] if args.key?(:height_px)
@width_px = args[:width_px] if args.key?(:width_px)
end
end
# A Timestamp represents a point in time independent of any time zone or local
# calendar, encoded as a count of seconds and fractions of seconds at nanosecond
# resolution. The count is relative to an epoch at UTC midnight on January 1,
# 1970, in the proleptic Gregorian calendar which extends the Gregorian calendar
# backwards to year one.
# All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
# second table is needed for interpretation, using a [24-hour linear smear](
# https://developers.google.com/time/smear).
# The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
# restricting to that range, we ensure that we can convert to and from [RFC 3339]
# (https://www.ietf.org/rfc/rfc3339.txt) date strings.
# # Examples
# Example 1: Compute Timestamp from POSIX `time()`.
# Timestamp timestamp; timestamp.set_seconds(time(NULL)); timestamp.set_nanos(0);
# Example 2: Compute Timestamp from POSIX `gettimeofday()`.
# struct timeval tv; gettimeofday(&tv, NULL);
# Timestamp timestamp; timestamp.set_seconds(tv.tv_sec); timestamp.set_nanos(tv.
# tv_usec * 1000);
# Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
# FILETIME ft; GetSystemTimeAsFileTime(&ft); UINT64 ticks = (((UINT64)ft.
# dwHighDateTime) << 32) | ft.dwLowDateTime;
# // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z // is
# 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. Timestamp
# timestamp; timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
# timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
# Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
# long millis = System.currentTimeMillis();
# Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) .
# setNanos((int) ((millis % 1000) * 1000000)).build();
# Example 5: Compute Timestamp from current time in Python.
# timestamp = Timestamp() timestamp.GetCurrentTime()
# # JSON Mapping
# In JSON format, the Timestamp type is encoded as a string in the [RFC 3339](
# https://www.ietf.org/rfc/rfc3339.txt) format. That is, the format is "`year`-`
# month`-`day`T`hour`:`min`:`sec`[.`frac_sec`]Z" where `year` is always
# expressed using four digits while `month`, `day`, `hour`, `min`, and `sec` are
# zero-padded to two digits each. The fractional seconds, which can go up to 9
# digits (i.e. up to 1 nanosecond resolution), are optional. The "Z" suffix
# indicates the timezone ("UTC"); the timezone is required. A proto3 JSON
# serializer should always use UTC (as indicated by "Z") when printing the
# Timestamp type and a proto3 JSON parser should be able to accept both UTC and
# other timezones (as indicated by an offset).
# For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past 01:30 UTC on
# January 15, 2017.
# In JavaScript, one can convert a Date object to this format using the standard
# [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/
# Reference/Global_Objects/Date/toISOString) method. In Python, a standard `
# datetime.datetime` object can be converted to this format using [`strftime`](
# https://docs.python.org/2/library/time.html#time.strftime) with the time
# format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use the Joda
# Time's [`ISODateTimeFormat.dateTime()`]( http://www.joda.org/joda-time/apidocs/
# org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D ) to obtain a
# formatter capable of generating timestamps in this format.
class Timestamp
include Google::Apis::Core::Hashable
# Non-negative fractions of a second at nanosecond resolution. Negative second
# values with fractions must still have non-negative nanos values that count
# forward in time. Must be from 0 to 999,999,999 inclusive.
# Corresponds to the JSON property `nanos`
# @return [Fixnum]
attr_accessor :nanos
# Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be
# from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.
# Corresponds to the JSON property `seconds`
# @return [Fixnum]
attr_accessor :seconds
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@nanos = args[:nanos] if args.key?(:nanos)
@seconds = args[:seconds] if args.key?(:seconds)
end
end
# An execution of an arbitrary tool. It could be a test runner or a tool copying
# artifacts or deploying code.
class ToolExecution
include Google::Apis::Core::Hashable
# The full tokenized command line including the program name (equivalent to argv
# in a C program).
# - In response: present if set by create request - In create request: optional -
# In update request: never set
# Corresponds to the JSON property `commandLineArguments`
# @return [Array<String>]
attr_accessor :command_line_arguments
# Exit code from a tool execution.
# Corresponds to the JSON property `exitCode`
# @return [Google::Apis::ToolresultsV1beta3::ToolExitCode]
attr_accessor :exit_code
# References to any plain text logs output the tool execution.
# This field can be set before the tool has exited in order to be able to have
# access to a live view of the logs while the tool is running.
# The maximum allowed number of tool logs per step is 1000.
# - In response: present if set by create/update request - In create request:
# optional - In update request: optional, any value provided will be appended to
# the existing list
# Corresponds to the JSON property `toolLogs`
# @return [Array<Google::Apis::ToolresultsV1beta3::FileReference>]
attr_accessor :tool_logs
# References to opaque files of any format output by the tool execution.
# The maximum allowed number of tool outputs per step is 1000.
# - In response: present if set by create/update request - In create request:
# optional - In update request: optional, any value provided will be appended to
# the existing list
# Corresponds to the JSON property `toolOutputs`
# @return [Array<Google::Apis::ToolresultsV1beta3::ToolOutputReference>]
attr_accessor :tool_outputs
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@command_line_arguments = args[:command_line_arguments] if args.key?(:command_line_arguments)
@exit_code = args[:exit_code] if args.key?(:exit_code)
@tool_logs = args[:tool_logs] if args.key?(:tool_logs)
@tool_outputs = args[:tool_outputs] if args.key?(:tool_outputs)
end
end
# Generic tool step to be used for binaries we do not explicitly support. For
# example: running cp to copy artifacts from one location to another.
class ToolExecutionStep
include Google::Apis::Core::Hashable
# An execution of an arbitrary tool. It could be a test runner or a tool copying
# artifacts or deploying code.
# Corresponds to the JSON property `toolExecution`
# @return [Google::Apis::ToolresultsV1beta3::ToolExecution]
attr_accessor :tool_execution
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@tool_execution = args[:tool_execution] if args.key?(:tool_execution)
end
end
# Exit code from a tool execution.
class ToolExitCode
include Google::Apis::Core::Hashable
# Tool execution exit code. A value of 0 means that the execution was successful.
# - In response: always set - In create/update request: always set
# Corresponds to the JSON property `number`
# @return [Fixnum]
attr_accessor :number
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@number = args[:number] if args.key?(:number)
end
end
# A reference to a ToolExecution output file.
class ToolOutputReference
include Google::Apis::Core::Hashable
# A Timestamp represents a point in time independent of any time zone or local
# calendar, encoded as a count of seconds and fractions of seconds at nanosecond
# resolution. The count is relative to an epoch at UTC midnight on January 1,
# 1970, in the proleptic Gregorian calendar which extends the Gregorian calendar
# backwards to year one.
# All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
# second table is needed for interpretation, using a [24-hour linear smear](
# https://developers.google.com/time/smear).
# The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
# restricting to that range, we ensure that we can convert to and from [RFC 3339]
# (https://www.ietf.org/rfc/rfc3339.txt) date strings.
# # Examples
# Example 1: Compute Timestamp from POSIX `time()`.
# Timestamp timestamp; timestamp.set_seconds(time(NULL)); timestamp.set_nanos(0);
# Example 2: Compute Timestamp from POSIX `gettimeofday()`.
# struct timeval tv; gettimeofday(&tv, NULL);
# Timestamp timestamp; timestamp.set_seconds(tv.tv_sec); timestamp.set_nanos(tv.
# tv_usec * 1000);
# Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
# FILETIME ft; GetSystemTimeAsFileTime(&ft); UINT64 ticks = (((UINT64)ft.
# dwHighDateTime) << 32) | ft.dwLowDateTime;
# // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z // is
# 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. Timestamp
# timestamp; timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
# timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
# Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
# long millis = System.currentTimeMillis();
# Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) .
# setNanos((int) ((millis % 1000) * 1000000)).build();
# Example 5: Compute Timestamp from current time in Python.
# timestamp = Timestamp() timestamp.GetCurrentTime()
# # JSON Mapping
# In JSON format, the Timestamp type is encoded as a string in the [RFC 3339](
# https://www.ietf.org/rfc/rfc3339.txt) format. That is, the format is "`year`-`
# month`-`day`T`hour`:`min`:`sec`[.`frac_sec`]Z" where `year` is always
# expressed using four digits while `month`, `day`, `hour`, `min`, and `sec` are
# zero-padded to two digits each. The fractional seconds, which can go up to 9
# digits (i.e. up to 1 nanosecond resolution), are optional. The "Z" suffix
# indicates the timezone ("UTC"); the timezone is required. A proto3 JSON
# serializer should always use UTC (as indicated by "Z") when printing the
# Timestamp type and a proto3 JSON parser should be able to accept both UTC and
# other timezones (as indicated by an offset).
# For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past 01:30 UTC on
# January 15, 2017.
# In JavaScript, one can convert a Date object to this format using the standard
# [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/
# Reference/Global_Objects/Date/toISOString) method. In Python, a standard `
# datetime.datetime` object can be converted to this format using [`strftime`](
# https://docs.python.org/2/library/time.html#time.strftime) with the time
# format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use the Joda
# Time's [`ISODateTimeFormat.dateTime()`]( http://www.joda.org/joda-time/apidocs/
# org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D ) to obtain a
# formatter capable of generating timestamps in this format.
# Corresponds to the JSON property `creationTime`
# @return [Google::Apis::ToolresultsV1beta3::Timestamp]
attr_accessor :creation_time
# A reference to a file.
# Corresponds to the JSON property `output`
# @return [Google::Apis::ToolresultsV1beta3::FileReference]
attr_accessor :output
# A reference to a test case.
# Test case references are canonically ordered lexicographically by these three
# factors: * First, by test_suite_name. * Second, by class_name. * Third, by
# name.
# Corresponds to the JSON property `testCase`
# @return [Google::Apis::ToolresultsV1beta3::TestCaseReference]
attr_accessor :test_case
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@creation_time = args[:creation_time] if args.key?(:creation_time)
@output = args[:output] if args.key?(:output)
@test_case = args[:test_case] if args.key?(:test_case)
end
end
end
end
end
| 48.074799 | 121 | 0.639477 |
33841b5d905ecf4c31578bdef2bd26dba60356c0 | 30,006 | #
# Fluentd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'fileutils'
require 'open3'
require 'fluent/config'
require 'fluent/counter'
require 'fluent/env'
require 'fluent/engine'
require 'fluent/error'
require 'fluent/log'
require 'fluent/plugin'
require 'fluent/rpc'
require 'fluent/system_config'
require 'fluent/msgpack_factory'
require 'fluent/variable_store'
require 'serverengine'
if Fluent.windows?
require 'windows/library'
require 'windows/synchronize'
require 'windows/system_info'
include Windows::Library
include Windows::Synchronize
include Windows::SystemInfo
require 'win32/ipc'
require 'win32/event'
end
module Fluent
module ServerModule
def before_run
@fluentd_conf = config[:fluentd_conf]
@rpc_server = nil
@counter = nil
if config[:rpc_endpoint]
@rpc_endpoint = config[:rpc_endpoint]
@enable_get_dump = config[:enable_get_dump]
run_rpc_server
end
install_supervisor_signal_handlers
if config[:signame]
@signame = config[:signame]
install_windows_event_handler
end
if counter = config[:counter_server]
run_counter_server(counter)
end
socket_manager_path = ServerEngine::SocketManager::Server.generate_path
ServerEngine::SocketManager::Server.open(socket_manager_path)
ENV['SERVERENGINE_SOCKETMANAGER_PATH'] = socket_manager_path.to_s
end
def after_run
stop_rpc_server if @rpc_endpoint
stop_counter_server if @counter
Fluent::Supervisor.cleanup_resources
end
def run_rpc_server
@rpc_server = RPC::Server.new(@rpc_endpoint, $log)
# built-in RPC for signals
@rpc_server.mount_proc('/api/processes.interruptWorkers') { |req, res|
$log.debug "fluentd RPC got /api/processes.interruptWorkers request"
Process.kill :INT, $$
nil
}
@rpc_server.mount_proc('/api/processes.killWorkers') { |req, res|
$log.debug "fluentd RPC got /api/processes.killWorkers request"
Process.kill :TERM, $$
nil
}
@rpc_server.mount_proc('/api/processes.flushBuffersAndKillWorkers') { |req, res|
$log.debug "fluentd RPC got /api/processes.flushBuffersAndKillWorkers request"
if Fluent.windows?
$log.warn "operation 'flushBuffersAndKillWorkers' is not supported on Windows now."
else
Process.kill :USR1, $$
Process.kill :TERM, $$
end
nil
}
@rpc_server.mount_proc('/api/plugins.flushBuffers') { |req, res|
$log.debug "fluentd RPC got /api/plugins.flushBuffers request"
unless Fluent.windows?
Process.kill :USR1, $$
end
nil
}
@rpc_server.mount_proc('/api/config.reload') { |req, res|
$log.debug "fluentd RPC got /api/config.reload request"
if Fluent.windows?
# restart worker with auto restarting by killing
kill_worker
else
Process.kill :HUP, $$
end
nil
}
@rpc_server.mount_proc('/api/config.dump') { |req, res|
$log.debug "fluentd RPC got /api/config.dump request"
$log.info "dump in-memory config"
supervisor_dump_config_handler
nil
}
@rpc_server.mount_proc('/api/config.gracefulReload') { |req, res|
$log.debug "fluentd RPC got /api/config.gracefulReload request"
unless Fluent.windows?
Process.kill :USR2, $$
end
nil
}
@rpc_server.mount_proc('/api/config.getDump') { |req, res|
$log.debug "fluentd RPC got /api/config.getDump request"
$log.info "get dump in-memory config via HTTP"
res.body = supervisor_get_dump_config_handler
[nil, nil, res]
} if @enable_get_dump
@rpc_server.start
end
def stop_rpc_server
@rpc_server.shutdown
end
def run_counter_server(counter_conf)
@counter = Fluent::Counter::Server.new(
counter_conf.scope,
{host: counter_conf.bind, port: counter_conf.port, log: $log, path: counter_conf.backup_path}
)
@counter.start
end
def stop_counter_server
@counter.stop
end
def install_supervisor_signal_handlers
trap :HUP do
$log.debug "fluentd supervisor process get SIGHUP"
supervisor_sighup_handler
end unless Fluent.windows?
trap :USR1 do
$log.debug "fluentd supervisor process get SIGUSR1"
supervisor_sigusr1_handler
end unless Fluent.windows?
trap :USR2 do
$log.debug 'fluentd supervisor process got SIGUSR2'
supervisor_sigusr2_handler
end unless Fluent.windows?
end
def install_windows_event_handler
Thread.new do
ev = Win32::Event.new(@signame)
begin
ev.reset
until WaitForSingleObject(ev.handle, 0) == WAIT_OBJECT_0
sleep 1
end
stop(true)
ensure
ev.close
end
end
end
def supervisor_sighup_handler
kill_worker
end
def supervisor_sigusr1_handler
reopen_log
send_signal_to_workers(:USR1)
end
def supervisor_sigusr2_handler
conf = nil
t = Thread.new do
$log.info 'Reloading new config'
# Validate that loading config is valid at first
conf = Fluent::Config.build(
config_path: config[:config_path],
encoding: config[:conf_encoding],
additional_config: config[:inline_config],
use_v1_config: config[:use_v1_config],
)
Fluent::VariableStore.try_to_reset do
Fluent::Engine.reload_config(conf, supervisor: true)
end
end
t.report_on_exception = false # Error is handled by myself
t.join
reopen_log
send_signal_to_workers(:USR2)
@fluentd_conf = conf.to_s
rescue => e
$log.error "Failed to reload config file: #{e}"
end
def kill_worker
if config[:worker_pid]
pids = config[:worker_pid].clone
config[:worker_pid].clear
pids.each_value do |pid|
if Fluent.windows?
Process.kill :KILL, pid
else
Process.kill :TERM, pid
end
end
end
end
def supervisor_dump_config_handler
$log.info @fluentd_conf
end
def supervisor_get_dump_config_handler
{ conf: @fluentd_conf }
end
private
def reopen_log
if (log = config[:logger_initializer])
# Creating new thread due to mutex can't lock
# in main thread during trap context
Thread.new do
log.reopen!
end
end
end
def send_signal_to_workers(signal)
return unless config[:worker_pid]
config[:worker_pid].each_value do |pid|
# don't rescue Errno::ESRCH here (invalid status)
Process.kill(signal, pid)
end
end
end
module WorkerModule
def spawn(process_manager)
main_cmd = config[:main_cmd]
env = {
'SERVERENGINE_WORKER_ID' => @worker_id.to_i.to_s,
}
@pm = process_manager.spawn(env, *main_cmd)
end
def after_start
(config[:worker_pid] ||= {})[@worker_id] = @pm.pid
end
end
class Supervisor
def self.load_config(path, params = {})
pre_loadtime = 0
pre_loadtime = params['pre_loadtime'].to_i if params['pre_loadtime']
pre_config_mtime = nil
pre_config_mtime = params['pre_config_mtime'] if params['pre_config_mtime']
config_mtime = File.mtime(path)
# reuse previous config if last load time is within 5 seconds and mtime of the config file is not changed
if Time.now - Time.at(pre_loadtime) < 5 and config_mtime == pre_config_mtime
return params['pre_conf']
end
log_level = params['log_level']
suppress_repeated_stacktrace = params['suppress_repeated_stacktrace']
ignore_repeated_log_interval = params['ignore_repeated_log_interval']
ignore_same_log_interval = params['ignore_same_log_interval']
log_path = params['log_path']
chuser = params['chuser']
chgroup = params['chgroup']
log_rotate_age = params['log_rotate_age']
log_rotate_size = params['log_rotate_size']
log_opts = {suppress_repeated_stacktrace: suppress_repeated_stacktrace, ignore_repeated_log_interval: ignore_repeated_log_interval,
ignore_same_log_interval: ignore_same_log_interval}
logger_initializer = Supervisor::LoggerInitializer.new(
log_path, log_level, chuser, chgroup, log_opts,
log_rotate_age: log_rotate_age,
log_rotate_size: log_rotate_size
)
# this #init sets initialized logger to $log
logger_initializer.init(:supervisor, 0)
logger_initializer.apply_options(format: params['log_format'], time_format: params['log_time_format'])
logger = $log
command_sender = Fluent.windows? ? "pipe" : "signal"
# ServerEngine's "daemonize" option is boolean, and path of pid file is brought by "pid_path"
pid_path = params['daemonize']
daemonize = !!params['daemonize']
se_config = {
worker_type: 'spawn',
workers: params['workers'],
log_stdin: false,
log_stdout: false,
log_stderr: false,
enable_heartbeat: true,
auto_heartbeat: false,
unrecoverable_exit_codes: [2],
stop_immediately_at_unrecoverable_exit: true,
root_dir: params['root_dir'],
logger: logger,
log: logger.out,
log_path: log_path,
log_level: log_level,
logger_initializer: logger_initializer,
chuser: chuser,
chgroup: chgroup,
chumask: 0,
suppress_repeated_stacktrace: suppress_repeated_stacktrace,
ignore_repeated_log_interval: ignore_repeated_log_interval,
ignore_same_log_interval: ignore_same_log_interval,
daemonize: daemonize,
rpc_endpoint: params['rpc_endpoint'],
counter_server: params['counter_server'],
enable_get_dump: params['enable_get_dump'],
windows_daemon_cmdline: [ServerEngine.ruby_bin_path,
File.join(File.dirname(__FILE__), 'daemon.rb'),
ServerModule.name,
WorkerModule.name,
path,
JSON.dump(params)],
command_sender: command_sender,
fluentd_conf: params['fluentd_conf'],
conf_encoding: params['conf_encoding'],
inline_config: params['inline_config'],
config_path: path,
main_cmd: params['main_cmd'],
signame: params['signame'],
}
if daemonize
se_config[:pid_path] = pid_path
end
pre_params = params.dup
params['pre_loadtime'] = Time.now.to_i
params['pre_config_mtime'] = config_mtime
params['pre_conf'] = se_config
# prevent pre_conf from being too big by reloading many times.
pre_params['pre_conf'] = nil
params['pre_conf'][:windows_daemon_cmdline][5] = JSON.dump(pre_params)
se_config
end
class LoggerInitializer
def initialize(path, level, chuser, chgroup, opts, log_rotate_age: nil, log_rotate_size: nil)
@path = path
@level = level
@chuser = chuser
@chgroup = chgroup
@opts = opts
@log_rotate_age = log_rotate_age
@log_rotate_size = log_rotate_size
end
def worker_id_suffixed_path(worker_id, path)
require 'pathname'
Pathname(path).sub_ext("-#{worker_id}#{Pathname(path).extname}").to_s
end
def init(process_type, worker_id)
@opts[:process_type] = process_type
@opts[:worker_id] = worker_id
if @path && @path != "-"
unless File.exist?(@path)
FileUtils.mkdir_p(File.dirname(@path))
end
@logdev = if @log_rotate_age || @log_rotate_size
Fluent::LogDeviceIO.new(Fluent.windows? ?
worker_id_suffixed_path(worker_id, @path) : @path,
shift_age: @log_rotate_age, shift_size: @log_rotate_size)
else
File.open(@path, "a")
end
if @chuser || @chgroup
chuid = @chuser ? ServerEngine::Privilege.get_etc_passwd(@chuser).uid : nil
chgid = @chgroup ? ServerEngine::Privilege.get_etc_group(@chgroup).gid : nil
File.chown(chuid, chgid, @path)
end
else
@logdev = STDOUT
end
dl_opts = {}
# subtract 1 to match serverengine daemon logger side logging severity.
dl_opts[:log_level] = @level - 1
logger = ServerEngine::DaemonLogger.new(@logdev, dl_opts)
$log = Fluent::Log.new(logger, @opts)
$log.enable_color(false) if @path
$log.enable_debug if @level <= Fluent::Log::LEVEL_DEBUG
end
def stdout?
@logdev == STDOUT
end
def reopen!
if @path && @path != "-"
@logdev.reopen(@path, "a")
end
self
end
def apply_options(format: nil, time_format: nil, log_dir_perm: nil, ignore_repeated_log_interval: nil, ignore_same_log_interval: nil)
$log.format = format if format
$log.time_format = time_format if time_format
$log.ignore_repeated_log_interval = ignore_repeated_log_interval if ignore_repeated_log_interval
$log.ignore_same_log_interval = ignore_same_log_interval if ignore_same_log_interval
if @path && log_dir_perm
File.chmod(log_dir_perm || 0755, File.dirname(@path))
end
end
def level=(level)
@level = level
$log.level = level
end
end
def self.default_options
{
config_path: Fluent::DEFAULT_CONFIG_PATH,
plugin_dirs: [Fluent::DEFAULT_PLUGIN_DIR],
log_level: Fluent::Log::LEVEL_INFO,
log_path: nil,
daemonize: nil,
libs: [],
setup_path: nil,
chuser: nil,
chgroup: nil,
root_dir: nil,
suppress_interval: 0,
suppress_repeated_stacktrace: true,
ignore_repeated_log_interval: nil,
without_source: nil,
use_v1_config: true,
strict_config_value: nil,
supervise: true,
standalone_worker: false,
signame: nil,
conf_encoding: 'utf-8'
}
end
def self.cleanup_resources
unless Fluent.windows?
if ENV.has_key?('SERVERENGINE_SOCKETMANAGER_PATH')
FileUtils.rm_f(ENV['SERVERENGINE_SOCKETMANAGER_PATH'])
end
end
end
def initialize(opt)
@daemonize = opt[:daemonize]
@standalone_worker= opt[:standalone_worker]
@config_path = opt[:config_path]
@inline_config = opt[:inline_config]
@use_v1_config = opt[:use_v1_config]
@conf_encoding = opt[:conf_encoding]
@log_path = opt[:log_path]
@show_plugin_config = opt[:show_plugin_config]
@libs = opt[:libs]
@plugin_dirs = opt[:plugin_dirs]
@chgroup = opt[:chgroup]
@chuser = opt[:chuser]
@log_rotate_age = opt[:log_rotate_age]
@log_rotate_size = opt[:log_rotate_size]
@signame = opt[:signame]
@cl_opt = opt
@conf = nil
log_opts = {suppress_repeated_stacktrace: opt[:suppress_repeated_stacktrace], ignore_repeated_log_interval: opt[:ignore_repeated_log_interval],
ignore_same_log_interval: opt[:ignore_same_log_interval]}
@log = LoggerInitializer.new(
@log_path, opt[:log_level], @chuser, @chgroup, log_opts,
log_rotate_age: @log_rotate_age,
log_rotate_size: @log_rotate_size
)
@finished = false
end
def run_supervisor(dry_run: false)
if dry_run
$log.info "starting fluentd-#{Fluent::VERSION} as dry run mode", ruby: RUBY_VERSION
end
if @system_config.workers < 1
raise Fluent::ConfigError, "invalid number of workers (must be > 0):#{@system_config.workers}"
end
root_dir = @system_config.root_dir
if root_dir
if File.exist?(root_dir)
unless Dir.exist?(root_dir)
raise Fluent::InvalidRootDirectory, "non directory entry exists:#{root_dir}"
end
else
begin
FileUtils.mkdir_p(root_dir, mode: @system_config.dir_permission || 0755)
rescue => e
raise Fluent::InvalidRootDirectory, "failed to create root directory:#{root_dir}, #{e.inspect}"
end
end
end
begin
ServerEngine::Privilege.change(@chuser, @chgroup)
MessagePackFactory.init(enable_time_support: @system_config.enable_msgpack_time_support)
Fluent::Engine.init(@system_config, supervisor_mode: true)
Fluent::Engine.run_configure(@conf, dry_run: dry_run)
rescue Fluent::ConfigError => e
$log.error 'config error', file: @config_path, error: e
$log.debug_backtrace
exit!(1)
end
if dry_run
$log.info 'finished dry run mode'
exit 0
else
supervise
end
end
def options
{
'config_path' => @config_path,
'pid_file' => @daemonize,
'plugin_dirs' => @plugin_dirs,
'log_path' => @log_path,
'root_dir' => @system_config.root_dir,
}
end
def run_worker
begin
require 'sigdump/setup'
rescue Exception
# ignore LoadError and others (related with signals): it may raise these errors in Windows
end
Process.setproctitle("worker:#{@system_config.process_name}") if @process_name
if @standalone_worker && @system_config.workers != 1
raise Fluent::ConfigError, "invalid number of workers (must be 1 or unspecified) with --no-supervisor: #{@system_config.workers}"
end
install_main_process_signal_handlers
# This is the only log messsage for @standalone_worker
$log.info "starting fluentd-#{Fluent::VERSION} without supervision", pid: Process.pid, ruby: RUBY_VERSION if @standalone_worker
main_process do
create_socket_manager if @standalone_worker
if @standalone_worker
ServerEngine::Privilege.change(@chuser, @chgroup)
File.umask(0)
end
MessagePackFactory.init(enable_time_support: @system_config.enable_msgpack_time_support)
Fluent::Engine.init(@system_config)
Fluent::Engine.run_configure(@conf)
Fluent::Engine.run
self.class.cleanup_resources if @standalone_worker
exit 0
end
end
def configure(supervisor: false)
if supervisor
@log.init(:supervisor, 0)
else
worker_id = ENV['SERVERENGINE_WORKER_ID'].to_i
process_type = case
when @standalone_worker then :standalone
when worker_id == 0 then :worker0
else :workers
end
@log.init(process_type, worker_id)
end
if @show_plugin_config
show_plugin_config
end
if @inline_config == '-'
$log.warn('the value "-" for `inline_config` is deprecated. See https://github.com/fluent/fluentd/issues/2711')
@inline_config = STDIN.read
end
@conf = Fluent::Config.build(config_path: @config_path, encoding: @conf_encoding, additional_config: @inline_config, use_v1_config: @use_v1_config)
@system_config = build_system_config(@conf)
@log.level = @system_config.log_level
@log.apply_options(
format: @system_config.log.format,
time_format: @system_config.log.time_format,
log_dir_perm: @system_config.dir_permission,
ignore_repeated_log_interval: @system_config.ignore_repeated_log_interval,
ignore_same_log_interval: @system_config.ignore_same_log_interval
)
$log.info :supervisor, 'parsing config file is succeeded', path: @config_path
@libs.each do |lib|
require lib
end
@plugin_dirs.each do |dir|
if Dir.exist?(dir)
dir = File.expand_path(dir)
Fluent::Plugin.add_plugin_dir(dir)
end
end
if supervisor
# plugins / configuration dumps
Gem::Specification.find_all.select { |x| x.name =~ /^fluent(d|-(plugin|mixin)-.*)$/ }.each do |spec|
$log.info("gem '#{spec.name}' version '#{spec.version}'")
end
end
end
private
def create_socket_manager
socket_manager_path = ServerEngine::SocketManager::Server.generate_path
ServerEngine::SocketManager::Server.open(socket_manager_path)
ENV['SERVERENGINE_SOCKETMANAGER_PATH'] = socket_manager_path.to_s
end
def show_plugin_config
name, type = @show_plugin_config.split(":") # input:tail
$log.info "show_plugin_config option is deprecated. Use fluent-plugin-config-format --format=txt #{name} #{type}"
exit 0
end
def supervise
Process.setproctitle("supervisor:#{@system_config.process_name}") if @system_config.process_name
$log.info "starting fluentd-#{Fluent::VERSION}", pid: Process.pid, ruby: RUBY_VERSION
fluentd_spawn_cmd = build_spawn_command
$log.info "spawn command to main: ", cmdline: fluentd_spawn_cmd
params = {
'main_cmd' => fluentd_spawn_cmd,
'daemonize' => @daemonize,
'inline_config' => @inline_config,
'log_path' => @log_path,
'log_rotate_age' => @log_rotate_age,
'log_rotate_size' => @log_rotate_size,
'chuser' => @chuser,
'chgroup' => @chgroup,
'use_v1_config' => @use_v1_config,
'conf_encoding' => @conf_encoding,
'signame' => @signame,
'fluentd_conf' => @conf.to_s,
'workers' => @system_config.workers,
'root_dir' => @system_config.root_dir,
'log_level' => @system_config.log_level,
'suppress_repeated_stacktrace' => @system_config.suppress_repeated_stacktrace,
'ignore_repeated_log_interval' => @system_config.ignore_repeated_log_interval,
'rpc_endpoint' => @system_config.rpc_endpoint,
'enable_get_dump' => @system_config.enable_get_dump,
'counter_server' => @system_config.counter_server,
'log_format' => @system_config.log.format,
'log_time_format' => @system_config.log.time_format,
}
se = ServerEngine.create(ServerModule, WorkerModule){
Fluent::Supervisor.load_config(@config_path, params)
}
se.run
end
def install_main_process_signal_handlers
# Fluentd worker process (worker of ServerEngine) don't use code in serverengine to set signal handlers,
# because it does almost nothing.
# This method is the only method to set signal handlers in Fluentd worker process.
# When user use Ctrl + C not SIGINT, SIGINT is sent to all process in same process group.
# ServerEngine server process will send SIGTERM to child(spawned) processes by that SIGINT, so
# worker process SHOULD NOT do anything with SIGINT, SHOULD just ignore.
trap :INT do
$log.debug "fluentd main process get SIGINT"
# When Fluentd is launched without supervisor, worker should handle ctrl-c by itself
if @standalone_worker
@finished = true
$log.debug "getting start to shutdown main process"
Fluent::Engine.stop
end
end
trap :TERM do
$log.debug "fluentd main process get SIGTERM"
unless @finished
@finished = true
$log.debug "getting start to shutdown main process"
Fluent::Engine.stop
end
end
trap :USR1 do
flush_buffer
end unless Fluent.windows?
trap :USR2 do
reload_config
end unless Fluent.windows?
if Fluent.windows?
command_pipe = STDIN.dup
STDIN.reopen(File::NULL, "rb")
command_pipe.binmode
command_pipe.sync = true
Thread.new do
loop do
cmd = command_pipe.gets.chomp
case cmd
when "GRACEFUL_STOP", "IMMEDIATE_STOP"
$log.debug "fluentd main process get #{cmd} command"
@finished = true
$log.debug "getting start to shutdown main process"
Fluent::Engine.stop
break
else
$log.warn "fluentd main process get unknown command [#{cmd}]"
end
end
end
end
end
def flush_buffer
# Creating new thread due to mutex can't lock
# in main thread during trap context
Thread.new do
begin
$log.debug "fluentd main process get SIGUSR1"
$log.info "force flushing buffered events"
@log.reopen!
Fluent::Engine.flush!
$log.debug "flushing thread: flushed"
rescue Exception => e
$log.warn "flushing thread error: #{e}"
end
end
end
def reload_config
Thread.new do
$log.debug('worker got SIGUSR2')
begin
conf = Fluent::Config.build(
config_path: @config_path,
encoding: @conf_encoding,
additional_config: @inline_config,
use_v1_config: @use_v1_config,
)
Fluent::VariableStore.try_to_reset do
Fluent::Engine.reload_config(conf)
end
rescue => e
# it is guaranteed that config file is valid by supervisor side. but it's not atomic because of using signals to commnicate between worker and super
# So need this rescue code
$log.error("failed to reload config: #{e}")
next
end
@conf = conf
end
end
def logging_with_console_output
yield $log
unless @log.stdout?
logger = ServerEngine::DaemonLogger.new(STDOUT)
log = Fluent::Log.new(logger)
log.level = @system_config.log_level
console = log.enable_debug
yield console
end
end
def main_process(&block)
if @system_config.process_name
if @system_config.workers > 1
Process.setproctitle("worker:#{@system_config.process_name}#{ENV['SERVERENGINE_WORKER_ID']}")
else
Process.setproctitle("worker:#{@system_config.process_name}")
end
end
unrecoverable_error = false
begin
block.call
rescue Fluent::ConfigError => e
logging_with_console_output do |log|
log.error "config error", file: @config_path, error: e
log.debug_backtrace
end
unrecoverable_error = true
rescue Fluent::UnrecoverableError => e
logging_with_console_output do |log|
log.error e.message, error: e
log.error_backtrace
end
unrecoverable_error = true
rescue ScriptError => e # LoadError, NotImplementedError, SyntaxError
logging_with_console_output do |log|
if e.respond_to?(:path)
log.error e.message, path: e.path, error: e
else
log.error e.message, error: e
end
log.error_backtrace
end
unrecoverable_error = true
rescue => e
logging_with_console_output do |log|
log.error "unexpected error", error: e
log.error_backtrace
end
end
exit!(unrecoverable_error ? 2 : 1)
end
def build_system_config(conf)
system_config = SystemConfig.create(conf, @cl_opt[:strict_config_value])
opt = {}
Fluent::SystemConfig::SYSTEM_CONFIG_PARAMETERS.each do |param|
if @cl_opt.key?(param) && !@cl_opt[param].nil?
if param == :log_level && @cl_opt[:log_level] == Fluent::Log::LEVEL_INFO
# info level can't be specified via command line option.
# log_level is info here, it is default value and <system>'s log_level should be applied if exists.
next
end
opt[param] = @cl_opt[param]
end
end
system_config.overwrite_variables(**opt)
system_config
end
RUBY_ENCODING_OPTIONS_REGEX = %r{\A(-E|--encoding=|--internal-encoding=|--external-encoding=)}.freeze
def build_spawn_command
if ENV['TEST_RUBY_PATH']
fluentd_spawn_cmd = [ENV['TEST_RUBY_PATH']]
else
fluentd_spawn_cmd = [ServerEngine.ruby_bin_path]
end
rubyopt = ENV['RUBYOPT']
if rubyopt
encodes, others = rubyopt.split(' ').partition { |e| e.match?(RUBY_ENCODING_OPTIONS_REGEX) }
fluentd_spawn_cmd.concat(others)
adopted_encodes = encodes.empty? ? ['-Eascii-8bit:ascii-8bit'] : encodes
fluentd_spawn_cmd.concat(adopted_encodes)
else
fluentd_spawn_cmd << '-Eascii-8bit:ascii-8bit'
end
# Adding `-h` so that it can avoid ruby's command blocking
# e.g. `ruby -Eascii-8bit:ascii-8bit` will block. but `ruby -Eascii-8bit:ascii-8bit -h` won't.
_, e, s = Open3.capture3(*fluentd_spawn_cmd, "-h")
if s.exitstatus != 0
$log.error('Invalid option is passed to RUBYOPT', command: fluentd_spawn_cmd, error: e)
exit s.exitstatus
end
fluentd_spawn_cmd << $0
fluentd_spawn_cmd += $fluentdargv
fluentd_spawn_cmd << '--under-supervisor'
fluentd_spawn_cmd
end
end
end
| 32.195279 | 158 | 0.629841 |
916c827e7b76dfcd02eb7963ef479dd2d965bba4 | 1,133 | class Asciinema < Formula
desc "Record and share terminal sessions"
homepage "https://asciinema.org"
url "https://github.com/asciinema/asciinema/archive/v2.0.1.tar.gz"
sha256 "7087b247dae36d04821197bc14ebd4248049592b299c9878d8953c025ac802e4"
head "https://github.com/asciinema/asciinema.git"
bottle do
cellar :any_skip_relocation
sha256 "e312010e2476d8e9544a6f7e11290f6188a51b71b4da5afafb043cb4655c9fcd" => :high_sierra
sha256 "e312010e2476d8e9544a6f7e11290f6188a51b71b4da5afafb043cb4655c9fcd" => :sierra
sha256 "e312010e2476d8e9544a6f7e11290f6188a51b71b4da5afafb043cb4655c9fcd" => :el_capitan
end
depends_on "python"
def install
xy = Language::Python.major_minor_version "python3"
ENV.prepend_create_path "PYTHONPATH", libexec/"lib/python#{xy}/site-packages"
system "python3", *Language::Python.setup_install_args(libexec)
bin.install Dir[libexec/"bin/*"]
bin.env_script_all_files(libexec/"bin", :PYTHONPATH => ENV["PYTHONPATH"])
end
test do
ENV["LC_ALL"] = "en_US.UTF-8"
system "#{bin}/asciinema", "--version"
system "#{bin}/asciinema", "--help"
end
end
| 35.40625 | 93 | 0.753751 |
1d49fb416e3e416f7e0eaa40b0dd8f6a704c413a | 76 | module Elasticsearch
module Transport
VERSION = "7.0.0.pre"
end
end
| 12.666667 | 25 | 0.697368 |
ac4e5118ac6bcaca2e30366929e6e3badfaf2bdf | 878 | RSpec.describe Genius::Search::Tracks do
subject { described_class }
describe 'successful processing' do
context 'when query present' do
let(:output) do
VCR.use_cassette 'services/genius/search/tracks/success' do
subject.call(query: 'molly nilsson', limit: 5, page: 2, profile_id: 1)
end
end
it { expect(output).to eq(Helpers::Genius::Search.tracks_data) }
end
end
describe 'no processing' do
context 'when no query given' do
let(:output) { subject.call }
it { expect(output).to eq(Helpers::Base.bad_request_error) }
end
context 'when wrong query' do
let(:output) do
VCR.use_cassette 'services/genius/search/tracks/wrong_query' do
subject.call(query: random)
end
end
it { expect(output).to eq(Helpers::Base.not_found_error) }
end
end
end
| 25.823529 | 80 | 0.644647 |
e2a87758b7cfefc5d169cabd5713752533320675 | 279 | module Cyr
module Views
# Since editing and selecting are the same interaction, just different
# flow states, we just have to give the select state a name that matches
# and inherit the edit state.
class SelectCompany < Cyr::Views::EditCompany; end
end
end
| 25.363636 | 76 | 0.724014 |
214d7eec1ccdbd9f1ea9aad72abcb9875dc02a71 | 2,302 | require File.expand_path('../boot', __FILE__)
require 'rails/all'
# If you have a Gemfile, require the gems listed there, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env) if defined?(Bundler)
module Portfolio
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# JavaScript files you want as :defaults (application.js is always included).
# config.action_view.javascript_expansions[:defaults] = %w(jquery rails)
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Ensure that Rails throws errors on invalid mass assignment.
# REMOVED DUE TO UPGRADE TO 4.0
# config.active_record.whitelist_attributes = true
# Change form field error display.
config.action_view.field_error_proc = Proc.new { |html_tag, instance| ActionController::Base.helpers.content_tag(:span, html_tag, class: "field_with_errors") }
end
end
| 45.137255 | 163 | 0.725891 |
916dbffe7e528c2e134d0750f3a0a32d4099510f | 18,086 | #
# Copyright:: Copyright (c) Chef Software Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require_relative "../resource"
require "chef-utils/dist" unless defined?(ChefUtils::Dist)
class Chef
class Resource
class ChefClientConfig < Chef::Resource
provides :chef_client_config
description "Use the **chef_client_config** resource to create a client.rb file in the #{ChefUtils::Dist::Infra::PRODUCT} configuration directory. See the [client.rb docs](https://docs.chef.io/config_rb_client/) for more details on options available in the client.rb configuration file."
introduced "16.6"
examples <<~DOC
**Bare minimum #{ChefUtils::Dist::Infra::PRODUCT} client.rb**:
The absolute minimum configuration necessary for a node to communicate with the #{ChefUtils::Dist::Server::PRODUCT} is the URL of the #{ChefUtils::Dist::Server::PRODUCT}. All other configuration options either have values at the server side (Policyfiles, Roles, Environments, etc) or have default values determined at client startup.
```ruby
chef_client_config 'Create client.rb' do
chef_server_url 'https://chef.example.dmz'
end
```
**More complex #{ChefUtils::Dist::Infra::PRODUCT} client.rb**:
```ruby
chef_client_config 'Create client.rb' do
chef_server_url 'https://chef.example.dmz'
log_level :info
log_location :syslog
http_proxy 'proxy.example.dmz'
https_proxy 'proxy.example.dmz'
no_proxy %w(internal.example.dmz)
end
```
**Adding additional config content to the client.rb**:
This resource aims to provide common configuration options. Some configuration options are missing and some users may want to use arbitrary Ruby code within their configuration. For this we offer an `additional_config` property that can be used to add any configuration or code to the bottom of the `client.rb` file. Also keep in mind that within the configuration directory is a `client.d` directory where you can put additional `.rb` files containing configuration options. These can be created using `file` or `template` resources within your cookbooks as necessary.
```ruby
chef_client_config 'Create client.rb' do
chef_server_url 'https://chef.example.dmz'
additional_config <<~CONFIG
# Extra config code to safely load a gem into the client run.
# Since the config is Ruby you can run any Ruby code you want via the client.rb.
# It's a great way to break things, so be careful
begin
require 'aws-sdk'
rescue LoadError
Chef::Log.warn "Failed to load aws-sdk."
end
CONFIG
end
```
**Setup two report handlers in the client.rb**:
```ruby
chef_client_config 'Create client.rb' do
chef_server_url 'https://chef.example.dmz'
report_handlers [
{
'class' => 'ReportHandler1Class',
'arguments' => ["'FirstArgument'", "'SecondArgument'"],
},
{
'class' => 'ReportHandler2Class',
'arguments' => ["'FirstArgument'", "'SecondArgument'"],
},
]
end
```
**Report directly to the [Chef Automate data collector endpoint](/automate/data_collection/#configure-chef-infra-client-to-use-the-data-collector-endpoint-in-chef-automate).**
```ruby
chef_client_config 'Create client.rb' do
chef_server_url 'https://chef.example.dmz'
data_collector_server_url 'https://automate.example.dmz'
data_collector_token 'TEST_TOKEN_TEST'
end
```
DOC
# @todo policy_file or policy_group being set requires the other to be set so enforce that.
# @todo all properties for automate report
# @todo add all descriptions
# @todo validate handler hash structure
#
# @param [String, Symbol] prop_val the value from the property
#
# @return [Symbol] The symbol form of the symbol-like string, string, or symbol value
#
def string_to_symbol(prop_val)
if prop_val.is_a?(String) && prop_val.start_with?(":")
prop_val[1..-1].to_sym
else
prop_val.to_sym
end
end
property :config_directory, String,
description: "The directory to store the client.rb in.",
default: ChefConfig::Config.etc_chef_dir,
default_description: "`/etc/chef/` on *nix-like systems and `C:\\chef\\` on Windows"
property :user, String,
description: "The user that should own the client.rb file and the configuration directory if it needs to be created. Note: The configuration directory will not be created if it already exists, which allows you to further control the setup of that directory outside of this resource."
property :group, String,
description: "The group that should own the client.rb file and the configuration directory if it needs to be created. Note: The configuration directory will not be created if it already exists, which allows you to further control the setup of that directory outside of this resource."
property :node_name, [String, NilClass], # this accepts nil so people can disable the default
description: "The name of the node. This configuration sets the `node.name` value used in cookbooks and the `client_name` value used when authenticating to a #{ChefUtils::Dist::Server::PRODUCT} to determine what configuration to apply. Note: By default this configuration uses the `node.name` value which would be set during bootstrap. Hard coding this value in the `client.rb` config avoids logic within #{ChefUtils::Dist::Server::PRODUCT} that performs DNS lookups and may fail in the event of a DNS outage. To skip this default value and instead use the built-in #{ChefUtils::Dist::Server::PRODUCT} logic, set this property to `nil`",
default: lazy { node.name },
default_description: "The `node.name` value reported by #{ChefUtils::Dist::Infra::PRODUCT}."
property :chef_server_url, String,
description: "The URL for the #{ChefUtils::Dist::Server::PRODUCT}.",
required: true
# @todo Allow passing this as a string and convert it to the symbol
property :ssl_verify_mode, [Symbol, String],
equal_to: %i{verify_none verify_peer},
coerce: proc { |x| string_to_symbol(x) },
description: <<~DESC
Set the verify mode for HTTPS requests.
* Use :verify_none for no validation of SSL certificates.
* Use :verify_peer for validation of all SSL certificates, including the #{ChefUtils::Dist::Server::PRODUCT} connections, S3 connections, and any HTTPS remote_file resource URLs used in #{ChefUtils::Dist::Infra::PRODUCT} runs. This is the recommended setting.
DESC
property :formatters, Array,
description: "Client logging formatters to load.",
default: []
property :event_loggers, Array,
description: "",
default: []
property :log_level, Symbol,
description: "The level of logging performed by the #{ChefUtils::Dist::Infra::PRODUCT}.",
equal_to: %i{auto trace debug info warn fatal}
property :log_location, [String, Symbol],
description: "The location to save logs to. This can either by a path to a log file on disk `:syslog` to log to Syslog, `:win_evt` to log to the Windows Event Log, or `'STDERR'`/`'STDOUT'` to log to the *nix text streams.",
callbacks: {
"accepts Symbol values of ':win_evt' for Windows Event Log or ':syslog' for Syslog" => lambda { |p|
p.is_a?(Symbol) ? %i{win_evt syslog}.include?(p) : true
},
}
property :http_proxy, String,
description: "The proxy server to use for HTTP connections."
property :https_proxy, String,
description: "The proxy server to use for HTTPS connections."
property :ftp_proxy, String,
description: "The proxy server to use for FTP connections."
property :no_proxy, [String, Array],
description: "A comma-separated list or an array of URLs that do not need a proxy.",
coerce: proc { |x| x.is_a?(Array) ? x.join(",") : x },
default: []
# @todo we need to fixup bad plugin naming inputs here
property :ohai_disabled_plugins, Array,
description: "Ohai plugins that should be disabled in order to speed up the #{ChefUtils::Dist::Infra::PRODUCT} run and reduce the size of node data sent to #{ChefUtils::Dist::Infra::PRODUCT}",
coerce: proc { |x| x.map { |v| string_to_symbol(v).capitalize } },
default: []
# @todo we need to fixup bad plugin naming inputs here
property :ohai_optional_plugins, Array,
description: "Optional Ohai plugins that should be enabled to provide additional Ohai data for use in cookbooks.",
coerce: proc { |x| x.map { |v| string_to_symbol(v).capitalize } },
default: []
property :policy_persist_run_list, [true, false],
description: "Override run lists defined in a Policyfile with the `run_list` defined on the #{ChefUtils::Dist::Server::PRODUCT}.",
introduced: "17.3"
property :minimal_ohai, [true, false],
description: "Run a minimal set of Ohai plugins providing data necessary for the execution of #{ChefUtils::Dist::Infra::PRODUCT}'s built-in resources. Setting this to true will skip many large and time consuming data sets such as `cloud` or `packages`. Setting this this to true may break cookbooks that assume all Ohai data will be present."
property :start_handlers, Array,
description: %q(An array of hashes that contain a report handler class and the arguments to pass to that class on initialization. The hash should include `class` and `argument` keys where `class` is a String and `argument` is an array of quoted String values. For example: `[{'class' => 'MyHandler', %w('"argument1"', '"argument2"')}]`),
default: []
property :report_handlers, Array,
description: %q(An array of hashes that contain a report handler class and the arguments to pass to that class on initialization. The hash should include `class` and `argument` keys where `class` is a String and `argument` is an array of quoted String values. For example: `[{'class' => 'MyHandler', %w('"argument1"', '"argument2"')}]`),
default: []
property :rubygems_url, [String, Array],
description: "The location to source rubygems. It can be set to a string or array of strings for URIs to set as rubygems sources. This allows individuals to setup an internal mirror of rubygems for “airgapped” environments.",
introduced: "17.11"
property :exception_handlers, Array,
description: %q(An array of hashes that contain a exception handler class and the arguments to pass to that class on initialization. The hash should include `class` and `argument` keys where `class` is a String and `argument` is an array of quoted String values. For example: `[{'class' => 'MyHandler', %w('"argument1"', '"argument2"')}]`),
default: []
property :chef_license, String,
description: "Accept the [Chef EULA](https://www.chef.io/end-user-license-agreement/)",
equal_to: %w{accept accept-no-persist accept-silent}
property :policy_name, String,
description: "The name of a policy, as identified by the `name` setting in a Policyfile.rb file. `policy_group` when setting this property."
property :policy_group, String,
description: "The name of a `policy group` that exists on the #{ChefUtils::Dist::Server::PRODUCT}. `policy_name` must also be specified when setting this property."
property :named_run_list, String,
description: "A specific named runlist defined in the node's applied Policyfile, which the should be used when running #{ChefUtils::Dist::Infra::PRODUCT}."
property :pid_file, String,
description: "The location in which a process identification number (pid) is saved. An executable, when started as a daemon, writes the pid to the specified file. "
property :file_cache_path, String,
description: "The location in which cookbooks (and other transient data) files are stored when they are synchronized. This value can also be used in recipes to download files with the `remote_file` resource."
property :file_backup_path, String,
description: "The location in which backup files are stored. If this value is empty, backup files are stored in the directory of the target file"
property :file_staging_uses_destdir, String,
description: "How file staging (via temporary files) is done. When `true`, temporary files are created in the directory in which files will reside. When `false`, temporary files are created under `ENV['TMP']`"
property :additional_config, String,
description: "Additional text to add at the bottom of the client.rb config. This can be used to run custom Ruby or to add less common config options"
property :data_collector_server_url, String,
description: "The data collector URL (typically automate) to send node, converge, and compliance data. Note: If possible, use Chef Infra Server to do all data collection reporting, as this removes the need to distribute tokens to individual nodes.",
introduced: "17.8"
property :data_collector_token, String,
description: "The data collector token to interact with the data collector server URL (Automate). Note: If possible, use Chef Infra Server to do all data collection reporting, as this removes the need to distribute tokens to individual nodes.",
introduced: "17.8"
action :create, description: "Create a client.rb config file and folders for configuring #{ChefUtils::Dist::Infra::PRODUCT}." do
[
new_resource.config_directory,
(::File.dirname(new_resource.log_location) unless new_resource.log_location.nil?),
new_resource.file_backup_path,
new_resource.file_cache_path,
::File.join(new_resource.config_directory, "client.d"),
(::File.dirname(new_resource.pid_file) unless new_resource.pid_file.nil?),
].compact.each do |dir_path|
directory dir_path do
user new_resource.user unless new_resource.user.nil?
group new_resource.group unless new_resource.group.nil?
mode dir_path == ::File.dirname(new_resource.log_location) ? "0755" : "0750"
recursive true
end
end
template ::File.join(new_resource.config_directory, "client.rb") do
source ::File.expand_path("support/client.erb", __dir__)
user new_resource.user unless new_resource.user.nil?
group new_resource.group unless new_resource.group.nil?
local true
variables(
chef_license: new_resource.chef_license,
chef_server_url: new_resource.chef_server_url,
event_loggers: new_resource.event_loggers,
exception_handlers: format_handler(new_resource.exception_handlers),
file_backup_path: new_resource.file_backup_path,
file_cache_path: new_resource.file_cache_path,
file_staging_uses_destdir: new_resource.file_staging_uses_destdir,
formatters: new_resource.formatters,
http_proxy: new_resource.http_proxy,
https_proxy: new_resource.https_proxy,
ftp_proxy: new_resource.ftp_proxy,
log_level: new_resource.log_level,
log_location: new_resource.log_location,
minimal_ohai: new_resource.minimal_ohai,
named_run_list: new_resource.named_run_list,
no_proxy: new_resource.no_proxy,
node_name: new_resource.node_name,
ohai_disabled_plugins: new_resource.ohai_disabled_plugins,
ohai_optional_plugins: new_resource.ohai_optional_plugins,
pid_file: new_resource.pid_file,
policy_group: new_resource.policy_group,
policy_name: new_resource.policy_name,
report_handlers: format_handler(new_resource.report_handlers),
rubygems_url: new_resource.rubygems_url,
ssl_verify_mode: new_resource.ssl_verify_mode,
start_handlers: format_handler(new_resource.start_handlers),
additional_config: new_resource.additional_config,
policy_persist_run_list: new_resource.policy_persist_run_list,
data_collector_server_url: new_resource.data_collector_server_url,
data_collector_token: new_resource.data_collector_token
)
mode "0640"
action :create
end
end
action :remove, description: "Remove a client.rb config file for configuring #{ChefUtils::Dist::Infra::PRODUCT}." do
file ::File.join(new_resource.config_directory, "client.rb") do
action :delete
end
end
action_class do
#
# Format the handler document in the way we want it presented in the client.rb file
#
# @param [Hash] a handler property
#
# @return [Array] Array of handler data
#
def format_handler(handler_property)
handler_data = []
handler_property.each do |handler|
handler_data << "#{handler["class"]}.new(#{handler["arguments"].join(",")})"
end
handler_data
end
end
end
end
end
| 52.883041 | 645 | 0.685005 |
01bb4be74d7b63c8604e7de0b888e3146842922c | 2,283 | #!/usr/bin/env ruby
class String
def brightRed
return "\033[1;31m" + self + "\033[0m"
end
def brightGreen
return "\033[1;32m" + self + "\033[0m"
end
end
def doCommand( command )
puts command.brightGreen
if $step
do_it = false
print "Execute? [y]es, [n]o, yes to [a]ll "; $stdout.flush
input = $stdin.gets.strip.downcase
case input
when 'y'
do_it = true
when 'a'
do_it = true
$step = false
end
else
do_it = true
end
if do_it
puts `#{command}`
if not $?.nil? and $?.exitstatus > 0
puts "'#{command}' failed with exit code #{$?}".brightRed
exit $?
end
else
puts "(skipping)"
end
end
def printUsage
puts "#{$0} <version number> [--work-dir <dir>] [--step]"
end
# ---------------
if ARGV.length < 1
printUsage
exit 1
end
version = nil
work_dir = '/misc/pistos/unpack'
scp_port = 22
$step = false
args = ARGV.dup
while args.length > 0
arg = args.shift
case arg
when '-h', '--help'
printUsage
exit 1
when '-p', '-P', '--port'
scp_port = args.shift
when '--step'
$step = true
when '--work-dir'
work_dir = args.shift
else
version = arg
end
end
puts "git tag and export..."
doCommand "git co v#{version} || git tag -a v#{version} -m 'Tagged Diakonos version #{version}.'"
doCommand "git archive --format=tar --prefix=diakonos-#{version}/ refs/tags/v#{version} | bzip2 > diakonos-#{version}.tar.bz2"
doCommand "git archive --format=tar --prefix=diakonos-#{version}/ refs/tags/v#{version} | gzip > diakonos-#{version}.tar.gz"
puts "MD5 sums:"
doCommand( "md5sum diakonos-#{version}.tar.gz" )
doCommand( "md5sum diakonos-#{version}.tar.bz2" )
puts "Copying files to website..."
doCommand( "scp -P #{scp_port} diakonos-#{version}.tar.bz2 diakonos-#{version}.tar.gz CHANGELOG README.rdoc [email protected]:/var/www/diakonos.pist0s.ca/archives" )
puts "Release complete."
puts
puts "Release/announce on sites:"
puts "1) diakonos.pist0s.ca"
puts "2) rubyforge.org"
puts "3) RAA"
puts "4) http://en.wikipedia.org/wiki/Diakonos"
| 24.815217 | 173 | 0.579501 |
e206bf8749f205fba7d272b8851a8cca3aaafd19 | 792 | require 'field_def'
require 'type_name_utils'
# Represents the definition of a field that is based on an NHibernate User-defined type mapper
class UserTypeFieldDef < FieldDef
def initialize(model, fieldNode)
super(model, fieldNode)
@dataType = TypeNameUtils.getTypeNameFromHbm(fieldNode.attributes['type'])
end
def kind
:userType
end
def dataType
@dataType
end
def initialValue
"new #{dataType}()"
end
# a usertype field is mandatory if it is not nullable
# TODO: does this actually work?
def isMandatory
!nullable
end
# usertypes are not searchable unless they have a corresponding C# SearchCriteria object defined
# TODO: how do we accomodate this?
def isSearchable
false
end
end
| 22 | 99 | 0.690657 |
b90c2fc0e28ef8f18af777996a773b8d753b0021 | 432 | class ApplicationController < ActionController::Base
helper_method :p_logged_in?,:s_logged_in?, :current_user
def p_logged_in?
!!session[:professor_id]
end
def s_logged_in?
!!session[:student_id]
end
def current_user
if p_logged_in?
return Professor.find(session[:professor_id])
elsif s_logged_in?
return Student.find(session[:student_id])
else
return nil
end
end
end
| 16.615385 | 58 | 0.696759 |
032eafbc73aed6a207582135f02f91c1c281c9da | 275 | module Gitlab
module SlashCommands
module Presenters
class Error < Presenters::Base
def initialize(message)
@message = message
end
def message
ephemeral_response(text: @message)
end
end
end
end
end
| 17.1875 | 44 | 0.589091 |
010528e6aea052f66b1e6333d3cc31ef8ceeabba | 1,588 | # frozen_string_literal: true
class User < ApplicationRecord
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable and :omniauthable
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :trackable, :validatable
belongs_to :role, optional: true
belongs_to :category, optional: true
has_many :created_visits, class_name: 'Visit', foreign_key: 'pat_id'
has_many :requested_visits, class_name: 'Visit', foreign_key: 'doc_id'
validates :fullname, presence: true, uniqueness: true
validates :email, uniqueness: true
validates :mobile_no, uniqueness: true
before_save :assign_role
# Search user by mobile number(not email)
def self.find_first_by_auth_conditions(warden_conditions)
conditions = warden_conditions.dup
where(mobile_no: conditions[:mobile_no]).first
end
# Stop using email as authentication key
def email_required?
false
end
def email_changed?
false
end
def will_save_change_to_email?
false
end
def assign_role
self.role = Role.find_by name: 'Patient' if role.nil?
end
def show_user_profile; end
def admin?
role.name == 'Admin'
end
def doctor?
role.name == 'Doctor'
end
def patient?
role.name == 'Patient'
end
def choose_category
collection_select(
:user, :category_id, Category.all, :id, :speciality, { prompt: true }
).gets.chomp.to_i
end
def choose_doctor_from_category
User.where category_id gets.chomp.to_i
category(category_id).users.each(&:fullname)
end
end
| 23.352941 | 75 | 0.725441 |
ff97c5ebd9e57a1609076eaa0c9b01ea48304be6 | 1,482 | # -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "yajl-ruby"
s.version = "1.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Brian Lopez", "Lloyd Hilaiel"]
s.date = "2011-11-09"
s.email = "[email protected]"
s.extensions = ["ext/yajl/extconf.rb"]
s.files = ["ext/yajl/extconf.rb"]
s.homepage = "http://github.com/brianmario/yajl-ruby"
s.require_paths = ["lib"]
s.required_ruby_version = Gem::Requirement.new(">= 1.8.6")
s.rubygems_version = "2.0.14"
s.summary = "Ruby C bindings to the excellent Yajl JSON stream-based parser library."
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rake-compiler>, [">= 0.7.5"])
s.add_development_dependency(%q<rspec>, [">= 2.0.0"])
s.add_development_dependency(%q<activesupport>, [">= 0"])
s.add_development_dependency(%q<json>, [">= 0"])
else
s.add_dependency(%q<rake-compiler>, [">= 0.7.5"])
s.add_dependency(%q<rspec>, [">= 2.0.0"])
s.add_dependency(%q<activesupport>, [">= 0"])
s.add_dependency(%q<json>, [">= 0"])
end
else
s.add_dependency(%q<rake-compiler>, [">= 0.7.5"])
s.add_dependency(%q<rspec>, [">= 2.0.0"])
s.add_dependency(%q<activesupport>, [">= 0"])
s.add_dependency(%q<json>, [">= 0"])
end
end
| 37.05 | 105 | 0.629555 |
9151bf49d3566b5b77c8fca53396f2ee035d4c2c | 1,934 | require 'spec_helper'
module Gmailish
describe Account do
let(:instance) { Gmailish::Account.new(username, password) }
let(:username) { 'venkmanapp' }
let(:password) { 'ABC123456' }
let(:account) {
instance_double(
'Net::IMAP'
)
}
describe '#initialize' do
it 'take arguments' do
instance
end
end
describe '.process' do
subject { described_class.process(username, password) }
it 'calls off to process' do
described_class.any_instance.should_receive(:process)
subject
end
end
describe '#process' do
subject { instance.process }
let(:actions) {
instance_double(
'Gmailish::Actions'
)
}
let(:messages) { lambda{} }
let(:address) { 'imap.gmail.com' }
let(:port) { 993 }
before { instance.stub(:messages => messages) }
it 'calls off to actions and return self' do
Net::IMAP.should_receive(:new).
with(address, port, true, nil, false).
and_return(account)
Actions.should_receive(:new).
with(account, username, password).
and_return(actions)
actions.should_receive(:process)
expect(subject).to be_an_instance_of(Gmailish::Account)
end
end
describe '#messages' do
subject { instance.messages }
let(:label) { 'UNSEEN' }
let(:uid) { 37 }
let(:uids) { [ uid ] }
let(:message) {
instance_double(
'Gmailish::Message'
)
}
before { instance.stub(:account => account) }
it 'returns array of message objects' do
account.should_receive(:uid_search).
with([label]).
and_return(uids)
Message.should_receive(:process).
with(account, uid).
and_return(message)
expect(subject).to eq([message])
end
end
end
end
| 21.488889 | 64 | 0.569286 |
b94a775958cbb05214f1fb42a1c5baba0afa538f | 1,531 | cask "tunnelblick-beta" do
version "3.8.6beta03,5700"
sha256 "de0f6d24cbc45650c1789f6963f7b454099e6cd9a00ec067178977614415d256"
url "https://github.com/Tunnelblick/Tunnelblick/releases/download/v#{version.before_comma}/Tunnelblick_#{version.before_comma}_build_#{version.after_comma}.dmg",
verified: "github.com/Tunnelblick/Tunnelblick/"
name "Tunnelblick"
desc "Free and open source graphic user interface for OpenVPN"
homepage "https://www.tunnelblick.net/"
livecheck do
url "https://github.com/Tunnelblick/Tunnelblick/releases"
strategy :page_match do |page|
match = page.match(%r{href=.*?/Tunnelblick_(\d+(?:\.\d+)*beta(?:\d+))_build_(\d+)\.dmg}i)
"#{match[1]},#{match[2]}"
end
end
auto_updates true
app "Tunnelblick.app"
uninstall_preflight do
set_ownership "#{appdir}/Tunnelblick.app"
end
uninstall launchctl: [
"net.tunnelblick.tunnelblick.LaunchAtLogin",
"net.tunnelblick.tunnelblick.tunnelblickd",
],
quit: "net.tunnelblick.tunnelblick"
zap trash: [
"/Library/Application Support/Tunnelblick",
"~/Library/Application Support/Tunnelblick",
"~/Library/Caches/com.apple.helpd/SDMHelpData/Other/English/HelpSDMIndexFile/net.tunnelblick.tunnelblick.help*",
"~/Library/Caches/net.tunnelblick.tunnelblick",
"~/Library/Preferences/net.tunnelblick.tunnelblick.plist",
]
caveats <<~EOS
For security reasons, #{token} must be installed to /Applications,
and will request to be moved at launch.
EOS
end
| 33.282609 | 163 | 0.719138 |
f8c037a48ea58edcbdde45ff1603e23c6547f0b0 | 1,330 | class Yj < Formula
desc "CLI to convert between YAML, TOML, JSON and HCL"
homepage "https://github.com/sclevine/yj"
url "https://github.com/sclevine/yj/archive/v5.0.0.tar.gz"
sha256 "df9a4f5b6d067842ea3da68ff92c374b98560dce1086337d39963a1346120574"
license "Apache-2.0"
head "https://github.com/sclevine/yj.git"
bottle do
rebuild 1
sha256 cellar: :any_skip_relocation, arm64_big_sur: "9093ad6c4366b3d6cd6d37b1300f1e80fbd30b051e3d934a498db64c46bed6d6"
sha256 cellar: :any_skip_relocation, big_sur: "6c1c51234dc8c8590af22184be13472c9939426c98db9c9bcd58fade1cbb1840"
sha256 cellar: :any_skip_relocation, catalina: "918450aaf162067fe6fa7979518a7fc998853a4ab215c01f2c69e756739fb710"
sha256 cellar: :any_skip_relocation, mojave: "918450aaf162067fe6fa7979518a7fc998853a4ab215c01f2c69e756739fb710"
sha256 cellar: :any_skip_relocation, high_sierra: "918450aaf162067fe6fa7979518a7fc998853a4ab215c01f2c69e756739fb710"
sha256 cellar: :any_skip_relocation, x86_64_linux: "78961aa6b8d2b179ab6d436baad5d862fdbeb2604dfbe366085c9e42ebe3516e" # linuxbrew-core
end
depends_on "go" => :build
def install
system "go", "build", "-ldflags", "-X main.Version=#{version}", *std_go_args
end
test do
assert_match '{"a":1}', shell_output("echo a=1|#{bin}/yj -t")
end
end
| 45.862069 | 139 | 0.778195 |
628ce3643b2bad8c7072e6e10eeea451edb03fa1 | 569 | $:.push File.expand_path("lib", __dir__)
require "omniauth/irma/version"
Gem::Specification.new do |spec|
spec.name = "omniauth-irma"
spec.version = Omniauth::Irma::VERSION
spec.authors = ["Sietse Ringers"]
spec.email = ["[email protected]"]
spec.summary = %q{IRMA strategy for OmniAuth}
spec.homepage = "https://irma.app/docs"
spec.license = "MIT"
spec.required_ruby_version = Gem::Requirement.new(">= 2.3.0")
spec.files = Dir["{app,config,db,lib}/**/*", "LICENSE", "Rakefile", "README.md"]
end
| 31.611111 | 82 | 0.627417 |
bf4bc99f048e9f78be0ea556c848fa483feb9b30 | 1,563 | module Builder
include Test::Unit::Assertions
def I(size)
self.eye(size)
end
def eye(size)
#pre
assert size.is_a? Integer
assert size >= 0
result = SMatrix.new(Dok.new(size, size))
result.each_diagonal_index do |i, j|
result[i, j] = 1
end
#post
assert result.identity?
result
end
def zero(rows, cols = rows)
#pre
assert rows.is_a? Integer
assert cols.is_a? Integer
assert rows >= 0
assert cols >= 0
result = SMatrix.new(Dok.new(rows, cols))
#post
assert result.zero?
result
end
def random(rows, cols = rows, non_zero_factor = 0.3, spread = 1000)
#pre
assert (rows.is_a? Integer), "rows not an integer"
assert (cols.is_a? Integer), "cols not an integer"
assert rows >= 0
assert cols >= 0
result = SMatrix.zero(rows, cols)
result.each_index do |i, j|
if rand <= non_zero_factor
result[i, j] = rand * spread
end
end
#post
result
end
def tridiagonal(upper, middle, lower)
#pre
assert upper.is_a? Array and middle.is_a? Array and lower.is_a? Array
assert (upper.length == middle.length - 1) and (upper.length == lower.length)
assert middle.length >= 3
result = SMatrix.zero(middle.length)
result.each_diagonal_index(1) do |i, j|
result[i, j] = upper.shift
end
result.each_diagonal_index(0) do |i, j|
result[i, j] = middle.shift
end
result.each_diagonal_index(-1) do |i, j|
result[i, j] = lower.shift
end
#post
assert result.tridiagonal?
result
end
end | 19.5375 | 80 | 0.632118 |
e8b0300f4f7fa451710d4a9ec6dca49b324132d0 | 121 | class AddExistToAmbulance < ActiveRecord::Migration
def change
add_column :ambulances, :exist, :integer
end
end
| 20.166667 | 51 | 0.760331 |
e9fff6f90c0143c3474b7c0572c26d4fbc052db8 | 2,294 | # server-based syntax
# ======================
# Defines a single server with a list of roles and multiple properties.
# You can define all roles on a single server, or split them:
# server 'example.com', user: 'deploy', roles: %w{app db web}, my_property: :my_value
# server 'example.com', user: 'deploy', roles: %w{app web}, other_property: :other_value
# server 'db.example.com', user: 'deploy', roles: %w{db}
server ENV['APP_SERVER_IP'], user: 'ec2-user', roles: [:web, :app, :db]
set :ssh_options, {
keys: %w(/Users/d-tasaki/.ssh/nslides01.pem),
forward_agent: false,
auth_methods: %w(publickey password)
}
# role-based syntax
# ==================
# Defines a role with one or multiple servers. The primary server in each
# group is considered to be the first unless any hosts have the primary
# property set. Specify the username and a domain or IP for the server.
# Don't use `:all`, it's a meta role.
# role :app, %w{[email protected]}, my_property: :my_value
# role :web, %w{[email protected] [email protected]}, other_property: :other_value
# role :db, %w{[email protected]}
# Configuration
# =============
# You can set any configuration variable like in config/deploy.rb
# These variables are then only loaded and set in this stage.
# For available Capistrano configuration variables see the documentation page.
# http://capistranorb.com/documentation/getting-started/configuration/
# Feel free to add new variables to customise your setup.
# Custom SSH Options
# ==================
# You may pass any option but keep in mind that net/ssh understands a
# limited set of options, consult the Net::SSH documentation.
# http://net-ssh.github.io/net-ssh/classes/Net/SSH.html#method-c-start
#
# Global options
# --------------
# set :ssh_options, {
# keys: %w(/home/rlisowski/.ssh/id_rsa),
# forward_agent: false,
# auth_methods: %w(password)
# }
#
# The server-based syntax can be used to override options:
# ------------------------------------
# server 'example.com',
# user: 'user_name',
# roles: %w{web app},
# ssh_options: {
# user: 'user_name', # overrides user setting above
# keys: %w(/home/user_name/.ssh/id_rsa),
# forward_agent: false,
# auth_methods: %w(publickey password)
# # password: 'please use keys'
# }
| 33.735294 | 88 | 0.670445 |
abdb5337fe22ed4411e980296330024475820348 | 2,493 | require 'rails_helper'
describe 'gend_images/new.html.erb', type: :view do
let(:caption1) { FactoryGirl.create(:caption) }
let(:gend_image) { FactoryGirl.create(:gend_image, captions: [caption1]) }
before do
assign(:gend_image, gend_image)
assign(:caption_defaults, [{}, {}])
assign(:src_image_url_with_extension, 'src image url with extension')
def view.current_user
nil
end
end
it 'renders' do
render
end
it 'uses the src image name as an h1' do
render
expect(rendered).to have_xpath('//h1/span[text()="src image name"]')
end
it 'has the src image name in the title' do
render
expect(view.content_for(:title)).to eq 'src image name meme generator'
end
it 'has text inputs with the form control class' do
render
expect(rendered).to have_selector(
'#gend_image_captions_attributes_0_text'
) do |s|
expect(s.first['class']).to_not be_nil
expect(s.first['class']).to include 'form-control'
end
end
it 'has a hidden negative captcha field called email' do
render
expect(rendered).to have_selector('#gend_image_email', visible: false)
end
context 'when the gend image is not private' do
let(:gend_image) { FactoryGirl.create(:gend_image, private: false) }
it 'does not check the private checkbox' do
expect(render).to_not have_selector(
'input[checked=checked][type="checkbox"][name="gend_image[private]"]'
)
end
end
context 'when the gend image is private' do
let(:gend_image) { FactoryGirl.create(:gend_image, private: true) }
it 'checks the private checkbox' do
expect(render).to have_selector(
'input[checked=checked][type="checkbox"][name="gend_image[private]"]'
)
end
end
describe 'text positioner' do
it 'sets the data-img-url to the src image url' do
expect(render).to have_selector(
'div.text-positioner[data-img-url="src image url with extension"]'
)
end
end
context 'when the user can edit the src image' do
before { assign(:can_edit_src_image, true) }
it 'shows the set default captions button' do
expect(render).to have_text('Set current captions as default')
end
end
context 'when the user cannot edit the src image' do
before { assign(:can_edit_src_image, false) }
it 'does not show the set default captions button' do
expect(render).to_not have_text('Set current captions as default')
end
end
end
| 26.806452 | 77 | 0.678299 |
7aaad57f5cd9a0897c1a42488a399ad9be68064a | 2,457 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Entry::AllowFailure do
let(:entry) { described_class.new(config.deep_dup) }
let(:expected_config) { config }
describe 'validations' do
context 'when entry config value is valid' do
shared_examples 'valid entry' do
describe '#value' do
it 'returns key value' do
expect(entry.value).to eq(expected_config)
end
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context 'with boolean values' do
it_behaves_like 'valid entry' do
let(:config) { true }
end
it_behaves_like 'valid entry' do
let(:config) { false }
end
end
context 'with hash values' do
it_behaves_like 'valid entry' do
let(:config) { { exit_codes: 137 } }
let(:expected_config) { { exit_codes: [137] } }
end
it_behaves_like 'valid entry' do
let(:config) { { exit_codes: [42, 137] } }
end
end
end
context 'when entry value is not valid' do
shared_examples 'invalid entry' do
describe '#valid?' do
it { expect(entry).not_to be_valid }
it { expect(entry.errors).to include(error_message) }
end
end
context 'when it has a wrong type' do
let(:config) { [1] }
let(:error_message) do
'allow failure config should be a hash or a boolean value'
end
it_behaves_like 'invalid entry'
end
context 'with string exit codes' do
let(:config) { { exit_codes: 'string' } }
let(:error_message) do
'allow failure exit codes should be an array of integers or an integer'
end
it_behaves_like 'invalid entry'
end
context 'with array of strings as exit codes' do
let(:config) { { exit_codes: ['string 1', 'string 2'] } }
let(:error_message) do
'allow failure exit codes should be an array of integers or an integer'
end
it_behaves_like 'invalid entry'
end
context 'when it has an extra keys' do
let(:config) { { extra: true } }
let(:error_message) do
'allow failure config contains unknown keys: extra'
end
it_behaves_like 'invalid entry'
end
end
end
end
| 26.419355 | 81 | 0.579976 |
aca34f43d56f3a2f5762ce014fec0417943205db | 22,701 | # -------------------------------------------------------------------------- #
# Copyright 2002-2020, OpenNebula Project, OpenNebula Systems #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
require 'CloudServer'
require 'OpenNebulaJSON'
include OpenNebulaJSON
require 'OpenNebulaVNC'
require 'opennebula_guac'
require 'opennebula_vmrc'
require 'OpenNebulaAddons'
require 'OpenNebulaJSON/JSONUtils'
#include JSONUtils
require 'net/http'
class SunstoneServer < CloudServer
# Secs to sleep between checks to see if image upload to repo is finished
IMAGE_POLL_SLEEP_TIME = 5
def initialize(client, config, logger)
super(config, logger)
@client = client
end
############################################################################
#
############################################################################
def get_pool(kind,user_flag, client=nil)
client = @client if !client
user_flag = Integer(user_flag)
pool = case kind
when "group" then GroupPoolJSON.new(client)
when "cluster" then ClusterPoolJSON.new(client)
when "host" then HostPoolJSON.new(client)
when "image" then ImagePoolJSON.new(client, user_flag)
when "vmtemplate" then TemplatePoolJSON.new(client, user_flag)
when "vm_group" then VMGroupPoolJSON.new(client, user_flag)
when "vm" then VirtualMachinePoolJSON.new(client, user_flag)
when "vnet" then VirtualNetworkPoolJSON.new(client, user_flag)
when "vntemplate" then VNTemplatePoolJSON.new(client, user_flag)
when "user" then UserPoolJSON.new(client)
when "acl" then AclPoolJSON.new(client)
when "datastore" then DatastorePoolJSON.new(client)
when "zone" then ZonePoolJSON.new(client)
when "security_group" then SecurityGroupPoolJSON.new(client, user_flag)
when "vdc" then VdcPoolJSON.new(client)
when "vrouter" then VirtualRouterPoolJSON.new(client, user_flag)
when "marketplace" then MarketPlacePoolJSON.new(client)
when "marketplaceapp" then MarketPlaceAppPoolJSON.new(client, user_flag)
else
error = Error.new("Error: #{kind} resource not supported")
return [404, error.to_json]
end
if kind == "vm" && $conf[:get_extended_vm_info]
rc = pool.get_hash_extended
else
rc = pool.get_hash
end
if OpenNebula.is_error?(rc)
return [500, rc.to_json]
else
return [200, rc.to_json]
end
end
############################################################################
#
############################################################################
def get_resource(kind, id, extended=false)
resource = retrieve_resource(kind, id, extended)
if OpenNebula.is_error?(resource)
return [404, resource.to_json]
else
return [200, resource.to_json]
end
end
############################################################################
#
############################################################################
def get_template(kind,id)
resource = retrieve_resource(kind,id)
if OpenNebula.is_error?(resource)
return [404, resource.to_json]
else
template_str = resource.template_str(true)
return [200, {:template => template_str}.to_json]
end
end
############################################################################
#
############################################################################
def create_resource(kind, template)
resource = case kind
when "group" then GroupJSON.new(Group.build_xml, @client)
when "cluster" then ClusterJSON.new(Group.build_xml, @client)
when "host" then HostJSON.new(Host.build_xml, @client)
when "image" then ImageJSON.new(Image.build_xml, @client)
when "vmtemplate" then TemplateJSON.new(Template.build_xml, @client)
when "vm_group" then VMGroupJSON.new(VMGroup.build_xml,@client)
when "vm" then VirtualMachineJSON.new(VirtualMachine.build_xml,@client)
when "vnet" then VirtualNetworkJSON.new(VirtualNetwork.build_xml, @client)
when "vntemplate" then VNTemplateJSON.new(VNTemplate.build_xml, @client)
when "user" then UserJSON.new(User.build_xml, @client)
when "acl" then AclJSON.new(Acl.build_xml, @client)
when "datastore" then DatastoreJSON.new(Datastore.build_xml, @client)
when "zone" then ZoneJSON.new(Zone.build_xml, @client)
when "security_group" then SecurityGroupJSON.new(SecurityGroup.build_xml, @client)
when "vdc" then VdcJSON.new(Vdc.build_xml, @client)
when "vrouter" then VirtualRouterJSON.new(VirtualRouter.build_xml, @client)
when "marketplace" then MarketPlaceJSON.new(MarketPlace.build_xml, @client)
when "marketplaceapp" then MarketPlaceAppJSON.new(MarketPlaceApp.build_xml, @client)
else
error = Error.new("Error: #{kind} resource not supported")
return [404, error.to_json]
end
rc = resource.create(template)
if OpenNebula.is_error?(rc)
return [500, rc.to_json]
else
rc = resource.info
if OpenNebula.is_error?(rc)
return [201, "{\"#{kind.upcase}\": {\"ID\": \"#{resource.id}\"}}"]
else
return [201, resource.to_json]
end
end
end
############################################################################
#
############################################################################
def upload(template, file_path)
image_hash = JSONUtils.parse_json(template, 'image')
if OpenNebula.is_error?(image_hash)
return [500, image_hash.to_json]
end
image_hash['PATH'] = file_path
ds_id = JSONUtils.parse_json(template, 'ds_id')
if OpenNebula.is_error?(ds_id)
return [500, ds_id.to_json]
end
new_template = {
:image => image_hash,
:ds_id => ds_id,
}.to_json
image = ImageJSON.new(Image.build_xml, @client)
rc = image.create(new_template)
if OpenNebula.is_error?(rc)
return [500, rc.to_json]
end
rc = image.info
#wait until image is ready to return
while ( !OpenNebula.is_error?(rc) &&
(image.state_str == 'LOCKED' ||
image.state_str == 'LOCKED_USED' ||
image.state_str == 'LOCKED_USED_PERS') ) do
sleep IMAGE_POLL_SLEEP_TIME
rc = image.info
end
if OpenNebula.is_error?(rc)
return [404, rc.to_json]
end
return [201, image.to_json]
end
############################################################################
#
############################################################################
def delete_resource(kind, id)
resource = retrieve_resource(kind, id)
if OpenNebula.is_error?(resource)
return [404, resource.to_json]
end
rc = resource.delete
if OpenNebula.is_error?(rc)
return [500, rc.to_json]
else
return [204, resource.to_json]
end
end
############################################################################
#
############################################################################
def perform_action(kind, id, action_json)
resource = retrieve_resource(kind, id)
if OpenNebula.is_error?(resource)
return [404, resource.to_json]
end
rc = resource.perform_action(action_json)
if OpenNebula.is_error?(rc)
return [500, rc.to_json]
else
if rc.nil?
return [204, resource.to_json]
else
return [201, rc.to_json]
end
end
end
############################################################################
#
############################################################################
def download_marketplaceapp(id)
# Get MarketPlaceApp
marketapp = MarketPlaceApp.new(MarketPlaceApp.build_xml(id.to_i), @client)
rc = marketapp.info
return [500, rc.message] if OpenNebula.is_error?(rc)
# Get Datastore
market_id = marketapp['MARKETPLACE_ID']
market = MarketPlace.new(MarketPlace.build_xml(market_id), @client)
rc = market.info
return [500, rc.message] if OpenNebula.is_error?(rc)
# Build Driver message
drv_message = "<DS_DRIVER_ACTION_DATA>" <<
"#{market.to_xml}" <<
"</DS_DRIVER_ACTION_DATA>"
drv_message_64 = Base64::strict_encode64(drv_message)
source = marketapp['SOURCE']
download_cmd = "DRV_ACTION=#{drv_message_64}; "<<
"#{VAR_LOCATION}/remotes/datastore/downloader.sh " <<
"#{source} -"
filename = "one-marketplaceapp-#{id}"
return [download_cmd, filename]
end
############################################################################
# Unused
############################################################################
def get_vm_log(id)
resource = retrieve_resource("vm", id)
if OpenNebula.is_error?(resource)
return [404, nil]
else
if !ONE_LOCATION
vm_log_file = LOG_LOCATION + "/#{id}.log"
else
vm_log_file = LOG_LOCATION + "/vms/#{id}/vm.log"
end
begin
log = File.read(vm_log_file)
rescue Exception => e
msg = "Log for VM #{id} not available"
return [200, {:vm_log => msg}.to_json]
end
return [200, {:vm_log => log}.to_json]
end
end
########################################################################
# VNC
########################################################################
def startvnc(id, vnc)
resource = retrieve_resource("vm", id)
if OpenNebula.is_error?(resource)
return [404, resource.to_json]
end
return vnc.proxy(resource)
end
########################################################################
# Guacamole
########################################################################
def startguac(id, type_connection, guac)
resource = retrieve_resource("vm", id)
if OpenNebula.is_error?(resource)
return [404, resource.to_json]
end
client = @client
vm_pool = VirtualMachinePool.new(client, -1)
user_pool = UserPool.new(client)
rc = user_pool.info
if OpenNebula.is_error?(rc)
puts rc.message
exit -1
end
rc = vm_pool.info
if OpenNebula.is_error?(rc)
puts rc.message
exit -1
end
return guac.proxy(resource, type_connection)
end
########################################################################
# VMRC
########################################################################
def startvmrc(id, vmrc)
resource = retrieve_resource("vm", id)
if OpenNebula.is_error?(resource)
return [404, resource.to_json]
end
client = @client
vm_pool = VirtualMachinePool.new(client, -1)
user_pool = UserPool.new(client)
rc = user_pool.info
if OpenNebula.is_error?(rc)
puts rc.message
exit -1
end
rc = vm_pool.info
if OpenNebula.is_error?(rc)
puts rc.message
exit -1
end
return vmrc.proxy(resource, client)
end
########################################################################
# Accounting & Monitoring
########################################################################
def get_pool_monitoring(resource, meters)
#pool_element
pool = case resource
when "vm", "VM"
VirtualMachinePool.new(@client)
when "host", "HOST"
HostPool.new(@client)
else
error = Error.new("Monitoring not supported for #{resource}")
return [200, error.to_json]
end
meters_a = meters.split(',')
rc = pool.monitoring(meters_a)
if OpenNebula.is_error?(rc)
error = Error.new(rc.message)
return [500, error.to_json]
end
rc[:resource] = resource
return [200, rc.to_json]
end
def get_resource_monitoring(id, resource, meters)
pool_element = case resource
when "vm", "VM"
VirtualMachine.new_with_id(id, @client)
when "host", "HOST"
Host.new_with_id(id, @client)
else
error = Error.new("Monitoring not supported for #{resource}")
return [403, error.to_json]
end
meters_a = meters.split(',')
rc = pool_element.monitoring(meters_a)
if OpenNebula.is_error?(rc)
error = Error.new(rc.message)
return [500, error.to_json]
end
meters_h = Hash.new
meters_h[:resource] = resource
meters_h[:id] = id
meters_h[:monitoring] = rc
return [200, meters_h.to_json]
end
# returns a { monitoring : meter1 : [[ts1, agg_value],[ts2, agg_value]...]
# meter2 : [[ts1, agg_value],[ts2, agg_value]...]}
# with this information we can paint historical graphs of usage
def get_user_accounting(options)
uid = options[:id].to_i
tstart = options[:start].to_i
tend = options[:end].to_i
interval = options[:interval].to_i
meters = options[:monitor_resources]
gid = options[:gid].to_i
acct_options = {:start_time => tstart,
:end_time => tend}
# If we want acct per group, we ask for all VMs visible to user
# and then filter by group.
if gid
uid = Pool::INFO_ALL
acct_options[:group] = gid
end
# Init results and request accounting
result = {}
meters_a = meters.split(',')
meters_a.each do | meter |
result[meter] = []
end
pool = VirtualMachinePool.new(@client)
acct_xml = pool.accounting_xml(uid, acct_options)
if OpenNebula.is_error?(acct_xml)
error = Error.new(acct_xml.message)
return [500, error.to_json]
end
xml = XMLElement.new
xml.initialize_xml(acct_xml, 'HISTORY_RECORDS')
# We aggregate the accounting values for each interval withing
# the given timeframe
while tstart < tend
tstep = tstart + interval
count = Hash.new
# We count machines which have started before the end of
# this interval AND have not finished yet OR machines which
# have started before the end of this interval AND
# have finished anytime after the start of this interval
xml.each("HISTORY[STIME<=#{tstep} and ETIME=0 or STIME<=#{tstep} and ETIME>=#{tstart}]") do |hr|
meters_a.each do | meter |
count[meter] ||= 0
count[meter] += hr["VM/#{meter}"].to_i if hr["VM/#{meter}"]
end
end
# We have aggregated values for this interval
# Then we just add them to the results along with a timestamp
count.each do | mname, mcount |
result[mname] << [tstart, mcount]
end
tstart = tstep
end
return [200, {:monitoring => result}.to_json]
end
def get_vm_accounting(options)
pool = VirtualMachinePool.new(@client)
filter_flag = options[:userfilter] ? options[:userfilter].to_i : VirtualMachinePool::INFO_ALL
start_time = options[:start_time] ? options[:start_time].to_i : -1
end_time = options[:end_time] ? options[:end_time].to_i : -1
acct_options = {
:start_time => start_time,
:end_time => end_time,
:group => options[:group]
}
rc = pool.accounting(filter_flag, acct_options)
if OpenNebula.is_error?(rc)
error = Error.new(rc.message)
return [500, error.to_json]
end
return [200, rc.to_json]
end
def get_vm_showback(options)
pool = VirtualMachinePool.new(@client)
filter_flag = options[:userfilter] ? options[:userfilter].to_i : VirtualMachinePool::INFO_ALL
start_month = options[:start_month] ? options[:start_month].to_i : -1
start_year = options[:start_year] ? options[:start_year].to_i : -1
end_month = options[:end_month] ? options[:end_month].to_i : -1
end_year = options[:end_year] ? options[:end_year].to_i : -1
acct_options = {
:start_month => start_month,
:start_year => start_year,
:end_month => end_month,
:end_year => end_year,
:group => options[:group]
}
rc = pool.showback(filter_flag, acct_options)
if OpenNebula.is_error?(rc)
error = Error.new(rc.message)
return [500, error.to_json]
end
return [200, rc.to_json]
end
def get_docker_tags(app_id)
# Get MarketPlaceApp
marketapp = retrieve_resource("marketplaceapp", app_id)
if OpenNebula.is_error?(marketapp)
return [404, marketapp.to_json]
end
# Get MarketPlace
market_id = marketapp["MARKETPLACE_ID"]
market = retrieve_resource("marketplace", market_id)
if OpenNebula.is_error?(market)
return [404, market.to_json]
end
# Check market_mad
# TODO Change message
return [400, "Invalid MARKET_MAD"] if market["MARKET_MAD"] != "dockerhub"
# Get dockerhub tags
url = "https://hub.docker.com/v2/repositories/library/#{marketapp["NAME"]}/tags/?page_size=100"
tags_names = []
loop do
uri = URI(url)
req = Net::HTTP::Get.new(uri.request_uri)
req['User-Agent'] = "OpenNebula"
opts = { :use_ssl => true }
rc = Net::HTTP.start(uri.hostname, uri.port, nil, nil, opts) do |http|
http.request(req)
end
return [rc.code.to_i, rc.msg] unless rc.is_a? Net::HTTPSuccess
body = JSON.parse(rc.body)
(tags_names << body["results"].map { |values| {
name: values["name"],
last_updated: values["last_updated"]
} }).flatten!
break if body["next"].nil? || body["next"].empty?
url = body["next"]
end
return [200, tags_names.to_json]
end
private
############################################################################
#
############################################################################
def retrieve_resource(kind, id, extended=false)
resource = case kind
when "group" then GroupJSON.new_with_id(id, @client)
when "cluster" then ClusterJSON.new_with_id(id, @client)
when "host" then HostJSON.new_with_id(id, @client)
when "image" then ImageJSON.new_with_id(id, @client)
when "vmtemplate" then TemplateJSON.new_with_id(id, @client)
when "vm_group" then VMGroupJSON.new_with_id(id, @client)
when "vm" then VirtualMachineJSON.new_with_id(id, @client)
when "vnet" then VirtualNetworkJSON.new_with_id(id, @client)
when "vntemplate" then VNTemplateJSON.new_with_id(id, @client)
when "user" then UserJSON.new_with_id(id, @client)
when "acl" then AclJSON.new_with_id(id, @client)
when "datastore" then DatastoreJSON.new_with_id(id, @client)
when "zone" then ZoneJSON.new_with_id(id, @client)
when "security_group" then SecurityGroupJSON.new_with_id(id, @client)
when "vdc" then VdcJSON.new_with_id(id, @client)
when "vrouter" then VirtualRouterJSON.new_with_id(id, @client)
when "marketplace" then MarketPlaceJSON.new_with_id(id, @client)
when "marketplaceapp" then MarketPlaceAppJSON.new_with_id(id, @client)
else
error = Error.new("Error: #{kind} resource not supported")
return error
end
rc = extended ? resource.info(true) : resource.info
if OpenNebula.is_error?(rc)
return rc
else
return resource
end
end
end
| 36.033333 | 108 | 0.499978 |
ffd2efe6938b692f2c86415b52b204b7eba2e6e9 | 132 | module RankmiExcelReview
class ApplicationController < ActionController::Base
protect_from_forgery with: :exception
end
end
| 22 | 54 | 0.818182 |
7956c84e987b924efe3bc871787a68aebb568f35 | 2,458 | class CLI
def welcome
puts "Welcome to 4x calculation tool"
API.currency_types
self.menu
end
def menu
puts "Would you like to use the tool, look through different types of currency or stop the program?"
puts "Type 1 to use the tool, 2 to stop program."
loop {
new_input = gets.strip
if new_input == "1"
x = "1"
while x == "1"
puts "Enter the date in the YYYY-MM-DD format"
three_digit_codes
date_choice
puts "Please enter the index number in reference to the three digit currency code that you wish to use."
sleep 2
display_list_of_currencies
currency_choice
display_list_of_rates
puts "Would you like to continue? Press 1 for yes and anything else to leave the program."
x = gets.strip
end
puts "Have a nice day!"
break
elsif new_input == "2"
puts "Have a nice day!"
break
else
puts "That was invalid let me ask ya again. Remember to press 1 to use the tool or 2 to exit the program."
sleep 1
end
}
end
def date_choice
new_regex = /\A[0-9]{4}-[0-1][0-9]-[0-3][0-9]\z/
date_input = gets.strip
unless (date_input =~ new_regex) == 0
puts "Please attempt the date again. Remember (YYYY-MM-DD) format."
date_input = gets.strip
end
@date = date_input
end
def currency_choice
rate_choice = gets.strip.to_i - 1
max_number = three_digit_codes.length - 1
until rate_choice.between?(0, max_number)
puts "Sorry, that was not an option. Please try again."
rate_choice = gets.strip.to_i - 1
end
@rate = three_digit_codes[rate_choice]
end
def three_digit_codes
FourX.all.first.xchange.keys
end
def display_list_of_currencies
f = FourX.all.first.xchange.keys.each_with_index {|val, index| puts "#{index + 1}: #{val}"}
f
end
def display_list_of_rates
@current_instance = API.currency_types(base_currency:@rate, base_date:@date)
@current_instance.xchange.each {|key, value| puts "#{key}: #{value}"}
end
end
| 32.773333 | 121 | 0.552075 |
01c07c107f1ed67a1e19e369c2bb5fd56001c5fc | 177 | class Cmsino::Post < Cmsino::Content
validates_presence_of :umbrella
validates_presence_of :name
before_save :set_date
def set_date
self.date = Time.now
end
end
| 16.090909 | 36 | 0.751412 |
edd0fee4280de10bb4e469c991da034677777834 | 786 | # frozen_string_literal: true
# See LICENSE.txt at root of repository
# GENERATED FILE - DO NOT EDIT!!
require 'ansible/ruby/modules/base'
module Ansible
module Ruby
module Modules
# This module can be used to manage (create, delete) VMware vSphere Datacenters.
class Vmware_datacenter < Base
# @return [String] The name of the datacenter the cluster will be created in.
attribute :datacenter_name
validates :datacenter_name, presence: true, type: String
# @return [:present, :absent, nil] If the datacenter should be present or absent.
attribute :state
validates :state, expression_inclusion: {:in=>[:present, :absent], :message=>"%{value} needs to be :present, :absent"}, allow_nil: true
end
end
end
end
| 35.727273 | 143 | 0.69084 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.