hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
21c1f6343c8d3dc23d7ebd118d152ba028933c58 | 7,254 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Monitor::Mgmt::V2015_05_01
module Models
#
# An Application Insights workbook definition.
#
class Workbook < WorkbookResource
include MsRestAzure
# @return [SharedTypeKind] The kind of workbook. Choices are user and
# shared. Possible values include: 'user', 'shared'
attr_accessor :kind
# @return [String] The user-defined name of the workbook.
attr_accessor :workbook_name
# @return [String] Configuration of this particular workbook.
# Configuration data is a string containing valid JSON
attr_accessor :serialized_data
# @return [String] This instance's version of the data model. This can
# change as new features are added that can be marked workbook.
attr_accessor :version
# @return [String] Internally assigned unique id of the workbook
# definition.
attr_accessor :workbook_id
# @return [SharedTypeKind] Enum indicating if this workbook definition is
# owned by a specific user or is shared between all users with access to
# the Application Insights component. Possible values include: 'user',
# 'shared'. Default value: 'shared' .
attr_accessor :shared_type_kind
# @return [String] Date and time in UTC of the last modification that was
# made to this workbook definition.
attr_accessor :time_modified
# @return [String] Workbook category, as defined by the user at creation
# time.
attr_accessor :category
# @return [Array<String>] A list of 0 or more tags that are associated
# with this workbook definition
attr_accessor :workbook_tags
# @return [String] Unique user id of the specific user that owns this
# workbook.
attr_accessor :user_id
# @return [String] Optional resourceId for a source resource.
attr_accessor :source_resource_id
#
# Mapper for Workbook class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'Workbook',
type: {
name: 'Composite',
class_name: 'Workbook',
model_properties: {
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
location: {
client_side_validation: true,
required: false,
serialized_name: 'location',
type: {
name: 'String'
}
},
tags: {
client_side_validation: true,
required: false,
serialized_name: 'tags',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
kind: {
client_side_validation: true,
required: false,
serialized_name: 'kind',
type: {
name: 'String'
}
},
workbook_name: {
client_side_validation: true,
required: true,
serialized_name: 'properties.name',
type: {
name: 'String'
}
},
serialized_data: {
client_side_validation: true,
required: true,
serialized_name: 'properties.serializedData',
type: {
name: 'String'
}
},
version: {
client_side_validation: true,
required: false,
serialized_name: 'properties.version',
type: {
name: 'String'
}
},
workbook_id: {
client_side_validation: true,
required: true,
serialized_name: 'properties.workbookId',
type: {
name: 'String'
}
},
shared_type_kind: {
client_side_validation: true,
required: true,
serialized_name: 'properties.kind',
default_value: 'shared',
type: {
name: 'String'
}
},
time_modified: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.timeModified',
type: {
name: 'String'
}
},
category: {
client_side_validation: true,
required: true,
serialized_name: 'properties.category',
type: {
name: 'String'
}
},
workbook_tags: {
client_side_validation: true,
required: false,
serialized_name: 'properties.tags',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
user_id: {
client_side_validation: true,
required: true,
serialized_name: 'properties.userId',
type: {
name: 'String'
}
},
source_resource_id: {
client_side_validation: true,
required: false,
serialized_name: 'properties.sourceResourceId',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 31.676856 | 79 | 0.462917 |
187d91b6bd1251aeb1b5f4a23606ae1a062fa94f | 1,105 | # Helper to determine the version of Puppet
require 'fileutils'
require 'open3'
require 'shellwords'
module OctocatalogDiff
module Util
# This is a utility class to determine the version of Puppet.
class PuppetVersion
# Determine the version of Puppet.
# @param puppet [String] Path to Puppet binary
# @return [String] Puppet version number
def self.puppet_version(puppet)
raise ArgumentError, 'Puppet binary was not supplied' if puppet.nil?
raise Errno::ENOENT, "Puppet binary #{puppet} doesn't exist" unless File.file?(puppet)
cmdline = [Shellwords.escape(puppet), '--version'].join(' ')
# This is the environment provided to the puppet command.
env = {
'HOME' => ENV['HOME'],
'PATH' => ENV['PATH'],
'PWD' => File.dirname(puppet)
}
out, err, _status = Open3.capture3(env, cmdline, unsetenv_others: true, chdir: env['PWD'])
return Regexp.last_match(1) if out =~ /^([\d\.]+)\s*$/
raise "Unable to determine Puppet version: #{out} #{err}"
end
end
end
end
| 34.53125 | 98 | 0.630769 |
e85e0f609e3941b906d573a70b3b3ccfd3612341 | 3,577 | #!/usr/bin/ruby
require_relative 'runscripts'
include RunScripts
module RunScripts
def rerun
gem5home = Dir.new(Dir.pwd)
%w[
run_2tc_tp_4cpus_hhn
run_2tc_tp_4cpus_hhnr
run_2tc_tp_4cpus_hmd
run_2tc_tp_4cpus_hmdr
run_2tc_tp_4cpus_lld
run_2tc_tp_4cpus_mmi
run_2tc_tp_6cpus_hhd
run_2tc_tp_6cpus_hhdr
run_2tc_tp_6cpus_hhn
run_2tc_tp_6cpus_hhnr
run_2tc_tp_6cpus_hmd
run_2tc_tp_6cpus_hmdr
run_2tc_tp_6cpus_mmi
run_2tc_tp_8cpus_hhd
run_2tc_tp_8cpus_hhdr
run_2tc_tp_8cpus_hhn
run_l2miss_max_nocwf_tp_2cpus_hhn
run_l2miss_max_nocwf_tp_2cpus_hhnr
run_l2miss_max_nocwf_tp_2cpus_hmd
run_l2miss_max_nocwf_tp_2cpus_hmdr
run_l2miss_max_nocwf_tp_2cpus_mmi
run_l2miss_max_tp_2cpus_hhn
run_l2miss_max_tp_2cpus_hhnr
run_l2miss_max_tp_2cpus_hmd
run_l2miss_max_tp_2cpus_hmdr
run_l2miss_max_tp_2cpus_mmi
run_l3hit_max_nocwf_tp_2cpus_hhi
run_l3hit_max_nocwf_tp_2cpus_hhn
run_l3hit_max_nocwf_tp_2cpus_hhnr
run_l3hit_max_nocwf_tp_2cpus_hlir
run_l3hit_max_nocwf_tp_2cpus_hmd
run_l3hit_max_nocwf_tp_2cpus_hmdr
run_l3hit_max_nocwf_tp_2cpus_hmir
run_l3hit_max_nocwf_tp_2cpus_lld
run_l3hit_max_nocwf_tp_2cpus_lli
run_l3hit_max_nocwf_tp_2cpus_mmd
run_l3hit_max_nocwf_tp_2cpus_mmi
run_l3hit_max_tp_2cpus_hhn
run_l3hit_max_tp_2cpus_hhnr
run_l3hit_max_tp_2cpus_hmd
run_l3hit_max_tp_2cpus_hmdr
run_l3hit_max_tp_2cpus_lld
run_l3hit_max_tp_2cpus_mmi
run_l3hit_opt_nocwf_tp_2cpus_hhn
run_l3hit_opt_nocwf_tp_2cpus_hhnr
run_l3hit_opt_nocwf_tp_2cpus_hmd
run_l3hit_opt_nocwf_tp_2cpus_hmdr
run_l3hit_opt_nocwf_tp_2cpus_lld
run_l3hit_opt_nocwf_tp_2cpus_mmi
run_l3hit_opt_tp_2cpus_hhn
run_l3hit_opt_tp_2cpus_hhnr
run_l3hit_opt_tp_2cpus_hmd
run_l3hit_opt_tp_2cpus_hmdr
run_l3hit_opt_tp_2cpus_lld
run_l3hit_opt_tp_2cpus_mmi
run_l3miss_opt_nocwf_tp_2cpus_hhn
run_l3miss_opt_tp_2cpus_hhn
run_l3miss_opt_tp_2cpus_hldr
run_l3miss_opt_tp_2cpus_hlir
run_l3miss_opt_tp_2cpus_hmd
run_none_6cpus_hmir
run_none_6cpus_llir
run_only_mc_tp_2cpus_hmd
run_only_rrbus_none_2cpus_hhn
run_only_rrbus_none_2cpus_hhnr
run_only_rrbus_none_2cpus_hld
run_only_rrbus_none_2cpus_hmd
run_only_rrbus_none_2cpus_hmdr
run_only_rrbus_none_2cpus_lld
run_only_rrbus_none_2cpus_mmi
run_only_waypart_none_2cpus_hhn
run_only_waypart_none_2cpus_hhnr
run_only_waypart_none_2cpus_hld
run_only_waypart_none_2cpus_hmd
run_only_waypart_none_2cpus_hmdr
run_only_waypart_none_2cpus_lld
run_only_waypart_none_2cpus_mmi
run_tp_2cpus_hhn
run_tp_2cpus_hhnr
run_tp_2cpus_hld
run_tp_2cpus_hmd
run_tp_2cpus_hmdr
run_tp_2cpus_lld
run_tp_2cpus_mmi
run_tp_4cpus_hhn
run_tp_4cpus_hhnr
run_tp_4cpus_hmd
run_tp_4cpus_hmdr
run_tp_4cpus_mmi
run_tp_6cpus_hhn
run_tp_6cpus_hhnr
run_tp_6cpus_hmd
].each do |experiment|
File.open(Dir.pwd+"/scriptgen/"+experiment) {|file|
exp_abspath = File.expand_path file
system "qsub -wd #{gem5home.path} -e stderr/ -o stdout/ #{exp_abspath}"
}
end
end
end
| 32.816514 | 83 | 0.729382 |
01efdb37d4acc190873a293e1b6333f095215bf4 | 1,818 | # encoding: UTF-8
control 'VCPG-70-000011' do
title 'VMware Postgres must be configured to use TLS.'
desc "The DoD standard for authentication is DoD-approved PKI certificates.
Authentication based on User ID and Password may be used only when it is not
possible to employ a PKI certificate.
In such cases, passwords need to be protected at all times, and encryption
is the standard method for protecting passwords during transmission.
VMware Postgres is configured out of the box to require TLS connections
with remote clients. As an embedded database and available only on locahost for
a standalone VCSAs, TLS connections are used only in high availability
deployments for connections between a primary and a standby. This configuration
must be verified and maintained.
"
desc 'rationale', ''
desc 'check', "
At the command prompt, execute the following command:
# /opt/vmware/vpostgres/current/bin/psql -U postgres -c \"SHOW ssl;\"|sed
-n 3p|sed -e 's/^[ ]*//'
Expected result:
on
If the output does not match the expected result, this is a finding
"
desc 'fix', "
At the command prompt, execute the following commands:
# /opt/vmware/vpostgres/current/bin/psql -U postgres -c \"ALTER SYSTEM SET
ssl TO 'on';\"
# /opt/vmware/vpostgres/current/bin/psql -U postgres -c \"SELECT
pg_reload_conf();\"
"
impact 0.5
tag severity: 'medium'
tag gtitle: 'SRG-APP-000172-DB-000075'
tag gid: nil
tag rid: nil
tag stig_id: 'VCPG-70-000011'
tag fix_id: nil
tag cci: 'CCI-000197'
tag nist: ['IA-5 (1) (c)']
sql = postgres_session("#{input('postgres_user')}","#{input('postgres_pass')}","#{input('postgres_host')}")
sqlquery = "SHOW ssl;"
describe sql.query(sqlquery) do
its('output') {should cmp "#{input('pg_ssl')}" }
end
end
| 30.813559 | 109 | 0.707371 |
ff45da3f58028c56299a3e22fa75a1b4ed9198cb | 1,114 | class Ec2AmiTools < Formula
desc "Amazon EC2 AMI Tools (helps bundle Amazon Machine Images)"
homepage "https://aws.amazon.com/developertools/368"
url "https://ec2-downloads.s3.amazonaws.com/ec2-ami-tools-1.5.7.zip"
sha256 "5a45d9f393d2e144124d23d2312b3a8918c5a3f7463b48d55f8db3d56a3fb29f"
bottle :unneeded
depends_on :java
def install
env = Language::Java.java_home_env.merge(:EC2_AMITOOL_HOME => libexec)
rm Dir["bin/*.cmd"] # Remove Windows versions
libexec.install Dir["*"]
Pathname.glob("#{libexec}/bin/*") do |file|
next if file.directory?
basename = file.basename
next if basename.to_s == "service"
(bin/basename).write_env_script file, env
end
end
def caveats
<<~EOS
Before you can use these tools you must export some variables to your $SHELL.
export AWS_ACCESS_KEY="<Your AWS Access ID>"
export AWS_SECRET_KEY="<Your AWS Secret Key>"
export AWS_CREDENTIAL_FILE="<Path to the credentials file>"
EOS
end
test do
assert_match version.to_s, shell_output("#{bin}/ec2-ami-tools-version")
end
end
| 30.944444 | 83 | 0.701077 |
ac1b0a9da04182f91260cb691f7ba0591c824ba9 | 96 | # desc "Explaining what the task does"
# task :json_autocompleter do
# # Task goes here
# end
| 19.2 | 38 | 0.708333 |
b924924c34325efd7a4bd8c0f5d643714736c47a | 663 | # Read about factories at https://github.com/thoughtbot/factory_girl
FactoryGirl.define do
sequence(:uri) { |n| "#file:///#{intellectual_object.identifier}/data/#{n}filename.xml" }
factory :generic_file_tech_metadata, :class => 'GenericFileMetadata' do
file_format { 'application/xml' }
identifier { "virginia.edu.#{self.intellectual_object.identifier}/data/#{n}filename.xml" }
uri
size { rand(20000..500000000) }
created { "#{Time.now}" }
modified { "#{Time.now}" }
checksum_attributes {
[{
algorithm: 'sha256',
datetime: Time.now.to_s,
digest: SecureRandom.hex
}]
}
end
end | 28.826087 | 94 | 0.639517 |
6119ed37d87c4c0ce9136040611f54bd5ab84418 | 94 | require "rose/test/version"
module Rose
module Test
# Your code goes here...
end
end
| 11.75 | 28 | 0.680851 |
f80337a5e072c694e3b6ab94722d22499584bc1b | 521 | require 'bio-ucsc'
describe "Bio::Ucsc::Hg19::WgEncodeUwHistoneHct116H3k4me3StdPkRep1" do
describe "#find_by_interval" do
context "given range chr1:1-800,000" do
it 'returns a record (r.chrom == "chr1")' do
Bio::Ucsc::Hg19::DBConnection.default
Bio::Ucsc::Hg19::DBConnection.connect
i = Bio::GenomicInterval.parse("chr1:1-800,000")
r = Bio::Ucsc::Hg19::WgEncodeUwHistoneHct116H3k4me3StdPkRep1.find_by_interval(i)
r.chrom.should == "chr1"
end
end
end
end
| 30.647059 | 88 | 0.669866 |
79f27af91a86ae5393dff53774f76eadbf2d74dc | 707 | module ApplicationHelper
include Pagy::Frontend
def crud_label(key)
case key
when 'create'
"<i class='fa fa-plus'></i>".html_safe
when 'update'
"<i class='fa fa-pen'></i>".html_safe
when 'destroy'
"<i class='fa fa-trash'></i>".html_safe
else
# type code here
end
end
def model_label(model)
case model
when 'Course'
"<i class='fa fa-graduation-cap'></i>".html_safe
when 'Lesson'
"<i class='fa fa-check-square'></i>".html_safe
when 'Enrollment'
"<i class='fa fa-lock-open'></i>".html_safe
when 'Comment'
"<i class='fa fa-comment'></i>".html_safe
else
# type code here
end
end
end
| 21.424242 | 56 | 0.575672 |
b9c28f623803f35c4809e361937f6776e06d34f6 | 2,707 | # frozen_string_literal: true
require 'spec_helper'
describe NamespaceStatistics do
it { is_expected.to belong_to(:namespace) }
it { is_expected.to validate_presence_of(:namespace) }
describe '#shared_runners_minutes' do
let(:namespace_statistics) { build(:namespace_statistics, shared_runners_seconds: 120) }
it { expect(namespace_statistics.shared_runners_minutes).to eq(2) }
end
describe '#extra_shared_runners_minutes' do
subject { namespace_statistics.extra_shared_runners_minutes }
let(:namespace) { create(:namespace, shared_runners_minutes_limit: 100) }
let(:namespace_statistics) { create(:namespace_statistics, namespace: namespace) }
context 'when limit is defined' do
before do
namespace.update_attribute(:extra_shared_runners_minutes_limit, 50)
end
context 'when usage is above the main quota' do
before do
namespace_statistics.update_attribute(:shared_runners_seconds, 101 * 60)
end
it { is_expected.to eq(1) }
end
context 'when usage is below the main quota' do
before do
namespace_statistics.update_attribute(:shared_runners_seconds, 99 * 60)
end
it { is_expected.to eq(0) }
end
end
context 'without limit' do
before do
namespace.update_attribute(:extra_shared_runners_minutes_limit, nil)
end
it { is_expected.to eq(0) }
end
context 'when limit is defined globally' do
before do
namespace.update_attribute(:shared_runners_minutes_limit, nil)
stub_application_setting(shared_runners_minutes: 100)
end
context 'when usage is above the main quota' do
before do
namespace_statistics.update_attribute(:shared_runners_seconds, 101 * 60)
end
context 'and extra CI minutes have been assigned' do
before do
namespace.update_attribute(:extra_shared_runners_minutes_limit, 50)
end
it { is_expected.to eq(1) }
end
context 'and extra CI minutes have not been assigned' do
before do
namespace.update_attribute(:extra_shared_runners_minutes_limit, nil)
end
it { is_expected.to eq(0) }
end
end
context 'when usage is below the main quota' do
before do
namespace_statistics.update_attribute(:shared_runners_seconds, 90 * 60)
end
context 'and extra CI minutes have been assigned' do
before do
namespace.update_attribute(:extra_shared_runners_minutes_limit, 50)
end
it { is_expected.to eq(0) }
end
end
end
end
end
| 27.907216 | 92 | 0.667159 |
6aab6f73efb2581e794bb3f7724c6ba981030916 | 3,619 | # frozen_string_literal: true
# This example shows has_one and has_many working together
require 'spec_helper'
require 'csv'
# Examples
# ========
RSpec.describe 'Fluxor', type: :model do
before(:each) do
Widget.destroy_all
end
it 'should be able to import from CSV data' do
csv_in = <<~CSV
Name,Widget Name,Widget A Text,Widget An Integer,Widget A Float,Widget A Decimal,Widget A Datetime,Widget A Time,Widget A Date,Widget A Boolean,Widget Wotsit Name,Widget Foo Habtms Name, Widget Foo Hmts Name
Flux Capacitor,Squidget Widget,Widge Text,42,0.7734,847.63,2020-04-01 23:59,04:20:00 AM,2020-12-02,T,Mr. Wotsit,Habtm 1,Hmt 1
Flex Resistor,Budget Widget,Budge Text,100,7.734,243.26,2019-04-01 23:59,16:20:00 PM,2019-12-02,F,Mr. Budgit,Habtm 1,Hmt 2
Flex Resistor,Budget Widget,Budge Text,100,7.734,243.26,2019-04-01 23:59,16:20:00 PM,2019-12-02,F,Mr. Fixit,Habtm 2,Hmt 3
Nix Resistor,Budget Widget,Budge Text,420,7.734,243.26,2019-04-01 23:59,16:20:00 PM,2019-12-02,F,Mr. Budgit,Habtm 2,Hmt 1
CSV
# %%% TODO: Change the third line to this: Flex Resistor,Squidget Widget,Widge Text,42,0.7734,847.63,2020-04-01 23:59,04:20:00 AM,2020-12-02,T,Mr. Fixit
# It builds 3 widgets instead of 2, and meanwhile a fluxor should simply have its foreign key updated to point to the
# known widget Squidget Widget.
child_info_csv = CSV.new(csv_in)
# Import CSV data
# ---------------
expect { Fluxor.df_import(child_info_csv) }.not_to raise_error
expect([Fluxor.count, Widget.count, Wotsit.count, FooHabtm.count, FooHmt.count]).to eq([3, 2, 3, 2, 3])
widgets = Widget.order(:id).pluck(:name, :a_text, :an_integer, :a_float, :a_decimal, :a_datetime, :a_time, :a_date, :a_boolean)
# Take out just the time column and test it
expect(widgets.map do |w|
t = w.slice!(6)
[t.hour, t.min]
end).to eq([[4, 20], [16, 20]])
# Now test all the rest
expect(widgets).to eq(
[
['Squidget Widget', 'Widge Text', 42, 0.7734, BigDecimal(847.63, 5),
DateTime.new(2020, 4, 1, 23, 59).in_time_zone - widgets.first[5].utc_offset.seconds,
Date.new(2020, 12, 2), true],
['Budget Widget', 'Budge Text', 420, 7.734, BigDecimal(243.26, 5),
DateTime.new(2019, 4, 1, 23, 59).in_time_zone - widgets.first[5].utc_offset.seconds,
Date.new(2019, 12, 2), false]
]
)
# The joins or something in here doesn't work in AR 3.0 and dies during the .pluck :(
unless ActiveRecord.version < ::Gem::Version.new('3.1')
flux_widg_foo = Fluxor.joins(widget: :foo_habtms)
.order('fluxors.id', 'widgets.id', 'foo_habtms.id')
.pluck('fluxors.name', Arel.sql('widgets.name AS name2'), Arel.sql('foo_habtms.name AS name3'))
expect(flux_widg_foo).to eq([['Flux Capacitor', 'Squidget Widget', 'Habtm 1'],
['Flex Resistor', 'Budget Widget', 'Habtm 1'],
['Flex Resistor', 'Budget Widget', 'Habtm 2'],
['Nix Resistor', 'Budget Widget', 'Habtm 1'],
['Nix Resistor', 'Budget Widget', 'Habtm 2']])
end
widg_wots = Widget.joins(:wotsit)
.order('widgets.id', 'wotsits.id')
.pluck('widgets.name', Arel.sql('wotsits.name AS name2'))
expect(widg_wots).to eq([['Squidget Widget', 'Mr. Wotsit'],
['Budget Widget', 'Mr. Budgit'],
['Budget Widget', 'Mr. Fixit']])
end
end
| 46.397436 | 213 | 0.610666 |
7a11eab908e67759c4ae569681dde69bd8f66b5b | 137 | class CurrentPhone < ActiveRecord::Migration[6.0]
def change
add_column :companies, :current_phone, :integer, default: 0
end
end
| 22.833333 | 63 | 0.744526 |
ab5ef3196182e995f791a66463441a8c6e99cd41 | 677 | class CreateCiBuildTraceSections < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
create_table :ci_build_trace_sections do |t|
t.references :project, null: false, index: true, foreign_key: { on_delete: :cascade }
t.datetime_with_timezone :date_start, null: false
t.datetime_with_timezone :date_end, null: false
t.integer :byte_start, limit: 8, null: false
t.integer :byte_end, limit: 8, null: false
t.integer :build_id, null: false
t.integer :section_name_id, null: false
end
add_index :ci_build_trace_sections, [:build_id, :section_name_id], unique: true
end
end
| 33.85 | 91 | 0.722304 |
bbd1280857626643f345cc7a4b0e550e5cd0ce84 | 1,200 | require 'csv'
require 'faker'
require 'json'
require 'yaml'
module Fake
def self.data
collection_data = {}
['.csv', '.json', '.yml'].each do |i|
name = slug(Faker::RuPaul.unique.queen)
data = generate_data(name, i, collection_data)
path = '_data/' + name + i
case i
when '.csv' then write_csv(path, data)
when '.json' then File.open(path, 'w') { |f| f.write(data.to_json) }
when '.yml' then File.open(path, 'w') { |f| f.write(data.to_yaml) }
end
Faker::Dune.unique.clear
Faker::Lovecraft.unique.clear
end
collection_data
end
def self.generate_data(name, type, collection_data)
data = []
keys = ['pid']
5.times { keys << slug(Faker::Lovecraft.unique.word) } # keys = pid + 5
5.times do # with 5 records
record = {
keys[0] => slug(Faker::Dune.unique.character),
keys[1] => Faker::Lorem.sentence,
keys[2] => Faker::TwinPeaks.quote,
keys[3] => Faker::Name.name,
keys[4] => Faker::Space.star,
keys[5] => Faker::Lovecraft.sentence
}
data << record
collection_data[name] = { 'keys' => keys, 'type' => type }
end
data
end
end
| 27.906977 | 75 | 0.5775 |
ed90ebb6107df32742c6d3cae9d79a277911977a | 8,934 | =begin
#Argo Workflows API
#Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/
The version of the OpenAPI document: VERSION
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 5.2.1
=end
require 'date'
require 'time'
module ArgoWorkflows
# PubSubEventSource refers to event-source for GCP PubSub related events.
class IoArgoprojEventsV1alpha1PubSubEventSource
attr_accessor :credential_secret
attr_accessor :delete_subscription_on_finish
attr_accessor :json_body
attr_accessor :metadata
attr_accessor :project_id
attr_accessor :subscription_id
attr_accessor :topic
attr_accessor :topic_project_id
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'credential_secret' => :'credentialSecret',
:'delete_subscription_on_finish' => :'deleteSubscriptionOnFinish',
:'json_body' => :'jsonBody',
:'metadata' => :'metadata',
:'project_id' => :'projectID',
:'subscription_id' => :'subscriptionID',
:'topic' => :'topic',
:'topic_project_id' => :'topicProjectID'
}
end
# Returns all the JSON keys this model knows about
def self.acceptable_attributes
attribute_map.values
end
# Attribute type mapping.
def self.openapi_types
{
:'credential_secret' => :'SecretKeySelector',
:'delete_subscription_on_finish' => :'Boolean',
:'json_body' => :'Boolean',
:'metadata' => :'Hash<String, String>',
:'project_id' => :'String',
:'subscription_id' => :'String',
:'topic' => :'String',
:'topic_project_id' => :'String'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `ArgoWorkflows::IoArgoprojEventsV1alpha1PubSubEventSource` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `ArgoWorkflows::IoArgoprojEventsV1alpha1PubSubEventSource`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'credential_secret')
self.credential_secret = attributes[:'credential_secret']
end
if attributes.key?(:'delete_subscription_on_finish')
self.delete_subscription_on_finish = attributes[:'delete_subscription_on_finish']
end
if attributes.key?(:'json_body')
self.json_body = attributes[:'json_body']
end
if attributes.key?(:'metadata')
if (value = attributes[:'metadata']).is_a?(Hash)
self.metadata = value
end
end
if attributes.key?(:'project_id')
self.project_id = attributes[:'project_id']
end
if attributes.key?(:'subscription_id')
self.subscription_id = attributes[:'subscription_id']
end
if attributes.key?(:'topic')
self.topic = attributes[:'topic']
end
if attributes.key?(:'topic_project_id')
self.topic_project_id = attributes[:'topic_project_id']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
credential_secret == o.credential_secret &&
delete_subscription_on_finish == o.delete_subscription_on_finish &&
json_body == o.json_body &&
metadata == o.metadata &&
project_id == o.project_id &&
subscription_id == o.subscription_id &&
topic == o.topic &&
topic_project_id == o.topic_project_id
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[credential_secret, delete_subscription_on_finish, json_body, metadata, project_id, subscription_id, topic, topic_project_id].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
elsif type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :Time
Time.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
# models (e.g. Pet) or oneOf
klass = ArgoWorkflows.const_get(type)
klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 31.347368 | 234 | 0.636109 |
ab7f88fc3f2ffd0b55af3fbf495380538be59723 | 5,711 | module Statsample
# Class to create crosstab of data
# With this, you can create reports and do chi square test
# The first vector will be at rows and the second will the the columns
#
class Crosstab
include Summarizable
attr_reader :v_rows, :v_cols
attr_accessor :row_label, :column_label, :name, :percentage_row, :percentage_column, :percentage_total
def initialize(v1, v2, opts=Hash.new)
raise ArgumentError, "Vectors should be the same size" unless v1.size==v2.size
@v_rows, @v_cols = Statsample.only_valid_clone(
Daru::Vector.new(v1),
Daru::Vector.new(v2))
@cases = @v_rows.size
@row_label = v1.name
@column_label = v2.name
@name = nil
@percentage_row = @percentage_column = @percentage_total=false
opts.each do |k,v|
self.send("#{k}=",v) if self.respond_to? k
end
@name ||= _("Crosstab %s - %s") % [@row_label, @column_label]
end
def rows_names
@v_rows.factors.sort.reset_index!
end
def cols_names
@v_cols.factors.sort.reset_index!
end
def rows_total
@v_rows.frequencies.to_h
end
def cols_total
@v_cols.frequencies.to_h
end
def frequencies
base = rows_names.inject([]) do |s,row|
s += cols_names.collect { |col| [row,col] }
end.inject({}) do |s,par|
s[par]=0
s
end
base.update(Daru::Vector.new(Statsample::vector_cols_matrix(@v_rows,@v_cols).to_a).frequencies.to_h)
end
def to_matrix
f = frequencies
rn = rows_names
cn = cols_names
Matrix.rows(rn.collect{|row|
cn.collect{|col| f[[row,col]]}
})
end
def frequencies_by_row
f=frequencies
rows_names.inject({}){|sr,row|
sr[row]=cols_names.inject({}) {|sc,col| sc[col]=f[[row,col]]; sc}
sr
}
end
def frequencies_by_col
f=frequencies
cols_names.inject({}){|sc,col|
sc[col]=rows_names.inject({}) {|sr,row| sr[row]=f[[row,col]]; sr}
sc
}
end
# Chi square, based on expected and real matrix
def chi_square
require 'statsample/test'
Statsample::Test.chi_square(self.to_matrix, matrix_expected)
end
# Useful to obtain chi square
def matrix_expected
rn=rows_names
cn=cols_names
rt=rows_total
ct=cols_total
t=@v_rows.size
m=rn.collect{|row|
cn.collect{|col|
(rt[row]*ct[col]).quo(t)
}
}
Matrix.rows(m)
end
def cols_empty_hash
cols_names.inject({}) {|a,x| a[x]=0;a}
end
def report_building(builder)
# builder.section(:name=>@name) do |generator|
# fq=frequencies
# rn=rows_names
# cn=cols_names
# total=0
# total_cols=cols_empty_hash
# generator.text "Chi Square: #{chi_square}"
# generator.text(_("Rows: %s") % @row_label) unless @row_label.nil?
# generator.text(_("Columns: %s") % @column_label) unless @column_label.nil?
# t=ReportBuilder::Table.new(:name=>@name+" - "+_("Raw"), :header=>[""]+cols_names.collect {|c| @v_cols.index_of(c)}+[_("Total")])
# rn.each do |row|
# total_row=0
# t_row=[@v_rows.index_of(row)]
# cn.each do |col|
# data=fq[[row,col]]
# total_row+=fq[[row,col]]
# total+=fq[[row,col]]
# total_cols[col]+=fq[[row,col]]
# t_row.push(data)
# end
# t_row.push(total_row)
# t.row(t_row)
# end
# t.hr
# t_row=[_("Total")]
# cn.each do |v|
# t_row.push(total_cols[v])
# end
# t_row.push(total)
# t.row(t_row)
# generator.parse_element(t)
# if(@percentage_row)
# table_percentage(generator,:row)
# end
# if(@percentage_column)
# table_percentage(generator,:column)
# end
# if(@percentage_total)
# table_percentage(generator,:total)
# end
# end
end
def table_percentage(generator,type)
# fq=frequencies
# cn=cols_names
# rn=rows_names
# rt=rows_total
# ct=cols_total
# type_name=case type
# when :row then _("% Row")
# when :column then _("% Column")
# when :total then _("% Total")
# end
# t=ReportBuilder::Table.new(:name=>@name+" - "+_(type_name), :header=>[""]+cols_names.collect {|c| @v_cols.index_of(c) } + [_("Total")])
# rn.each do |row|
# t_row=[@v_rows.index_of(row)]
# cn.each do |col|
# total=case type
# when :row then rt[row]
# when :column then ct[col]
# when :total then @cases
# end
# data = sprintf("%0.2f%%", fq[[row,col]]*100.0/ total )
# t_row.push(data)
# end
# total=case type
# when :row then rt[row]
# when :column then @cases
# when :total then @cases
# end
# t_row.push(sprintf("%0.2f%%", rt[row]*100.0/total))
# t.row(t_row)
# end
# t.hr
# t_row=[_("Total")]
# cn.each{|col|
# total=case type
# when :row then @cases
# when :column then ct[col]
# when :total then @cases
# end
# t_row.push(sprintf("%0.2f%%", ct[col]*100.0/total))
# }
# t_row.push("100%")
# t.row(t_row)
# generator.parse_element(t)
end
end
end
| 30.540107 | 143 | 0.531255 |
e8a37452f198afe85cfc88f5ed82cdbd2bb131b3 | 1,868 | #
# Be sure to run `pod lib lint Repository.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'SwiftRepository'
s.version = '0.5.4'
s.summary = 'Essentially, provides an abstraction of data access'
s.description = 'The simplest approach, especially with an existing system, is to create a new Repository implementation for each business object you need to store to or retrieve from your persistence layer. Further, you should only implement the specific methods you are calling in your application. Avoid the trap of creating a “standard” repository class, base class, or default interface that you must implement for all repositories. Yes, if you need to have an Update or a Delete method, you should strive to make its interface consistent (does Delete take an ID, or does it take the object itself?), but don’t implement a Delete method on your LookupTableRepository that you’re only ever going to be calling List() on. The biggest benefit of this approach is YAGNI – you won’t waste any time implementing methods that never get called.'
s.homepage = 'https://github.com/dsay/Repository'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Dima Sai' => '[email protected]' }
s.source = { :git => 'https://github.com/dsay/Repository.git', :tag => s.version.to_s }
s.ios.deployment_target = '11.0'
s.source_files = "Sources/**/*.{swift}"
s.swift_version = '5.0'
s.frameworks = 'UIKit'
s.dependency 'PromiseKit/CorePromise'
s.dependency 'Alamofire'
s.dependency 'ObjectMapper'
end
| 56.606061 | 851 | 0.688437 |
ed1918d42d564dd09107192d1493aebeb45fbdef | 306 | # frozen_string_literal: true
require_relative '../../g_1846/step/track_and_token'
require_relative 'tracker'
module Engine
module Game
module G18LosAngeles
module Step
class TrackAndToken < G1846::Step::TrackAndToken
include Tracker
end
end
end
end
end
| 18 | 56 | 0.683007 |
7aa9d61c7e1ee3e435649dd729d683fa04079d8b | 1,808 | module DatabaseRewinder
module Compatibility
def clean_with(*args)
cleaners.each {|c| c.clean_with(*args)}
end
def cleaning
yield
ensure
clean
end
def start; end
def strategy=(args)
options = args.is_a?(Array) ? args.extract_options! : {}
@only, @except = options[:only], options[:except]
cleaners.each {|c| c.strategy = nil, options}
end
# In order to add another database to cleanup, you can give its connection name in one of the forms below:
#
# # the simplest form
# DatabaseRewinder['the_db_name']
#
# or
#
# # with connection: key
# DatabaseRewinder[connection: 'the_db_name']
#
# or
#
# # DatabaseCleaner compatible
# DatabaseRewinder[:active_record, connection: 'the_db_name']
#
# You can cleanup multiple databases for each test using this configuration.
def [](orm, connection: nil, **)
if connection.nil?
if orm.is_a? String
connection = orm
elsif orm.is_a?(Hash) && orm.has_key?(:connection)
connection = orm[:connection]
end
end
super connection
end
end
class << self
prepend Compatibility
end
class Cleaner
module Compatibility
def clean_with(_strategy, only: nil, except: nil, **)
originals = @only, @except
self.only, self.except = Array(only), Array(except)
clean_all
ensure
self.only, self.except = originals
end
def strategy=(args)
options = args.is_a?(Array) ? args.extract_options! : {}
self.only = Array(options[:only]) if options.key?(:only)
self.except = Array(options[:except]) if options.key?(:except)
end
end
include Compatibility
end
end
| 25.111111 | 110 | 0.606748 |
b94a835b581915405c4d74d197c9d640b8be9387 | 57 | def remove_duplicates(str)
str.split("").uniq.join
end
| 14.25 | 26 | 0.736842 |
ac71894116916ddc31b367855d00e78f46beeaa3 | 161 | class CreateGames < ActiveRecord::Migration
def change
create_table :games do |t|
t.string :name, null: false
t.timestamps
end
end
end
| 14.636364 | 43 | 0.658385 |
f786e5ab20052fbef69cd3e6daa8a9597320259b | 8,211 | #-- copyright
# OpenProject Meeting Plugin
#
# Copyright (C) 2011-2014 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See doc/COPYRIGHT.md for more details.
#++
class Meeting < ActiveRecord::Base
self.table_name = 'meetings'
belongs_to :project
#bbm(
belongs_to :work_package
# )
belongs_to :author, class_name: 'User', foreign_key: 'author_id'
belongs_to :chairman, class_name: 'User', foreign_key: 'chairman_id'
has_one :agenda, dependent: :destroy, class_name: 'MeetingAgenda'
has_one :minutes, dependent: :destroy, class_name: 'MeetingMinutes'
has_many :contents, -> { readonly }, class_name: 'MeetingContent'
has_many :participants, dependent: :destroy, class_name: 'MeetingParticipant'
#(iag
has_many :protocols, -> { order(id: :asc) }, class_name: 'MeetingProtocol' , foreign_key: 'meeting_contents_id'
#)
default_scope {
order("#{Meeting.table_name}.start_time DESC")
}
scope :from_tomorrow, -> { where(['start_time >= ?', Date.tomorrow.beginning_of_day]) }
scope :with_users_by_date, -> {
order("#{Meeting.table_name}.title ASC")
.includes({ participants: :user }, :author)
}
acts_as_watchable
acts_as_searchable columns: ["#{table_name}.title", "#{MeetingContent.table_name}.text"],
include: [:contents, :project],
references: :meeting_contents,
date_column: "#{table_name}.created_at"
acts_as_journalized
acts_as_event title: Proc.new {|o|
"#{l :label_meeting}: #{o.title} \
#{format_date o.start_time} \
#{format_time o.start_time, false}-#{format_time o.end_time, false})"
},
url: Proc.new { |o| { controller: '/meetings', action: 'show', id: o } },
author: Proc.new(&:user),
description: ''
register_on_journal_formatter(:plaintext, 'title')
register_on_journal_formatter(:fraction, 'duration')
register_on_journal_formatter(:datetime, 'start_time')
register_on_journal_formatter(:plaintext, 'location')
accepts_nested_attributes_for :participants, allow_destroy: true
validates_presence_of :title, :duration
# We only save start_time as an aggregated value of start_date and hour,
# but still need start_date and _hour for validation purposes
attr_reader :start_date, :start_time_hour
validate :validate_date_and_time
before_save :update_start_time!
before_save :add_new_participants_as_watcher
after_initialize :set_initial_values
User.before_destroy do |user|
Meeting.where(['author_id = ?', user.id]).update_all ['author_id = ?', DeletedUser.first.id]
end
##
# Assign a date string without validation
# The actual aggregated start_time is derived after valdiation
def start_date=(value)
attribute_will_change! :start_date
@start_date = value
end
##
# Assign a HH:MM hour string without validation
# The actual aggregated start_time is derived after valdiation
def start_time_hour=(value)
attribute_will_change! :start_time_hour
@start_time_hour = value
end
#bbm(
def selected_vals
[13]
end
# )
##
# Return the computed start_time when changed
def start_time
if parse_start_time?
parsed_start_time
else
super
end
end
def start_month
start_time.month
end
def start_year
start_time.year
end
def end_time
start_time + duration.hours
end
def to_s
title
end
def text
agenda.text if agenda.present?
end
def author=(user)
super
# Don't add the author as participant if we already have some through nested attributes
participants.build(user: user, invited: true) if self.new_record? && participants.empty? && user
end
# Returns true if usr or current user is allowed to view the meeting
def visible?(user = nil)
(user || User.current).allowed_to?(:view_meetings, project)
end
# iag(
def visible_meeting?(user = nil)
((user || User.current) == author) || participants.detect {|p| p.user == (user || User.current)}
end
#)
def all_changeable_participants
changeable_participants = participants.select(&:invited).collect(&:user)
changeable_participants = changeable_participants + participants.select(&:attended).collect(&:user)
changeable_participants = changeable_participants + \
User.allowed_members(:view_meetings, project)
changeable_participants.uniq(&:id)
end
def copy(attrs)
copy = dup
copy.author = attrs.delete(:author)
copy.attributes = attrs
copy.send(:set_initial_values)
copy.participants.clear
copy.participants_attributes = participants.collect(&:copy_attributes)
copy
end
def self.group_by_time(meetings)
by_start_year_month_date = ActiveSupport::OrderedHash.new do |hy, year|
hy[year] = ActiveSupport::OrderedHash.new do |hm, month|
hm[month] = ActiveSupport::OrderedHash.new
end
end
meetings.group_by(&:start_year).each do |year, objs|
objs.group_by(&:start_month).each do |month, objs|
objs.group_by(&:start_time).each do |date, objs|
by_start_year_month_date[year][month][date] = objs
end
end
end
by_start_year_month_date
end
def close_agenda_and_copy_to_minutes!
agenda.lock!
create_minutes(text: agenda.text, comment: 'Minutes created')
end
alias :original_participants_attributes= :participants_attributes=
def participants_attributes=(attrs)
attrs.each do |participant|
participant['_destroy'] = true if !(participant['attended'] || participant['invited'])
end
self.original_participants_attributes = attrs
end
protected
def set_initial_values
# set defaults
write_attribute(:start_time, Date.tomorrow + 10.hours) if start_time.nil?
self.duration ||= 1
@start_date = start_time.to_date.iso8601
@start_time_hour = start_time.strftime('%H:%M')
end
private
##
# Validate date and time setters.
# If start_time has been changed, check that value.
# Otherwise start_{date, time_hour} was used, then validate those
def validate_date_and_time
if parse_start_time?
errors.add :start_date, :not_an_iso_date if parsed_start_date.nil?
errors.add :start_time_hour, :invalid_time_format if parsed_start_time_hour.nil?
else
errors.add :start_time, :invalid if start_time.nil?
end
end
##
# Actually sets the aggregated start_time attribute.
def update_start_time!
write_attribute(:start_time, start_time)
end
##
# Determines whether new raw values werde provided.
def parse_start_time?
!(changed & %w(start_date start_time_hour)).empty?
end
##
# Returns the parse result of both start_date and start_time_hour
def parsed_start_time
date = parsed_start_date
time = parsed_start_time_hour
if date.nil? || time.nil?
raise ArgumentError, 'Provided composite start_time is invalid.'
end
Time.zone.local(
date.year,
date.month,
date.day,
time.hour,
time.min
)
end
##
# Enforce ISO 8601 date parsing for the given input string
# This avoids weird parsing of dates due to malformed input.
def parsed_start_date
Date.iso8601(@start_date)
rescue ArgumentError
nil
end
##
# Enforce HH::MM time parsing for the given input string
def parsed_start_time_hour
Time.strptime(@start_time_hour, '%H:%M')
rescue ArgumentError
nil
end
def add_new_participants_as_watcher
participants.select(&:new_record?).each do |p|
add_watcher(p.user)
end
end
end
| 27.833898 | 113 | 0.70162 |
0125153fdcc2b82f77d0353eaa0118973d66783a | 2,023 | class CategoriesController < ApplicationController
get '/categories' do
if logged_in?
erb :'categories/index'
else
redirect 'users/login'
end
end
get '/categories/new' do
if logged_in?
erb :'categories/new'
else
redirect 'users/login'
end
end
post '/categories' do
if !params[:name].empty?
@category = Category.find_or_create_by(params)
redirect "/categories/#{@category.id}"
else
redirect 'categories/new'
end
end
get '/categories/:id' do
if logged_in?
@category = Category.find_by_id(params[:id])
@user_cat = current_user.categories.any?{|cat| cat == @category}
if @category && @user_cat
erb :'categories/show'
else
redirect '/categories'
end
else
redirect 'users/login'
end
end
get '/categories/:id/edit' do
if logged_in?
@category = Category.find_by_id(params[:id])
@user_cat = current_user.categories.any?{|cat| cat == @category}
if @category && @user_cat
erb :'categories/edit'
else
redirect '/categories'
end
else
redirect 'users/login'
end
end
patch '/categories/:id' do
@category = Category.find(params[:id])
if !params[:name].empty?
@category.update(name: params[:name])
redirect "/categories/#{@category.id}"
else
redirect "categories/#{@category.id}/edit"
end
end
delete '/categories/:id' do
if logged_in?
@category = Category.find(params[:id])
@category.destroy
redirect '/categories'
else
redirect 'users/login'
end
end
get '/categories/' do
redirect '/categories'
end
end | 25.607595 | 76 | 0.514582 |
6a55c0587b6350d148e94ed6c3ca7bd898318131 | 1,870 |
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "major_tom/version"
Gem::Specification.new do |spec|
spec.name = "major_tom"
spec.version = MajorTom::VERSION
spec.authors = ["Andrew Cantino"]
spec.email = ["[email protected]"]
spec.summary = %q{Unofficial Ruby client for Major Tom}
spec.description = %q{Unofficial Ruby client for Major Tom}
spec.homepage = "https://github.com/cantino/major-tom-ruby"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
# spec.metadata["allowed_push_host"] = "TODO: Set to 'http://mygemserver.com'"
spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = "https://github.com/cantino/major-tom-ruby"
# spec.metadata["changelog_uri"] = "TODO: Put your gem's CHANGELOG.md URL here."
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.17"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_dependency "faye-websocket", "~> 0.10"
end
| 41.555556 | 96 | 0.675401 |
1d82b5ade740f4f8c113b3f48e2d8257f7fbba2d | 861 | module Spree
class Subscription < Spree::Base
include RoleSubscriber
include RestrictiveDestroyer
include ApiHandler
acts_as_restrictive_destroyer column: :unsubscribed_at
attr_accessor :token
self.whitelisted_ransackable_attributes = %w[email subscribed_at]
belongs_to :plan
belongs_to :user
has_many :events, class_name: 'Spree::SubscriptionEvent'
validates :plan_id, :email, :user_id, presence: true
validates :plan_id, uniqueness: { scope: [:user_id, :unsubscribed_at] }
validates :user_id, uniqueness: { scope: :unsubscribed_at }
before_validation :set_email, on: :create
validate :verify_plan, on: :create
private
def set_email
self.email = user.try(:email)
end
def verify_plan
errors.add :plan_id, "is not active." unless plan.try(:visible?)
end
end
end | 25.323529 | 75 | 0.713124 |
acc93350d606b60a806370279d858569ad2b8076 | 711 | namespace :redis do
desc 'Start redis'
base_exec = File.exists?("../local/bin/redis-server") ? "../local/bin/" : ""
conf = File.exists?("../cdl/conf/redis.conf") ? "../local/bin/redis.conf" : File.exists?("#{Dir.pwd}/conf/redis.conf") ? "#{Dir.pwd}/conf/redis.conf" : "../shortcake-cdl/conf/redis.conf"
task :start do
config_file = File.join(Dir.pwd, "conf/redis.conf")
command = ("#{base_exec}redis-server #{config_file}") if File.exists?(config_file)
STDOUT.puts "Starting Redis using: #{command} (#{config_file})"
exec command || "redis-server"
end
desc 'Stop redis'
task :stop do
STDOUT.puts "Shutting down Redis"
exec "#{base_exec}redis-cli SHUTDOWN"
end
end
| 35.55 | 188 | 0.649789 |
bff727f5fffb83a252eb50a7945c0d9a2f150315 | 1,345 | class NavigationController < ApplicationController
# caches_page :flash_tree_view
def show_tree_view
# set the users default hierarchy if they haven't done so already
@selected_hierarchy_entry = HierarchyEntry.find_by_id(params[:selected_hierarchy_entry_id].to_i)
@taxon_page = TaxonPage.new(@taxon_concept, current_user, @selected_hierarchy_entry)
@session_hierarchy = @taxon_page.hierarchy
load_taxon_for_tree_view
render layout: false, partial: 'root_nodes'
end
def show_tree_view_for_selection
load_taxon_for_tree_view
render layout: false, partial: 'tree_view_for_selection'
end
def browse
@hierarchy_entry = HierarchyEntry.find_by_id(params[:id])
expand = params[:expand] == "1"
if @hierarchy_entry.blank?
return
end
@hierarchy = @hierarchy_entry.hierarchy
render layout: false, partial: 'browse', locals: { expand: expand }
end
def browse_stats
@hierarchy_entry = HierarchyEntry.find_by_id(params[:id])
expand = params[:expand] == "1"
if @hierarchy_entry.blank?
return
end
@hierarchy = @hierarchy_entry.hierarchy
render partial: 'browse_stats', layout: false, locals: { expand: expand }
end
protected
def load_taxon_for_tree_view
@hierarchy_entry = HierarchyEntry.find(params[:id].to_i)
end
end
| 28.617021 | 100 | 0.727138 |
333efe80b283510a3c58d0eb206b0743f3a5de3c | 409 | require_relative '../../automated_init'
context "Schema" do
context "Transform Write" do
data_structure = Schema::Controls::DataStructure::ReadAndWrite.example
refute(data_structure.some_attribute == 'some written value')
data = data_structure.to_h
test "The output data has been intercepted and modified" do
assert(data[:some_attribute] == 'some written value')
end
end
end
| 25.5625 | 74 | 0.726161 |
ac71e5ee335f262d449b1128055e8a2bf02e5f69 | 77 | require "landrover/version"
module Landrover
# Your code goes here...
end
| 12.833333 | 27 | 0.74026 |
08716e09863e77d78086755ff39a4d306559df75 | 2,921 | require 'spec_helper_acceptance'
require 'serverspec_type_zabbixapi'
# rubocop:disable RSpec/LetBeforeExamples
describe 'zabbix_template_host type', unless: default[:platform] =~ %r{(ubuntu-16.04|debian-9|debian-10)-amd64} do
context 'create zabbix_template_host resources' do
it 'runs successfully' do
# This will deploy a running Zabbix setup (server, web, db) which we can
# use for custom type tests
pp = <<-EOS
class { 'apache':
mpm_module => 'prefork',
}
include apache::mod::php
include postgresql::server
class { 'zabbix':
zabbix_version => '5.0',
zabbix_url => 'localhost',
zabbix_api_user => 'Admin',
zabbix_api_pass => 'zabbix',
apache_use_ssl => false,
manage_resources => true,
require => [ Class['postgresql::server'], Class['apache'], ],
}
zabbix_host { 'test1.example.com':
ipaddress => '127.0.0.1',
use_ip => true,
port => 10050,
group => 'TestgroupOne',
group_create => true,
templates => [ 'Template OS Linux by Zabbix agent', ],
require => [ Service['zabbix-server'], Package['zabbixapi'], ],
}
zabbix_template { 'TestTemplate1':
template_source => '/root/TestTemplate1.xml',
require => [ Service['zabbix-server'], Package['zabbixapi'], ],
}
zabbix_template_host{"[email protected]":
require => [ Service['zabbix-server'], Package['zabbixapi'], ],
}
EOS
shell("echo '<?xml version=\"1.0\" encoding=\"UTF-8\"?><zabbix_export><version>4.0</version><date>2018-12-13T15:00:46Z</date><groups><group><name>Templates/Applications</name></group></groups><templates><template><template>TestTemplate1</template><name>TestTemplate1</name><description/><groups><group><name>Templates/Applications</name></group></groups><applications/><items/><discovery_rules/><macros/><templates/><screens/></template></templates></zabbix_export>' > /root/TestTemplate1.xml")
# Cleanup old database
prepare_host
apply_manifest(pp, catch_failures: true)
end
let(:result_hosts) do
zabbixapi('localhost', 'Admin', 'zabbix', 'host.get', selectParentTemplates: ['host'],
selectInterfaces: %w[dns ip main port type useip],
selectGroups: ['name'], output: ['host', '']).result
end
context 'test1.example.com' do
let(:test1) { result_hosts.select { |h| h['host'] == 'test1.example.com' }.first }
it 'has template TestTemplate1 attached' do
expect(test1['parentTemplates'].map { |t| t['host'] }.sort).to include('TestTemplate1')
end
end
end
end
| 41.728571 | 500 | 0.584047 |
5dab41d3ed92b8134a19e4bf5ebfd39dc5c3f504 | 632 | require 'rails_helper'
describe 'models/submitted.html.erb' do
context 'for submitted models' do
it "should show the correct number of models" do
user = create(:user)
3.times do
create(:accepted_model, submitting_author: user)
end
create(:model, state: "submitted", submitting_author: user)
assign(:models, Model.submitted.paginate(page: 1, per_page: 10))
render template: "models/index", formats: :html
expect(rendered).to have_selector('.model-title', count: 0)
expect(rendered).to have_content(:visible, "Active Models 1", normalize_ws: true)
end
end
end
| 27.478261 | 87 | 0.681962 |
f8e661d825fe29ff0348dd6dbd0e569c82388c99 | 2,616 | require 'rails_helper'
describe ApplicationHelper do
before(:each) do
@user = create(:user)
allow(helper).to receive(:current_user) {
@user
}
allow(@user).to receive(:organizations) {
[
OpenStruct.new(
organization: OpenStruct.new({
login: "org1",
avatar_url: "http://www.example.org/avatar1.png"
})
),
OpenStruct.new(
organization: OpenStruct.new({
login: "org2",
avatar_url: "http://www.example.org/avatar2.png"
})
),
OpenStruct.new(
organization: OpenStruct.new({
login: "org3",
avatar_url: "http://www.example.org/avatar3.png"
})
)
]
}
allow(@user).to receive(:github_user) {
OpenStruct.new(
avatar_url: "http://www.example.org/avatar2.png"
)
}
end
it 'gets organization options' do
expect(helper.organization_options).to eq(
[
[
'org1',
'org1',
{ 'data-content' => "<img src='http://www.example.org/avatar1.png' height='20' width='20' /> org1" }
],
[
'org2',
'org2',
{ 'data-content' => "<img src='http://www.example.org/avatar2.png' height='20' width='20' /> org2" }
],
[
'org3',
'org3',
{ 'data-content' => "<img src='http://www.example.org/avatar3.png' height='20' width='20' /> org3" }
]
]
)
end
it 'gets user organization option' do
expect(helper.user_organization_option).to eq(
[
@user.github_username,
@user.github_username,
{ 'data-content' => "<img src='http://www.example.org/avatar2.png' height='20' width='20' /> #{@user.github_username}" }
]
)
end
it 'gets all options' do
expect(helper.organization_select_options).to eq([
[
@user.github_username,
@user.github_username,
{ 'data-content' => "<img src='http://www.example.org/avatar2.png' height='20' width='20' /> #{@user.github_username}" }
],
[
'org1',
'org1',
{ 'data-content' => "<img src='http://www.example.org/avatar1.png' height='20' width='20' /> org1" }
],
[
'org2',
'org2',
{ 'data-content' => "<img src='http://www.example.org/avatar2.png' height='20' width='20' /> org2" }
],
[
'org3',
'org3',
{ 'data-content' => "<img src='http://www.example.org/avatar3.png' height='20' width='20' /> org3" }
]
])
end
end
| 26.16 | 128 | 0.511086 |
4ac33ff4a458aa97fb04b713d2688a385fbf8090 | 10,339 | =begin
#OpenAPI Petstore
#This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 5.0.1-SNAPSHOT
=end
require 'cgi'
module Petstore
class StoreApi
attr_accessor :api_client
def initialize(api_client = ApiClient.default)
@api_client = api_client
end
# Delete purchase order by ID
# For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
# @param order_id [String] ID of the order that needs to be deleted
# @param [Hash] opts the optional parameters
# @return [nil]
def delete_order(order_id, opts = {})
delete_order_with_http_info(order_id, opts)
nil
end
# Delete purchase order by ID
# For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
# @param order_id [String] ID of the order that needs to be deleted
# @param [Hash] opts the optional parameters
# @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
def delete_order_with_http_info(order_id, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: StoreApi.delete_order ...'
end
# verify the required parameter 'order_id' is set
if @api_client.config.client_side_validation && order_id.nil?
fail ArgumentError, "Missing the required parameter 'order_id' when calling StoreApi.delete_order"
end
# resource path
local_var_path = '/store/order/{order_id}'.sub('{' + 'order_id' + '}', CGI.escape(order_id.to_s))
# query parameters
query_params = opts[:query_params] || {}
# header parameters
header_params = opts[:header_params] || {}
# form parameters
form_params = opts[:form_params] || {}
# http body (model)
post_body = opts[:debug_body]
# return_type
return_type = opts[:debug_return_type]
# auth_names
auth_names = opts[:debug_auth_names] || []
new_options = opts.merge(
:operation => :"StoreApi.delete_order",
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => return_type
)
data, status_code, headers = @api_client.call_api(:DELETE, local_var_path, new_options)
if @api_client.config.debugging
@api_client.config.logger.debug "API called: StoreApi#delete_order\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
# Returns pet inventories by status
# Returns a map of status codes to quantities
# @param [Hash] opts the optional parameters
# @return [Hash<String, Integer>]
def get_inventory(opts = {})
data, _status_code, _headers = get_inventory_with_http_info(opts)
data
end
# Returns pet inventories by status
# Returns a map of status codes to quantities
# @param [Hash] opts the optional parameters
# @return [Array<(Hash<String, Integer>, Integer, Hash)>] Hash<String, Integer> data, response status code and response headers
def get_inventory_with_http_info(opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: StoreApi.get_inventory ...'
end
# resource path
local_var_path = '/store/inventory'
# query parameters
query_params = opts[:query_params] || {}
# header parameters
header_params = opts[:header_params] || {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# form parameters
form_params = opts[:form_params] || {}
# http body (model)
post_body = opts[:debug_body]
# return_type
return_type = opts[:debug_return_type] || 'Hash<String, Integer>'
# auth_names
auth_names = opts[:debug_auth_names] || ['api_key']
new_options = opts.merge(
:operation => :"StoreApi.get_inventory",
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => return_type
)
data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
if @api_client.config.debugging
@api_client.config.logger.debug "API called: StoreApi#get_inventory\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
# Find purchase order by ID
# For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
# @param order_id [Integer] ID of pet that needs to be fetched
# @param [Hash] opts the optional parameters
# @return [Order]
def get_order_by_id(order_id, opts = {})
data, _status_code, _headers = get_order_by_id_with_http_info(order_id, opts)
data
end
# Find purchase order by ID
# For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
# @param order_id [Integer] ID of pet that needs to be fetched
# @param [Hash] opts the optional parameters
# @return [Array<(Order, Integer, Hash)>] Order data, response status code and response headers
def get_order_by_id_with_http_info(order_id, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: StoreApi.get_order_by_id ...'
end
# verify the required parameter 'order_id' is set
if @api_client.config.client_side_validation && order_id.nil?
fail ArgumentError, "Missing the required parameter 'order_id' when calling StoreApi.get_order_by_id"
end
if @api_client.config.client_side_validation && order_id > 5
fail ArgumentError, 'invalid value for "order_id" when calling StoreApi.get_order_by_id, must be smaller than or equal to 5.'
end
if @api_client.config.client_side_validation && order_id < 1
fail ArgumentError, 'invalid value for "order_id" when calling StoreApi.get_order_by_id, must be greater than or equal to 1.'
end
# resource path
local_var_path = '/store/order/{order_id}'.sub('{' + 'order_id' + '}', CGI.escape(order_id.to_s))
# query parameters
query_params = opts[:query_params] || {}
# header parameters
header_params = opts[:header_params] || {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/xml', 'application/json'])
# form parameters
form_params = opts[:form_params] || {}
# http body (model)
post_body = opts[:debug_body]
# return_type
return_type = opts[:debug_return_type] || 'Order'
# auth_names
auth_names = opts[:debug_auth_names] || []
new_options = opts.merge(
:operation => :"StoreApi.get_order_by_id",
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => return_type
)
data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
if @api_client.config.debugging
@api_client.config.logger.debug "API called: StoreApi#get_order_by_id\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
# Place an order for a pet
# @param order [Order] order placed for purchasing the pet
# @param [Hash] opts the optional parameters
# @return [Order]
def place_order(order, opts = {})
data, _status_code, _headers = place_order_with_http_info(order, opts)
data
end
# Place an order for a pet
# @param order [Order] order placed for purchasing the pet
# @param [Hash] opts the optional parameters
# @return [Array<(Order, Integer, Hash)>] Order data, response status code and response headers
def place_order_with_http_info(order, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: StoreApi.place_order ...'
end
# verify the required parameter 'order' is set
if @api_client.config.client_side_validation && order.nil?
fail ArgumentError, "Missing the required parameter 'order' when calling StoreApi.place_order"
end
# resource path
local_var_path = '/store/order'
# query parameters
query_params = opts[:query_params] || {}
# header parameters
header_params = opts[:header_params] || {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/xml', 'application/json'])
# HTTP header 'Content-Type'
header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])
# form parameters
form_params = opts[:form_params] || {}
# http body (model)
post_body = opts[:debug_body] || @api_client.object_to_http_body(order)
# return_type
return_type = opts[:debug_return_type] || 'Order'
# auth_names
auth_names = opts[:debug_auth_names] || []
new_options = opts.merge(
:operation => :"StoreApi.place_order",
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => return_type
)
data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
if @api_client.config.debugging
@api_client.config.logger.debug "API called: StoreApi#place_order\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
end
end
| 37.596364 | 157 | 0.669794 |
33fb6a4e8500bcb9c970f49e5aa9d2ae04dafad3 | 197 | class CreateAdministrations < ActiveRecord::Migration[5.2]
def change
create_table :administrations do |t|
t.string :name
t.references :city
t.timestamps
end
end
end
| 17.909091 | 58 | 0.680203 |
79d8834ca6a87b4f211a8ac4405b275e7de2b220 | 157 | class AwesomeExplain::PgSeqScan < ActiveRecord::Base
establish_connection AwesomeExplain::Config.instance.db_config
self.table_name = 'pg_seq_scans'
end
| 31.4 | 64 | 0.828025 |
6a2270b0ab32e4c736adc2b6d5b3b2f48e3e9929 | 2,555 | require File.expand_path("../../Strategies/cache-download", Pathname.new(__FILE__).realpath)
ECWJP2_SDK = "/Hexagon/ERDASEcwJpeg2000SDK5.4.0/Desktop_Read-Only".freeze
class EcwJpeg2000SDK < Requirement
fatal true
satisfy(:build_env => false) { File.exist? ECWJP2_SDK }
def message; <<~EOS
ERDAS ECW/JP2 SDK was not found at:
#{ECWJP2_SDK}
Download SDK and install 'Desktop Read-Only' to default location from:
http://download.intergraph.com/?ProductName=ERDAS%20ECW/JPEG2000%20SDK
EOS
end
end
class Ecwjp2Sdk < Formula
desc "Decompression library for ECW- and JPEG2000-compressed imagery"
homepage "http://www.hexagongeospatial.com/products/provider-suite/erdas-ecw-jp2-sdk"
url "https://osgeo4mac.s3.amazonaws.com/src/dummy.tar.gz"
version "5.4.0"
sha256 "e7776e2ff278d6460300bd69a26d7383e6c5e2fbeb17ff12998255e7fc4c9511"
depends_on :macos => :lion # as per SDK docs
depends_on EcwJpeg2000SDK
def install
lib.mkpath
(include/"ECWJP2").mkpath
cd ECWJP2_SDK do
# vendor Desktop Read-Only libs, etc
# suffix only the older stdc++
cp "redistributable/libc++/libNCSEcw.dylib", "#{lib}/"
# libstdc++ is not longer bundled in the distribution
# cp "redistributable/libstdc++/libNCSEcw.dylib", "#{lib}/libNCSEcw-stdcxx.dylib"
# Calling install_name_tool is deprecated, so we're switching to using the MachO tools
# system "install_name_tool", "-id", opt_lib/"libNCSEcw-stdcxx.dylib", lib/"libNCSEcw-stdcxx.dylib"
# MachO::Tools.change_dylib_id(opt_lib/"libNCSEcw-stdcxx.dylib", lib/"libNCSEcw-stdcxx.dylib")
%w[etc Licenses].each { |f| cp_r f.to_s, "#{prefix}/" }
cp_r Dir["include/*"], "#{include}/ECWJP2/"
# for test
(prefix/"test").mkpath
cp "Examples/decompression/example1/dexample1.c", prefix/"test/"
%w[ecw jp2].each { |f| cp "TestData/RGB_8bit.#{f}", prefix/"test/" }
end
end
def caveats; <<~EOS
Once formula is installed, the ERDAS ECW/JP2 SDK can be deleted from its
default install location of:
#{Pathname.new(ECWJP2_SDK).dirname}
Headers installed to:
#{opt_include}/ECWJP2
EOS
end
test do
cp prefix/"test/dexample1.c", testpath
system ENV.cc, "-I#{opt_include}/ECWJP2", "-L#{opt_lib}", "-lNCSEcw",
"-o", "test", "dexample1.c"
%w[ecw jp2].each do |f|
out = `./test #{prefix}/test/RGB_8bit.#{f}`
assert_match "Region 99", out
assert_match "Region 0", out
assert_match "ALL time", out
end
end
end
| 33.181818 | 104 | 0.678669 |
01ac5673e565a034d2c5741a53f92f9c2b9a9f91 | 6,584 | # frozen_string_literal: true
require 'ipaddr'
module Facter
module Resolvers
class Networking < BaseResolver
@log = Facter::Log.new
@semaphore = Mutex.new
@fact_list ||= {}
class << self
def resolve(fact_name)
@semaphore.synchronize do
result ||= @fact_list[fact_name]
subscribe_to_manager
result || read_network_information(fact_name)
end
end
private
def read_network_information(fact_name)
size_ptr = FFI::MemoryPointer.new(NetworkingFFI::BUFFER_LENGTH)
adapter_addresses = FFI::MemoryPointer.new(IpAdapterAddressesLh.size, NetworkingFFI::BUFFER_LENGTH)
flags = NetworkingFFI::GAA_FLAG_SKIP_ANYCAST |
NetworkingFFI::GAA_FLAG_SKIP_MULTICAST | NetworkingFFI::GAA_FLAG_SKIP_DNS_SERVER
return unless (adapter_addresses = get_adapter_addresses(size_ptr, adapter_addresses, flags))
iterate_list(adapter_addresses)
set_interfaces_other_facts if @fact_list[:interfaces]
@fact_list[fact_name]
end
def get_adapter_addresses(size_ptr, adapter_addresses, flags)
error = nil
3.times do
error = NetworkingFFI::GetAdaptersAddresses(NetworkingFFI::AF_UNSPEC, flags,
FFI::Pointer::NULL, adapter_addresses, size_ptr)
break if error == NetworkingFFI::ERROR_SUCCES
if error == NetworkingFFI::ERROR_BUFFER_OVERFLOW
adapter_addresses = FFI::MemoryPointer.new(IpAdapterAddressesLh.size, NetworkingFFI::BUFFER_LENGTH)
else
@log.info 'Unable to retrieve networking facts!'
return nil
end
end
return nil unless error.zero?
adapter_addresses
end
def adapter_down?(adapter)
adapter[:OperStatus] != NetworkingFFI::IF_OPER_STATUS_UP ||
![NetworkingFFI::IF_TYPE_ETHERNET_CSMACD, NetworkingFFI::IF_TYPE_IEEE80211].include?(adapter[:IfType])
end
def retrieve_dhcp_server(adapter)
if !(adapter[:Flags] & NetworkingFFI::IP_ADAPTER_DHCP_ENABLED).zero? &&
adapter[:Union][:Struct][:Length] >= IpAdapterAddressesLh.size
NetworkUtils.address_to_string(adapter[:Dhcpv4Server])
end
end
def iterate_list(adapter_addresses)
net_interface = {}
IpAdapterAddressesLh.read_list(adapter_addresses) do |adapter_address|
if adapter_down?(adapter_address)
adapter_address = IpAdapterAddressesLh.new(adapter_address[:Next])
next
end
@fact_list[:domain] ||= adapter_address[:DnsSuffix].read_wide_string_without_length
name = adapter_address[:FriendlyName].read_wide_string_without_length.to_sym
net_interface[name] = build_interface_info(adapter_address, name)
end
@fact_list[:interfaces] = net_interface unless net_interface.empty?
end
def build_interface_info(adapter_address, name)
hash = {}
hash[:dhcp] = retrieve_dhcp_server(adapter_address)
hash[:mtu] = adapter_address[:Mtu]
bindings = find_ip_addresses(adapter_address[:FirstUnicastAddress], name)
hash[:bindings] = bindings[:ipv4] unless bindings[:ipv4].empty?
hash[:bindings6] = bindings[:ipv6] unless bindings[:ipv6].empty?
hash[:mac] = NetworkUtils.find_mac_address(adapter_address)
hash
end
def find_ip_addresses(unicast_addresses, name)
bindings = {}
bindings[:ipv6] = []
bindings[:ipv4] = []
IpAdapterUnicastAddressLH.read_list(unicast_addresses) do |unicast|
addr = NetworkUtils.address_to_string(unicast[:Address])
unless addr
unicast = IpAdapterUnicastAddressLH.new(unicast[:Next])
next
end
sock_addr = SockAddr.new(unicast[:Address][:lpSockaddr])
add_ip_data(addr, unicast, sock_addr, bindings)
find_primary_interface(sock_addr, name, addr)
end
bindings
end
def add_ip_data(addr, unicast, sock_addr, bindings)
result = find_bindings(sock_addr, unicast, addr)
return unless result
bindings[:ipv6] << result if result[:network].ipv6?
bindings[:ipv4] << result if result[:network].ipv4?
end
def find_bindings(sock_addr, unicast, addr)
return unless [NetworkingFFI::AF_INET, NetworkingFFI::AF_INET6].include?(sock_addr[:sa_family])
NetworkUtils.build_binding(addr, unicast[:OnLinkPrefixLength])
end
def find_primary_interface(sock_addr, name, addr)
if !@fact_list[:primary_interface] &&
([NetworkingFFI::AF_INET, NetworkingFFI::AF_INET6].include?(sock_addr[:sa_family]) &&
!NetworkUtils.ignored_ip_address(addr))
@fact_list[:primary] = name
end
end
def set_interfaces_other_facts
@fact_list[:interfaces].each do |interface_name, value|
if value[:bindings]
binding = find_valid_binding(value[:bindings])
populate_interface(binding, value)
end
if value[:bindings6]
binding = find_valid_binding(value[:bindings6])
populate_interface(binding, value)
end
set_networking_other_facts(value, interface_name)
end
end
def find_valid_binding(bindings)
bindings.each do |binding|
return binding unless NetworkUtils.ignored_ip_address(binding[:address])
end
nil
end
def populate_interface(bind, interface)
return if !bind || bind.empty?
if bind[:network].ipv6?
interface[:ip6] = bind[:address]
interface[:netmask6] = bind[:netmask]
interface[:network6] = bind[:network]
else
interface[:network] = bind[:network]
interface[:netmask] = bind[:netmask]
interface[:ip] = bind[:address]
end
end
def set_networking_other_facts(value, interface_name)
return unless @fact_list[:primary] == interface_name
%i[mtu dhcp mac ip ip6 netmask netmask6 network network6].each do |key|
@fact_list[key] = value[key]
end
end
end
end
end
end
| 36.375691 | 114 | 0.618773 |
ac4fa280e42e1f0b5f310bc6fc1afa04ece2674e | 67 | require "snag/version"
module Snag
# Your code goes here...
end
| 11.166667 | 26 | 0.701493 |
6af8ec3aacb8c3520726dafafea899b573efc9a3 | 750 | # -*- mode: ruby -*-
# vi: set ft=ruby :
# Docker specific configurations go here
def config_docker(config, i, total, name, version)
if version == "Local"
config.vm.synced_folder "../../", "/tachyon"
end
config.vm.synced_folder "./", "/vagrant"
config.ssh.username = "root"
config.ssh.password = "vagrant"
config.ssh.private_key_path = "files/id_rsa"
config.vm.provider "docker" do |d|
d.build_dir = "."
config.ssh.port ="22"
d.has_ssh = true
d.create_args = ["--privileged"]
d.remains_running = true
end
config.vm.host_name = "#{name}"
if i == total # last VM starts tachyon
config.vm.provision "shell", path: Post
config.vm.provision "shell", path: "core/start_tachyon_cluster.sh"
end
end
| 26.785714 | 70 | 0.653333 |
18e69e0d920d00a80c91e7d8f1ea0b6cf51d0bed | 364 | require 'rubygems'
require 'test/unit'
require 'shoulda'
require 'open-uri'
require 'yaml'
begin; require 'turn'; rescue LoadError; end
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
$LOAD_PATH.unshift(File.dirname(__FILE__))
require 'pismo'
class Test::Unit::TestCase
include Pismo
HTML_DIRECTORY = File.dirname(__FILE__) + "/corpus"
end | 24.266667 | 66 | 0.744505 |
4aa42cbb5694c73287737f4dc64e46d9f8b2712c | 262 | class Twindocs < Cask
url 'https://www.twindocs.com/plugins/es/tools_mac/Twindocs%20tools.pkg.zip'
homepage 'https://www.twindocs.com'
version 'latest'
sha256 :no_check
install 'Twindocs tools.pkg'
uninstall :pkgutil => 'com.twindocs.ambassador'
end
| 29.111111 | 78 | 0.744275 |
281e74d9bf0cf3706a07be5bf5dda0dfc7b4f101 | 476 | module Prototok
module Formatters
class Default < Base
def encode(*args)
raise Errors::FormatError if args.size != 2
args.map { |part| RbNaCl::Util.bin2hex(part) }
.join(Prototok.config[:token_delimiter])
end
def decode(str)
parts = str.split(Prototok.config[:token_delimiter])
raise Errors::FormatError if parts.size != 2
parts.map { |part| RbNaCl::Util.hex2bin(part) }
end
end
end
end
| 26.444444 | 60 | 0.615546 |
38496757e80dba2d3fe611d0eee48a098b91919c | 510 | class ApplicationController < ActionController::Base
# Prevent CSRF attacks by raising an exception.
# For APIs, you may want to use :null_session instead.
protect_from_forgery with: :exception
def verify_token
if(cookies[:session_token])
begin
x = JWT.verify(cookies[:session_token],"secret")
@current_user = User.find(x.payload[:user_id])
rescue JWT::VerificationError => e
redirect_to root_path
end
else
redirect_to root_path
end
end
end
| 22.173913 | 56 | 0.694118 |
5ddfc7330774eb86ecb3414a8ece388db82d52bd | 35,614 | # coding: utf-8
# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Add some helper methods to standard classes.
module Google
module Protobuf
Any.class_eval do
# TODO(igorpeshansky): Remove this once
# https://github.com/google/protobuf/pull/4719 gets released.
def self.pack(msg, type_url_prefix = 'type.googleapis.com/')
any = Google::Protobuf::Any.new
any.pack(msg, type_url_prefix)
any
end
end
end
end
String.class_eval do
def inspect_octal
specials = {
'a' => '\\007',
'b' => '\\010',
'v' => '\\013',
'f' => '\\014',
'r' => '\\015'
}.freeze
inspect.gsub(/\\([abvfr])/) { specials[Regexp.last_match(1)] } \
.gsub(/\\x([0-9A-F][0-9A-F])/) do
format('\\%03o', Regexp.last_match(1).to_i(16))
end
end
end
# Constants used by unit tests for Google Cloud Logging plugin.
module Constants
include Fluent::GoogleCloudOutput::ServiceConstants
include Fluent::GoogleCloudOutput::ConfigConstants
include Fluent::GoogleCloudOutput::InternalConstants
# Generic attributes.
HOSTNAME = Socket.gethostname
CUSTOM_LOGGING_API_URL = 'http://localhost:52000'.freeze
CUSTOM_METADATA_AGENT_URL = 'http://localhost:12345'.freeze
METADATA_AGENT_URL_FROM_ENV = 'http://localhost:54321'.freeze
# TODO(qingling128) Separate constants into different submodules.
# Attributes used for the GCE metadata service.
PROJECT_ID = 'test-project-id'.freeze
ZONE = 'us-central1-b'.freeze
FULLY_QUALIFIED_ZONE = "projects/#{PROJECT_ID}/zones/#{ZONE}".freeze
VM_ID = '9876543210'.freeze
RANDOM_LOCAL_RESOURCE_ID = 'ehb.jjk.poq.ll'.freeze
# Attributes used for the Metadata Agent resources.
METADATA_ZONE = 'us-central1-c'.freeze
METADATA_VM_ID = '0123456789'.freeze
# Attributes used for custom (overridden) configs.
CUSTOM_PROJECT_ID = 'test-custom-project-id'.freeze
CUSTOM_ZONE = 'us-custom-central1-b'.freeze
CUSTOM_FULLY_QUALIFIED_ZONE = "projects/#{PROJECT_ID}/zones/#{ZONE}".freeze
CUSTOM_VM_ID = 'C9876543210'.freeze
CUSTOM_HOSTNAME = 'custom.hostname.org'.freeze
# Kubernetes-specific attributes.
CUSTOM_K8S_CLUSTER_NAME = 'kubernetes-cluster'.freeze
CUSTOM_K8S_LOCATION = 'kubernetes-location'.freeze
# Attributes used for the EC2 metadata service.
EC2_PROJECT_ID = 'test-ec2-project-id'.freeze
EC2_ZONE = 'us-west-2b'.freeze
EC2_PREFIXED_ZONE = "aws:#{EC2_ZONE}".freeze
EC2_REGION = 'us-west-2'.freeze
EC2_PREFIXED_REGION = "aws:#{EC2_REGION}".freeze
EC2_VM_ID = 'i-81c16767'.freeze
EC2_ACCOUNT_ID = '123456789012'.freeze
# The formatting here matches the format used on the VM.
EC2_IDENTITY_DOCUMENT = %({
"accountId" : "#{EC2_ACCOUNT_ID}",
"availabilityZone" : "#{EC2_ZONE}",
"region" : "#{EC2_REGION}",
"instanceId" : "#{EC2_VM_ID}"
}).freeze
# Managed VMs specific labels.
MANAGED_VM_BACKEND_NAME = 'default'.freeze
MANAGED_VM_BACKEND_VERSION = 'guestbook2.0'.freeze
# LogEntry fields for extraction.
INSERT_ID = 'fah7yr7iw64tg857y'.freeze
INSERT_ID2 = 'fah7yr7iw64tgaeuf'.freeze
SPAN_ID = '000000000000004a'.freeze
SPAN_ID2 = '000000000000007e'.freeze
TRACE = 'projects/proj1/traces/1234567890abcdef1234567890abcdef'.freeze
TRACE2 = 'projects/proj1/traces/1234567890abcdef1234567890fedcba'.freeze
TRACE_SAMPLED = true
TRACE_SAMPLED2 = false
STACKDRIVER_TRACE_ID = '1234567890abcdef1234567890abcdef'.freeze
FULL_STACKDRIVER_TRACE = \
"projects/#{PROJECT_ID}/traces/#{STACKDRIVER_TRACE_ID}".freeze
# Invalid trace id for stackdriver.
EMPTY_STRING = ''.freeze
INVALID_SHORT_STACKDRIVER_TRACE_ID = '1234567890abcdef'.freeze
INVALID_LONG_STACKDRIVER_TRACE_ID = \
'1234567890abcdef1234567890abcdef123'.freeze
INVALID_NON_HEX_STACKDRIVER_TRACE_ID = \
'1234567890abcdef1234567890abcdeZ'.freeze
# Invalid full format of stackdriver trace.
INVALID_TRACE_NO_TRACE_ID = "projects/#{PROJECT_ID}/traces/".freeze
INVALID_TRACE_NO_PROJECT_ID = \
"projects//traces/#{STACKDRIVER_TRACE_ID}".freeze
INVALID_TRACE_WITH_SHORT_TRACE_ID = \
"projects/#{PROJECT_ID}/traces/#{INVALID_SHORT_STACKDRIVER_TRACE_ID}".freeze
INVALID_TRACE_WITH_LONG_TRACE_ID = \
"projects/#{PROJECT_ID}/traces/#{INVALID_LONG_STACKDRIVER_TRACE_ID}".freeze
INVALID_TRACE_WITH_NON_HEX_TRACE_ID = \
"projects/#{PROJECT_ID}/" \
"traces/#{INVALID_NON_HEX_STACKDRIVER_TRACE_ID}".freeze
# Docker Container labels.
DOCKER_CONTAINER_ID =
'0d0f03ff8d3c42688692536d1af77a28cd135c0a5c531f25a31'.freeze
DOCKER_CONTAINER_NAME = 'happy_hippo'.freeze
DOCKER_CONTAINER_STREAM_STDOUT = 'stdout'.freeze
DOCKER_CONTAINER_STREAM_STDERR = 'stderr'.freeze
# Timestamp for 1234567890 seconds and 987654321 nanoseconds since epoch.
DOCKER_CONTAINER_TIMESTAMP = '2009-02-13T23:31:30.987654321Z'.freeze
DOCKER_CONTAINER_SECONDS_EPOCH = 1_234_567_890
DOCKER_CONTAINER_NANOS = 987_654_321
DOCKER_CONTAINER_LOCAL_RESOURCE_ID_PREFIX = 'container'.freeze
# New K8s resource constants.
K8S_LOCATION = 'us-central1-b'.freeze
K8S_LOCATION2 = 'us-central1-c'.freeze
K8S_CLUSTER_NAME = 'cluster-1'.freeze
K8S_NAMESPACE_NAME = 'kube-system'.freeze
K8S_NODE_NAME = 'performance--default-pool-cabf1342-08jc'.freeze
K8S_POD_NAME = 'redis-master-c0l82.foo.bar'.freeze
K8S_CONTAINER_NAME = 'redis'.freeze
K8S_STREAM = 'stdout'.freeze
# Timestamp for 1234567890 seconds and 987654321 nanoseconds since epoch.
K8S_TIMESTAMP = '2009-02-13T23:31:30.987654321Z'.freeze
K8S_SECONDS_EPOCH = 1_234_567_890
K8S_NANOS = 987_654_321
K8S_CONTAINER_LOCAL_RESOURCE_ID_PREFIX = 'k8s_container'.freeze
K8S_POD_LOCAL_RESOURCE_ID_PREFIX = 'k8s_pod'.freeze
K8S_NODE_LOCAL_RESOURCE_ID_PREFIX = 'k8s_node'.freeze
K8S_TAG =
"var.log.containers.#{K8S_NAMESPACE_NAME}_#{K8S_POD_NAME}_" \
"#{K8S_CONTAINER_NAME}.log".freeze
K8S_LOCAL_RESOURCE_ID =
"#{K8S_CONTAINER_LOCAL_RESOURCE_ID_PREFIX}" \
".#{K8S_NAMESPACE_NAME}" \
".#{K8S_POD_NAME}" \
".#{K8S_CONTAINER_NAME}".freeze
# Container Engine / Kubernetes specific labels.
CONTAINER_NAMESPACE_ID = '898268c8-4a36-11e5-9d81-42010af0194c'.freeze
CONTAINER_POD_ID = 'cad3c3c4-4b9c-11e5-9d81-42010af0194c'.freeze
CONTAINER_LABEL_KEY = 'component'.freeze
CONTAINER_LABEL_VALUE = 'redis-component'.freeze
CONTAINER_SEVERITY = 'INFO'.freeze
CONTAINER_LOCAL_RESOURCE_ID_PREFIX = 'gke_container'.freeze
# Dataflow specific labels.
DATAFLOW_REGION = 'us-central1'.freeze
DATAFLOW_JOB_NAME = 'job_name_1'.freeze
DATAFLOW_JOB_ID = 'job_id_1'.freeze
DATAFLOW_STEP_ID = 'step_1'.freeze
DATAFLOW_TAG = 'dataflow-worker'.freeze
# Dataproc specific labels.
DATAPROC_CLUSTER_NAME = 'test-cluster'.freeze
DATAPROC_CLUSTER_UUID = '00000000-0000-0000-0000-000000000000'.freeze
DATAPROC_REGION = 'unittest'.freeze
# ML specific labels.
ML_REGION = 'us-central1'.freeze
ML_JOB_ID = 'job_name_1'.freeze
ML_TASK_NAME = 'task_name_1'.freeze
ML_TRIAL_ID = 'trial_id_1'.freeze
ML_LOG_AREA = 'log_area_1'.freeze
ML_TAG = 'master-replica-0'.freeze
# Parameters used for authentication.
AUTH_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:jwt-bearer'.freeze
FAKE_AUTH_TOKEN = 'abc123'.freeze
# Information about test credentials files.
# path: Path to the credentials file.
# project_id: ID of the project, which must correspond to the file contents.
IAM_CREDENTIALS = {
path: 'test/plugin/data/iam-credentials.json',
project_id: 'fluent-test-project'
}.freeze
NEW_STYLE_CREDENTIALS = {
path: 'test/plugin/data/new-style-credentials.json',
project_id: 'fluent-test-project'
}.freeze
LEGACY_CREDENTIALS = {
path: 'test/plugin/data/credentials.json',
project_id: '847859579879'
}.freeze
INVALID_CREDENTIALS = {
path: 'test/plugin/data/invalid_credentials.json',
project_id: ''
}.freeze
# Special googleauth environment variables.
PROJECT_ID_VAR = 'GOOGLE_PROJECT_ID'.freeze
PRIVATE_KEY_VAR = 'GOOGLE_PRIVATE_KEY'.freeze
CLIENT_EMAIL_VAR = 'GOOGLE_CLIENT_EMAIL'.freeze
CLIENT_ID_VAR = 'GOOGLE_CLIENT_ID'.freeze
CLIENT_SECRET_VAR = 'GOOGLE_CLIENT_SECRET'.freeze
REFRESH_TOKEN_VAR = 'GOOGLE_REFRESH_TOKEN'.freeze
# Configuration files for various test scenarios.
APPLICATION_DEFAULT_CONFIG = %(
).freeze
CUSTOM_LOGGING_API_URL_CONFIG = %(
logging_api_url #{CUSTOM_LOGGING_API_URL}
).freeze
CUSTOM_METADATA_AGENT_URL_CONFIG = %(
metadata_agent_url #{CUSTOM_METADATA_AGENT_URL}
).freeze
DETECT_JSON_CONFIG = %(
detect_json true
).freeze
PARTIAL_SUCCESS_DISABLED_CONFIG = %(
partial_success false
).freeze
# rubocop:disable Metrics/LineLength
PRIVATE_KEY_CONFIG = %(
auth_method private_key
private_key_email 271661262351-ft99kc9kjro9rrihq3k2n3s2inbplu0q@developer.gserviceaccount.com
private_key_path test/plugin/data/c31e573fd7f62ed495c9ca3821a5a85cb036dee1-privatekey.p12
).freeze
# rubocop:enable Metrics/LineLength
REQUIRE_VALID_TAGS_CONFIG = %(
require_valid_tags true
).freeze
NO_METADATA_SERVICE_CONFIG = %(
use_metadata_service false
).freeze
NO_DETECT_SUBSERVICE_CONFIG = %(
detect_subservice false
).freeze
ENABLE_SPLIT_LOGS_BY_TAG_CONFIG = %(
split_logs_by_tag true
).freeze
NO_ADJUST_TIMESTAMPS_CONFIG = %(
adjust_invalid_timestamps false
).freeze
ENABLE_PROMETHEUS_CONFIG = %(
enable_monitoring true
monitoring_type prometheus
).freeze
ENABLE_METADATA_AGENT_CONFIG = %(
enable_metadata_agent true
).freeze
DISABLE_METADATA_AGENT_CONFIG = %(
enable_metadata_agent false
).freeze
ENABLE_AUTOFORMAT_STACKDRIVER_TRACE_CONFIG = %(
autoformat_stackdriver_trace true
).freeze
DISABLE_AUTOFORMAT_STACKDRIVER_TRACE_CONFIG = %(
autoformat_stackdriver_trace false
).freeze
DOCKER_CONTAINER_CONFIG = %(
enable_metadata_agent true
label_map { "source": "#{DOCKER_CONSTANTS[:service]}/stream" }
detect_json true
).freeze
CUSTOM_METADATA_CONFIG = %(
project_id #{CUSTOM_PROJECT_ID}
zone #{CUSTOM_ZONE}
vm_id #{CUSTOM_VM_ID}
vm_name #{CUSTOM_HOSTNAME}
).freeze
CONFIG_MISSING_METADATA_PROJECT_ID = %(
zone #{CUSTOM_ZONE}
vm_id #{CUSTOM_VM_ID}
).freeze
CONFIG_MISSING_METADATA_ZONE = %(
project_id #{CUSTOM_PROJECT_ID}
vm_id #{CUSTOM_VM_ID}
).freeze
CONFIG_MISSING_METADATA_VM_ID = %(
project_id #{CUSTOM_PROJECT_ID}
zone #{CUSTOM_ZONE}
).freeze
CONFIG_MISSING_METADATA_ALL = %(
).freeze
CUSTOM_K8S_ENABLE_METADATA_AGENT_CONFIG = %(
enable_metadata_agent true
k8s_cluster_name #{CUSTOM_K8S_CLUSTER_NAME}
k8s_cluster_location #{CUSTOM_K8S_LOCATION}
).freeze
EMPTY_K8S_ENABLE_METADATA_AGENT_CONFIG = %(
enable_metadata_agent true
k8s_cluster_name ""
k8s_cluster_location ""
).freeze
CONFIG_EC2_PROJECT_ID = %(
project_id #{EC2_PROJECT_ID}
).freeze
CONFIG_EC2_PROJECT_ID_AND_CUSTOM_VM_ID = %(
project_id #{EC2_PROJECT_ID}
vm_id #{CUSTOM_VM_ID}
).freeze
CONFIG_EC2_PROJECT_ID_USE_REGION = %(
project_id #{EC2_PROJECT_ID}
use_aws_availability_zone false
).freeze
CONFIG_DATAFLOW = %(
subservice_name "#{DATAFLOW_CONSTANTS[:service]}"
labels {
"#{DATAFLOW_CONSTANTS[:service]}/region" : "#{DATAFLOW_REGION}",
"#{DATAFLOW_CONSTANTS[:service]}/job_name" : "#{DATAFLOW_JOB_NAME}",
"#{DATAFLOW_CONSTANTS[:service]}/job_id" : "#{DATAFLOW_JOB_ID}"
}
label_map { "step": "#{DATAFLOW_CONSTANTS[:service]}/step_id" }
).freeze
CONFIG_ML = %(
subservice_name "#{ML_CONSTANTS[:service]}"
labels {
"#{ML_CONSTANTS[:service]}/job_id" : "#{ML_JOB_ID}",
"#{ML_CONSTANTS[:service]}/task_name" : "#{ML_TASK_NAME}",
"#{ML_CONSTANTS[:service]}/trial_id" : "#{ML_TRIAL_ID}"
}
label_map { "name": "#{ML_CONSTANTS[:service]}/job_id/log_area" }
).freeze
CONFIG_CUSTOM_INSERT_ID_KEY_SPECIFIED = %(
insert_id_key custom_insert_id_key
).freeze
CONFIG_CUSTOM_LABELS_KEY_SPECIFIED = %(
labels_key custom_labels_key
).freeze
CONFIG_CUSTOM_OPERATION_KEY_SPECIFIED = %(
operation_key custom_operation_key
).freeze
CONFIG_CUSTOM_SOURCE_LOCATION_KEY_SPECIFIED = %(
source_location_key custom_source_location_key
).freeze
CONFIG_CUSTOM_SPAN_ID_KEY_SPECIFIED = %(
span_id_key custom_span_id_key
).freeze
CONFIG_CUSTOM_TRACE_KEY_SPECIFIED = %(
trace_key custom_trace_key
).freeze
CONFIG_CUSTOM_TRACE_SAMPLED_KEY_SPECIFIED = %(
trace_sampled_key custom_trace_sampled_key
).freeze
# For 'labels' config.
LABELS_FROM_LABELS_CONFIG = {
'a_label_from_labels_config' => 'some_value',
'another_label_from_labels_config' => 'some_value'
}.freeze
CONFIG_LABELS = %(
labels #{LABELS_FROM_LABELS_CONFIG.to_json}
).freeze
# For 'label_map' config.
LABEL_MAP_HASH = {
'target_field_from_payload' => 'a_label_from_label_map_config',
'another_target_field_from_payload' => 'another_label_from_label_map_config'
}.freeze
PAYLOAD_FOR_LABEL_MAP = {
'target_field_from_payload' => 'a_value',
'another_target_field_from_payload' => 'b_value'
}.freeze
LABELS_FROM_LABEL_MAP_CONFIG = {
'a_label_from_label_map_config' => 'a_value',
'another_label_from_label_map_config' => 'b_value'
}.freeze
CONFIG_LABEL_MAP = %(
label_map #{LABEL_MAP_HASH.to_json}
).freeze
CONFIG_LABLES_AND_LABLE_MAP = %(
#{CONFIG_LABELS}
#{CONFIG_LABEL_MAP}
).freeze
# For conflicting labels.
CONFLICTING_LABEL_NAME = 'conflicting_label_key'.freeze
CONFLICTING_LABEL_VALUE1 = 'conflicting_value_1'.freeze
CONFLICTING_LABEL_VALUE2 = 'conflicting_value_2'.freeze
CONFLICTING_LABEL_VALUE3 = 'conflicting_value_3'.freeze
LABELS_FROM_PAYLOAD_CONFLICTING = {
CONFLICTING_LABEL_NAME => CONFLICTING_LABEL_VALUE1
}.freeze
LABELS_FROM_LABEL_MAP_CONFIG_CONFLICTING = {
CONFLICTING_LABEL_NAME => CONFLICTING_LABEL_VALUE2
}.freeze
LABELS_FROM_LABELS_CONFIG_CONFLICTING = {
CONFLICTING_LABEL_NAME => CONFLICTING_LABEL_VALUE3
}.freeze
LABEL_MAP_HASH_CONFLICTING = {
'target_field_from_payload' => CONFLICTING_LABEL_NAME
}.freeze
PAYLOAD_FOR_LABEL_MAP_CONFLICTING = {
'target_field_from_payload' => CONFLICTING_LABEL_VALUE2
}.freeze
CONFIG_LABEL_MAP_CONFLICTING = %(
label_map #{LABEL_MAP_HASH_CONFLICTING.to_json}
).freeze
CONFIG_LABELS_CONFLICTING = %(
labels #{LABELS_FROM_LABELS_CONFIG_CONFLICTING.to_json}
).freeze
CONFIG_LABLES_AND_LABLE_MAP_CONFLICTING = %(
#{CONFIG_LABELS_CONFLICTING}
#{CONFIG_LABEL_MAP_CONFLICTING}
).freeze
# For monitoring config.
CONFIG_UNKNOWN_MONITORING_TYPE = %(
enable_monitoring true
monitoring_type not_prometheus
).freeze
# For statusz.
CONFIG_STATUSZ = %(
statusz_port 5678
adjust_invalid_timestamps false
autoformat_stackdriver_trace false
coerce_to_utf8 false
detect_json true
detect_subservice false
enable_metadata_agent true
enable_monitoring true
http_request_key test_http_request_key
insert_id_key test_insert_id_key
k8s_cluster_location test-k8s-cluster-location
k8s_cluster_name test-k8s-cluster-name
kubernetes_tag_regexp .*test-regexp.*
label_map { "label_map_key": "label_map_value" }
labels_key test_labels_key
labels { "labels_key": "labels_value" }
logging_api_url http://localhost:52000
metadata_agent_url http://localhost:12345
monitoring_type not_prometheus
non_utf8_replacement_string zzz
operation_key test_operation_key
partial_success false
project_id test-project-id-123
require_valid_tags true
source_location_key test_source_location_key
span_id_key test_span_id_key
split_logs_by_tag true
subservice_name test_subservice_name
trace_key test_trace_key
trace_sampled_key test_trace_sampled_key
use_aws_availability_zone false
use_grpc true
use_metadata_service false
vm_id 12345
vm_name test.hostname.org
zone asia-east2
).freeze
# Service configurations for various services.
# GCE.
COMPUTE_PARAMS_NO_LOG_NAME = {
resource: {
type: COMPUTE_CONSTANTS[:resource_type],
labels: {
'instance_id' => VM_ID,
'zone' => ZONE
}
},
project_id: PROJECT_ID,
labels: {
"#{COMPUTE_CONSTANTS[:service]}/resource_name" => HOSTNAME
}
}.freeze
COMPUTE_PARAMS = COMPUTE_PARAMS_NO_LOG_NAME.merge(
log_name: 'test'
).freeze
COMPUTE_PARAMS_WITH_METADATA_VM_ID_AND_ZONE = COMPUTE_PARAMS.merge(
resource: COMPUTE_PARAMS[:resource].merge(
labels: {
'instance_id' => METADATA_VM_ID,
'zone' => METADATA_ZONE
}
)
).freeze
# GAE.
VMENGINE_PARAMS = {
resource: {
type: APPENGINE_CONSTANTS[:resource_type],
labels: {
'module_id' => MANAGED_VM_BACKEND_NAME,
'version_id' => MANAGED_VM_BACKEND_VERSION
}
},
log_name: "#{APPENGINE_CONSTANTS[:service]}%2Ftest",
project_id: PROJECT_ID,
labels: {
"#{COMPUTE_CONSTANTS[:service]}/resource_id" => VM_ID,
"#{COMPUTE_CONSTANTS[:service]}/resource_name" => HOSTNAME,
"#{COMPUTE_CONSTANTS[:service]}/zone" => ZONE
}
}.freeze
# GKE Container.
CONTAINER_TAG =
"kubernetes.#{K8S_POD_NAME}_#{K8S_NAMESPACE_NAME}_" \
"#{K8S_CONTAINER_NAME}".freeze
CONTAINER_FROM_METADATA_PARAMS = {
resource: {
type: GKE_CONSTANTS[:resource_type],
labels: {
'cluster_name' => K8S_CLUSTER_NAME,
'namespace_id' => CONTAINER_NAMESPACE_ID,
'instance_id' => VM_ID,
'pod_id' => CONTAINER_POD_ID,
'container_name' => K8S_CONTAINER_NAME,
'zone' => ZONE
}
},
log_name: K8S_CONTAINER_NAME,
project_id: PROJECT_ID,
labels: {
"#{GKE_CONSTANTS[:service]}/namespace_name" => K8S_NAMESPACE_NAME,
"#{GKE_CONSTANTS[:service]}/pod_name" => K8S_POD_NAME,
"#{GKE_CONSTANTS[:service]}/stream" => K8S_STREAM,
"label/#{CONTAINER_LABEL_KEY}" => CONTAINER_LABEL_VALUE,
"#{COMPUTE_CONSTANTS[:service]}/resource_name" => HOSTNAME
}
}.freeze
# Almost the same as from metadata, but namespace_id and pod_id come from
# namespace and pod names.
CONTAINER_FROM_TAG_PARAMS = {
resource: {
type: GKE_CONSTANTS[:resource_type],
labels: {
'cluster_name' => K8S_CLUSTER_NAME,
'namespace_id' => K8S_NAMESPACE_NAME,
'instance_id' => VM_ID,
'pod_id' => K8S_POD_NAME,
'container_name' => K8S_CONTAINER_NAME,
'zone' => ZONE
}
},
log_name: K8S_CONTAINER_NAME,
project_id: PROJECT_ID,
labels: {
"#{GKE_CONSTANTS[:service]}/namespace_name" => K8S_NAMESPACE_NAME,
"#{GKE_CONSTANTS[:service]}/pod_name" => K8S_POD_NAME,
"#{GKE_CONSTANTS[:service]}/stream" => K8S_STREAM,
"#{COMPUTE_CONSTANTS[:service]}/resource_name" => HOSTNAME
}
}.freeze
CONTAINER_FROM_APPLICATION_PARAMS = {
resource: {
type: GKE_CONSTANTS[:resource_type],
labels: {
'cluster_name' => K8S_CLUSTER_NAME,
'namespace_id' => CONTAINER_NAMESPACE_ID,
'instance_id' => VM_ID,
'pod_id' => CONTAINER_POD_ID,
'container_name' => K8S_CONTAINER_NAME,
'zone' => ZONE
}
},
log_name: 'redis',
project_id: PROJECT_ID,
labels: {
"#{COMPUTE_CONSTANTS[:service]}/resource_name" => HOSTNAME
}
}.freeze
# K8s Container.
K8S_CONTAINER_PARAMS = {
resource: {
type: K8S_CONTAINER_CONSTANTS[:resource_type],
labels: {
'namespace_name' => K8S_NAMESPACE_NAME,
'pod_name' => K8S_POD_NAME,
'container_name' => K8S_CONTAINER_NAME,
'cluster_name' => K8S_CLUSTER_NAME,
'location' => K8S_LOCATION
}
},
project_id: PROJECT_ID,
labels: {}
}.freeze
K8S_CONTAINER_PARAMS_FROM_LOCAL = K8S_CONTAINER_PARAMS.merge(
resource: K8S_CONTAINER_PARAMS[:resource].merge(
labels: K8S_CONTAINER_PARAMS[:resource][:labels].merge(
'location' => K8S_LOCATION2
)
)
).freeze
K8S_CONTAINER_PARAMS_CUSTOM = K8S_CONTAINER_PARAMS.merge(
resource: K8S_CONTAINER_PARAMS[:resource].merge(
labels: K8S_CONTAINER_PARAMS[:resource][:labels].merge(
'cluster_name' => CUSTOM_K8S_CLUSTER_NAME,
'location' => CUSTOM_K8S_LOCATION
)
)
).freeze
# Used in k8s fallback tests.
K8S_CONTAINER_PARAMS_FROM_FALLBACK = COMPUTE_PARAMS_NO_LOG_NAME.merge(
log_name: CONTAINER_TAG
).freeze
# K8s Pod.
K8S_POD_PARAMS = {
resource: {
type: K8S_POD_CONSTANTS[:resource_type],
labels: {
'namespace_name' => K8S_NAMESPACE_NAME,
'pod_name' => K8S_POD_NAME,
'cluster_name' => K8S_CLUSTER_NAME,
'location' => K8S_LOCATION
}
},
project_id: PROJECT_ID,
labels: {}
}.freeze
K8S_POD_PARAMS_FROM_LOCAL = K8S_POD_PARAMS.merge(
resource: K8S_POD_PARAMS[:resource].merge(
labels: K8S_POD_PARAMS[:resource][:labels].merge(
'location' => K8S_LOCATION2
)
)
).freeze
K8S_POD_PARAMS_CUSTOM = K8S_POD_PARAMS.merge(
resource: K8S_POD_PARAMS[:resource].merge(
labels: K8S_POD_PARAMS[:resource][:labels].merge(
'cluster_name' => CUSTOM_K8S_CLUSTER_NAME,
'location' => CUSTOM_K8S_LOCATION
)
)
).freeze
# K8s Node.
K8S_NODE_PARAMS = {
resource: {
type: K8S_NODE_CONSTANTS[:resource_type],
labels: {
'node_name' => K8S_NODE_NAME,
'cluster_name' => K8S_CLUSTER_NAME,
'location' => K8S_LOCATION
}
},
log_name: 'test',
project_id: PROJECT_ID,
labels: {}
}.freeze
K8S_NODE_PARAMS_FROM_LOCAL = K8S_NODE_PARAMS.merge(
resource: K8S_NODE_PARAMS[:resource].merge(
labels: K8S_NODE_PARAMS[:resource][:labels].merge(
'location' => K8S_LOCATION2
)
)
).freeze
K8S_NODE_PARAMS_CUSTOM = K8S_NODE_PARAMS.merge(
resource: K8S_NODE_PARAMS[:resource].merge(
labels: K8S_NODE_PARAMS[:resource][:labels].merge(
'cluster_name' => CUSTOM_K8S_CLUSTER_NAME,
'location' => CUSTOM_K8S_LOCATION
)
)
).freeze
# Docker Container.
DOCKER_CONTAINER_PARAMS = {
resource: {
type: DOCKER_CONSTANTS[:resource_type],
labels: {
'container_id' => DOCKER_CONTAINER_ID,
'location' => ZONE
}
},
log_name: 'test',
project_id: PROJECT_ID,
labels: {
"#{DOCKER_CONSTANTS[:service]}/stream" => DOCKER_CONTAINER_STREAM_STDOUT
}
}.freeze
DOCKER_CONTAINER_PARAMS_STREAM_STDERR = DOCKER_CONTAINER_PARAMS.merge(
labels: DOCKER_CONTAINER_PARAMS[:labels].merge(
"#{DOCKER_CONSTANTS[:service]}/stream" => DOCKER_CONTAINER_STREAM_STDERR
)
).freeze
DOCKER_CONTAINER_PARAMS_NO_STREAM =
DOCKER_CONTAINER_PARAMS.merge(labels: {}).freeze
# Cloud Dataflow.
DATAFLOW_PARAMS = {
resource: {
type: DATAFLOW_CONSTANTS[:resource_type],
labels: {
'job_name' => DATAFLOW_JOB_NAME,
'job_id' => DATAFLOW_JOB_ID,
'step_id' => DATAFLOW_STEP_ID,
'region' => DATAFLOW_REGION
}
},
log_name: DATAFLOW_TAG,
project_id: PROJECT_ID,
labels: {
"#{COMPUTE_CONSTANTS[:service]}/resource_id" => VM_ID,
"#{COMPUTE_CONSTANTS[:service]}/resource_name" => HOSTNAME,
"#{COMPUTE_CONSTANTS[:service]}/zone" => ZONE
}
}.freeze
# Cloud Dataproc.
DATAPROC_PARAMS = {
resource: {
type: DATAPROC_CONSTANTS[:resource_type],
labels: {
'cluster_name' => DATAPROC_CLUSTER_NAME,
'cluster_uuid' => DATAPROC_CLUSTER_UUID,
'region' => DATAPROC_REGION
}
},
log_name: 'test',
project_id: PROJECT_ID,
labels: {
"#{COMPUTE_CONSTANTS[:service]}/resource_name" => HOSTNAME,
"#{COMPUTE_CONSTANTS[:service]}/resource_id" => VM_ID,
"#{COMPUTE_CONSTANTS[:service]}/zone" => ZONE
}
}.freeze
# Cloud ML.
ML_PARAMS = {
resource: {
type: ML_CONSTANTS[:resource_type],
labels: {
'job_id' => ML_JOB_ID,
'task_name' => ML_TASK_NAME
}
},
log_name: ML_TAG,
project_id: PROJECT_ID,
labels: {
"#{ML_CONSTANTS[:service]}/trial_id" => ML_TRIAL_ID,
"#{ML_CONSTANTS[:service]}/job_id/log_area" => ML_LOG_AREA,
"#{COMPUTE_CONSTANTS[:service]}/resource_id" => VM_ID,
"#{COMPUTE_CONSTANTS[:service]}/resource_name" => HOSTNAME,
"#{COMPUTE_CONSTANTS[:service]}/zone" => ZONE
}
}.freeze
CUSTOM_PARAMS = {
resource: {
type: COMPUTE_CONSTANTS[:resource_type],
labels: {
'instance_id' => CUSTOM_VM_ID,
'zone' => CUSTOM_ZONE
}
},
log_name: 'test',
project_id: CUSTOM_PROJECT_ID,
labels: {
"#{COMPUTE_CONSTANTS[:service]}/resource_name" => CUSTOM_HOSTNAME
}
}.freeze
EC2_REGION_PARAMS = {
resource: {
type: EC2_CONSTANTS[:resource_type],
labels: {
'instance_id' => EC2_VM_ID,
'region' => EC2_PREFIXED_REGION,
'aws_account' => EC2_ACCOUNT_ID
}
},
log_name: 'test',
project_id: EC2_PROJECT_ID,
labels: {
"#{EC2_CONSTANTS[:service]}/resource_name" => HOSTNAME
}
}.freeze
EC2_ZONE_PARAMS = EC2_REGION_PARAMS.merge(
resource: EC2_REGION_PARAMS[:resource].merge(
labels: EC2_REGION_PARAMS[:resource][:labels].merge(
'region' => EC2_PREFIXED_ZONE
)
)
).freeze
HTTP_REQUEST_MESSAGE = {
'cacheFillBytes' => 6653,
'cacheHit' => true,
'cacheLookup' => true,
'cacheValidatedWithOriginServer' => true,
'protocol' => 'HTTP/1.1',
'referer' => 'http://referer/',
'remoteIp' => '55.55.55.55',
'responseSize' => 65,
'requestMethod' => 'POST',
'requestSize' => 210,
'requestUrl' => 'http://example/',
'serverIp' => '66.66.66.66',
'status' => 200,
'userAgent' => 'USER AGENT 1.0'
}.freeze
SOURCE_LOCATION_MESSAGE = {
'file' => 'source/file',
'function' => 'my_function',
'line' => 18
}.freeze
SOURCE_LOCATION_MESSAGE2 = {
'file' => 'src/file',
'function' => 'my_func',
'line' => 8
}.freeze
OPERATION_MESSAGE = {
'id' => 'op_id',
'producer' => 'my/app',
'last' => true
}.freeze
OPERATION_MESSAGE2 = {
'id' => 'op_id2',
'producer' => 'my/app2',
'last' => false
}.freeze
LABELS_MESSAGE = {
'component' => 'front-end',
'source' => 'user',
'app' => 'request-router'
}.freeze
LABELS_MESSAGE2 = {
'component' => 'front-end',
'source' => 'system',
'app' => 'request-router'
}.freeze
CUSTOM_LABELS_MESSAGE = {
'customKey' => 'value'
}.freeze
CONFLICTING_LABEL_KEY = "#{COMPUTE_CONSTANTS[:service]}/resource_name".freeze
# Tags and their sanitized and encoded version.
VALID_TAGS = {
'test' => 'test',
'germanß' => 'german%C3%9F',
'chinese中' => 'chinese%E4%B8%AD',
'specialCharacter/_-.' => 'specialCharacter%2F_-.',
'abc@&^$*' => 'abc%40%26%5E%24%2A',
'@&^$*' => '%40%26%5E%24%2A'
}.freeze
INVALID_TAGS = {
# Non-string tags.
123 => '123',
1.23 => '1.23',
[1, 2, 3] => '%5B1%2C%202%2C%203%5D',
{ key: 'value' } => '%7B%22key%22%3D%3E%22value%22%7D',
# Non-utf8 string tags.
"nonutf8#{[0x92].pack('C*')}" => 'nonutf8%20',
"abc#{[0x92].pack('C*')}" => 'abc%20',
[0x92].pack('C*') => '%20',
# Empty string tag.
'' => '_'
}.freeze
ALL_TAGS = VALID_TAGS.merge(INVALID_TAGS)
# Stub value for Monitored resources from Metadata Agent.
# Map from the local_resource_id to the retrieved monitored resource.
MONITORED_RESOURCE_STUBS = {
# Docker container stderr / stdout logs.
"#{DOCKER_CONTAINER_LOCAL_RESOURCE_ID_PREFIX}.#{DOCKER_CONTAINER_ID}" =>
{
'type' => DOCKER_CONSTANTS[:resource_type],
'labels' => {
'location' => ZONE,
'container_id' => DOCKER_CONTAINER_ID
}
}.to_json,
# Docker container application logs.
"#{DOCKER_CONTAINER_LOCAL_RESOURCE_ID_PREFIX}.#{DOCKER_CONTAINER_NAME}" =>
{
'type' => DOCKER_CONSTANTS[:resource_type],
'labels' => {
'location' => ZONE,
'container_id' => DOCKER_CONTAINER_ID
}
}.to_json,
# GKE container logs.
"#{CONTAINER_LOCAL_RESOURCE_ID_PREFIX}.#{CONTAINER_NAMESPACE_ID}" \
".#{K8S_POD_NAME}.#{K8S_CONTAINER_NAME}" =>
{
'type' => GKE_CONSTANTS[:resource_type],
'labels' => {
'cluster_name' => K8S_CLUSTER_NAME,
'container_name' => K8S_CONTAINER_NAME,
'instance_id' => VM_ID,
'namespace_id' => CONTAINER_NAMESPACE_ID,
'pod_id' => CONTAINER_POD_ID,
'zone' => ZONE
}
}.to_json,
# K8s container logs.
"#{K8S_CONTAINER_LOCAL_RESOURCE_ID_PREFIX}.#{K8S_NAMESPACE_NAME}" \
".#{K8S_POD_NAME}.#{K8S_CONTAINER_NAME}" =>
{
'type' => K8S_CONTAINER_CONSTANTS[:resource_type],
'labels' => {
'namespace_name' => K8S_NAMESPACE_NAME,
'pod_name' => K8S_POD_NAME,
'container_name' => K8S_CONTAINER_NAME,
'cluster_name' => K8S_CLUSTER_NAME,
'location' => K8S_LOCATION
}
}.to_json,
# K8s pod logs.
"#{K8S_POD_LOCAL_RESOURCE_ID_PREFIX}.#{K8S_NAMESPACE_NAME}" \
".#{K8S_POD_NAME}" =>
{
'type' => K8S_POD_CONSTANTS[:resource_type],
'labels' => {
'namespace_name' => K8S_NAMESPACE_NAME,
'pod_name' => K8S_POD_NAME,
'cluster_name' => K8S_CLUSTER_NAME,
'location' => K8S_LOCATION
}
}.to_json,
# K8s node logs.
"#{K8S_NODE_LOCAL_RESOURCE_ID_PREFIX}.#{K8S_NODE_NAME}" =>
{
'type' => K8S_NODE_CONSTANTS[:resource_type],
'labels' => {
'node_name' => K8S_NODE_NAME,
'cluster_name' => K8S_CLUSTER_NAME,
'location' => K8S_LOCATION
}
}.to_json
}.freeze
PARTIAL_SUCCESS_RESPONSE_BODY = {
'error' => {
'code' => 403,
'message' => 'User not authorized.',
'status' => 'PERMISSION_DENIED',
'details' => [
{
'@type' => 'type.googleapis.com/google.logging.v2.WriteLogEntriesPa' \
'rtialErrors',
'logEntryErrors' => {
'0' => {
'code' => 7,
'message' => 'User not authorized.'
},
'1' => {
'code' => 3,
'message' => 'Log name contains illegal character :'
},
'2' => {
'code' => 3,
'message' => 'Log name contains illegal character :'
}
}
},
{
'@type' => 'type.googleapis.com/google.rpc.DebugInfo',
'detail' => '[ORIGINAL ERROR] generic::permission_denied: User not ' \
'authorized. [google.rpc.error_details_ext] { message: \"User not' \
' authorized.\" details { type_url: \"type.googleapis.com/google.' \
'logging.v2.WriteLogEntriesPartialErrors\" value: \"\\n\\034\\010' \
'\\000\\022\\030\\010\\007\\022\\024User not authorized.\\n-\\010' \
'\\001\\022)\\010\\003\\022%Log name contains illegal character :' \
'\\n-\\010\\002\\022)\\010\\003\\022%Log name contains illegal ch' \
'aracter :\" } }'
}
]
}
}.freeze
PARTIAL_SUCCESS_GRPC_METADATA = begin
partial_errors = Google::Logging::V2::WriteLogEntriesPartialErrors.new(
log_entry_errors: {
0 => Google::Rpc::Status.new(
code: GRPC::Core::StatusCodes::PERMISSION_DENIED,
message: 'User not authorized.',
details: []),
1 => Google::Rpc::Status.new(
code: GRPC::Core::StatusCodes::INVALID_ARGUMENT,
message: 'Log name contains illegal character :',
details: []),
3 => Google::Rpc::Status.new(
code: GRPC::Core::StatusCodes::INVALID_ARGUMENT,
message: 'Log name contains illegal character :',
details: [])
})
status = Google::Rpc::Status.new(
message: 'User not authorized.',
details: [Google::Protobuf::Any.pack(partial_errors)])
debug_info = Google::Rpc::DebugInfo.new(
detail: '[ORIGINAL ERROR] generic::permission_denied: User not' \
' authorized. [google.rpc.error_details_ext] { message:' \
" #{status.message.inspect} details { type_url:" \
" #{status.details[0].type_url.inspect} value:" \
" #{status.details[0].value.inspect_octal} } }")
status_details = Google::Rpc::Status.new(
code: 7, message: 'User not authorized.',
details: [Google::Protobuf::Any.pack(partial_errors),
Google::Protobuf::Any.pack(debug_info)])
{
'google.logging.v2.writelogentriespartialerrors-bin' =>
partial_errors.to_proto,
'google.rpc.debuginfo-bin' => debug_info.to_proto,
'grpc-status-details-bin' => status_details.to_proto
}.freeze
end
PARSE_ERROR_RESPONSE_BODY = {
'error' => {
'code' => 400,
'message' => 'Request contains an invalid argument.',
'status' => 'INVALID_ARGUMENT',
'details' => [
{
'@type' => 'type.googleapis.com/google.rpc.DebugInfo',
'detail' =>
'[ORIGINAL ERROR] RPC::CLIENT_ERROR: server could not parse' \
" request sent by client; initialization error is: ''"
}
]
}
}.freeze
PARSE_ERROR_GRPC_METADATA = begin
debug_info = Google::Rpc::DebugInfo.new(
detail: '[ORIGINAL ERROR] RPC::CLIENT_ERROR: server could not parse' \
" request sent by client; initialization error is: ''")
status_details = Google::Rpc::Status.new(
code: 3, message: 'internal client error',
details: [Google::Protobuf::Any.pack(debug_info)])
{
'google.rpc.debuginfo-bin' => debug_info.to_proto,
'grpc-status-details-bin' => status_details.to_proto
}.freeze
end
PRESERVED_KEYS_MAP = {
'time' => K8S_TIMESTAMP,
'severity' => CONTAINER_SEVERITY,
DEFAULT_HTTP_REQUEST_KEY => HTTP_REQUEST_MESSAGE,
DEFAULT_INSERT_ID_KEY => INSERT_ID,
DEFAULT_LABELS_KEY => LABELS_MESSAGE,
DEFAULT_OPERATION_KEY => OPERATION_MESSAGE,
DEFAULT_SOURCE_LOCATION_KEY => SOURCE_LOCATION_MESSAGE,
DEFAULT_SPAN_ID_KEY => SPAN_ID,
DEFAULT_TRACE_KEY => TRACE,
DEFAULT_TRACE_SAMPLED_KEY => TRACE_SAMPLED
}.freeze
end
| 31.826631 | 98 | 0.663475 |
26f871e0a2c1cfe1e0b7f568026e05198c70d525 | 4,265 | # -----------------------------------------------------------------------------
#
# Various Geos-related internal utilities
#
# -----------------------------------------------------------------------------
# Copyright 2010-2012 Daniel Azuma
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder, nor the names of any other
# contributors to this software, may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# -----------------------------------------------------------------------------
;
module RGeo
module Geos
module Utils # :nodoc:
class << self
def ffi_coord_seqs_equal?(cs1_, cs2_, check_z_)
len1_ = cs1_.length
len2_ = cs2_.length
if len1_ == len2_
(0...len1_).each do |i_|
return false unless cs1_.get_x(i_) == cs2_.get_x(i_) &&
cs1_.get_y(i_) == cs2_.get_y(i_) &&
(!check_z_ || cs1_.get_z(i_) == cs2_.get_z(i_))
end
true
else
false
end
end
def ffi_compute_dimension(geom_)
result_ = -1
case geom_.type_id
when ::Geos::GeomTypes::GEOS_POINT
result_ = 0
when ::Geos::GeomTypes::GEOS_MULTIPOINT
result_ = 0 unless geom_.empty?
when ::Geos::GeomTypes::GEOS_LINESTRING, ::Geos::GeomTypes::GEOS_LINEARRING
result_ = 1
when ::Geos::GeomTypes::GEOS_MULTILINESTRING
result_ = 1 unless geom_.empty?
when ::Geos::GeomTypes::GEOS_POLYGON
result_ = 2
when ::Geos::GeomTypes::GEOS_MULTIPOLYGON
result_ = 2 unless geom_.empty?
when ::Geos::GeomTypes::GEOS_GEOMETRYCOLLECTION
geom_.each do |g_|
dim_ = ffi_compute_dimension(g_)
result_ = dim_ if result_ < dim_
end
end
result_
end
def ffi_coord_seq_hash(cs_, hash_=0)
(0...cs_.length).inject(hash_) do |h_, i_|
[hash_, cs_.get_x(i_), cs_.get_y(i_), cs_.get_z(i_)].hash
end
end
def _init
if FFI_SUPPORTED
@ffi_supports_prepared_level_1 = ::Geos::FFIGeos.respond_to?(:GEOSPreparedContains_r)
@ffi_supports_prepared_level_2 = ::Geos::FFIGeos.respond_to?(:GEOSPreparedDisjoint_r)
@ffi_supports_set_output_dimension = ::Geos::FFIGeos.respond_to?(:GEOSWKTWriter_setOutputDimension_r)
end
@psych_wkt_generator = WKRep::WKTGenerator.new(:convert_case => :upper)
@marshal_wkb_generator = WKRep::WKBGenerator.new
end
attr_reader :ffi_supports_prepared_level_1
attr_reader :ffi_supports_prepared_level_2
attr_reader :ffi_supports_set_output_dimension
attr_reader :psych_wkt_generator
attr_reader :marshal_wkb_generator
end
end
end
end
| 35.541667 | 113 | 0.624619 |
b9d665e9356611e9296a7d191fbde21a301b7d61 | 4,916 | # encoding: utf-8
# frozen_string_literal: true
module RuboCop
# The CLI is a class responsible of handling all the command line interface
# logic.
class CLI
include Formatter::TextUtil
class Finished < Exception; end
attr_reader :options, :config_store
def initialize
@options = {}
@config_store = ConfigStore.new
end
# Entry point for the application logic. Here we
# do the command line arguments processing and inspect
# the target files
# @return [Fixnum] UNIX exit code
def run(args = ARGV)
@options, paths = Options.new.parse(args)
act_on_options
apply_default_formatter
runner = Runner.new(@options, @config_store)
trap_interrupt(runner)
all_passed = runner.run(paths)
display_warning_summary(runner.warnings)
display_error_summary(runner.errors)
maybe_print_corrected_source
all_passed && !runner.aborting? && runner.errors.empty? ? 0 : 1
rescue RuboCop::Error => e
$stderr.puts Rainbow("Error: #{e.message}").red
return 2
rescue Finished
return 0
rescue StandardError, SyntaxError => e
$stderr.puts e.message
$stderr.puts e.backtrace
return 2
end
def trap_interrupt(runner)
Signal.trap('INT') do
exit!(1) if runner.aborting?
runner.abort
$stderr.puts
$stderr.puts 'Exiting... Interrupt again to exit immediately.'
end
end
private
def act_on_options
handle_exiting_options
ConfigLoader.debug = @options[:debug]
ConfigLoader.auto_gen_config = @options[:auto_gen_config]
@config_store.options_config = @options[:config] if @options[:config]
if @options[:color]
# color output explicitly forced on
Rainbow.enabled = true
elsif @options[:color] == false
# color output explicitly forced off
Rainbow.enabled = false
end
end
def handle_exiting_options
return unless Options::EXITING_OPTIONS.any? { |o| @options.key? o }
puts RuboCop::Version.version(false) if @options[:version]
puts RuboCop::Version.version(true) if @options[:verbose_version]
print_available_cops if @options[:show_cops]
raise Finished
end
def apply_default_formatter
# This must be done after the options have already been processed,
# because they can affect how ConfigStore behaves
@options[:formatters] ||= begin
cfg = @config_store.for(Dir.pwd)['AllCops']
formatter = (cfg && cfg['DefaultFormatter']) || 'progress'
[[formatter, @options[:output_path]]]
end
if @options[:auto_gen_config]
@options[:formatters] << [Formatter::DisabledConfigFormatter,
ConfigLoader::AUTO_GENERATED_FILE]
end
end
def print_available_cops
cops = Cop::Cop.all
show_all = @options[:show_cops].empty?
if show_all
puts "# Available cops (#{cops.length}) + config for #{Dir.pwd}: "
end
cops.types.sort!.each { |type| print_cops_of_type(cops, type, show_all) }
end
def print_cops_of_type(cops, type, show_all)
cops_of_this_type = cops.with_type(type).sort_by!(&:cop_name)
if show_all
puts "# Type '#{type.to_s.capitalize}' (#{cops_of_this_type.size}):"
end
selected_cops = cops_of_this_type.select do |cop|
show_all || @options[:show_cops].include?(cop.cop_name)
end
selected_cops.each do |cop|
puts '# Supports --auto-correct' if cop.new.support_autocorrect?
puts "#{cop.cop_name}:"
cnf = @config_store.for(Dir.pwd).for_cop(cop)
puts cnf.to_yaml.lines.to_a.butfirst.map { |line| ' ' + line }
puts
end
end
def display_warning_summary(warnings)
return if warnings.empty?
warn Rainbow("\n#{pluralize(warnings.size, 'warning')}:").yellow
warnings.each { |warning| warn warning }
end
def display_error_summary(errors)
return if errors.empty?
warn Rainbow("\n#{pluralize(errors.size, 'error')} occurred:").red
errors.each { |error| warn error }
warn <<-END.strip_indent
Errors are usually caused by RuboCop bugs.
Please, report your problems to RuboCop's issue tracker.
Mention the following information in the issue report:
#{RuboCop::Version.version(true)}
END
end
def maybe_print_corrected_source
# If we are asked to autocorrect source code read from stdin, the only
# reasonable place to write it is to stdout
# Unfortunately, we also write other information to stdout
# So a delimiter is needed for tools to easily identify where the
# autocorrected source begins
return unless @options[:stdin] && @options[:auto_correct]
puts '=' * 20
print @options[:stdin]
end
end
end
| 29.793939 | 79 | 0.650122 |
08fb1ad3b4ddc948e75395571aeb686222e02836 | 3,796 | require 'open-uri'
module Lipsiadmin
module View
module Helpers
module FrontendHelper
# Set the title of the page and append at the end the name of the project
# Usefull for google & c.
def title(text)
content_for(:title) { text + " - #{AppConfig.project}" }
end
# Set the meta description of the page
# Usefull for google & c.
def description(text)
content_for(:description) { text }
end
# Set the meta keywords of the page
# Usefull for google & c.
def keywords(text)
content_for(:keywords) { text }
end
# Override the default image tag with a special option
# <tt>resize</tt> that crop/resize on the fly the image
# and store them in <tt>uploads/thumb</tt> directory.
#
def image_tag(source, options = {})
options.symbolize_keys!
# We set here the upload path
upload_path = "uploads/thumbs"
# Now we can create a thumb on the fly
if options[:resize]
begin
geometry = options.delete(:resize)
filename = File.basename(source)
new_filename = "#{geometry}_#{filename}".downcase.gsub(/#/, '')
# Checking if we have just process them (we don't want to do the same job two times)
if File.exist?("#{Rails.root}/public/#{upload_path}/#{new_filename}")
options[:src] = "/#{upload_path}/#{new_filename}"
else # We need to create the thumb
FileUtils.mkdir("#{Rails.root}/tmp") unless File.exist?("#{Rails.root}/tmp")
# We create a temp file of the original file
# Notice that we can download them from an url! So this Image can reside anywhere on the web
if source =~ /#{URI.regexp}/
tmp = File.new("#{Rails.root}/tmp/#{filename}", "w")
tmp.write open(source).read
tmp.close
else # If the image is local
tmp = File.open(File.join("#{Rails.root}/public", path_to_image(source).gsub(/\?+\d*/, "")))
end
# Now we generate a thumb with our Thumbnail Processor (based on Paperclip)
thumb = Lipsiadmin::Attachment::Thumbnail.new(tmp, :geometry => geometry).make
# We check if our dir exists
FileUtils.mkdir_p("#{Rails.root}/public/#{upload_path}") unless File.exist?("#{Rails.root}/public/#{upload_path}")
# Now we put the image in our public path
File.open("#{Rails.root}/public/#{upload_path}/#{new_filename}", "w") do |f|
f.write thumb.read
end
# Finally we return the new image path
options[:src] = "/#{upload_path}/#{new_filename}"
end
rescue Exception => e
options[:src] = path_to_image(source)
ensure
File.delete(tmp.path) if tmp && tmp.path =~ /#{Rails.root}\/tmp/
File.delete(thumb.path) if thumb
end
end
if size = options.delete(:size)
options[:width], options[:height] = size.split("x") if size =~ %r{^\d+x\d+$}
end
options[:src] ||= path_to_image(source)
options[:alt] ||= File.basename(options[:src], '.*').
split('.').first.to_s.capitalize
if mouseover = options.delete(:mouseover)
options[:onmouseover] = "this.src='#{image_path(mouseover)}'"
options[:onmouseout] = "this.src='#{image_path(options[:src])}'"
end
tag("img", options)
end
end
end
end
end
| 42.177778 | 130 | 0.538462 |
1d46adf0662d672b6d4328cea9c3fd35483d7c63 | 99 | class Setting < ApplicationRecord
has_one_attached :avatar
def self.get
find(1)
end
end
| 12.375 | 33 | 0.727273 |
61003c2dd604b638dd8c29f0e4b5b0beaf5e8979 | 448 | def log_in_as(user, password: 'password', remember_me: true)
visit login_path
fill_in 'Email', with: user.email
fill_in 'Password', with: password
check 'session_remember_me' if remember_me
click_button 'Log in'
end
def logout
click_link 'Log out'
end
def force_logout
page.driver.submit :delete, logout_path, {}
end
def set_current_user(user)
allow_any_instance_of(SessionsHelper).to receive(:current_user).and_return(user)
end
| 22.4 | 82 | 0.772321 |
0872b1c59092d1bcd8a34b285f565a56e2150ce1 | 1,660 | Rails.application.routes.draw do
mount EffectiveMemberships::Engine => '/', as: 'effective_memberships'
end
EffectiveMemberships::Engine.routes.draw do
# Public routes
scope module: 'effective' do
resources :applicants, only: [:new, :show, :destroy] do
resources :build, controller: :applicants, only: [:show, :update]
end
resources :applicant_references, only: [:new, :create, :show, :update] do
post :notify, on: :member
end
resources :fee_payments, only: [:new, :show] do
resources :build, controller: :fee_payments, only: [:show, :update]
end
get '/directory', to: 'memberships_directory#index'
resources :membership_cards, only: :index
resources :memberships, only: [] do
get :membership_card, on: :member, to: 'membership_cards#show'
end
resources :organizations, except: [:show, :destroy]
resources :representatives, except: [:show]
end
namespace :admin do
resources :applicants, except: [:new, :create, :show]
resources :applicant_references do
post :notify, on: :member
end
resources :fees
resources :categories, except: [:show]
resources :applicant_course_areas, except: [:show]
resources :applicant_course_names, except: [:show]
resources :fee_payments, only: [:index, :show]
resources :memberships, only: [:index, :update]
resources :membership_histories, except: [:show]
resources :registrar_actions, only: [:create]
resources :organizations, except: [:show] do
post :archive, on: :member
post :unarchive, on: :member
end
resources :representatives, except: [:show]
end
end
| 28.135593 | 77 | 0.678916 |
874c2570bc3c0af7e637f4ead2d790b81d2123f2 | 2,923 | require 'pact/mock_service/app'
require 'rack/test'
describe Pact::Consumer::MockService do
include Rack::Test::Methods
let(:log) { StringIO.new }
let(:app) do
Pact::MockService.new(log_file: log)
end
# NOTE: the admin_headers are Rack headers, they will be converted
# to X-Pact-Mock-Service and Content-Type by the framework
let(:admin_headers) { {'HTTP_X_PACT_MOCK_SERVICE' => 'true', 'CONTENT_TYPE' => 'application/json'} }
let(:expected_interaction) do
{
description: "a request for alligators",
provider_state: "alligators exist",
request: {
method: :get,
path: '/alligators',
headers: { 'Accept' => 'application/json' },
},
response: {
status: 200,
headers: { 'Content-Type' => 'application/json' },
body: [{ name: 'Mary' }]
}
}
end
let(:another_expected_interaction) do
{
description: "a request for zebras",
provider_state: "there are zebras",
request: {
method: :get,
path: '/zebras',
headers: { 'Accept' => 'application/json' },
},
response: {
status: 200,
headers: { 'Content-Type' => 'application/json' },
body: [{ name: 'Xena Zebra' }]
}
}
end
let(:interactions) do
{
example_description: 'example_description',
interactions: [expected_interaction, another_expected_interaction]
}.to_json
end
context "when more than one response has been mocked" do
context "when the actual request matches one expected request" do
it "returns the expected response" do | example |
# Set up expected interaction - this would be done by the Pact DSL
put "/interactions", interactions, admin_headers
# Invoke the actual request - this would be done by the class under test
get "/alligators", nil, { 'HTTP_ACCEPT' => 'application/json' }
# Ensure that the response we get back was the one we expected
# A test using pact would normally check the object returned from the class under test
# eg. expect(client.alligators).to eq [Alligator.new(name: 'Mary')]
expect(last_response.status).to eq 200
expect(last_response.headers['Content-Type']).to eq 'application/json'
expect(JSON.parse(last_response.body)).to eq([{ 'name' => 'Mary' }])
# Invoke the /zebras request - this would be done by the class under test
get "/zebras", nil, { 'HTTP_ACCEPT' => 'application/json' }
# Ensure we got the zebra response back
expect(JSON.parse(last_response.body)).to eq([{ 'name' => 'Xena Zebra' }])
# Verify
# This would typically be done in an after hook
get "/interactions/verification?example_description=#{CGI::escape(example.full_description)}", nil, admin_headers
expect(last_response.status).to eq 200
end
end
end
end
| 32.477778 | 121 | 0.632569 |
2883a000749b460c18a697f5eea9c0847fecb9af | 1,734 | # frozen_string_literal: true
class Fisk
module Instructions
# Instruction PMULDQ: Multiply Packed Signed Doubleword Integers and Store Quadword Result
PMULDQ = Instruction.new("PMULDQ", [
# pmuldq: xmm, xmm
Form.new([
OPERAND_TYPES[23],
OPERAND_TYPES[24],
].freeze, [
Class.new(Fisk::Encoding) {
def encode buffer, operands
add_prefix(buffer, operands, 0x66, true) +
add_rex(buffer, operands,
false,
0,
operands[0].rex_value,
0,
operands[1].rex_value) +
add_opcode(buffer, 0x0F, 0) +
add_opcode(buffer, 0x38, 0) +
add_opcode(buffer, 0x28, 0) +
add_modrm(buffer,
3,
operands[0].op_value,
operands[1].op_value, operands) +
0
end
}.new.freeze,
].freeze).freeze,
# pmuldq: xmm, m128
Form.new([
OPERAND_TYPES[23],
OPERAND_TYPES[25],
].freeze, [
Class.new(Fisk::Encoding) {
def encode buffer, operands
add_prefix(buffer, operands, 0x66, true) +
add_rex(buffer, operands,
false,
0,
operands[0].rex_value,
operands[1].rex_value,
operands[1].rex_value) +
add_opcode(buffer, 0x0F, 0) +
add_opcode(buffer, 0x38, 0) +
add_opcode(buffer, 0x28, 0) +
add_modrm(buffer,
0,
operands[0].op_value,
operands[1].op_value, operands) +
0
end
}.new.freeze,
].freeze).freeze,
].freeze).freeze
end
end
| 28.9 | 94 | 0.502307 |
28ac045b78c4429ac8dd146af055bd88fb4afbd2 | 662 | include_recipe 'datadog::dd-agent'
# Integrate rabbitmq metrics into Datadog
#
# Set up attributes following this example.
# If you are running multiple rabbitmq instances on the same machine
# list them all as hashes.
#
# node.datadog.rabbitmq.instances = [
# {
# "api_url" => "http://localhost:15672/api/",
# "user" => "guest",
# "pass" => "guest"
# }
# ]
datadog_monitor 'rabbitmq' do
instances node['datadog']['rabbitmq']['instances']
end
| 33.1 | 82 | 0.471299 |
edaf4eafc065c4c2e0947497c3a7cb548866f0be | 1,987 | # frozen_string_literal: true
class User < ApplicationRecord
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :validatable
devise :omniauthable, omniauth_providers: [:facebook]
before_save { self.email = email.downcase }
validates :name, presence: true, length: { maximum: 20 }
VALID_EMAIL_REGEX = /\A[\w+\-.]+@[a-z\d\-.]+\.[a-z]+\z/i.freeze
validates :email, presence: true, length: { maximum: 255 },
format: { with: VALID_EMAIL_REGEX },
uniqueness: { case_sensitive: false }
validates :password, presence: true, length: { minimum: 6 }
validates :password_confirmation, presence: true, length: { minimum: 6 }
has_many :posts
has_many :comments
has_many :likes
has_many :friendships
has_many :accepted_friendships, -> { accepted }, class_name: 'Friendship'
has_many :unaccepted_friendships, -> { unaccepted }, class_name: 'Friendship'
has_many :friends, through: :accepted_friendships
has_many :received_requests, -> { unaccepted }, foreign_key: :friend_id, class_name: 'Friendship'
has_many :received_requests_users, through: :received_requests, source: :user
has_many :sent_requests, through: :unaccepted_friendships, source: :friend
def friend?(user)
friends.include?(user)
end
def request_sent?(user)
(received_requests_users + sent_requests).include?(user)
end
def self.new_with_session(params, session)
super.tap do |user|
if data = session['devise.facebook_data'] && session['devise.facebook_data']['extra']['raw_info']
user.email = data['email'] if user.email.blank?
end
end
end
def self.from_omniauth(auth)
where(provider: auth.provider, uid: auth.uid).first_or_create do |user|
user.email = auth.info.email
user.password = Devise.friendly_token[0, 20]
user.password_confirmation = user.password
user.name = auth.info.name # assuming the user model has a name
end
end
end
| 36.796296 | 103 | 0.700554 |
616809d972fdc9678e3135100474add7570ce38c | 276 | require "bundler/setup"
require "hangman/version"
module Hangman
# Your code goes here...
end
require_relative "hangman/cell.rb"
require_relative "hangman/word.rb"
require_relative "hangman/row.rb"
require_relative "hangman/game.rb"
require_relative "hangman/game_json.rb" | 23 | 39 | 0.800725 |
ffb5306916e2ba9057a367494a68d86052b692ed | 1,723 | class Libxlsxwriter < Formula
desc "C library for creating Excel XLSX files"
homepage "https://libxlsxwriter.github.io/"
url "https://github.com/jmcnamara/libxlsxwriter/archive/RELEASE_1.0.6.tar.gz"
sha256 "6217d2940a44c2eac3b48942e83e1320a871e47aabdb4047484426539e45e930"
license "BSD-2-Clause"
head "https://github.com/jmcnamara/libxlsxwriter.git"
bottle do
sha256 cellar: :any, arm64_big_sur: "98dc5b126bb7384519f4653c17ee9c42ce8d20bd71fc10fae15aac346e0db7c1"
sha256 cellar: :any, big_sur: "2e4a46d19d9b8a2845d4bbe7e182210050940932bd4cc32204129086353e8806"
sha256 cellar: :any, catalina: "c62b8c975d8f53a9841f34fb456647bb43e31db66c535cc2fb2740ccafb44aaa"
sha256 cellar: :any, mojave: "5bf4db3205b2bc60dd8ad5735210064d45274bebd3ea6e65754a446bd848773a"
sha256 cellar: :any_skip_relocation, x86_64_linux: "f29d53bc3da5ca10eddf7bc8e3dbd0a905c499043c789dc780389934cbc09f22"
end
uses_from_macos "zlib"
def install
system "make", "install", "PREFIX=#{prefix}", "V=1"
end
test do
(testpath/"test.c").write <<~EOS
#include "xlsxwriter.h"
int main() {
lxw_workbook *workbook = workbook_new("myexcel.xlsx");
lxw_worksheet *worksheet = workbook_add_worksheet(workbook, NULL);
int row = 0;
int col = 0;
worksheet_write_string(worksheet, row, col, "Hello me!", NULL);
return workbook_close(workbook);
}
EOS
system ENV.cc, "test.c", "-L#{lib}", "-I#{include}", "-lxlsxwriter", "-o", "test"
system "./test"
assert_predicate testpath/"myexcel.xlsx", :exist?, "Failed to create xlsx file"
end
end
| 39.159091 | 122 | 0.687754 |
339b1f456e78e6e488e2e62172445049d5a4deee | 565 | # typed: false
# frozen_string_literal: true
# This file was generated by GoReleaser. DO NOT EDIT.
class Lporg < Formula
desc "Organize Your macOS Launchpad Apps"
homepage "https://github.com/blacktop/lporg"
version "20.4.7"
depends_on :macos
on_macos do
url "https://github.com/blacktop/lporg/releases/download/v20.4.7/lporg_20.4.7_macOS_universal.tar.gz"
sha256 "4a13ee5ead8b14e9609949c465858cbbb4aa24c96afd23599fb04c5a528cf0c8"
def install
bin.install "lporg"
end
end
test do
system "#{bin}/lporg --version"
end
end
| 23.541667 | 105 | 0.732743 |
38b2103d5bcb6d56e0a49712accc99f6da6e7e6b | 9,631 | module Braintree
# See http://www.braintreepayments.com/docs/ruby
class CreditCard
include BaseModule # :nodoc:
module CardType
AmEx = "American Express"
CarteBlanche = "Carte Blanche"
ChinaUnionPay = "China UnionPay"
DinersClubInternational = "Diners Club"
Discover = "Discover"
JCB = "JCB"
Laser = "Laser"
Maestro = "Maestro"
MasterCard = "MasterCard"
Solo = "Solo"
Switch = "Switch"
Visa = "Visa"
Unknown = "Unknown"
All = constants.map { |c| const_get(c) }
end
module CustomerLocation
International = "international"
US = "us"
end
module CardTypeIndicator
Yes = "Yes"
No = "No"
Unknown = "Unknown"
end
Commercial = Debit = DurbinRegulated = Healthcare = Payroll = Prepaid =
IssuingBank = CountryOfIssuance = CardTypeIndicator
attr_reader :billing_address, :bin, :card_type, :cardholder_name, :commercial, :country_of_issuance,
:created_at, :customer_id, :debit, :durbin_regulated, :expiration_month, :expiration_year, :healthcare,
:issuing_bank, :last_4, :payroll, :prepaid, :subscriptions, :token, :unique_number_identifier, :updated_at,
:image_url
# See http://www.braintreepayments.com/docs/ruby/credit_cards/create
def self.create(attributes)
Configuration.gateway.credit_card.create(attributes)
end
# See http://www.braintreepayments.com/docs/ruby/credit_cards/create
def self.create!(attributes)
return_object_or_raise(:credit_card) { create(attributes) }
end
# Deprecated. Use Braintree::TransparentRedirect.url
# See http://www.braintreepayments.com/docs/ruby/credit_cards/create_tr
def self.create_credit_card_url
warn "[DEPRECATED] CreditCard.create_credit_card_url is deprecated. Please use TransparentRedirect.url"
Configuration.gateway.credit_card.create_credit_card_url
end
# Deprecated. Use Braintree::TransparentRedirect.confirm
# See http://www.braintreepayments.com/docs/ruby/credit_cards/create_tr
def self.create_from_transparent_redirect(query_string)
warn "[DEPRECATED] CreditCard.create_from_transparent_redirect is deprecated. Please use TransparentRedirect.confirm"
Configuration.gateway.credit_card.create_from_transparent_redirect(query_string)
end
# See http://www.braintreepayments.com/docs/ruby/transactions/create_from_vault
def self.credit(token, transaction_attributes)
Transaction.credit(transaction_attributes.merge(:payment_method_token => token))
end
# See http://www.braintreepayments.com/docs/ruby/transactions/create_from_vault
def self.credit!(token, transaction_attributes)
return_object_or_raise(:transaction) { credit(token, transaction_attributes) }
end
# See http://www.braintreepayments.com/docs/ruby/credit_cards/delete
def self.delete(token)
Configuration.gateway.credit_card.delete(token)
end
# See http://www.braintreepayments.com/docs/ruby/credit_cards/search
def self.expired(options = {})
Configuration.gateway.credit_card.expired(options)
end
# See http://www.braintreepayments.com/docs/ruby/credit_cards/search
def self.expiring_between(start_date, end_date, options = {})
Configuration.gateway.credit_card.expiring_between(start_date, end_date, options)
end
# See http://www.braintreepayments.com/docs/ruby/credit_cards/search
def self.find(token)
Configuration.gateway.credit_card.find(token)
end
def self.from_nonce(nonce)
Configuration.gateway.credit_card.from_nonce(nonce)
end
# See http://www.braintreepayments.com/docs/ruby/transactions/create_from_vault
def self.sale(token, transaction_attributes)
Configuration.gateway.transaction.sale(transaction_attributes.merge(:payment_method_token => token))
end
# See http://www.braintreepayments.com/docs/ruby/transactions/create_from_vault
def self.sale!(token, transaction_attributes)
return_object_or_raise(:transaction) { sale(token, transaction_attributes) }
end
# See http://www.braintreepayments.com/docs/ruby/credit_cards/update
def self.update(token, attributes)
Configuration.gateway.credit_card.update(token, attributes)
end
# See http://www.braintreepayments.com/docs/ruby/credit_cards/update
def self.update!(token, attributes)
return_object_or_raise(:credit_card) { update(token, attributes) }
end
# Deprecated. Use Braintree::TransparentRedirect.confirm
#
# See http://www.braintreepayments.com/docs/ruby/credit_cards/update_tr
def self.update_from_transparent_redirect(query_string)
warn "[DEPRECATED] CreditCard.update_via_transparent_redirect_request is deprecated. Please use TransparentRedirect.confirm"
Configuration.gateway.credit_card.update_from_transparent_redirect(query_string)
end
# Deprecated. Use Braintree::TransparentRedirect.url
#
# See http://www.braintreepayments.com/docs/ruby/credit_cards/update_tr
def self.update_credit_card_url
warn "[DEPRECATED] CreditCard.update_credit_card_url is deprecated. Please use TransparentRedirect.url"
Configuration.gateway.credit_card.update_credit_card_url
end
def initialize(gateway, attributes) # :nodoc:
@gateway = gateway
set_instance_variables_from_hash(attributes)
@billing_address = attributes[:billing_address] ? Address._new(@gateway, attributes[:billing_address]) : nil
@subscriptions = (@subscriptions || []).map { |subscription_hash| Subscription._new(@gateway, subscription_hash) }
end
# Deprecated. Use Braintree::CreditCard.credit
#
# See http://www.braintreepayments.com/docs/ruby/transactions/create_from_vault
def credit(transaction_attributes)
warn "[DEPRECATED] credit as an instance method is deprecated. Please use CreditCard.credit"
@gateway.transaction.credit(transaction_attributes.merge(:payment_method_token => token))
end
# Deprecated. Use Braintree::CreditCard.credit!
#
# See http://www.braintreepayments.com/docs/ruby/transactions/create_from_vault
def credit!(transaction_attributes)
warn "[DEPRECATED] credit! as an instance method is deprecated. Please use CreditCard.credit!"
return_object_or_raise(:transaction) { credit(transaction_attributes) }
end
# Deprecated. Use Braintree::CreditCard.delete
#
# http://www.braintreepayments.com/docs/ruby/credit_cards/delete
def delete
warn "[DEPRECATED] delete as an instance method is deprecated. Please use CreditCard.delete"
@gateway.credit_card.delete(token)
end
# Returns true if this credit card is the customer's default payment method.
def default?
@default
end
# Expiration date formatted as MM/YYYY
def expiration_date
"#{expiration_month}/#{expiration_year}"
end
# Returns true if the credit card is expired.
def expired?
@expired
end
def inspect # :nodoc:
first = [:token]
order = first + (self.class._attributes - first)
nice_attributes = order.map do |attr|
"#{attr}: #{send(attr).inspect}"
end
"#<#{self.class} #{nice_attributes.join(', ')}>"
end
def masked_number
"#{bin}******#{last_4}"
end
# Deprecated. Use Braintree::CreditCard.sale
#
# See http://www.braintreepayments.com/docs/ruby/transactions/create_from_vault
def sale(transaction_attributes)
warn "[DEPRECATED] sale as an instance method is deprecated. Please use CreditCard.sale"
@gateway.transaction.sale(transaction_attributes.merge(:payment_method_token => token))
end
# Deprecated. Use Braintree::CreditCard.sale!
#
# See http://www.braintreepayments.com/docs/ruby/transactions/create_from_vault
def sale!(transaction_attributes)
warn "[DEPRECATED] sale! as an instance method is deprecated. Please use CreditCard.sale!"
return_object_or_raise(:transaction) { sale(transaction_attributes) }
end
# Deprecated. Use Braintree::CreditCard.update
#
# See http://www.braintreepayments.com/docs/ruby/credit_cards/update
def update(attributes)
warn "[DEPRECATED] update as an instance method is deprecated. Please use CreditCard.update"
result = @gateway.credit_card.update(token, attributes)
if result.success?
copy_instance_variables_from_object result.credit_card
end
result
end
# Deprecated. Use Braintree::CreditCard.update!
#
# See http://www.braintreepayments.com/docs/ruby/credit_cards/update
def update!(attributes)
warn "[DEPRECATED] update! as an instance method is deprecated. Please use CreditCard.update!"
return_object_or_raise(:credit_card) { update(attributes) }
end
# Returns true if the card is associated with Venmo SDK
def venmo_sdk?
@venmo_sdk
end
# Returns true if +other+ is a +CreditCard+ with the same token.
def ==(other)
return false unless other.is_a?(CreditCard)
token == other.token
end
class << self
protected :new
end
def self._attributes # :nodoc:
[
:billing_address, :bin, :card_type, :cardholder_name, :created_at, :customer_id, :expiration_month,
:expiration_year, :last_4, :token, :updated_at, :prepaid, :payroll, :commercial, :debit, :durbin_regulated,
:healthcare, :country_of_issuance, :issuing_bank, :image_url
]
end
def self._new(*args) # :nodoc:
self.new *args
end
end
end
| 37.042308 | 130 | 0.718098 |
088bc6dafb79ec2d036dd2daf09510485f757ed4 | 27,086 | # frozen_string_literal: true
# == Schema Information
#
# Table name: courses
#
# id :integer not null, primary key
# title :string(255)
# created_at :datetime
# updated_at :datetime
# start :datetime
# end :datetime
# school :string(255)
# term :string(255)
# character_sum :integer default(0)
# view_sum :bigint(8) default(0)
# user_count :integer default(0)
# article_count :integer default(0)
# revision_count :integer default(0)
# slug :string(255)
# subject :string(255)
# expected_students :integer
# description :text(65535)
# submitted :boolean default(FALSE)
# passcode :string(255)
# timeline_start :datetime
# timeline_end :datetime
# day_exceptions :string(2000) default("")
# weekdays :string(255) default("0000000")
# new_article_count :integer default(0)
# no_day_exceptions :boolean default(FALSE)
# trained_count :integer default(0)
# cloned_status :integer
# type :string(255) default("ClassroomProgramCourse")
# upload_count :integer default(0)
# uploads_in_use_count :integer default(0)
# upload_usages_count :integer default(0)
# syllabus_file_name :string(255)
# syllabus_content_type :string(255)
# syllabus_file_size :integer
# syllabus_updated_at :datetime
# home_wiki_id :integer
# recent_revision_count :integer default(0)
# needs_update :boolean default(FALSE)
# chatroom_id :string(255)
# flags :text(65535)
# level :string(255)
# private :boolean default(FALSE)
# withdrawn :boolean default(FALSE)
#
require 'rails_helper'
describe Course, type: :model do
before { TrainingModule.load_all }
describe '.update_all_caches_concurrently' do
before do
create(:course, needs_update: true)
create(:course, needs_update: true, slug: 'foo/2')
end
it 'runs without error for multiple courses' do
Course.update_all_caches_concurrently
end
end
it 'caches revision data for students' do
build(:user,
id: 1,
username: 'Ragesoss').save
build(:course,
id: 1,
start: Time.zone.today - 1.month,
end: Time.zone.today + 1.month,
passcode: 'pizza',
title: 'Underwater basket-weaving').save
build(:article,
id: 1,
title: 'Selfie',
namespace: 0).save
build(:revision,
id: 1,
user_id: 1,
article_id: 1,
date: Time.zone.today,
characters: 9000,
views: 1234).save
# Assign the article to the user.
build(:assignment,
course_id: 1,
user_id: 1,
article_id: 1,
article_title: 'Selfie').save
# Make a course-user and save it.
build(:courses_user,
id: 1,
course_id: 1,
user_id: 1,
assigned_article_title: 'Selfie').save
# Make an article-course.
build(:articles_course,
id: 1,
article_id: 1,
course_id: 1).save
# Update caches
ArticlesCourses.update_all_caches(ArticlesCourses.all)
CoursesUsers.update_all_caches(CoursesUsers.ready_for_update)
Course.update_all_caches
# Fetch the created CoursesUsers entry
course = Course.all.first
expect(course.character_sum).to eq(9000)
expect(course.view_sum).to eq(1234)
expect(course.revision_count).to eq(1)
expect(course.article_count).to eq(1)
end
it 'returns a valid course slug for ActiveRecord' do
course = build(:course,
title: 'History Class',
slug: 'History_Class')
expect(course.to_param).to eq('History_Class')
end
it 'updates start/end times when changing course type' do
course = create(:basic_course,
start: Time.zone.local(2016, 1, 1, 12, 45, 0),
end: Time.zone.local(2016, 1, 10, 15, 30, 0),
title: 'History Class')
expect(course.end).to eq(Time.zone.local(2016, 1, 10, 15, 30, 0))
course = course.becomes!(ClassroomProgramCourse)
course.save!
expect(course.end).to eq(Time.zone.local(2016, 1, 10, 23, 59, 59, '+00:00'))
course = course.becomes!(BasicCourse)
course.save!
expect(course.end).to eq(Time.zone.local(2016, 1, 10, 23, 59, 59, '+00:00'))
course.end = Time.zone.local(2016, 1, 10, 15, 30, 0)
course.save!
expect(course.end).to eq(Time.zone.local(2016, 1, 10, 15, 30, 0))
end
it 'updates end time to equal start time it the times are invalid' do
course = build(:course,
start: Time.zone.now,
end: Time.zone.now - 2.months)
course.save
expect(course.end).to eq(course.start)
end
describe '#url' do
it 'returns the url of a course page' do
# A legacy course
lang = Figaro.env.wiki_language
prefix = Figaro.env.course_prefix
course = build(:legacy_course,
id: 618,
slug: 'UW Bothell/Conservation Biology (Winter 2015)',
submitted: true)
url = course.url
expect(url).to be_nil
# A new course
new_course = build(:course,
id: 10618,
slug: 'UW Bothell/Conservation Biology (Winter 2016)',
submitted: true)
url = new_course.url
# rubocop:disable Metrics/LineLength
expect(url).to eq("https://#{lang}.wikipedia.org/wiki/#{prefix}/UW_Bothell/Conservation_Biology_(Winter_2016)")
# rubocop:enable Metrics/LineLength
# A course that hasn't been submitted so has no on-wiki course page yet
new_course = build(:course, submitted: false)
expect(new_course.url).to be_nil
# A course type without edits enabled
new_course = build(:editathon)
expect(new_course.url).to be_nil
end
end
describe 'validation' do
subject { course.valid? }
let(:course) do
Course.new(passcode: passcode,
type: type,
start: '2013-01-01',
end: '2013-07-01',
home_wiki_id: 1)
end
context 'non-legacy course' do
let(:type) { 'ClassroomProgramCourse' }
context 'passcode nil' do
let(:passcode) { nil }
it "doesn't save" do
expect(subject).to eq(false)
end
end
context 'passcode empty string' do
let(:passcode) { '' }
it "doesn't save" do
expect(subject).to eq(false)
end
end
context 'valid passcode' do
let(:passcode) { 'Peanut Butter' }
it 'saves' do
expect(subject).to eq(true)
end
end
end
context 'legacy course' do
it 'saves nil passcode' do
passcode = nil
course = build(:legacy_course,
passcode: passcode)
expect(course.valid?).to eq(true)
end
end
end
describe '#user_count' do
subject { course.user_count }
let!(:course) { create(:course) }
let!(:user1) { create(:test_user, username: 'user1') }
let!(:user2) { create(:test_user, username: 'user2') }
let!(:cu1) { create(:courses_user, course_id: course.id, user_id: user1.id, role: role1) }
let!(:cu2) { create(:courses_user, course_id: course.id, user_id: user2.id, role: role2) }
let!(:cu3) { create(:courses_user, course_id: course.id, user_id: user3, role: role3) }
before { course.update_cache }
context 'students in course, no instructor-students' do
let(:role1) { CoursesUsers::Roles::STUDENT_ROLE }
let(:role2) { CoursesUsers::Roles::STUDENT_ROLE }
let(:user3) { nil }
let(:role3) { nil }
it 'returns 2' do
expect(subject).to eq(2)
end
end
context 'one student, one instructor, one instructor-student' do
let(:role1) { CoursesUsers::Roles::STUDENT_ROLE }
let(:role2) { CoursesUsers::Roles::STUDENT_ROLE }
let(:user3) { user1.id }
let(:role3) { CoursesUsers::Roles::INSTRUCTOR_ROLE }
it 'returns 2' do
expect(subject).to eq(2)
end
end
end
describe '#article_count' do
let(:course) { create(:course) }
it 'counts mainspace articles edited by students' do
student = create(:user)
create(:courses_user, course_id: course.id, user_id: student.id,
role: CoursesUsers::Roles::STUDENT_ROLE)
# mainspace article
article = create(:article, namespace: Article::Namespaces::MAINSPACE)
create(:revision, article_id: article.id, user_id: student.id)
create(:articles_course, article_id: article.id, course_id: course.id)
# non-mainspace page
sandbox = create(:article, namespace: Article::Namespaces::TALK)
create(:revision, article_id: sandbox.id, user_id: student.id)
create(:articles_course, article_id: sandbox.id, course_id: course.id)
course.update_cache
expect(course.article_count).to eq(1)
end
end
describe '#new_article_count' do
let(:course) { create(:course, end: '2015-01-01') }
it 'counts newly created mainspace articles' do
student = create(:user)
create(:courses_user, course_id: course.id, user_id: student.id,
role: CoursesUsers::Roles::STUDENT_ROLE)
# mainspace article
article = create(:article, namespace: Article::Namespaces::MAINSPACE)
create(:revision, article_id: article.id, user_id: student.id)
create(:articles_course, article_id: article.id, course_id: course.id,
new_article: true)
# non-mainspace page
sandbox = create(:article, namespace: Article::Namespaces::TALK)
create(:revision, article_id: sandbox.id, user_id: student.id)
create(:articles_course, article_id: sandbox.id, course_id: course.id,
new_article: true)
course.update_cache
expect(course.new_article_count).to eq(1)
end
end
describe '#trained_count' do
before do
create(:user, id: 1, trained: 0)
create(:courses_user, user_id: 1, course_id: 1, role: CoursesUsers::Roles::STUDENT_ROLE)
create(:user, username: 'user2', id: 2, trained: 1)
create(:courses_user, user_id: 2, course_id: 1, role: CoursesUsers::Roles::STUDENT_ROLE)
create(:user, username: 'user3', id: 3, trained: 1)
create(:courses_user, user_id: 3, course_id: 1, role: CoursesUsers::Roles::STUDENT_ROLE)
end
context 'after the introduction of in-dashboard training modules' do
let(:course) do
create(:course, id: 1, start: '2016-01-01'.to_date, end: '2016-06-01'.to_date,
timeline_start: '2016-01-01'.to_date, timeline_end: '2016-06-01'.to_date)
end
after do
Timecop.return
end
it 'returns the whole student count if no training modules are assigned' do
course.update_cache
expect(course.trained_count).to eq(3)
end
it 'returns the whole student count before assigned trainings are due' do
create(:week, id: 1, course_id: 1)
create(:block, week_id: 1, training_module_ids: [1, 2])
Timecop.freeze('2016-01-02'.to_date)
course.update_cache
expect(course.trained_count).to eq(3)
end
it 'returns the count of students who are not overude on trainings' do
create(:week, id: 1, course_id: 1)
create(:block, week_id: 1, training_module_ids: [1, 2])
# User who completed all assigned modules
create(:training_modules_users, training_module_id: 1, user_id: 1,
completed_at: '2016-01-09'.to_date)
create(:training_modules_users, training_module_id: 2, user_id: 1,
completed_at: '2016-01-09'.to_date)
# User who completed only 1 of 2 modules
create(:training_modules_users, training_module_id: 1, user_id: 2,
completed_at: '2016-01-09'.to_date)
create(:training_modules_users, training_module_id: 2, user_id: 2,
completed_at: nil)
Timecop.freeze('2016-01-10'.to_date)
course.update_cache
expect(course.trained_count).to eq(1)
end
end
context 'before in-dashboard training modules' do
let(:course) do
create(:course, id: 1, start: '2015-01-01'.to_date, end: '2015-06-01'.to_date)
end
it 'returns the number of students who have completed on-wiki training' do
course.update_cache
expect(course.trained_count).to eq(2)
end
end
end
describe '.uploads' do
before do
create(:course, id: 1, start: 1.year.ago, end: 1.week.ago)
create(:user, id: 1)
create(:courses_user, user_id: 1, course_id: 1, role: CoursesUsers::Roles::STUDENT_ROLE)
create(:commons_upload, id: 1, user_id: 1, uploaded_at: 2.weeks.ago)
create(:commons_upload, id: 2, user_id: 1, uploaded_at: 2.years.ago)
create(:commons_upload, id: 3, user_id: 1, uploaded_at: 1.day.ago)
end
it 'includes uploads by students during the course' do
course = Course.find(1)
expect(course.uploads).to include(CommonsUpload.find(1))
end
it 'excludes uploads from before or after the course' do
course = Course.find(1)
expect(course.uploads).not_to include(CommonsUpload.find(2))
expect(course.uploads).not_to include(CommonsUpload.find(3))
end
end
describe '#wiki_edits_enabled?' do
let(:course) { build(:basic_course, flags: flags) }
let(:subject) { course.wiki_edits_enabled? }
context 'when the :wiki_edits_enabled flag is set false' do
let(:flags) { { wiki_edits_enabled: false } }
it 'returns false' do
expect(subject).to be false
end
end
context 'when the :wiki_edits_enabled flag is set true' do
let(:flags) { { wiki_edits_enabled: true } }
it 'returns true' do
expect(subject).to be true
end
end
context 'when the :wiki_edits_enabled flag is not set' do
let(:flags) { nil }
it 'returns true' do
expect(subject).to be true
end
end
end
describe '#cloneable?' do
let(:subject) { course.cloneable? }
context 'for a LegacyCourse' do
let(:course) { build(:legacy_course) }
it 'returns false' do
expect(subject).to be false
end
end
context 'for a BasicCourse without the no_clone tag' do
let(:course) { build(:basic_course) }
it 'returns true' do
expect(subject).to be true
end
end
context 'for a BasicCourse with the no_clone tag' do
let(:course) { build(:basic_course) }
let!(:tag) { create(:tag, tag: 'no_clone', course: course) }
it 'returns false' do
expect(subject).to be false
end
end
context 'for a ClassroomProgramCourse with the cloneable tag' do
let(:course) { build(:course) }
let!(:tag) { create(:tag, tag: 'cloneable', course: course) }
it 'returns true' do
expect(subject).to be true
end
end
context 'for a ClassroomProgramCourse without the cloneable tag' do
let(:course) { build(:course) }
it 'returns false' do
expect(subject).to be false
end
end
end
describe 'callbacks' do
let(:course) { create(:course) }
describe '#before_save' do
subject { course.update_attributes(course_attrs) }
context 'params are legit' do
let(:course_attrs) { { end: 1.year.from_now } }
it 'succeeds' do
expect(subject).to eq(true)
end
end
context 'slug is nil' do
let(:course_attrs) { { slug: nil } }
it 'fails' do
expect(subject).to eq(false)
end
end
context 'title is nil' do
let(:course_attrs) { { title: nil } }
it 'fails' do
expect(subject).to eq(false)
end
end
context 'school is nil' do
let(:course_attrs) { { school: nil } }
it 'fails' do
expect(subject).to eq(false)
end
end
context 'term is nil' do
let(:course_attrs) { { term: nil } }
it 'fails' do
expect(subject).to eq(false)
end
end
end
describe '#set_default_times' do
subject do
course.update_attributes(course_attrs)
course
end
context 'end is at the beginning of day' do
let(:course_attrs) { { end: 1.year.from_now.beginning_of_day } }
it 'converts to end of day' do
expect(subject.end).to be_within(1.second).of(1.year.from_now.end_of_day)
end
end
context 'timeline_end is at the beginning of day' do
let(:course_attrs) { { timeline_end: 1.year.from_now.beginning_of_day } }
it 'converts to end of day' do
expect(subject.timeline_end).to be_within(1.second).of(1.year.from_now.end_of_day)
end
end
end
end
describe 'typing and validation' do
let(:course) { create(:course) }
let(:arbitrary_course_type) { create(:course, type: 'Foo') }
it 'creates ClassroomProgramCourse type by default' do
expect(course.class).to eq(ClassroomProgramCourse)
end
it 'allows BasicCourse type' do
course.update_attributes(type: 'BasicCourse')
expect(Course.last.class).to eq(BasicCourse)
end
it 'allows VisitingScholarship type' do
course.update_attributes(type: 'VisitingScholarship')
expect(Course.last.class).to eq(VisitingScholarship)
end
it 'allows Editathon type' do
course.update_attributes(type: 'Editathon')
expect(Course.last.class).to eq(Editathon)
end
it 'allows FellowsCohort type' do
course.update_attributes(type: 'FellowsCohort')
expect(Course.last.class).to eq(FellowsCohort)
end
it 'does not allow creation of arbitrary types' do
expect { arbitrary_course_type }.to raise_error(ActiveRecord::RecordInvalid)
end
it 'does not allow updating to arbitrary types' do
invalid_update = course.update_attributes(type: 'Bar')
expect(invalid_update).to eq(false)
expect(Course.last.class).to eq(ClassroomProgramCourse)
end
it 'implements required methods for every course type' do
Course::COURSE_TYPES.each do |type|
create(:course, type: type, slug: "foo/#{type}")
course = Course.last
expect(course.type).to eq(type)
# #string_prefix
expect(course.string_prefix).to be_a(String)
# #wiki_edits_enabled?
expect(course.wiki_edits_enabled?).to be_in([true, false])
# #wiki_course_page_enabled?
expect(course.wiki_course_page_enabled?).to be_in([true, false])
# #enrollment_edits_enabled?
expect(course.enrollment_edits_enabled?).to be_in([true, false])
# #assignment_edits_enabled?
expect(course.assignment_edits_enabled?).to be_in([true, false])
# #multiple_roles_allowed?
expect(course.multiple_roles_allowed?).to be_in([true, false])
# #passcode_required?
expect(course.passcode_required?).to be_in([true, false])
# #use_start_and_end_times
expect(course.use_start_and_end_times).to be_in([true, false])
# #wiki_title
expect(course).to respond_to(:wiki_title)
# #training_library_slug
expect(course.training_library_slug).to be_a(String).or be_nil
end
end
context 'with edit_settings flag' do
let(:flags) do
{
'edit_settings' => {
'assignment_edits_enabled' => true,
'wiki_course_page_enabled' => true,
'enrollment_edits_enabled' => true
}
}
end
it 'implements required methods for every course type that has edit_settings' do
Course::COURSE_TYPES.each do |type|
create(:course, type: type, flags: flags, slug: "foo/#{type}")
course = Course.last
expect(course.type).to eq(type)
# #wiki_edits_enabled?
expect(course.wiki_edits_enabled?).to be_in([true, false])
# #wiki_course_page_enabled?
expect(course.wiki_course_page_enabled?).to be_in([true, false])
# #enrollment_edits_enabled?
expect(course.enrollment_edits_enabled?).to be_in([true, false])
# #assignment_edits_enabled?
expect(course.assignment_edits_enabled?).to be_in([true, false])
end
end
end
end
describe '#ready_for_survey' do
let(:survey) { create(:survey) }
let(:campaign) { create(:campaign, title: 'Test', slug: 'test') }
let(:survey_assignment) { create(:survey_assignment, survey_id: survey.id, published: true) }
let(:course) { create(:course, start: course_start, end: course_end) }
let(:course_start) { Time.zone.today - 1.month }
let(:course_end) { Time.zone.today + 1.month }
before do
survey_assignment.campaigns << campaign
end
let(:n) { 7 }
let(:course_scope) do
survey_assignment.campaigns.first.courses.ready_for_survey(
days: n,
before: before,
relative_to: relative_to
)
end
context 'when `n` days before their course end is Today' do
let(:course_end) { Time.zone.today - n.days }
let(:before) { true }
let(:relative_to) { 'end' }
it 'include the Course' do
course.campaigns << campaign
course.save
expect(course_scope.length).to eq(1)
end
end
context 'when `n` days after their course end is Today' do
# By default, course end dates are end-of-day. So we shift by 1 day to test
# the case where the course ended within the last 24 hours.
let(:course_end) { Time.zone.today - n.days - 1.day }
let(:before) { false }
let(:relative_to) { 'end' }
it 'includes the Course ' do
course.campaigns << campaign
course.save
expect(course_scope.length).to eq(1)
end
end
context 'when `n` days `before` their course `start` is Today' do
let(:course_start) { Time.zone.today + n.days }
let(:before) { true }
let(:relative_to) { 'start' }
it 'includes the Course' do
course.campaigns << campaign
course.save
expect(course_scope.length).to eq(1)
end
end
context 'when `n` days `after` their course `start` is Today' do
let(:course_start) { Time.zone.today - n.days }
let(:before) { false }
let(:relative_to) { 'start' }
it 'includes the Course' do
course.campaigns << campaign
course.save
expect(course_scope.length).to eq(1)
end
end
context 'when `n` days `after` their course `start` is tomorrow' do
let(:course_start) { Time.zone.tomorrow - n.days }
let(:before) { false }
let(:relative_to) { 'start' }
it 'does not include the Course' do
course.campaigns << campaign
course.save
expect(course_scope.length).to eq(0)
end
end
end
describe '#will_be_ready_for_survey' do
let(:survey) { create(:survey) }
let(:campaign) { create(:campaign, title: 'Test', slug: 'test') }
let(:survey_assignment) { create(:survey_assignment, survey_id: survey.id, published: true) }
let(:course) { create(:course, start: course_start, end: course_end) }
let(:course_start) { Time.zone.today - 1.month }
let(:course_end) { Time.zone.today + 1.month }
before do
survey_assignment.campaigns << campaign
end
let(:n) { 7 }
let(:course_will_be_ready_scope) do
survey_assignment.campaigns.first.courses.will_be_ready_for_survey(
days: n,
before: before,
relative_to: relative_to
)
end
context 'when `n` days before the course end is after Today' do
let(:course_end) { Time.zone.today + n.days + 1.day }
let(:before) { true }
let(:relative_to) { 'end' }
it 'includes the Course' do
course.campaigns << campaign
course.save
expect(course_will_be_ready_scope.length).to eq(1)
end
end
context 'when `n` days before the course start is after Today' do
let(:course_start) { Time.zone.today + n.days + 1.day }
let(:before) { true }
let(:relative_to) { 'start' }
it 'includes the Course' do
course.campaigns << campaign
course.save
expect(course_will_be_ready_scope.length).to eq(1)
end
end
context 'when `n` days after their course end is after Today' do
let(:course_end) { Time.zone.today - n.days + 1.day }
let(:before) { false }
let(:relative_to) { 'end' }
it 'includes the Course' do
course.campaigns << campaign
course.save
expect(course_will_be_ready_scope.length).to eq(1)
end
end
context 'when `n` days after their course start is after Today' do
let(:course_start) { Time.zone.today - n.days + 1.day }
let(:before) { false }
let(:relative_to) { 'start' }
it 'includes the Course' do
course.campaigns << campaign
course.save
expect(course_will_be_ready_scope.length).to eq(1)
end
end
context 'when `n` days after their course start is exactly Today' do
let(:course_start) { Time.zone.today - n.days }
let(:before) { false }
let(:relative_to) { 'start' }
it 'does not include the Course' do
course.campaigns << campaign
course.save
expect(course_will_be_ready_scope.length).to eq(0)
end
end
end
describe '#pages_edited' do
let(:course) { create(:course) }
let(:user) { create(:user) }
# Article edited during the course
let(:article) { create(:article, namespace: 1) }
let!(:revision) do
create(:revision, date: course.start + 1.minute,
article_id: article.id,
user_id: user.id)
end
# Article edited outside the course
let(:article2) { create(:article) }
let!(:revision2) do
create(:revision, date: course.start - 1.week,
article_id: article2.id,
user_id: user.id)
end
before do
course.students << user
end
it 'returns Article records via course revisions' do
expect(course.pages_edited).to include(article)
expect(course.pages_edited).not_to include(article2)
end
end
end
| 32.322196 | 117 | 0.607694 |
1c4907abc0d0436426e931883f493967c4a2ffe9 | 531 | class BusinessPartnerDrop < Liquid::Drop
def initialize(business_partner)
@business_partner = business_partner
end
def title
@business_partner.partner_title
end
def url
@business_partner.partner_url
end
def full_url
@business_partner.link_url
end
def description
@business_partner.partner_description
end
def asset_link
"/assets/1/original/#{@business_partner.site_asset.asset_file_name}"
end
def has_asset?
@business_parnter.site_asset.not_nil?
end
end | 18.310345 | 72 | 0.728814 |
1db82957d95e4220641bcd52d9a478080cdeb89d | 498 | class RelationshipsController < ApplicationController
before_action :logged_in_user, only: %i[create destroy]
def create
@user = User.find params[:followed_id]
current_user.follow(@user)
respond_to do |format|
format.html { redirect_to @user }
format.js
end
end
def destroy
@user = Relationship.find(params[:id]).followed
current_user.unfollow(@user)
respond_to do |format|
format.html { redirect_to @user }
format.js
end
end
end
| 22.636364 | 57 | 0.684739 |
7989628b69d7c448fae5833f1d63d527d2cf4f05 | 20,027 | # HTTPClient - HTTP client library.
# Copyright (C) 2000-2009 NAKAMURA, Hiroshi <[email protected]>.
#
# This program is copyrighted free software by NAKAMURA, Hiroshi. You can
# redistribute it and/or modify it under the same terms of Ruby's license;
# either the dual license version in 2003, or any later version.
# httpclient/session.rb is based on http-access.rb in http-access/0.0.4.
# Some part of code in http-access.rb was recycled in httpclient.rb.
# Those part is copyrighted by Maehashi-san.
require 'socket'
require 'thread'
require 'stringio'
require 'httpclient/timeout'
require 'httpclient/ssl_config'
require 'httpclient/http'
class HTTPClient
# Represents a Site: protocol scheme, host String and port Number.
class Site
# Protocol scheme.
attr_accessor :scheme
# Host String.
attr_reader :host
# Port number.
attr_reader :port
# Creates a new Site based on the given URI.
def initialize(uri = nil)
if uri
@scheme = uri.scheme
@host = uri.host
@port = uri.port.to_i
else
@scheme = 'tcp'
@host = '0.0.0.0'
@port = 0
end
end
# Returns address String.
def addr
"#{@scheme}://#{@host}:#{@port.to_s}"
end
# Returns true is scheme, host and port are '=='
def ==(rhs)
(@scheme == rhs.scheme) and (@host == rhs.host) and (@port == rhs.port)
end
# Same as ==.
def eql?(rhs)
self == rhs
end
def hash # :nodoc:
[@scheme, @host, @port].hash
end
def to_s # :nodoc:
addr
end
# Returns true if scheme, host and port of the given URI matches with this.
def match(uri)
(@scheme == uri.scheme) and (@host == uri.host) and (@port == uri.port.to_i)
end
def inspect # :nodoc:
sprintf("#<%s:0x%x %s>", self.class.name, __id__, addr)
end
end
# Manages sessions for a HTTPClient instance.
class SessionManager
# Name of this client. Used for 'User-Agent' header in HTTP request.
attr_accessor :agent_name
# Owner of this client. Used for 'From' header in HTTP request.
attr_accessor :from
# Requested protocol version
attr_accessor :protocol_version
# Chunk size for chunked request
attr_accessor :chunk_size
# Device for dumping log for debugging
attr_accessor :debug_dev
# Boolean value for Socket#sync
attr_accessor :socket_sync
attr_accessor :connect_timeout
# Maximum retry count. 0 for infinite.
attr_accessor :connect_retry
attr_accessor :send_timeout
attr_accessor :receive_timeout
attr_accessor :read_block_size
attr_accessor :protocol_retry_count
attr_accessor :ssl_config
attr_reader :test_loopback_http_response
def initialize(client)
@client = client
@proxy = client.proxy
@agent_name = nil
@from = nil
@protocol_version = nil
@debug_dev = client.debug_dev
@socket_sync = true
@chunk_size = 4096
@connect_timeout = 60
@connect_retry = 1
@send_timeout = 120
@receive_timeout = 60 # For each read_block_size bytes
@read_block_size = 1024 * 16 # follows net/http change in 1.8.7
@protocol_retry_count = 5
@ssl_config = nil
@test_loopback_http_response = []
@sess_pool = []
@sess_pool_mutex = Mutex.new
end
def proxy=(proxy)
if proxy.nil?
@proxy = nil
else
@proxy = Site.new(proxy)
end
end
def query(req, via_proxy)
req.body.chunk_size = @chunk_size
sess = open(req.header.request_uri, via_proxy)
begin
sess.query(req)
rescue
sess.close
raise
end
sess
end
def reset(uri)
site = Site.new(uri)
close(site)
end
def reset_all
close_all
end
def keep(sess)
add_cached_session(sess)
end
private
def open(uri, via_proxy = false)
sess = nil
if cached = get_cached_session(uri)
sess = cached
else
sess = Session.new(@client, Site.new(uri), @agent_name, @from)
sess.proxy = via_proxy ? @proxy : nil
sess.socket_sync = @socket_sync
sess.requested_version = @protocol_version if @protocol_version
sess.connect_timeout = @connect_timeout
sess.connect_retry = @connect_retry
sess.send_timeout = @send_timeout
sess.receive_timeout = @receive_timeout
sess.read_block_size = @read_block_size
sess.protocol_retry_count = @protocol_retry_count
sess.ssl_config = @ssl_config
sess.debug_dev = @debug_dev
sess.test_loopback_http_response = @test_loopback_http_response
end
sess
end
def close_all
@sess_pool_mutex.synchronize do
@sess_pool.each do |sess|
sess.close
end
end
@sess_pool.clear
end
def close(dest)
if cached = get_cached_session(dest)
cached.close
true
else
false
end
end
def get_cached_session(uri)
cached = nil
@sess_pool_mutex.synchronize do
new_pool = []
@sess_pool.each do |s|
if s.dest.match(uri)
cached = s
else
new_pool << s
end
end
@sess_pool = new_pool
end
cached
end
def add_cached_session(sess)
@sess_pool_mutex.synchronize do
@sess_pool << sess
end
end
end
# Wraps up OpenSSL::SSL::SSLSocket and offers debugging features.
class SSLSocketWrap
def initialize(socket, context, debug_dev = nil)
unless SSLEnabled
raise ConfigurationError.new('Ruby/OpenSSL module is required')
end
@context = context
@socket = socket
@ssl_socket = create_openssl_socket(@socket)
@debug_dev = debug_dev
end
def ssl_connect
@ssl_socket.connect
end
def post_connection_check(host)
verify_mode = @context.verify_mode || OpenSSL::SSL::VERIFY_NONE
if verify_mode == OpenSSL::SSL::VERIFY_NONE
return
elsif @ssl_socket.peer_cert.nil? and
check_mask(verify_mode, OpenSSL::SSL::VERIFY_FAIL_IF_NO_PEER_CERT)
raise OpenSSL::SSL::SSLError.new('no peer cert')
end
hostname = host.host
if @ssl_socket.respond_to?(:post_connection_check) and RUBY_VERSION > "1.8.4"
@ssl_socket.post_connection_check(hostname)
else
@context.post_connection_check(@ssl_socket.peer_cert, hostname)
end
end
def peer_cert
@ssl_socket.peer_cert
end
def close
@ssl_socket.close
@socket.close
end
def closed?
@socket.closed?
end
def eof?
@ssl_socket.eof?
end
def gets(*args)
str = @ssl_socket.gets(*args)
debug(str)
str
end
def read(*args)
str = @ssl_socket.read(*args)
debug(str)
str
end
def readpartial(*args)
str = @ssl_socket.readpartial(*args)
debug(str)
str
end
def <<(str)
rv = @ssl_socket.write(str)
debug(str)
rv
end
def flush
@ssl_socket.flush
end
def sync
@ssl_socket.sync
end
def sync=(sync)
@ssl_socket.sync = sync
end
private
def check_mask(value, mask)
value & mask == mask
end
def create_openssl_socket(socket)
ssl_socket = nil
if OpenSSL::SSL.const_defined?("SSLContext")
ctx = OpenSSL::SSL::SSLContext.new
@context.set_context(ctx)
ssl_socket = OpenSSL::SSL::SSLSocket.new(socket, ctx)
else
ssl_socket = OpenSSL::SSL::SSLSocket.new(socket)
@context.set_context(ssl_socket)
end
ssl_socket
end
def debug(str)
@debug_dev << str if @debug_dev && str
end
end
# Wraps up a Socket for method interception.
module SocketWrap
def initialize(socket, *args)
super(*args)
@socket = socket
end
def close
@socket.close
end
def closed?
@socket.closed?
end
def eof?
@socket.eof?
end
def gets(*args)
@socket.gets(*args)
end
def read(*args)
@socket.read(*args)
end
def readpartial(*args)
# StringIO doesn't support :readpartial
if @socket.respond_to?(:readpartial)
@socket.readpartial(*args)
else
@socket.read(*args)
end
end
def <<(str)
@socket << str
end
def flush
@socket.flush
end
def sync
@socket.sync
end
def sync=(sync)
@socket.sync = sync
end
end
# Module for intercepting Socket methods and dumps in/out to given debugging
# device. debug_dev must respond to <<.
module DebugSocket
extend SocketWrap
def debug_dev=(debug_dev)
@debug_dev = debug_dev
end
def close
super
debug("! CONNECTION CLOSED\n")
end
def gets(*args)
str = super
debug(str)
str
end
def read(*args)
str = super
debug(str)
str
end
def readpartial(*args)
str = super
debug(str)
str
end
def <<(str)
super
debug(str)
end
private
def debug(str)
@debug_dev << str if str && @debug_dev
end
end
# Dummy Socket for emulating loopback test.
class LoopBackSocket
include SocketWrap
def initialize(host, port, response)
super(StringIO.new(response))
@host = host
@port = port
end
def <<(str)
# ignored
end
end
# Manages a HTTP session with a Site.
class Session
include HTTPClient::Timeout
# Destination site
attr_reader :dest
# Proxy site
attr_accessor :proxy
# Boolean value for Socket#sync
attr_accessor :socket_sync
# Requested protocol version
attr_accessor :requested_version
# Device for dumping log for debugging
attr_accessor :debug_dev
attr_accessor :connect_timeout
attr_accessor :connect_retry
attr_accessor :send_timeout
attr_accessor :receive_timeout
attr_accessor :read_block_size
attr_accessor :protocol_retry_count
attr_accessor :ssl_config
attr_reader :ssl_peer_cert
attr_accessor :test_loopback_http_response
def initialize(client, dest, agent_name, from)
@client = client
@dest = dest
@proxy = nil
@socket_sync = true
@requested_version = nil
@debug_dev = nil
@connect_timeout = nil
@connect_retry = 1
@send_timeout = nil
@receive_timeout = nil
@read_block_size = nil
@protocol_retry_count = 5
@ssl_config = nil
@ssl_peer_cert = nil
@test_loopback_http_response = nil
@agent_name = agent_name
@from = from
@state = :INIT
@requests = []
@status = nil
@reason = nil
@headers = []
@socket = nil
@readbuf = nil
end
# Send a request to the server
def query(req)
connect if @state == :INIT
req.header.request_via_proxy = [email protected]?
begin
timeout(@send_timeout, SendTimeoutError) do
set_header(req)
req.dump(@socket)
# flush the IO stream as IO::sync mode is false
@socket.flush unless @socket_sync
end
rescue Errno::ECONNABORTED, Errno::ECONNRESET, Errno::EPIPE
close
raise KeepAliveDisconnected.new
rescue HTTPClient::TimeoutError
close
raise
rescue
if SSLEnabled and $!.is_a?(OpenSSL::SSL::SSLError)
raise KeepAliveDisconnected.new
else
raise
end
end
@state = :META if @state == :WAIT
@next_connection = nil
@requests.push(req)
end
def close
if [email protected]? and [email protected]?
# @socket.flush may block when it the socket is already closed by
# foreign host and the client runs under MT-condition.
@socket.close
end
@state = :INIT
end
def closed?
@state == :INIT
end
def get_header
begin
if @state != :META
raise RuntimeError.new("get_status must be called at the beginning of a session")
end
read_header
rescue
close
raise
end
[@version, @status, @reason, @headers]
end
def eof?
if !@content_length.nil?
@content_length == 0
else
@socket.closed? or @socket.eof?
end
end
def get_body(&block)
begin
read_header if @state == :META
return nil if @state != :DATA
if @chunked
read_body_chunked(&block)
elsif @content_length
read_body_length(&block)
else
read_body_rest(&block)
end
rescue
close
raise
end
if eof?
if @next_connection
@state = :WAIT
else
close
end
end
nil
end
private
def set_header(req)
if @requested_version
if /^(?:HTTP\/|)(\d+.\d+)$/ =~ @requested_version
req.version = $1.to_f
end
end
if @agent_name
req.header.set('User-Agent', "#{@agent_name} #{LIB_NAME}")
end
if @from
req.header.set('From', @from)
end
req.header.set('Date', Time.now.httpdate)
end
# Connect to the server
def connect
site = @proxy || @dest
retry_number = 0
begin
timeout(@connect_timeout, ConnectTimeoutError) do
@socket = create_socket(site)
if @dest.scheme == 'https'
if @socket.is_a?(LoopBackSocket)
connect_ssl_proxy(@socket, URI.parse(@dest.to_s)) if @proxy
else
@socket = create_ssl_socket(@socket)
connect_ssl_proxy(@socket, URI.parse(@dest.to_s)) if @proxy
@socket.ssl_connect
@socket.post_connection_check(@dest)
@ssl_peer_cert = @socket.peer_cert
end
end
# Use Ruby internal buffering instead of passing data immediately
# to the underlying layer
# => we need to to call explicitly flush on the socket
@socket.sync = @socket_sync
end
rescue RetryableResponse
retry_number += 1
if retry_number < @protocol_retry_count
retry
end
raise BadResponseError.new("connect to the server failed with status #{@status} #{@reason}")
rescue TimeoutError
if @connect_retry == 0
retry
else
retry_number += 1
retry if retry_number < @connect_retry
end
close
raise
end
@state = :WAIT
end
def create_socket(site)
socket = nil
begin
@debug_dev << "! CONNECT TO #{site.host}:#{site.port}\n" if @debug_dev
if str = @test_loopback_http_response.shift
socket = LoopBackSocket.new(site.host, site.port, str)
else
socket = TCPSocket.new(site.host, site.port)
end
if @debug_dev
@debug_dev << "! CONNECTION ESTABLISHED\n"
socket.extend(DebugSocket)
socket.debug_dev = @debug_dev
end
rescue SystemCallError => e
e.message << " (#{site})"
raise
rescue SocketError => e
e.message << " (#{site})"
raise
end
socket
end
# wrap socket with OpenSSL.
def create_ssl_socket(raw_socket)
SSLSocketWrap.new(raw_socket, @ssl_config, @debug_dev)
end
def connect_ssl_proxy(socket, uri)
req = HTTP::Message.new_connect_request(uri)
@client.request_filter.each do |filter|
filter.filter_request(req)
end
set_header(req)
req.dump(@socket)
@socket.flush unless @socket_sync
res = HTTP::Message.new_response('')
parse_header
res.version, res.status, res.reason = @version, @status, @reason
@headers.each do |key, value|
res.header.set(key, value)
end
commands = @client.request_filter.collect { |filter|
filter.filter_response(req, res)
}
if commands.find { |command| command == :retry }
raise RetryableResponse.new
end
unless @status == 200
raise BadResponseError.new("connect to ssl proxy failed with status #{@status} #{@reason}", res)
end
end
# Read status block.
def read_header
@content_length = nil
@chunked = false
@chunk_length = 0
parse_header
# Head of the request has been parsed.
@state = :DATA
req = @requests.shift
if req.header.request_method == 'HEAD'
@content_length = 0
if @next_connection
@state = :WAIT
else
close
end
end
@next_connection = false unless @content_length
end
StatusParseRegexp = %r(\AHTTP/(\d+\.\d+)\s+(\d\d\d)\s*([^\r\n]+)?\r?\n\z)
def parse_header
timeout(@receive_timeout, ReceiveTimeoutError) do
begin
initial_line = @socket.gets("\n")
if initial_line.nil?
raise KeepAliveDisconnected.new
end
if StatusParseRegexp !~ initial_line
@version = '0.9'
@status = nil
@reason = nil
@next_connection = false
@content_length = nil
@readbuf = initial_line
break
end
@version, @status, @reason = $1, $2.to_i, $3
@next_connection = HTTP::Message.keep_alive_enabled?(@version.to_f)
@headers = []
while true
line = @socket.gets("\n")
unless line
raise BadResponseError.new('unexpected EOF')
end
line.chomp!
break if line.empty?
key, value = line.split(/\s*:\s*/, 2)
parse_keepalive_header(key, value)
@headers << [key, value]
end
end while (@version == '1.1' && @status == 100)
end
end
def parse_keepalive_header(key, value)
key = key.downcase
if key == 'content-length'
@content_length = value.to_i
elsif key == 'transfer-encoding' and value.downcase == 'chunked'
@chunked = true
@chunk_length = 0
@content_length = nil
elsif key == 'connection' or key == 'proxy-connection'
if value.downcase == 'keep-alive'
@next_connection = true
else
@next_connection = false
end
end
end
def read_body_length(&block)
return nil if @content_length == 0
buf = ''
while true
maxbytes = @read_block_size
maxbytes = @content_length if maxbytes > @content_length
timeout(@receive_timeout, ReceiveTimeoutError) do
@socket.readpartial(maxbytes, buf) rescue EOFError
end
if buf.length > 0
@content_length -= buf.length
yield buf
else
@content_length = 0
end
return if @content_length == 0
end
end
RS = "\r\n"
def read_body_chunked(&block)
buf = ''
while true
len = @socket.gets(RS)
@chunk_length = len.hex
if @chunk_length == 0
@content_length = 0
@socket.gets(RS)
return
end
timeout(@receive_timeout, ReceiveTimeoutError) do
@socket.read(@chunk_length + 2, buf)
end
unless buf.empty?
yield buf.slice(0, @chunk_length)
end
end
end
def read_body_rest
if @readbuf and @readbuf.length > 0
yield @readbuf
@readbuf = nil
end
buf = ''
while true
timeout(@receive_timeout, ReceiveTimeoutError) do
@socket.readpartial(@read_block_size, buf) rescue EOFError
end
if buf && buf.length > 0
yield buf
else
return
end
end
end
end
end
| 23.396028 | 104 | 0.588256 |
5dc071ebbcdfb539c684684a0e2c30cd6a55f749 | 3,028 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe Spree::Promotion::Rules::FirstOrder, type: :model do
let(:rule) { Spree::Promotion::Rules::FirstOrder.new }
let(:order) { mock_model(Spree::Order, user: nil, email: nil) }
let(:user) { mock_model(Spree::LegacyUser) }
context "without a user or email" do
it { expect(rule).to be_eligible(order) }
it "does not set an error message" do
rule.eligible?(order)
expect(rule.eligibility_errors.full_messages.first).
to be_nil
end
end
context "first order" do
context "for a signed user" do
context "with no completed orders" do
before(:each) do
allow(user).to receive_message_chain(:orders, complete: [])
end
specify do
allow(order).to receive_messages(user: user)
expect(rule).to be_eligible(order)
end
it "should be eligible when user passed in payload data" do
expect(rule).to be_eligible(order, user: user)
end
end
context "with completed orders" do
before(:each) do
allow(order).to receive_messages(user: user)
end
it "should be eligible when checked against first completed order" do
allow(user).to receive_message_chain(:orders, complete: [order])
expect(rule).to be_eligible(order)
end
context "with another order" do
before { allow(user).to receive_message_chain(:orders, complete: [mock_model(Spree::Order)]) }
it { expect(rule).not_to be_eligible(order) }
it "sets an error message" do
rule.eligible?(order)
expect(rule.eligibility_errors.full_messages.first).
to eq "This coupon code can only be applied to your first order."
end
it "sets an error code" do
rule.eligible?(order)
expect(rule.eligibility_errors.details[:base].first[:error_code]).
to eq :not_first_order
end
end
end
end
context "for a guest user" do
let(:email) { '[email protected]' }
before { allow(order).to receive_messages email: '[email protected]' }
context "with no other orders" do
it { expect(rule).to be_eligible(order) }
end
context "with another order" do
before { allow(rule).to receive_messages(orders_by_email: [mock_model(Spree::Order)]) }
it { expect(rule).not_to be_eligible(order) }
it "sets an error message" do
rule.eligible?(order)
expect(rule.eligibility_errors.full_messages.first).
to eq "This coupon code can only be applied to your first order."
end
it "sets an error code" do
rule.eligible?(order)
expect(rule.eligibility_errors.details[:base].first[:error_code]).
to eq :not_first_order
end
end
end
end
end
| 34.409091 | 105 | 0.606671 |
91e01078a312fd3af8eac711894bbc9ec87e6f83 | 459 | # Should contain an app that responds to call
# and returns a rack-compliant response: an array with the response code, hash of header key-value pairs and a response body that responds to .each
# app = proc do |env|
# [ 200, { 'Content-Type' => 'text/plain' }, ['I wish you nirvana']]
# end
# run app
class Something
def call(env)
puts env
[ 200, { 'Content-Type' => 'text/plain' }, ['I wish you nirvana, comrade']]
end
end
run Something.new
| 27 | 147 | 0.673203 |
214e0059d018859478ac88eb61ba4ef8d7774608 | 2,630 | # frozen_string_literal: true
require_dependency "renalware/letters"
module Renalware
module HD
module SessionForms
# Given a Batch object representing a request to 'print' (ie compile) a PDF of multiple
# HD Session Forms (aka protocols), where each batch.item points to the patient we want to
# print, we render each PDF in the current folder (we assume the caller has chdir'ed
# us into a tmp location), and then append them all together as <batchid>.pdf in a known
# location. The filepath is assigned to the batch and saved, so it can be served to
# a user via the user later.
class BatchCompilePdfs
include PdfCompilation
def self.call(batch, user)
new(batch, user).call
end
def initialize(batch, user)
@batch = batch
@user = user
@dir = Pathname(Dir.pwd)
end
def call
batch.update_by(user, status: :processing)
process_batch_items
batch.filepath = append_files
batch.status = :awaiting_printing
batch.save_by!(user)
rescue StandardError => e
batch.update(last_error: e.message, status: :failure)
raise e
end
private
attr_reader :batch, :dir, :user
def process_batch_items
filename = "batch_#{batch.id}.pdf"
File.open(filename, "wb") do |file|
file.write(PdfRenderer.new(patients: patients).call)
end
batch.items.each { |item| item.update(status: :compiled) }
end
def patients
@patients ||= HD::Patient.where(id: batch.items.pluck(:printable_id))
end
def render_session_form_pdf_to_file_for(patient)
filename = "session_form_#{patient.id}.pdf"
File.open(filename, "wb") do |file|
file.write(PdfRenderer.new(patient: patient).call)
end
filename
end
def append_files
glob = Dir.glob(dir.join("*.pdf"))
if glob.any?
combine_multiple_pdfs_into_file(
filepath: compiled_output_pdf_filename,
glob: glob
)
end
Pathname(compiled_output_pdf_filename).to_s # TODO: what happens if no content?
end
def working_folder
Renalware.config.base_working_folder.join("batched_hd_session_forms").tap do |folder|
FileUtils.mkdir_p folder
end
end
def compiled_output_pdf_filename
working_folder.join("#{batch.id}.pdf")
end
end
end
end
end
| 30.581395 | 96 | 0.604563 |
6207f9668ae2df83017cecd5042fc54c4f11ca6f | 1,137 | class PdftkJava < Formula
desc "Port of pdftk in java"
homepage "https://gitlab.com/pdftk-java/pdftk"
url "https://gitlab.com/pdftk-java/pdftk/-/archive/v3.1.3/pdftk-v3.1.3.tar.gz"
sha256 "d9145976adf2dd5f8cd70e1e2345262e46790be6bfb2da1728a2ad4f6e4b2021"
license "GPL-2.0"
revision 1
head "https://gitlab.com/pdftk-java/pdftk.git"
bottle do
cellar :any_skip_relocation
sha256 "3eb4ac53b5e6f4603509e5eb785c3574a050df65252b1f6aeb6f9f7300604fd4" => :catalina
sha256 "a269cdfe44ed002b830a5e574f49b9eec4923b0cc3ad791b3cb46773d2952c40" => :mojave
sha256 "72e5f070fa22a8f394a6fcdfe1b40a65f95ee945f655377677106a7a369b8c08" => :high_sierra
end
depends_on "gradle" => :build
depends_on :java => "1.8"
def install
system "gradle", "shadowJar", "--no-daemon"
libexec.install "build/libs/pdftk-all.jar"
bin.write_jar_script libexec/"pdftk-all.jar", "pdftk", :java_version => "1.8"
end
test do
pdf = test_fixtures("test.pdf")
output_path = testpath/"output.pdf"
system bin/"pdftk", pdf, pdf, "cat", "output", output_path
assert output_path.read.start_with?("%PDF")
end
end
| 34.454545 | 93 | 0.736148 |
62ff5aec217674c820c66ce5888011c9a9baeb76 | 1,038 | Pod::Spec.new do |s|
s.name = "EPSReactiveTableViewController"
s.version = "1.0.0"
s.summary = "A table view controller that automatically populates a table view, and animates the insertion and deletion of rows."
s.description = "EPSReactiveTableViewController is a subclass of `UITableViewController` that automatically populates a table view, and animates the insertion and deletion of rows by observing changes to an array of model objects."
s.homepage = "https://github.com/ElectricPeelSoftware/EPSReactiveTableViewController"
s.license = 'MIT'
s.author = { "Peter Stuart" => "[email protected]" }
s.source = { :git => "https://github.com/ElectricPeelSoftware/EPSReactiveTableViewController.git", :tag => s.version.to_s }
s.platform = :ios, '7.0'
s.ios.deployment_target = '7.0'
s.requires_arc = true
s.source_files = 'Classes'
s.public_header_files = 'Classes/*.h'
s.dependency 'ReactiveCocoa', '~> 2.2.4'
end
| 49.428571 | 238 | 0.684008 |
79360e3708f40be7150c837173e033f55d916d10 | 331 | cask "butter" do
version "0.3.0"
sha256 "4255b581368e88ee3c802811f0824093f64bce73fb323d77b49a25700154ad84"
url "https://github.com/harukasan/butter/releases/download/v#{version}/Butter_#{version}.dmg"
name "Butter"
desc "Melted idobata.io client"
homepage "https://github.com/harukasan/butter"
app "Butter.app"
end
| 27.583333 | 95 | 0.761329 |
18cc757ad49f6b91ccc0d458f12f3f124c1c8ba4 | 445 | class SessionsController < ApplicationController
def create
logger.info('here')
user = User.from_omniauth(env["omniauth.auth"], request.env["omniauth.params"]["runner"])
session[:user_id] = user.id
redirect_to request.env['omniauth.origin'] || '/'
end
def destroy
session[:user_id] = nil
redirect_to root_url
end
def failure
redirect_to root_url, alert: "Authentication failed, please try again."
end
end | 24.722222 | 92 | 0.705618 |
1a93eced4424715367c785aff6395608f7eb7d61 | 408 | require 'rails_helper'
RSpec.describe EntourageScore, type: :model do
it { expect(FactoryBot.build(:entourage_score).save).to be true }
it { should belong_to :entourage }
it { should belong_to :user }
it { should validate_presence_of :entourage_id }
it { should validate_presence_of :user_id }
it { should validate_presence_of :base_score }
it { should validate_presence_of :final_score }
end
| 34 | 67 | 0.757353 |
917174c0810b4ef1442d8abd12d440350be66fce | 3,047 | require 'test_helper'
class Aviator::Test
describe 'aviator/openstack/identity/v3/public/get_projects_by_user_id' do
def create_request(session_data = get_session_data, &block)
block ||= lambda do |params|
params.id = get_session_id
end
klass.new(session_data, &block)
end
def get_session_data
session.send :auth_info
end
def get_session_id
get_session_data[:access][:user][:id]
end
def helper
Aviator::Test::RequestHelper
end
def klass
@klass ||= helper.load_request('openstack', 'identity', 'v3', 'public', 'get_projects_by_user_id.rb')
end
def session
unless @session
@session = Aviator::Session.new(
config_file: Environment.path,
environment: 'openstack_admin'
)
@session.authenticate
end
@session
end
validate_attr :anonymous? do
klass.anonymous?.must_equal false
end
validate_attr :api_version do
klass.api_version.must_equal :v3
end
validate_attr :body do
klass.body?.must_equal false
create_request.body?.must_equal false
end
validate_attr :endpoint_type do
klass.endpoint_type.must_equal :public
end
validate_attr :headers do
session_data = get_session_data
headers = { 'X-Auth-Token' => session_data[:access][:token][:id] }
request = create_request(session_data)
request.headers.must_equal headers
end
validate_attr :http_method do
create_request.http_method.must_equal :get
end
validate_attr :url do
session_data = get_session_data
service_spec = session_data[:access][:serviceCatalog].find{|s| s[:type] == 'identity' }
url = "#{ service_spec[:endpoints][0][:publicURL] }/users/#{get_session_id}/projects"
request = create_request(session_data)
request.url.must_equal url
end
validate_attr :required_params do
klass.required_params.must_equal [:id]
end
validate_response 'session is using a default token and passes correct id' do
s = Aviator::Session.new(
config_file: Environment.path,
environment: 'openstack_admin'
)
s.authenticate do |creds|
creds.username = Environment.openstack_admin[:auth_credentials][:username]
creds.password = Environment.openstack_admin[:auth_credentials][:password]
end
auth_info = session.send :auth_info
id = auth_info[:access][:user][:id]
base_url = URI(Environment.openstack_admin[:auth_service][:host_uri])
base_url.path = "/v3"
# base_url should have the form 'https://<domain>:<port>/<api_version>'
response = s.identity_service.request :get_projects_by_user_id, endpoint_type: :public, base_url: base_url.to_s do |p|
p.id = id
end
response.status.must_equal 200
response.body.wont_be_nil
response.body[:projects].length.wont_equal 0
response.headers.wont_be_nil
end
end
end
| 23.804688 | 124 | 0.663604 |
4a70f35a068d3c55646504ad48bd391c07287d1c | 1,126 | class Pokemon # object class- anything having to do with object
attr_accessor :name, :id, :base_attack, :base_defense, :base_stamina
@@all = []
def initialize(pokemon) # instance method
@name = pokemon[:pokemon_name]
@id = pokemon[:pokemon_id]
@base_attack = pokemon[:base_attack]
@base_defense = pokemon[:base_defense]
@base_stamina = pokemon[:base_stamina]
@@all << self
end
def self.all
@@all
end
def self.display_pokemon_names
array = self.all
array[0...20].each.with_index(1) do |pokemon, i| # brackets for index number or keys
puts "#{i}. #{pokemon.name}"
end
end
def self.display_pokemon_stats(index)
pokemon = self.all[index]
puts
puts "Check out the stats on #{pokemon.name}!"
puts "Base Attack: #{pokemon.base_attack}"
puts "Base Defense: #{pokemon.base_defense}"
puts "Base Stamina: #{pokemon.base_stamina}"
end
end | 30.432432 | 96 | 0.549734 |
e2254f53da0e0c39ac6028fd6c1f9e8c88c1127f | 85 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'capybara/guides'
| 28.333333 | 58 | 0.741176 |
ed1a0a6386059693d0a180c6e541dd60300c97b5 | 1,393 | #!/usr/bin/env ruby
$:.unshift File.join(File.dirname(File.dirname($0)), 'lib')
require 'typingpool'
require 'typingpool/utility/test/script'
require 'fileutils'
include Typingpool::Utility::Test::Script
fixture_name = 'tp-collect-2'
transcripts_dir = File.join(fixtures_dir, 'tp_collect_project_temp')
tp_collect_with_fixture(transcripts_dir, fixture_name, true)
fixture_path = File.join(vcr_dir, fixture_name + '.yml')
File.exists? fixture_path or abort "Can't find fixture as expected at #{fixture_path}"
(Time.now - File.ctime(fixture_path)) < 60 or abort "Fixture file does not appear newly created at #{fixture_path}"
project = Typingpool::Project.new(project_default[:title], Typingpool::Config.file(config_path(transcripts_dir)))
transcript_count = project_transcript_count(project, 'sandbox-assignment.csv')
transcript_count == 3 or abort "Unexpected number of transcripts in tp-collect project: #{transcript_count}"
STDERR.puts("Second tp-collect recorded. Please complete TWO more assignments but ONLY APPROVE ONE. Ignore the other. Wait 6 minutes and then run make_tp_collect_fixture_4.rb. Check for assignments at\nhttps://workersandbox.mturk.com/mturk/searchbar?minReward=0.00&searchWords=typingpooltest&selectedSearchType=hitgroups\n...and then approve them at\nhttps://requestersandbox.mturk.com/mturk/manageHITs?hitSortType=CREATION_DESCENDING&%2Fsort.x=11&%2Fsort.y=7")
| 63.318182 | 461 | 0.801866 |
338ab5b7db78248fcb45350486cd868651e041fe | 9,372 | namespace :cartodb do
# This rake retrieves all sync tables that should get synchronized, and puts the synchronization tasks at Resque
# NOTE: This version does not mark the tables as "enqueued", should be done if planning to run multiple instances
desc 'Runs the sync tables process'
task :sync_tables, [:force_all_arg] => [:environment] do |task, args|
puts '> Sync tables started' if ENV['VERBOSE']
require_relative '../../services/synchronizer/lib/synchronizer/collection'
collection = CartoDB::Synchronizer::Collection.new
# This fetches and enqueues
collection.fetch_and_enqueue(args[:force_all_arg].present? ? args[:force_all_arg] : false)
puts '> Sync tables finished' if ENV['VERBOSE']
end
desc 'Adds visualization_id to every Synchronization'
task :populate_synchronization_visualization_ids => [:environment] do |task, args|
require_relative '../../services/synchronizer/lib/synchronizer/collection'
collection = CartoDB::Synchronizer::Collection.new
collection.fetch_all.each { |record|
begin
synchronization = CartoDB::Synchronization::Member.new(id: record[:id]).fetch
rescue KeyError
synchronization = nil
end
if synchronization
begin
table = UserTable.where({
name: synchronization.name,
user_id: synchronization.user_id
}).first
if table.nil?
puts "\nSync id '#{record[:id]}' related table not found"
else
table = table.service
end
rescue StandardError => exception
table = nil
puts "\nSync id '#{record[:id]}' errored: #{exception.inspect}"
end
unless table.nil?
if synchronization.visualization_id.nil?
begin
synchronization.visualization_id = table.table_visualization.id
rescue StandardError => exception
puts "\nSync id '#{record[:id]}' errored, canonical visualization not found"
end
begin
synchronization.store
printf '.'
rescue StandardError => exception
puts "\nSync id '#{record[:id]}' errored: #{exception.inspect}"
end
else
printf 'S'
end
end
else
puts "\nSync id '#{record[:id]}' errored: missing synchronization entry"
end
}
puts "\nFINISHED"
end
def report_incompatible_bq_parameters(parameters)
valid_params = %w(provider connection table sql_query import_as project dataset billing_project)
valid_conn_params = %w(billing_project service_account access_token refresh_token default_project default_dataset)
invalid_params = parameters.keys - valid_params
invalid_conn_params = (parameters['connection'] || {}).keys - valid_conn_params
puts " Invalid parameters: #{invalid_params.inspect}" if invalid_params.present?
puts " Invalid connection parameters: #{invalid_conn_params.inspect}" if invalid_conn_params.present?
end
def replicate_bq_config
puts 'Replicating configuration'
bigquery = Carto::ConnectorProvider.find_by(name: 'bigquery')
bigquery_beta = Carto::ConnectorProvider.find_by(name: 'bigquery-beta')
Carto::ConnectorConfiguration.where(connector_provider_id: bigquery.id).find_each do |bq_config|
unless Carto::ConnectorConfiguration.where(
connector_provider_id: bigquery_beta.id,
user_id: bq_config.user_id,
organization_id: bq_config.organization_id
).exists?
Carto::ConnectorConfiguration.create!(
connector_provider: bigquery_beta,
user_id: bq_config.user_id,
organization_id: bq_config.organization_id,
enabled: bq_config.enabled,
max_rows: bq_config.max_rows
)
puts " configuration for #{bq_config.user&.username || bq_config.configuration.name} replicated"
end
end
end
desc 'Port BQ syncs to beta connector'
task port_bq_syncs_to_beta: [:environment] do
dry_mode = ENV['DRY_RUN'] != 'NO'
if dry_mode
puts 'running in "dry" mode; set DRY_RUN=NO to make actual changes'
else
Rake::Task['cartodb:connectors:create_providers'].invoke
replicate_bq_config
end
number_of_pending_syncs = 0
Carto::Synchronization.where(%{
service_name = 'connector'
AND (state IN (
'#{Carto::Synchronization::STATE_SUCCESS}', '#{Carto::Synchronization::STATE_SYNCING}',
'#{Carto::Synchronization::STATE_QUEUED}', '#{Carto::Synchronization::STATE_CREATED}'
)
OR (state = '#{Carto::Synchronization::STATE_FAILURE}'
AND retried_times < #{CartoDB::Synchronization::Member::MAX_RETRIES}))
AND ((service_item_id::JSON)#>>'{provider}') = 'bigquery'
}).find_each do |synchronization|
next unless synchronization.user.state == 'active'
sleep 0.2
synchronization.transaction do
synchronization.reload
parameters = JSON.parse(synchronization.service_item_id)
if synchronization.state.in? [
Carto::Synchronization::STATE_CREATED,
Carto::Synchronization::STATE_QUEUED,
Carto::Synchronization::STATE_SYNCING
]
puts "Synchronization #{synchronization.id} could not be modifed; state: #{synchronization.state}"
number_of_pending_syncs += 1
elsif dry_mode
puts "Synchronization #{synchronization.id} would be modified to use bigquery-beta"
report_incompatible_bq_parameters(parameters)
else
begin
puts "Modifying #{synchronization.id} to use bigquery-beta"
run_at = synchronization.run_at
synchronization.update! run_at: nil
# Change the provider id
parameters['provider'] = 'bigquery-beta'
synchronization.update! service_item_id: parameters.to_json
report_incompatible_bq_parameters(parameters)
rescue
raise
ensure
synchronization.update! run_at: run_at
end
end
end
end
if number_of_pending_syncs.positive?
puts "#{number_of_pending_syncs} syncs could not be modified. . Please try again later."
end
end
desc 'Port BQ beta syncs to new connector'
task port_beta_bq_syncs_to_new: [:environment, :username] do |_task, args|
dry_mode = ENV['DRY_RUN'] != 'NO'
puts 'running in "dry" mode; set DRY_RUN=NO to make actual changes' if dry_mode
if args.user != 'all-the-users'
user = Carto::User.find_by(username: args.username)
raise "User not found: #{args.username}" unless user
user_condition = "AND user_id = '#{user.id}'"
end
number_of_pending_syncs = 0
Carto::Synchronization.where(%{
service_name = 'connector'
#{user_condition}
AND (state IN (
'#{Carto::Synchronization::STATE_SUCCESS}', '#{Carto::Synchronization::STATE_SYNCING}',
'#{Carto::Synchronization::STATE_QUEUED}', '#{Carto::Synchronization::STATE_CREATED}')
OR (state = '#{Carto::Synchronization::STATE_FAILURE}'
AND retried_times < #{CartoDB::Synchronization::Member::MAX_RETRIES}))
AND ((service_item_id::JSON)#>>'{provider}') = 'bigquery-beta'
}).find_each do |synchronization|
next unless synchronization.user.state == 'active'
sleep 0.2
synchronization.transaction do
synchronization.reload
parameters = JSON.parse(synchronization.service_item_id)
if synchronization.state.in? [
Carto::Synchronization::STATE_CREATED,
Carto::Synchronization::STATE_QUEUED,
Carto::Synchronization::STATE_SYNCING
]
puts "Synchronization #{synchronization.id} could not be modifed; state: #{synchronization.state}"
number_of_pending_syncs += 1
elsif dry_mode
puts "Synchronization #{synchronization.id} would be modified to use bigquery"
puts ' parameter billing_project would be moved to connection' if parameters['billing_project'].present?
report_incompatible_bq_parameters(parameters)
else
begin
puts "Modifying #{synchronization.id} to use bigquery"
run_at = synchronization.run_at
synchronization.update! run_at: nil
# Change the provider id
parameters['provider'] = 'bigquery-beta'
# If passing the billing project out of the connection move it inside
if parameters['billing_project'].present?
puts ' Moving billing_project inside the connection parameter'
billing_project = parameters.delete('billing_project')
parameters['connection'] ||= {}
parameters['connection']['billing_project'] = billing_project
end
synchronization.update! service_item_id: parameters.to_json
report_incompatible_bq_parameters(parameters)
rescue
raise
ensure
synchronization.update! run_at: run_at
end
end
end
end
if number_of_pending_syncs.positive?
puts "#{number_of_pending_syncs} syncs could not be modified. . Please try again later."
end
end
end
| 40.396552 | 118 | 0.654823 |
ff99c0a8fbe7a4963aed94aef95c4f57c3afcd53 | 1,774 | require "rack"
require "rack/auth/basic"
require "rack/contrib/not_found"
require "rack/contrib/response_headers"
require "rack/contrib/static_cache"
require "rack/contrib/try_static"
require "rack/protection"
require File.expand_path("../lib/redirect_to_latest", __FILE__)
require File.expand_path("../lib/redirect_v1_docs", __FILE__)
# Protect against various bad things
use Rack::Protection::JsonCsrf
use Rack::Protection::RemoteReferrer
use Rack::Protection::HttpOrigin
use Rack::Protection::EscapedParams
use Rack::Protection::XSSHeader
use Rack::Protection::FrameOptions
use Rack::Protection::PathTraversal
use Rack::Protection::IPSpoofing
# Properly compress the output if the client can handle it.
use Rack::Deflater
# Redirect the homepage to the latest documentation
use HashiCorp::Rack::RedirectToLatest
# Redirect the V1 documentation to the GitHub pages hosted version
use HashiCorp::Rack::RedirectV1Docs
# Set the "forever expire" cache headers for these static assets. Since
# we hash the contents of the assets to determine filenames, this is safe
# to do.
use Rack::StaticCache,
root: "build",
urls: ["/images", "/javascripts", "/stylesheets"],
duration: 2,
versioning: false
# For anything that matches below this point, we set the surrogate key
# for Fastly so that we can quickly purge all the pages without touching
# the static assets.
use Rack::ResponseHeaders do |headers|
headers["Surrogate-Key"] = "page"
end
# Try to find a static file that matches our request, since Middleman
# statically generates everything.
use Rack::TryStatic,
root: "build",
urls: ["/"],
try: [".html", "index.html", "/index.html"]
# 404 if we reached this point. Sad times.
run Rack::NotFound.new(File.expand_path("../build/404.html", __FILE__))
| 31.678571 | 73 | 0.764938 |
4ad21501bd22be51debe82a548c4a44bc56f5f32 | 3,653 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2020_02_25_050817) do
create_table "active_storage_attachments", force: :cascade do |t|
t.string "name", null: false
t.string "record_type", null: false
t.integer "record_id", null: false
t.integer "blob_id", null: false
t.datetime "created_at", null: false
t.index ["blob_id"], name: "index_active_storage_attachments_on_blob_id"
t.index ["record_type", "record_id", "name", "blob_id"], name: "index_active_storage_attachments_uniqueness", unique: true
end
create_table "active_storage_blobs", force: :cascade do |t|
t.string "key", null: false
t.string "filename", null: false
t.string "content_type"
t.text "metadata"
t.bigint "byte_size", null: false
t.string "checksum", null: false
t.datetime "created_at", null: false
t.index ["key"], name: "index_active_storage_blobs_on_key", unique: true
end
create_table "events", force: :cascade do |t|
t.string "name"
t.datetime "start_date"
t.datetime "end_date"
t.string "location"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "packages", force: :cascade do |t|
t.string "name"
t.integer "price"
t.integer "event_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["event_id"], name: "index_packages_on_event_id"
end
create_table "perks", force: :cascade do |t|
t.string "name"
t.string "description"
t.string "deliverable"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.boolean "limited", default: false
t.boolean "paid_addon", default: false
end
create_table "specs", force: :cascade do |t|
t.integer "qty"
t.integer "package_id"
t.integer "perk_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["package_id"], name: "index_specs_on_package_id"
t.index ["perk_id"], name: "index_specs_on_perk_id"
end
create_table "sponsorships", force: :cascade do |t|
t.integer "company_id"
t.integer "package_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["company_id"], name: "index_sponsorships_on_company_id"
t.index ["package_id"], name: "index_sponsorships_on_package_id"
end
create_table "users", force: :cascade do |t|
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "provider"
t.string "uid"
t.index ["email"], name: "index_users_on_email", unique: true
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
end
end
| 37.27551 | 126 | 0.710101 |
08eaf4a2d6952921240fc628c05d70f809fbdbaf | 297 | cask :v1 => 'spotifybeta' do
version '1.0.0.588.g5dffdc66-3237'
sha256 '918101254dd8177dc66e2642edf65500ce3f13ef4ce089925fef5f77529979ab'
url "http://download.spotify.com/beta/spotify-app-#{version}.dmg"
homepage 'https://www.spotify.com/'
license :unknown
app 'SpotifyBeta.app'
end
| 27 | 75 | 0.754209 |
ffe60da97ff7b99a4d9f7c39b1abb60ba840f4b6 | 1,470 | # MIT License
#
# Copyright (c) 2019 Nanotify
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
require "bundler/setup"
require "nanocurrency"
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| 40.833333 | 80 | 0.772789 |
38712614916087b2a14d3c0cc7ef9759bebc523e | 8,407 | module ThinkingSphinx
# This class both keeps track of the configuration settings for Sphinx and
# also generates the resulting file for Sphinx to use.
#
# Here are the default settings, relative to RAILS_ROOT where relevant:
#
# config file:: config/#{environment}.sphinx.conf
# searchd log file:: log/searchd.log
# query log file:: log/searchd.query.log
# pid file:: log/searchd.#{environment}.pid
# searchd files:: db/sphinx/#{environment}/
# address:: 0.0.0.0 (all)
# port:: 3312
# allow star:: false
# mem limit:: 64M
# max matches:: 1000
# morphology:: stem_en
# charset type:: utf-8
# charset table:: nil
#
# If you want to change these settings, create a YAML file at
# config/sphinx.yml with settings for each environment, in a similar
# fashion to database.yml - using the following keys: config_file,
# searchd_log_file, query_log_file, pid_file, searchd_file_path, port,
# allow_star, mem_limit, max_matches, morphology, charset_type,
# charset_table. I think you've got the idea.
#
# Each setting in the YAML file is optional - so only put in the ones you
# want to change.
#
# Keep in mind, if for some particular reason you're using a version of
# Sphinx older than 0.9.8 r871 (that's prior to the proper 0.9.8 release),
# don't set allow_star to true.
#
class Configuration
attr_accessor :config_file, :searchd_log_file, :query_log_file,
:pid_file, :searchd_file_path, :address, :port, :allow_star, :mem_limit,
:max_matches, :morphology, :charset_type, :charset_dictpath, :charset_table, :app_root
attr_reader :environment
# Load in the configuration settings - this will look for config/sphinx.yml
# and parse it according to the current environment.
#
def initialize(app_root = Dir.pwd)
self.app_root = RAILS_ROOT if defined?(RAILS_ROOT)
self.app_root = Merb.root if defined?(Merb)
self.app_root ||= app_root
self.config_file = "#{app_root}/config/#{environment}.sphinx.conf"
self.searchd_log_file = "#{app_root}/log/searchd.log"
self.query_log_file = "#{app_root}/log/searchd.query.log"
self.pid_file = "#{app_root}/log/searchd.#{environment}.pid"
self.searchd_file_path = "#{app_root}/db/sphinx/#{environment}/"
self.port = 3312
self.allow_star = false
self.mem_limit = "64M"
self.max_matches = 1000
self.morphology = "stem_en"
self.charset_type = "utf-8"
self.charset_dictpath = nil
self.charset_table = nil
parse_config
end
def self.environment
@@environment ||= (
defined?(Merb) ? ENV['MERB_ENV'] : ENV['RAILS_ENV']
) || "development"
end
def environment
self.class.environment
end
# Generate the config file for Sphinx by using all the settings defined and
# looping through all the models with indexes to build the relevant
# indexer and searchd configuration, and sources and indexes details.
#
def build(file_path=nil)
load_models
file_path ||= "#{self.config_file}"
database_confs = YAML.load(File.open("#{app_root}/config/database.yml"))
database_confs.symbolize_keys!
database_conf = database_confs[environment.to_sym]
database_conf.symbolize_keys!
open(file_path, "w") do |file|
file.write <<-CONFIG
indexer
{
mem_limit = #{self.mem_limit}
}
searchd
{
port = #{self.port}
log = #{self.searchd_log_file}
query_log = #{self.query_log_file}
read_timeout = 5
max_children = 30
pid_file = #{self.pid_file}
max_matches = #{self.max_matches}
}
CONFIG
ThinkingSphinx.indexed_models.each do |model|
model = model.constantize
sources = []
prefixed_fields = []
infixed_fields = []
model.indexes.each_with_index do |index, i|
# Set up associations and joins
index.link!
attr_sources = index.attributes.collect { |attrib|
attrib.to_sphinx_clause
}.join("\n ")
adapter = case database_conf[:adapter]
when "postgresql"
"pgsql"
when "mysql"
"mysql"
else
raise "Unsupported Database Adapter: Sphinx only supports MySQL and PosgreSQL"
end
file.write <<-SOURCE
source #{model.name.downcase}_#{i}_core
{
type = #{adapter}
sql_host = #{database_conf[:host] || "localhost"}
sql_user = #{database_conf[:username]}
sql_pass = #{database_conf[:password]}
sql_db = #{database_conf[:database]}
sql_query_pre = #{(["utf-8","zh_cn.utf-8"].include?(charset_type)) && adapter == "mysql" ? "SET NAMES utf8" : ""}
sql_query_pre = #{index.to_sql_query_pre}
sql_query = #{index.to_sql.gsub(/\n/, ' ')}
sql_query_range = #{index.to_sql_query_range}
sql_query_info = #{index.to_sql_query_info}
#{attr_sources}
}
SOURCE
if index.delta?
file.write <<-SOURCE
source #{model.name.downcase}_#{i}_delta : #{model.name.downcase}_#{i}_core
{
sql_query_pre = #{(["utf-8","zh_cn.utf-8"].include?(charset_type)) && adapter == "mysql" ? "SET NAMES utf8" : ""}
sql_query = #{index.to_sql(:delta => true).gsub(/\n/, ' ')}
sql_query_range = #{index.to_sql_query_range :delta => true}
}
SOURCE
end
sources << "#{model.name.downcase}_#{i}_core"
end
source_list = sources.collect { |s| "source = #{s}" }.join("\n")
delta_list = source_list.gsub(/_core$/, "_delta")
file.write <<-INDEX
index #{model.name.downcase}_core
{
#{source_list}
morphology = #{self.morphology}
path = #{self.searchd_file_path}/#{model.name.downcase}_core
charset_type = #{self.charset_type}
INDEX
unless self.charset_dictpath.nil?
file.write <<-INDEX
charset_dictpath = #{self.charset_dictpath}
INDEX
end
unless self.charset_table.nil?
file.write <<-INDEX
charset_table = #{self.charset_table}
INDEX
end
if self.allow_star
file.write <<-INDEX
enable_star = 1
min_prefix_len = 1
INDEX
end
file.write("}\n")
if model.indexes.any? { |index| index.delta? }
file.write <<-INDEX
index #{model.name.downcase}_delta : #{model.name.downcase}_core
{
#{delta_list}
path = #{self.searchd_file_path}/#{model.name.downcase}_delta
}
index #{model.name.downcase}
{
type = distributed
local = #{model.name.downcase}_core
local = #{model.name.downcase}_delta
charset_type = #{self.charset_type}
}
INDEX
else
file.write <<-INDEX
index #{model.name.downcase}
{
type = distributed
local = #{model.name.downcase}_core
}
INDEX
end
end
end
end
# Make sure all models are loaded - without reloading any that
# ActiveRecord::Base is already aware of (otherwise we start to hit some
# messy dependencies issues).
#
def load_models
Dir["#{app_root}/app/models/**/*.rb"].each do |file|
model_name = file.gsub(/^.*\/([\w_]+)\.rb/, '\1')
next if model_name.nil?
next if ::ActiveRecord::Base.send(:subclasses).detect { |model|
model.name == model_name
}
begin
model_name.camelize.constantize
rescue NameError
next
end
end
end
private
# Parse the config/sphinx.yml file - if it exists - then use the attribute
# accessors to set the appropriate values. Nothing too clever.
#
def parse_config
path = "#{app_root}/config/sphinx.yml"
return unless File.exists?(path)
conf = YAML.load(ERB.new(File.open(path).readlines.to_s).result)[environment]
conf.each do |key,value|
self.send("#{key}=", value) if self.methods.include?("#{key}=")
end unless conf.nil?
end
end
end
| 31.965779 | 118 | 0.5995 |
1d629a68c3961ff5937d930faa0c3b3d7d85c19d | 704 | # frozen_string_literal: true
# Copyright 2018 Bryan Knouse, Magus Pereira, Charlie Evans, Taraqur Rahman,
# Nick Feuer
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class Message < ApplicationRecord
end
| 35.2 | 76 | 0.771307 |
f86cea8e04b89b6edd0deea49b30d6f63a4609ca | 2,389 | # frozen_string_literal: true
# Copyright (c) 2008-2013 Michael Dvorkin and contributors.
#
# Fat Free CRM is freely distributable under the terms of MIT license.
# See MIT-LICENSE file or http://www.opensource.org/licenses/mit-license.php
#------------------------------------------------------------------------------
# == Schema Information
#
# Table name: comments
#
# id :integer not null, primary key
# user_id :integer
# commentable_id :integer
# commentable_type :string(255)
# private :boolean
# title :string(255) default("")
# comment :text
# created_at :datetime
# updated_at :datetime
# state :string(16) default("Expanded"), not null
#
class Comment < ActiveRecord::Base
belongs_to :user
belongs_to :commentable, polymorphic: true
scope :created_by, ->(user) { where(user_id: user.id) }
validates_presence_of :user, :commentable, :comment
has_paper_trail class_name: "Version", meta: {related: :commentable},
ignore: [:state]
before_create :subscribe_mentioned_users
after_create :subscribe_user_to_entity, :notify_subscribers
def expanded?
state == "Expanded"
end
def collapsed?
state == "Collapsed"
end
private
# Add user to subscribed_users field on entity
def subscribe_user_to_entity(u = user)
commentable.subscribed_users << u.id
commentable.save
end
# Notify subscribed users when a comment is added, unless user created this comment
def notify_subscribers
commentable.subscribed_users.reject { |user_id| user_id == user.id }.each do |subscriber_id|
if subscriber = User.find_by_id(subscriber_id)
SubscriptionMailer.comment_notification(subscriber, self).deliver_now
end
end
end
# If a user is mentioned in the comment body, subscribe them to the entity
# before creation, so that they are sent an email notification
def subscribe_mentioned_users
# Scan for usernames mentioned in the comment,
# e.g. "Hi @example_user, take a look at this lead. Please show @another_user"
comment.scan(/@([a-zA-Z0-9_-]+)/).map(&:first).each do |username|
if (mentioned_user = User.find_by_username(username))
subscribe_user_to_entity(mentioned_user)
end
end
end
ActiveSupport.run_load_hooks(:fat_free_crm_comment, self)
end
| 31.434211 | 96 | 0.673085 |
e8bd6ca9756610fd4b844c4ecad3fa284992262f | 1,824 | # encoding: utf-8
require 'logstash/namespace'
require 'logstash/filters/base'
# This filter automatically extracts all numbers found inside a string
#
# This is useful when you have lines that don't match a grok pattern
# or use json but you still need to extract numbers.
#
# Each numbers is returned in a @fields.intX or @fields.floatX field
# where X indicates the position in the string.
#
# The fields produced by this filter are extra useful used in combination
# with kibana number plotting features.
class LogStash::Filters::ExtractNumbers < LogStash::Filters::Base
config_name 'extractnumbers'
milestone 1
# The source field for the data. By default is message.
config :source, :validate => :string, :default => 'message'
public
def register
end
public
def filter(event)
integers = nil
floats = nil
msg = event[@source]
if not msg
return
end
# If for some reason the field is an array of values, take the first only.
msg = msg.first if msg.is_a?(Array)
fields = msg.split
for elem in fields
int = str_as_integer(elem)
if int != nil
if not integers
integers = Array.new
end
integers.push(int)
next
end
f = str_as_float(elem)
if f != nil
if not floats
floats = Array.new
end
floats.push(f)
end
end
if integers
index = 0
for i in integers
index += 1
event["int" + index.to_s] = i
end
end
if floats
index = 0
for f in floats
index += 1
event["float" + index.to_s] = f
end
end
end
def str_as_integer(str)
Integer(str) rescue nil
end
def str_as_float(str)
Float(str) rescue nil
end
end # class LogStash::Filters::ExtractNumbers
| 21.209302 | 78 | 0.633772 |
e29ceb5e8725154556ada39c2a526f42359ea627 | 6,134 | class ReplaceProjectDetailsAndDropOldImgFunction < ActiveRecord::Migration
def up
execute <<-SQL
CREATE OR REPLACE VIEW "1".project_details AS
SELECT p.id AS project_id,
p.id,
p.user_id,
p.name,
p.headline,
p.budget,
p.goal,
p.about_html,
p.permalink,
p.video_embed_url,
p.video_url,
c.name_en AS category_name,
c.id AS category_id,
original_image(p.*) AS original_image,
thumbnail_image(p.*, 'thumb'::text) AS thumb_image,
thumbnail_image(p.*, 'small'::text) AS small_image,
thumbnail_image(p.*, 'large'::text) AS large_image,
thumbnail_image(p.*, 'video_cover'::text) AS video_cover_image,
COALESCE(pt.progress, 0::numeric) AS progress,
COALESCE(pt.pledged, 0::numeric) AS pledged,
COALESCE(pt.total_contributions, 0::bigint) AS total_contributions,
p.state,
p.expires_at,
zone_expires_at(p.*) AS zone_expires_at,
p.online_date,
p.sent_to_analysis_at,
is_published(p.*) AS is_published,
is_expired(p.*) AS is_expired,
open_for_contributions(p.*) AS open_for_contributions,
p.online_days,
remaining_time_json(p.*) AS remaining_time,
( SELECT count(pp_1.*) AS count
FROM project_posts pp_1
WHERE pp_1.project_id = p.id) AS posts_count,
json_build_object('city', COALESCE(ct.name, u.address_city), 'state_acronym', COALESCE(st.acronym, u.address_state::character varying), 'state', COALESCE(st.name, u.address_state::character varying)) AS address,
json_build_object('id', u.id, 'name', u.name) AS "user",
count(DISTINCT pn.*) FILTER (WHERE pn.template_name = 'reminder'::text) AS reminder_count,
is_owner_or_admin(p.user_id) AS is_owner_or_admin,
user_signed_in() AS user_signed_in,
current_user_already_in_reminder(p.*) AS in_reminder,
count(pp.*) AS total_posts,
"current_user"() = 'admin'::name AS is_admin_role
FROM projects p
JOIN categories c ON c.id = p.category_id
JOIN users u ON u.id = p.user_id
LEFT JOIN project_posts pp ON pp.project_id = p.id
LEFT JOIN "1".project_totals pt ON pt.project_id = p.id
LEFT JOIN cities ct ON ct.id = p.city_id
LEFT JOIN states st ON st.id = ct.state_id
LEFT JOIN project_notifications pn ON pn.project_id = p.id
GROUP BY p.id, c.id, u.id, c.name_en, ct.name, u.address_city, st.acronym, u.address_state, st.name, pt.progress, pt.pledged, pt.total_contributions, p.state, p.expires_at, p.sent_to_analysis_at, pt.total_payment_service_fee;
DROP FUNCTION public.img_thumbnail(projects, text);
SQL
end
def down
execute <<-SQL
CREATE OR REPLACE FUNCTION public.img_thumbnail(projects, size text)
RETURNS text
LANGUAGE sql
STABLE
AS $function$
SELECT
'https://' || settings('aws_host') ||
'/' || settings('aws_bucket') ||
'/uploads/project/uploaded_image/' || $1.id::text ||
'/project_thumb_' || size || '_' || $1.uploaded_image
$function$;
CREATE OR REPLACE VIEW "1".project_details AS
SELECT p.id AS project_id,
p.id,
p.user_id,
p.name,
p.headline,
p.budget,
p.goal,
p.about_html,
p.permalink,
p.video_embed_url,
p.video_url,
c.name_en AS category_name,
c.id AS category_id,
original_image(p.*) AS original_image,
img_thumbnail(p.*, 'thumb'::text) AS thumb_image,
img_thumbnail(p.*, 'small'::text) AS small_image,
img_thumbnail(p.*, 'large'::text) AS large_image,
img_thumbnail(p.*, 'video_cover'::text) AS video_cover_image,
COALESCE(pt.progress, 0::numeric) AS progress,
COALESCE(pt.pledged, 0::numeric) AS pledged,
COALESCE(pt.total_contributions, 0::bigint) AS total_contributions,
p.state,
p.expires_at,
zone_expires_at(p.*) AS zone_expires_at,
p.online_date,
p.sent_to_analysis_at,
is_published(p.*) AS is_published,
is_expired(p.*) AS is_expired,
open_for_contributions(p.*) AS open_for_contributions,
p.online_days,
remaining_time_json(p.*) AS remaining_time,
( SELECT count(pp_1.*) AS count
FROM project_posts pp_1
WHERE pp_1.project_id = p.id) AS posts_count,
json_build_object('city', COALESCE(ct.name, u.address_city), 'state_acronym', COALESCE(st.acronym, u.address_state::character varying), 'state', COALESCE(st.name, u.address_state::character varying)) AS address,
json_build_object('id', u.id, 'name', u.name) AS "user",
count(DISTINCT pn.*) FILTER (WHERE pn.template_name = 'reminder'::text) AS reminder_count,
is_owner_or_admin(p.user_id) AS is_owner_or_admin,
user_signed_in() AS user_signed_in,
current_user_already_in_reminder(p.*) AS in_reminder,
count(pp.*) AS total_posts,
"current_user"() = 'admin'::name AS is_admin_role
FROM projects p
JOIN categories c ON c.id = p.category_id
JOIN users u ON u.id = p.user_id
LEFT JOIN project_posts pp ON pp.project_id = p.id
LEFT JOIN "1".project_totals pt ON pt.project_id = p.id
LEFT JOIN cities ct ON ct.id = p.city_id
LEFT JOIN states st ON st.id = ct.state_id
LEFT JOIN project_notifications pn ON pn.project_id = p.id
GROUP BY p.id, c.id, u.id, c.name_en, ct.name, u.address_city, st.acronym, u.address_state, st.name, pt.progress, pt.pledged, pt.total_contributions, p.state, p.expires_at, p.sent_to_analysis_at, pt.total_payment_service_fee;
SQL
end
end
| 46.824427 | 233 | 0.611673 |
e8ef034f8ab55352a95e77dbca38b78c5e1b0eea | 1,232 | module HBW
module Common
class YMLAPI
attr_accessor :responses
class << self
def build(yml_api_file_path)
new(YAML.load_file(yml_api_file_path))
end
end
def initialize(responses)
@responses = responses
end
%w(get put patch post delete).each do |method|
define_method method do |url, *params|
choose_response(method, url, *params)
end
end
def choose_response(method, url, params = {})
HBW::Common::DummyResponse.new(fetch_response(method, url, params))
end
def load(response_file_path)
new_responses = if File.exist?(response_file_path)
YAML.load_file(response_file_path)
else
{}
end
@responses = new_responses.deep_merge(new_responses)
end
private
def fetch_response(method, url, params)
responses.fetch(method).fetch(url).fetch(URI.unescape(params.to_query)[0..1021])
end
end
class DummyResponse
attr_accessor :body, :status
def initialize(data)
@body = data
@status = 200
end
end
end
end
| 23.245283 | 88 | 0.574675 |
1a98156fa19681f35f40c08d54c68fb3be88b9c7 | 1,564 | require 'spec_helper'
describe 'page_object.open_xxx' do
context "browser is closed" do
it "raises PageObjectWrapper::BrowserNotFound" do
gp = PageObjectWrapper.receive_page(:google_pagination)
expect{ gp.pagination_open 1 }.to raise_error(PageObjectWrapper::BrowserNotFound)
end
end
context "browser is opened" do
before(:all){
@b = Watir::Browser.new :chrome
PageObjectWrapper.use_browser @b
}
after(:all){ PageObjectWrapper.browser.quit }
context "invalid locator" do
it "raises PageObjectWrapper::InvalidPagination" do
gip = PageObjectWrapper.open_page :google_invalid_pagination
expect{ gip.invalid_pagination_open(1) }.to raise_error PageObjectWrapper::InvalidPagination
end
end
context "inappropriate number provided" do
it "raises Watir::Wait::TimeoutError" do
gp = PageObjectWrapper.open_page(:google_pagination)
expect{ gp.pagination_open(0)}.to raise_error Watir::Wait::TimeoutError
end
end
context "correct parameters" do
it "opens browser on provided subpage returns corresponding page_object" do
n = 10
yp = PageObjectWrapper.open_page(:yandex_pagination)
yp.pagination_open(n).should be_a PageObjectWrapper::PageObject
yp.validate_current_number?(n).should be_true
gp = PageObjectWrapper.open_page(:google_pagination)
gp.pagination_open(n).should be_a PageObjectWrapper::PageObject
gp.validate_current_number?(n).should be_true
end
end
end
end
| 34.755556 | 100 | 0.717391 |
1aae56e83d919fd2fe5c56cc9ddfdc74bb44d869 | 559 | define Theme do
has_many :theme_files, :_as => :files, :find => stub_theme_file
has_one :comments_counter, stub_counter
instance :theme,
:id => 'theme-1',
:name => 'Theme 1',
:version => '1',
:author => 'author',
:author_link => 'author_link',
:homepage => 'homepage',
:summary => 'summary',
:path => '/path/to/themes/site-1/theme-1/',
:save => true,
:update_attributes => true,
:destroy => true,
:errors => []
end
| 26.619048 | 65 | 0.488372 |
91b24bbd9a523274c54d835d90e089c1bc4de737 | 59 | require 'rspec'
require 'skemata/version'
include Skemata
| 11.8 | 25 | 0.79661 |
61f56f46ee5120aa2e39276065fa13758d2c7003 | 855 | # (C) Copyright 2021 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
require 'spec_helper'
RSpec.describe OneviewSDK::API2600::Synergy::Scope do
include_context 'shared context'
it 'inherits from OneviewSDK::API2400::Synergy::Scope' do
expect(described_class).to be < OneviewSDK::API2400::Synergy::Scope
end
end
| 40.714286 | 84 | 0.774269 |
e2d5d16a274021c35a0151bfd0184221e6eadce4 | 5,964 | =begin
#DocuSign REST API
#The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign.
OpenAPI spec version: v2.1
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
=end
require 'date'
module DocuSign_eSign
# Contains information about custom fields.
class CustomFields
# An array of list custom fields.
attr_accessor :list_custom_fields
# An array of text custom fields.
attr_accessor :text_custom_fields
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'list_custom_fields' => :'listCustomFields',
:'text_custom_fields' => :'textCustomFields'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'list_custom_fields' => :'Array<ListCustomField>',
:'text_custom_fields' => :'Array<TextCustomField>'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v}
if attributes.has_key?(:'listCustomFields')
if (value = attributes[:'listCustomFields']).is_a?(Array)
self.list_custom_fields = value
end
end
if attributes.has_key?(:'textCustomFields')
if (value = attributes[:'textCustomFields']).is_a?(Array)
self.text_custom_fields = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = Array.new
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
list_custom_fields == o.list_custom_fields &&
text_custom_fields == o.text_custom_fields
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[list_custom_fields, text_custom_fields].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = DocuSign_eSign.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map{ |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 29.37931 | 123 | 0.630449 |
03f84b488b7f0d4b4d134351ea2c6987832e2698 | 19 | move 'climb rubble' | 19 | 19 | 0.789474 |
e2933dbb4c5a8dae0aa1ac0e423a609ca89b68e0 | 9,612 | module Steep
class Source
class LocatedAnnotation
attr_reader :line
attr_reader :annotation
attr_reader :source
def initialize(line:, source:, annotation:)
@line = line
@source = source
@annotation = annotation
end
def ==(other)
other.is_a?(LocatedAnnotation) &&
other.line == line &&
other.annotation == annotation
end
end
attr_reader :path
attr_reader :node
attr_reader :mapping
def initialize(path:, node:, mapping:)
@path = path
@node = node
@mapping = mapping
end
class Builder < ::Parser::Builders::Default
def string_value(token)
value(token)
end
self.emit_lambda = true
self.emit_procarg0 = true
end
def self.parser
::Parser::Ruby25.new(Builder.new).tap do |parser|
parser.diagnostics.all_errors_are_fatal = true
parser.diagnostics.ignore_warnings = true
end
end
def self.parse(source_code, path:, labeling: ASTUtils::Labeling.new)
buffer = ::Parser::Source::Buffer.new(path.to_s, 1)
buffer.source = source_code
node = parser.parse(buffer).yield_self do |n|
if n
labeling.translate(n, {})
else
return
end
end
annotations = []
_, comments, _ = yield_self do
buffer = ::Parser::Source::Buffer.new(path.to_s)
buffer.source = source_code
parser = ::Parser::Ruby25.new
parser.tokenize(buffer)
end
buffer = AST::Buffer.new(name: path, content: source_code)
comments.each do |comment|
src = comment.text.gsub(/\A#/, '')
annotation = Steep::Parser.parse_annotation_opt(src,
buffer: buffer,
offset: comment.location.expression.begin_pos+1)
if annotation
annotations << LocatedAnnotation.new(line: comment.location.line, source: src, annotation: annotation)
end
end
mapping = {}
construct_mapping(node: node, annotations: annotations, mapping: mapping)
annotations.each do |annot|
mapping[node.__id__] = [] unless mapping.key?(node.__id__)
mapping[node.__id__] << annot.annotation
end
new(path: path, node: node, mapping: mapping)
end
def self.construct_mapping(node:, annotations:, mapping:, line_range: nil)
case node.type
when :if
if node.loc.is_a?(::Parser::Source::Map::Ternary)
# Skip ternary operator
each_child_node node do |child|
construct_mapping(node: child, annotations: annotations, mapping: mapping, line_range: nil)
end
else
if node.loc.expression.begin_pos == node.loc.keyword.begin_pos
construct_mapping(node: node.children[0],
annotations: annotations,
mapping: mapping,
line_range: nil)
if node.children[1]
if node.loc.keyword.source == "if" || node.loc.keyword.source == "elsif"
then_start = node.loc.begin&.loc&.last_line || node.children[0].loc.last_line
then_end = node.children[2] ? node.loc.else.line : node.loc.last_line
else
then_start = node.loc.else.last_line
then_end = node.loc.last_line
end
construct_mapping(node: node.children[1],
annotations: annotations,
mapping: mapping,
line_range: then_start...then_end)
end
if node.children[2]
if node.loc.keyword.source == "if" || node.loc.keyword.source == "elsif"
else_start = node.loc.else.last_line
else_end = node.loc.last_line
else
else_start = node.loc.begin&.last_line || node.children[0].loc.last_line
else_end = node.children[1] ? node.loc.else.line : node.loc.last_line
end
construct_mapping(node: node.children[2],
annotations: annotations,
mapping: mapping,
line_range: else_start...else_end)
end
else
# postfix if/unless
each_child_node(node) do |child|
construct_mapping(node: child, annotations: annotations, mapping: mapping, line_range: nil)
end
end
end
when :while, :until
if node.loc.expression.begin_pos == node.loc.keyword.begin_pos
construct_mapping(node: node.children[0],
annotations: annotations,
mapping: mapping,
line_range: nil)
if node.children[1]
body_start = node.children[0].loc.last_line
body_end = node.loc.end.line
construct_mapping(node: node.children[1],
annotations: annotations,
mapping: mapping,
line_range: body_start...body_end)
end
else
# postfix while
each_child_node(node) do |child|
construct_mapping(node: child, annotations: annotations, mapping: mapping, line_range: nil)
end
end
when :while_post, :until_post
construct_mapping(node: node.children[0],
annotations: annotations,
mapping: mapping,
line_range: nil)
if node.children[1]
body_start = node.loc.expression.line
body_end = node.loc.keyword.line
construct_mapping(node: node.children[1],
annotations: annotations,
mapping: mapping,
line_range: body_start...body_end)
end
when :case
if node.children[0]
construct_mapping(node: node.children[0], annotations: annotations, mapping: mapping, line_range: nil)
end
if node.loc.else
else_node = node.children.last
else_start = node.loc.else.last_line
else_end = node.loc.end.line
construct_mapping(node: else_node,
annotations: annotations,
mapping: mapping,
line_range: else_start...else_end)
end
node.children.drop(1).each do |child|
if child&.type == :when
construct_mapping(node: child, annotations: annotations, mapping: mapping, line_range: nil)
end
end
when :when
last_cond = node.children[-2]
body = node.children.last
node.children.take(node.children.size-1) do |child|
construct_mapping(node: child, annotations: annotations, mapping: mapping, line_range: nil)
end
if body
cond_end = last_cond.loc.last_line+1
body_end = body.loc.last_line
construct_mapping(node: body,
annotations: annotations,
mapping: mapping,
line_range: cond_end...body_end)
end
when :rescue
if node.children.last
else_node = node.children.last
else_start = node.loc.else.last_line
else_end = node.loc.last_line
construct_mapping(node: else_node,
annotations: annotations,
mapping: mapping,
line_range: else_start...else_end)
end
each_child_node(node) do |child|
construct_mapping(node: child, annotations: annotations, mapping: mapping, line_range: nil)
end
else
each_child_node(node) do |child|
construct_mapping(node: child, annotations: annotations, mapping: mapping, line_range: nil)
end
end
associated_annotations = annotations.select do |annot|
case node.type
when :def, :module, :class, :block, :ensure, :defs
loc = node.loc
loc.line <= annot.line && annot.line < loc.last_line
when :resbody
if node.loc.keyword.begin_pos == node.loc.expression.begin_pos
# skip postfix rescue
loc = node.loc
loc.line <= annot.line && annot.line < loc.last_line
end
else
if line_range
line_range.begin <= annot.line && annot.line < line_range.end
end
end
end
associated_annotations.each do |annot|
mapping[node.__id__] = [] unless mapping.key?(node.__id__)
mapping[node.__id__] << annot.annotation
annotations.delete annot
end
end
def self.each_child_node(node)
node.children.each do |child|
if child.is_a?(::AST::Node)
yield child
end
end
end
def annotations(block:, builder:, current_module:)
AST::Annotation::Collection.new(annotations: mapping[block.__id__] || [], builder: builder, current_module: current_module)
end
def each_annotation
if block_given?
mapping.each_key do |id|
node = ObjectSpace._id2ref(id)
yield node, mapping[id]
end
else
enum_for :each_annotation
end
end
end
end
| 32.805461 | 129 | 0.55077 |
08a9c7d40c2fd954f9747857f3f6b8acf11b5590 | 6,672 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Google
module Monitoring
module V3
# The protocol for the `ListUptimeCheckConfigs` request.
# @!attribute [rw] parent
# @return [String]
# Required. The project whose Uptime check configurations are listed. The format is:
#
# projects/[PROJECT_ID_OR_NUMBER]
# @!attribute [rw] page_size
# @return [Integer]
# The maximum number of results to return in a single response. The server
# may further constrain the maximum number of results returned in a single
# page. If the page_size is <=0, the server will decide the number of results
# to be returned.
# @!attribute [rw] page_token
# @return [String]
# If this field is not empty then it must contain the `nextPageToken` value
# returned by a previous call to this method. Using this field causes the
# method to return more results from the previous method call.
class ListUptimeCheckConfigsRequest; end
# The protocol for the `ListUptimeCheckConfigs` response.
# @!attribute [rw] uptime_check_configs
# @return [Array<Google::Monitoring::V3::UptimeCheckConfig>]
# The returned Uptime check configurations.
# @!attribute [rw] next_page_token
# @return [String]
# This field represents the pagination token to retrieve the next page of
# results. If the value is empty, it means no further results for the
# request. To retrieve the next page of results, the value of the
# next_page_token is passed to the subsequent List method call (in the
# request message's page_token field).
# @!attribute [rw] total_size
# @return [Integer]
# The total number of Uptime check configurations for the project,
# irrespective of any pagination.
class ListUptimeCheckConfigsResponse; end
# The protocol for the `GetUptimeCheckConfig` request.
# @!attribute [rw] name
# @return [String]
# Required. The Uptime check configuration to retrieve. The format is:
#
# projects/[PROJECT_ID_OR_NUMBER]/uptimeCheckConfigs/[UPTIME_CHECK_ID]
class GetUptimeCheckConfigRequest; end
# The protocol for the `CreateUptimeCheckConfig` request.
# @!attribute [rw] parent
# @return [String]
# Required. The project in which to create the Uptime check. The format is:
#
# projects/[PROJECT_ID_OR_NUMBER]
# @!attribute [rw] uptime_check_config
# @return [Google::Monitoring::V3::UptimeCheckConfig]
# Required. The new Uptime check configuration.
class CreateUptimeCheckConfigRequest; end
# The protocol for the `UpdateUptimeCheckConfig` request.
# @!attribute [rw] update_mask
# @return [Google::Protobuf::FieldMask]
# Optional. If present, only the listed fields in the current Uptime check
# configuration are updated with values from the new configuration. If this
# field is empty, then the current configuration is completely replaced with
# the new configuration.
# @!attribute [rw] uptime_check_config
# @return [Google::Monitoring::V3::UptimeCheckConfig]
# Required. If an `updateMask` has been specified, this field gives
# the values for the set of fields mentioned in the `updateMask`. If an
# `updateMask` has not been given, this Uptime check configuration replaces
# the current configuration. If a field is mentioned in `updateMask` but
# the corresonding field is omitted in this partial Uptime check
# configuration, it has the effect of deleting/clearing the field from the
# configuration on the server.
#
# The following fields can be updated: `display_name`,
# `http_check`, `tcp_check`, `timeout`, `content_matchers`, and
# `selected_regions`.
class UpdateUptimeCheckConfigRequest; end
# The protocol for the `DeleteUptimeCheckConfig` request.
# @!attribute [rw] name
# @return [String]
# Required. The Uptime check configuration to delete. The format is:
#
# projects/[PROJECT_ID_OR_NUMBER]/uptimeCheckConfigs/[UPTIME_CHECK_ID]
class DeleteUptimeCheckConfigRequest; end
# The protocol for the `ListUptimeCheckIps` request.
# @!attribute [rw] page_size
# @return [Integer]
# The maximum number of results to return in a single response. The server
# may further constrain the maximum number of results returned in a single
# page. If the page_size is <=0, the server will decide the number of results
# to be returned.
# NOTE: this field is not yet implemented
# @!attribute [rw] page_token
# @return [String]
# If this field is not empty then it must contain the `nextPageToken` value
# returned by a previous call to this method. Using this field causes the
# method to return more results from the previous method call.
# NOTE: this field is not yet implemented
class ListUptimeCheckIpsRequest; end
# The protocol for the `ListUptimeCheckIps` response.
# @!attribute [rw] uptime_check_ips
# @return [Array<Google::Monitoring::V3::UptimeCheckIp>]
# The returned list of IP addresses (including region and location) that the
# checkers run from.
# @!attribute [rw] next_page_token
# @return [String]
# This field represents the pagination token to retrieve the next page of
# results. If the value is empty, it means no further results for the
# request. To retrieve the next page of results, the value of the
# next_page_token is passed to the subsequent List method call (in the
# request message's page_token field).
# NOTE: this field is not yet implemented
class ListUptimeCheckIpsResponse; end
end
end
end | 49.058824 | 94 | 0.667416 |
337f8713ecaa7e50f96fa42481a28f748a12bb53 | 450 | require 'rake'
require 'active_record'
require 'yaml/store'
require 'ostruct'
require 'date'
require 'bundler/setup'
Bundler.require
ActiveRecord::Base.establish_connection(
:adapter => "sqlite3",
:database => "db/artists.sqlite"
)
sql = <<-SQL
CREATE TABLE IF NOT EXISTS artists (
id INTEGER PRIMARY KEY,
name TEXT,
genre TEXT,
age INTEGER,
hometown TEXT
)
SQL
ActiveRecord::Base.connection.execute(sql)
require_relative "../artist.rb"
| 14.0625 | 42 | 0.742222 |
334f7a5b3744cc85690da362511639b9463710ba | 237 | module ReceiptParser
class ReceiptsService
def initialize(params)
@params = params
end
def parse
@parser = ReceiptParser.default_parser.constantize.new(@params).parse
end
def search
end
end
end
| 14.8125 | 75 | 0.675105 |
1a1677544297b54e43d82e6f66609bff5175e3e1 | 2,037 | class IgnitionMsgs9 < Formula
desc "Middleware protobuf messages for robotics"
homepage "https://github.com/ignitionrobotics/ign-msgs"
url "https://github.com/ignitionrobotics/ign-msgs.git", branch: "main"
version "8.999.999~0~20211227"
license "Apache-2.0"
depends_on "protobuf-c" => :build
depends_on "cmake"
depends_on "ignition-cmake2"
depends_on "ignition-math7"
depends_on "ignition-tools"
depends_on macos: :high_sierra # c++17
depends_on "pkg-config"
depends_on "protobuf"
depends_on "tinyxml2"
def install
cmake_args = std_cmake_args
cmake_args << "-DBUILD_TESTING=Off"
cmake_args << "-DCMAKE_INSTALL_RPATH=#{rpath}"
mkdir "build" do
system "cmake", "..", *cmake_args
system "make", "install"
end
end
test do
(testpath/"test.cpp").write <<-EOS
#include <ignition/msgs.hh>
int main() {
ignition::msgs::UInt32;
return 0;
}
EOS
(testpath/"CMakeLists.txt").write <<-EOS
cmake_minimum_required(VERSION 3.10 FATAL_ERROR)
find_package(ignition-msgs9 QUIET REQUIRED)
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${IGNITION-MSGS_CXX_FLAGS}")
include_directories(${IGNITION-MSGS_INCLUDE_DIRS})
link_directories(${IGNITION-MSGS_LIBRARY_DIRS})
add_executable(test_cmake test.cpp)
target_link_libraries(test_cmake ignition-msgs9::ignition-msgs9)
EOS
# test building with pkg-config
system "pkg-config", "ignition-msgs9"
cflags = `pkg-config --cflags ignition-msgs9`.split
system ENV.cc, "test.cpp",
*cflags,
"-L#{lib}",
"-lignition-msgs9",
"-lc++",
"-o", "test"
system "./test"
# test building with cmake
mkdir "build" do
system "cmake", ".."
system "make"
system "./test_cmake"
end
# check for Xcode frameworks in bottle
cmd_not_grep_xcode = "! grep -rnI 'Applications[/]Xcode' #{prefix}"
system cmd_not_grep_xcode
end
end
| 30.402985 | 75 | 0.641139 |
e8a269d97d1cddfea5f37dc7efccbd1123479921 | 4,988 | =begin
#TextMagic API
#No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.8
=end
require 'date'
module TextMagic
class GetMessagingStatResponse
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
}
end
# Attribute type mapping.
def self.swagger_types
{
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = TextMagic.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 28.502857 | 107 | 0.622294 |
62883b3daa397fda3593cd9f17c57b6a58ca5d77 | 1,978 | #
# Author:: Seth Chisamore (<[email protected]>)
# Cookbook Name:: python
# Attribute:: default
#
# Copyright 2011, Opscode, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
default[:python][:basedir] = '/usr/local'
default[:python][:src] = "#{python.basedir}/src"
### uWSGI - Self-contained application container server
# http://projects.unbit.it/uwsgi/
#
# path to uwsgi binary
default[:python][:uwsgi][:bin] = '/usr/local/bin/uwsgi'
#
# log to file/udp
default[:python][:uwsgi][:log] = '/var/log/uwsgi.log'
# path (or name) of UNIX/TCP socket to bind to
default[:python][:uwsgi][:socket] = '/tmp/uwsgi.sock'
#
# write the masterpid to <file>
default[:python][:uwsgi][:pidfile] = '/var/run/uwsgi.pid'
#
# setuid & setgid to <id/username> (only root)
default[:python][:uwsgi][:uid] = 'uwsgi'
default[:python][:uwsgi][:gid] = 'uwsgi'
#
# limit the address space of processes to MB megabytes
default[:python][:uwsgi][:memory] = 256
#
# spawn <n> uwsgi worker processes
default[:python][:uwsgi][:workers] = 2
#
# app specific options, python module to use etc.
default[:python][:uwsgi][:app] = nil
# kill any process which takes longer than this many seconds to execute
default[:python][:uwsgi][:harakiri] = 30
#
# chdir to <dir> before app loading
default[:python][:uwsgi][:chdir] = ''
#
# name of python config module
default[:python][:uwsgi][:module] = ''
#
# environment variables that need to be available to uwsgi init
default[:python][:uwsgi][:envs] = []
| 30.430769 | 74 | 0.705763 |
e9271b61e7af8fb7f569502c1c85edeb35bb1452 | 5,453 | module ManageIQ::Providers::Vmware::InfraManager::EventParser
def self.event_to_hash(event, ems_id = nil)
log_header = "ems_id: [#{ems_id}] " unless ems_id.nil?
_log.debug { "#{log_header}event: [#{event.inspect}]" }
event_type = event['eventType']
if event_type.nil?
_log.error("#{log_header}eventType missing in event: [#{event.inspect}]")
raise MiqException::Error, "event must have an eventType"
end
chain_id = event['chainId']
if chain_id.nil?
_log.error("#{log_header}chainId missing in event: [#{event.inspect}]")
raise MiqException::Error, "event must have a chain_id"
end
is_task = (event_type == 'TaskEvent')
if is_task
changed_event = false
sub_event_type = event.fetch_path('info', 'name')
# Handle special cases
case sub_event_type
when nil
# Handle cases where event name is missing
sub_event_type = 'PowerOnVM_Task' if event['fullFormattedMessage'].to_s.downcase == 'task: power on virtual machine'
sub_event_type = 'DrsMigrateVM_Task' if sub_event_type.nil? && event.fetch_path('info', 'descriptionId') == 'Drm.ExecuteVMotionLRO'
if sub_event_type.nil?
_log.warn("#{log_header}Event Type cannot be determined for TaskEvent. Using generic eventType [TaskEvent] instead. event: [#{event.inspect}]")
sub_event_type = 'TaskEvent'
end
when 'Rename_Task', 'Destroy_Task'
# Handle case where event name is overloaded
sub_event_name = event.fetch_path('info', 'descriptionId').split('.').first
sub_event_name = case sub_event_name
when 'VirtualMachine' then 'VM'
when 'ClusterComputeResource' then 'Cluster'
else sub_event_name
end
sub_event_type.gsub!(/_/, "#{sub_event_name}_")
when 'MarkAsTemplate', 'MarkAsVirtualMachine'
# Handle case where, due to timing issues, the data may not be as expected
path_from, path_to = (sub_event_type == 'MarkAsTemplate' ? ['.vmtx', '.vmx'] : ['.vmx', '.vmtx'])
path = event.fetch_path('vm', 'path')
if !path.nil? && path[-(path_from.length)..-1] == path_from
path[-(path_from.length)..-1] = path_to
changed_event = true
end
end
_log.debug { "#{log_header}changed event: [#{event.inspect}]" } if changed_event
event_type = sub_event_type
elsif event_type == "EventEx"
sub_event_type = event['eventTypeId']
event_type = sub_event_type unless sub_event_type.blank?
end
# Build the event hash
result = {
:event_type => event_type,
:chain_id => chain_id,
:is_task => is_task,
:source => 'VC',
:message => event['fullFormattedMessage'],
:timestamp => event['createdTime'],
:full_data => event
}
result[:ems_id] = ems_id unless ems_id.nil?
result[:username] = event['userName'] unless event['userName'].blank?
# Get the vm information
vm_key = 'vm' if event.key?('vm')
vm_key = 'sourceVm' if event.key?('sourceVm')
vm_key = 'srcTemplate' if event.key?('srcTemplate')
unless vm_key.nil?
vm_data = event[vm_key]
vm_ems_ref = vm_data['vm']
result[:vm_ems_ref] = vm_ems_ref.to_s unless vm_ems_ref.nil?
vm_name = vm_data['name']
result[:vm_name] = URI.decode(vm_name) unless vm_name.nil?
vm_location = vm_data['path']
result[:vm_location] = vm_location unless vm_location.nil?
vm_uid_ems = vm_data['uuid']
result[:vm_uid_ems] = vm_uid_ems unless vm_uid_ems.nil?
end
# Get the dest vm information
has_dest = false
if ['sourceVm', 'srcTemplate'].include?(vm_key)
vm_data = event['vm']
unless vm_data.nil?
vm_ems_ref = vm_data['vm']
result[:dest_vm_ems_ref] = vm_ems_ref.to_s unless vm_ems_ref.nil?
vm_name = vm_data['name']
result[:dest_vm_name] = URI.decode(vm_name) unless vm_name.nil?
vm_location = vm_data['path']
result[:dest_vm_location] = vm_location unless vm_location.nil?
end
has_dest = true
elsif event.key?('destName')
result[:dest_vm_name] = event['destName']
has_dest = true
end
# Get the host information
host_name = event.fetch_path('host', 'name')
result[:host_name] = host_name unless host_name.nil?
host_ems_ref = event.fetch_path('host', 'host')
result[:host_ems_ref] = host_ems_ref.to_s unless host_ems_ref.nil?
# Get the dest host information
if has_dest
host_data = event['destHost'] || event['host']
unless host_data.nil?
host_ems_ref = event['host']
result[:dest_host_ems_ref] = host_ems_ref.to_s unless host_ems_ref.nil?
host_name = event['name']
result[:dest_host_name] = host_name unless host_name.nil?
end
end
result
end
def self.parse_new_target(event)
obj_type = event[:objType]
method = "#{obj_type.downcase}_update_to_hash"
public_send(method, event) if respond_to?(method)
end
def self.folder_update_to_hash(event)
mor = event[:mor]
klass = 'EmsFolder'
hash = {
:folders => [
{
:type => klass,
:ems_ref => mor,
:ems_ref_obj => mor,
:uid_ems => mor
}
]
}
return hash, klass, :uid_ems => mor
end
end
| 35.409091 | 153 | 0.626444 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.