hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
080a9c97b96def9e42426e38060c43d2a1d554df | 2,666 | # Encoding: utf-8
# Cloud Foundry Java Buildpack
# Copyright 2013 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'spec_helper'
require 'component_helper'
require 'java_buildpack/component/modular_component'
describe JavaBuildpack::Component::ModularComponent do
include_context 'component_helper'
let(:component) { StubModularComponent.new context }
it 'should fail if supports? is unimplemented' do
expect { component.supports? }.to raise_error
end
context do
before do
allow_any_instance_of(StubModularComponent).to receive(:supports?).and_return(false)
end
it 'should return nil from detect if not supported' do
expect(component.detect).to be_nil
end
it 'should fail if methods are unimplemented' do
expect { component.command }.to raise_error
expect { component.sub_components(context) }.to raise_error
end
end
context do
let(:sub_component) { double('sub_component') }
before do
allow_any_instance_of(StubModularComponent).to receive(:supports?).and_return(true)
allow_any_instance_of(StubModularComponent).to receive(:sub_components).and_return([sub_component, sub_component])
end
it 'should return name and version string from detect if supported' do
allow(sub_component).to receive(:detect).and_return('sub_component=test-version', 'sub_component=test-version-2')
detected = component.detect
expect(detected).to include('sub_component=test-version')
expect(detected).to include('sub_component=test-version-2')
end
it 'should call compile on each sub_component' do
expect(sub_component).to receive(:compile).twice
component.compile
end
it 'should call release on each sub_component and then command' do
expect(sub_component).to receive(:release).twice
allow_any_instance_of(StubModularComponent).to receive(:command).and_return('test-command')
expect(component.release).to eq('test-command')
end
end
end
class StubModularComponent < JavaBuildpack::Component::ModularComponent
public :command, :sub_components, :supports?
end
| 31.364706 | 120 | 0.746812 |
7a305cab29d65ac8e4961c8e0fa8c3b61495dd88 | 544 | require 'lino'
require_relative 'base'
require_relative 'mixins/profile'
require_relative 'mixins/environment'
module RubyLeiningen
module Commands
class Uberjar < Base
include Mixins::Profile
include Mixins::Environment
def configure_command(builder, opts)
builder = super(builder, opts)
main_namespace = opts[:main_namespace]
builder = builder.with_subcommand('uberjar')
builder = builder.with_argument(main_namespace) if main_namespace
builder
end
end
end
end
| 21.76 | 73 | 0.705882 |
38153657403410c5f2662f19e734bd0ac06dd90b | 122 | class AddCalumnoToCitars < ActiveRecord::Migration[5.0]
def change
add_column :citars, :calumno, :integer
end
end
| 20.333333 | 55 | 0.745902 |
f7b35e76925469c4243024411d9355abe7a40729 | 4,945 | require 'spec_helper'
describe TodosFinder do
describe '#execute' do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project) { create(:project, namespace: group) }
let(:issue) { create(:issue, project: project) }
let(:merge_request) { create(:merge_request, source_project: project) }
let(:finder) { described_class }
before do
group.add_developer(user)
end
describe '#execute' do
context 'filtering' do
let!(:todo1) { create(:todo, user: user, project: project, target: issue) }
let!(:todo2) { create(:todo, user: user, group: group, target: merge_request) }
it 'returns correct todos when filtered by a project' do
todos = finder.new(user, { project_id: project.id }).execute
expect(todos).to match_array([todo1])
end
it 'returns correct todos when filtered by a group' do
todos = finder.new(user, { group_id: group.id }).execute
expect(todos).to match_array([todo1, todo2])
end
it 'returns correct todos when filtered by a type' do
todos = finder.new(user, { type: 'Issue' }).execute
expect(todos).to match_array([todo1])
end
context 'with subgroups' do
let(:subgroup) { create(:group, parent: group) }
let!(:todo3) { create(:todo, user: user, group: subgroup, target: issue) }
it 'returns todos from subgroups when filtered by a group' do
todos = finder.new(user, { group_id: group.id }).execute
expect(todos).to match_array([todo1, todo2, todo3])
end
end
end
context 'external authorization' do
it_behaves_like 'a finder with external authorization service' do
let!(:subject) { create(:todo, project: project, user: user) }
let(:project_params) { { project_id: project.id } }
end
end
end
describe '#sort' do
context 'by date' do
let!(:todo1) { create(:todo, user: user, project: project) }
let!(:todo2) { create(:todo, user: user, project: project) }
let!(:todo3) { create(:todo, user: user, project: project) }
it 'sorts with oldest created first' do
todos = finder.new(user, { sort: 'id_asc' }).execute
expect(todos.first).to eq(todo1)
expect(todos.second).to eq(todo2)
expect(todos.third).to eq(todo3)
end
it 'sorts with newest created first' do
todos = finder.new(user, { sort: 'id_desc' }).execute
expect(todos.first).to eq(todo3)
expect(todos.second).to eq(todo2)
expect(todos.third).to eq(todo1)
end
end
it "sorts by priority" do
project_2 = create(:project)
label_1 = create(:label, title: 'label_1', project: project, priority: 1)
label_2 = create(:label, title: 'label_2', project: project, priority: 2)
label_3 = create(:label, title: 'label_3', project: project, priority: 3)
label_1_2 = create(:label, title: 'label_1', project: project_2, priority: 1)
issue_1 = create(:issue, title: 'issue_1', project: project)
issue_2 = create(:issue, title: 'issue_2', project: project)
issue_3 = create(:issue, title: 'issue_3', project: project)
issue_4 = create(:issue, title: 'issue_4', project: project)
merge_request_1 = create(:merge_request, source_project: project_2)
merge_request_1.labels << label_1_2
# Covers the case where Todo has more than one label
issue_3.labels << label_1
issue_3.labels << label_3
issue_2.labels << label_3
issue_1.labels << label_2
todo_1 = create(:todo, user: user, project: project, target: issue_4)
todo_2 = create(:todo, user: user, project: project, target: issue_2)
todo_3 = create(:todo, user: user, project: project, target: issue_3, created_at: 2.hours.ago)
todo_4 = create(:todo, user: user, project: project, target: issue_1)
todo_5 = create(:todo, user: user, project: project_2, target: merge_request_1, created_at: 1.hour.ago)
project_2.add_developer(user)
todos = finder.new(user, { sort: 'priority' }).execute
expect(todos).to eq([todo_3, todo_5, todo_4, todo_2, todo_1])
end
end
end
describe '#any_for_target?' do
it 'returns true if there are any todos for the given target' do
todo = create(:todo, :pending)
finder = described_class.new(todo.user)
expect(finder.any_for_target?(todo.target)).to eq(true)
end
it 'returns false if there are no todos for the given target' do
issue = create(:issue)
finder = described_class.new(issue.author)
expect(finder.any_for_target?(issue)).to eq(false)
end
end
end
| 36.360294 | 111 | 0.608089 |
7a92ee34809cf40ec79b32a47574d238aa88892b | 7,495 | require "language/go"
class Influxdb < Formula
desc "Time series, events, and metrics database"
homepage "https://influxdata.com/time-series-platform/influxdb/"
url "https://github.com/influxdata/influxdb/archive/v0.10.1.tar.gz"
sha256 "6a66373006a249cb6ab2a2f33b924694486ee07b1d9096c3f770376d0351b703"
bottle do
cellar :any_skip_relocation
sha256 "4ea28e218a7805755ac516094c691f3799257f182abc06c86f6eb5caf1702d6e" => :el_capitan
sha256 "0ed3eec3c34d166e063375e4583ca4b9c16fb345d72f60e122749a1c057cb052" => :yosemite
sha256 "eff6e107c1e1ad6272464771945855d08c9bf700bafafbabd25f9e90a918a380" => :mavericks
end
head do
url "https://github.com/influxdata/influxdb.git"
go_resource "github.com/influxdata/usage-client" do
url "https://github.com/influxdata/usage-client.git",
:revision => "475977e68d79883d9c8d67131c84e4241523f452"
end
end
depends_on "go" => :build
go_resource "collectd.org" do
url "https://github.com/collectd/go-collectd.git",
:revision => "9fc824c70f713ea0f058a07b49a4c563ef2a3b98"
end
go_resource "github.com/BurntSushi/toml" do
url "https://github.com/BurntSushi/toml.git",
:revision => "312db06c6c6dbfa9899e58564bacfaa584f18ab7"
end
go_resource "github.com/armon/go-metrics" do
url "https://github.com/armon/go-metrics.git",
:revision => "345426c77237ece5dab0e1605c3e4b35c3f54757"
end
go_resource "github.com/bmizerany/pat" do
url "https://github.com/bmizerany/pat.git",
:revision => "c068ca2f0aacee5ac3681d68e4d0a003b7d1fd2c"
end
go_resource "github.com/boltdb/bolt" do
url "https://github.com/boltdb/bolt.git",
:revision => "2f846c3551b76d7710f159be840d66c3d064abbe"
end
go_resource "github.com/davecgh/go-spew" do
url "https://github.com/davecgh/go-spew.git",
:revision => "5215b55f46b2b919f50a1df0eaa5886afe4e3b3d"
end
go_resource "github.com/dgryski/go-bits" do
url "https://github.com/dgryski/go-bits.git",
:revision => "86c69b3c986f9d40065df5bd8f765796549eef2e"
end
go_resource "github.com/dgryski/go-bitstream" do
url "https://github.com/dgryski/go-bitstream.git",
:revision => "27cd5973303fde7d914860be1ea4b927a6be0c92"
end
go_resource "github.com/gogo/protobuf" do
url "https://github.com/gogo/protobuf.git",
:revision => "82d16f734d6d871204a3feb1a73cb220cc92574c"
end
go_resource "github.com/golang/snappy" do
url "https://github.com/golang/snappy.git",
:revision => "d1d908a252c22fd7afd36190d5cffb144aa8f777"
end
go_resource "github.com/hashicorp/go-msgpack" do
url "https://github.com/hashicorp/go-msgpack.git",
:revision => "fa3f63826f7c23912c15263591e65d54d080b458"
end
go_resource "github.com/hashicorp/raft" do
url "https://github.com/hashicorp/raft.git",
:revision => "057b893fd996696719e98b6c44649ea14968c811"
end
go_resource "github.com/hashicorp/raft-boltdb" do
url "https://github.com/hashicorp/raft-boltdb.git",
:revision => "d1e82c1ec3f15ee991f7cc7ffd5b67ff6f5bbaee"
end
go_resource "github.com/influxdb/usage-client" do
url "https://github.com/influxdb/usage-client.git",
:revision => "475977e68d79883d9c8d67131c84e4241523f452"
end
go_resource "github.com/jwilder/encoding" do
url "https://github.com/jwilder/encoding.git",
:revision => "07d88d4f35eec497617bee0c7bfe651a796dae13"
end
go_resource "github.com/kimor79/gollectd" do
url "https://github.com/kimor79/gollectd.git",
:revision => "61d0deeb4ffcc167b2a1baa8efd72365692811bc"
end
go_resource "github.com/paulbellamy/ratecounter" do
url "https://github.com/paulbellamy/ratecounter.git",
:revision => "5a11f585a31379765c190c033b6ad39956584447"
end
go_resource "github.com/peterh/liner" do
url "https://github.com/peterh/liner.git",
:revision => "ad1edfd30321d8f006ccf05f1e0524adeb943060"
end
go_resource "github.com/rakyll/statik" do
url "https://github.com/rakyll/statik.git",
:revision => "274df120e9065bdd08eb1120e0375e3dc1ae8465"
end
go_resource "golang.org/x/crypto" do
url "https://go.googlesource.com/crypto.git",
:revision => "1f22c0103821b9390939b6776727195525381532"
end
go_resource "gopkg.in/fatih/pool.v2" do
url "https://gopkg.in/fatih/pool.v2.git",
:revision => "cba550ebf9bce999a02e963296d4bc7a486cb715"
end
def install
ENV["GOPATH"] = buildpath
if build.head?
influxdb_path = buildpath/"src/github.com/influxdata/influxdb"
else
influxdb_path = buildpath/"src/github.com/influxdb/influxdb"
end
influxdb_path.install Dir["*"]
Language::Go.stage_deps resources, buildpath/"src"
cd influxdb_path do
if build.head?
system "go", "install", "-ldflags", "-X main.version=0.11.0-HEAD -X main.branch=master -X main.commit=#{`git rev-parse HEAD`.strip}", "./..."
else
system "go", "install", "-ldflags", "-X main.version=0.10.1 -X main.branch=0.10.0 -X main.commit=b8bb32ecad9808ef00219e7d2469514890a0987a", "./..."
end
end
inreplace influxdb_path/"etc/config.sample.toml" do |s|
s.gsub! "/var/lib/influxdb/data", "#{var}/influxdb/data"
s.gsub! "/var/lib/influxdb/meta", "#{var}/influxdb/meta"
s.gsub! "/var/lib/influxdb/hh", "#{var}/influxdb/hh"
s.gsub! "/var/lib/influxdb/wal", "#{var}/influxdb/wal"
end
bin.install "bin/influxd"
bin.install "bin/influx"
etc.install influxdb_path/"etc/config.sample.toml" => "influxdb.conf"
(var/"influxdb/data").mkpath
(var/"influxdb/meta").mkpath
(var/"influxdb/hh").mkpath
(var/"influxdb/wal").mkpath
end
plist_options :manual => "influxd -config #{HOMEBREW_PREFIX}/etc/influxdb.conf"
def plist; <<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<dict>
<key>SuccessfulExit</key>
<false/>
</dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/influxd</string>
<string>-config</string>
<string>#{HOMEBREW_PREFIX}/etc/influxdb.conf</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{var}</string>
<key>StandardErrorPath</key>
<string>#{var}/log/influxdb.log</string>
<key>StandardOutPath</key>
<string>#{var}/log/influxdb.log</string>
<key>SoftResourceLimits</key>
<dict>
<key>NumberOfFiles</key>
<integer>10240</integer>
</dict>
</dict>
</plist>
EOS
end
test do
(testpath/"config.toml").write shell_output("influxd config")
inreplace testpath/"config.toml" do |s|
s.gsub! %r{/.*/.influxdb/data}, "#{testpath}/influxdb/data"
s.gsub! %r{/.*/.influxdb/meta}, "#{testpath}/influxdb/meta"
s.gsub! %r{/.*/.influxdb/hh}, "#{testpath}/influxdb/hh"
s.gsub! %r{/.*/.influxdb/wal}, "#{testpath}/influxdb/wal"
end
pid = fork do
exec "#{bin}/influxd -config #{testpath}/config.toml"
end
sleep 5
begin
output = shell_output("curl -Is localhost:8086/ping")
sleep 2
assert_match /X-Influxdb-Version:/, output
ensure
Process.kill("SIGINT", pid)
Process.wait(pid)
end
end
end
| 32.586957 | 155 | 0.684323 |
e9cde285cd1839f634fb894b377e0cd94ec19daf | 1,454 | require 'spec_helper'
RSpec.describe Idnow::RawResponse do
let(:idnow_response) { Idnow::RawResponse.new(response) }
let(:successful_raw_response) { 'was successful' }
let(:failure_raw_response) do
'{
"errors": [{
"cause": "SOMETHING_WRONG"
}]
}'
end
describe '#new' do
let(:response) { successful_raw_response }
subject { idnow_response }
it 'sets raw' do
expect(subject.raw).to eq 'was successful'
end
end
describe '#errors' do
subject { idnow_response.errors }
context 'given raw response without errors' do
let(:response) { successful_raw_response }
it 'returns nil' do
is_expected.to eq nil
end
end
context 'given a raw response with errors ' do
let(:response) { failure_raw_response }
it 'returns the errors' do
is_expected.to eq(
[
{ 'cause' => 'SOMETHING_WRONG' }
]
)
end
end
end
describe '#errors?' do
subject { idnow_response.errors? }
context 'given raw response without errors' do
let(:response) { successful_raw_response }
it { is_expected.to be_falsey }
end
context 'given a raw response with errors ' do
let(:response) { failure_raw_response }
it { is_expected.to be_truthy }
end
end
end
| 25.508772 | 68 | 0.570839 |
d5fe28f350b3c8c46ead651b803682034e3cd86d | 444 | module Shared::UnionScope
def self.included(base)
base.send :extend, ClassMethods
end
module ClassMethods
def union_scope(*scopes)
id_column = "#{table_name}.id"
if (sub_query = scopes.reject { |sc| sc.count == 0 }
.map { |s| "(#{s.select(id_column).to_sql})" }
.join(" UNION ")).present?
where "#{id_column} IN (#{sub_query})"
else
none
end
end
end
end
| 22.2 | 58 | 0.560811 |
d5aa20c9a87787dbf89a83beaac5e4b2450cd9c3 | 41 | module Talentlms
VERSION = "1.0.3"
end
| 10.25 | 19 | 0.682927 |
28dfc575047474b8e35a85766773b089a50329a0 | 428 | class Smartsynchronize < Cask
version '3.4'
sha256 'f7d30bae37c835ea208ff60b5b99c2668e3406c06f1321862495a425a9389322'
url "http://www.syntevo.com/download/smartsynchronize/smartsynchronize-macosx-#{version.gsub('.','_')}.dmg"
homepage 'http://www.syntevo.com'
license :unknown
app "SmartSynchronize.app"
binary "SmartSynchronize.app/Contents/MacOS/SmartSynchronize"
caveats do
files_in_usr_local
end
end
| 28.533333 | 109 | 0.778037 |
bfb64be10fc37c6cfb98049af6b7b02a51e0fe0c | 2,147 | # Interpolaciones de strings no solo con #{}
class FastInterpolation
attr_accessor :var
def print
p "Hola #@var"
end
end
a = FastInterpolation.new
a.var = "Manuel"
a.print
#------------------------------------------------
# rescue no necesitan begin
class RescueEasy
def blowup
1/0
rescue
puts "Saved!"
end
end
RescueEasy.new.blowup
#------------------------------------------------
# Splat operator
class Splat
def process(*args)
p args
end
end
Splat.new.process "hola", 12, [:a, :b]
def print_pair(a,b,*)
puts "#{a} and #{b}"
end
print_pair 1,2,3,:cake,7
def add(a,b)
a + b
end
pair = [3,7]
add *pair
first, *list = [1,2,3,4]
first, *primeros, ultimo = [1,2,3,4,5]
p primeros
#------------------------------------------------
# Ignored arguments
def ignore_me(a, _, b)
puts "Called with #{a} and #{b}"
end
ignore_me "Save", "The", "World"
{a: 1, b: 2}.each do |_, v|
p v
end
#------------------------------------------------
# iteracion doble
result = {a: 4, b: 5}.each_with_object({}) do |(k, v), h|
h[:res] = h[:res].to_i + v
h
end
p result
#------------------------------------------------
# case statement
# 1) Ver ejemplo directo al final del binario bin/tkn.rb
#
# 2) Uso con ===
a = "XXX String asd"
case a
when 1..20
puts "Entre 1 y 20"
when 25
puts "Seguro es 25"
when /paco/
puts "Es un string que hace matching con 'paco'"
when lambda {|x| x[0..2] == "XXX"}
puts "Empieza por XXX"
when String
puts "Es un string"
else
puts "No es nada de lo anterior"
end
# Este uso es con un sólo parametro, y ruby ejecuta el metodo
# === con cada valor, ejemplo: (1..20) === 14
#
# Tambien hay otra version sin argumento, donde los valores de "when" són
# true/false, que es el caso ya conocido de otros lenguajes de programación.
#------------------------------------------------
# Cualquier contexto es evaluable en ruby
def get_number
99 / 4.0
end
def test(name, age = (12 * get_number))
puts "Ejecutando test con age = #{age}"
end
test "Roger"
class Superclass; end
class Subclass < (Object.const_get("Superclass")); end
p Subclass.ancestors
| 15.786765 | 76 | 0.560782 |
6191968f9ff4673b9959f32614bf7facea83ea98 | 73 | require_relative 'tasks/autoclean'
require_relative 'tasks/seedregistry'
| 24.333333 | 37 | 0.863014 |
26193ecbd91f738217b502c3212a363c4fc9f556 | 33,624 | require "kitchen"
require_relative "azure_credentials"
require "securerandom"
require "azure_mgmt_resources"
require "azure_mgmt_network"
require "base64"
require "sshkey"
require "fileutils"
require "erb"
require "ostruct"
require "json"
require "faraday"
module Kitchen
module Driver
#
# Azurerm
#
class Azurerm < Kitchen::Driver::Base
attr_accessor :resource_management_client
attr_accessor :network_management_client
default_config(:azure_resource_group_prefix) do |_config|
"kitchen-"
end
default_config(:azure_resource_group_suffix) do |_config|
""
end
default_config(:azure_resource_group_name) do |config|
config.instance.name.to_s
end
default_config(:explicit_resource_group_name) do |_config|
nil
end
default_config(:resource_group_tags) do |_config|
{}
end
default_config(:image_urn) do |_config|
"Canonical:UbuntuServer:14.04.3-LTS:latest"
end
default_config(:image_url) do |_config|
""
end
default_config(:image_id) do |_config|
""
end
default_config(:use_ephemeral_osdisk) do |_config|
false
end
default_config(:os_disk_size_gb) do |_config|
""
end
default_config(:os_type) do |_config|
"linux"
end
default_config(:custom_data) do |_config|
""
end
default_config(:username) do |_config|
"azure"
end
default_config(:password) do |_config|
SecureRandom.base64(12)
end
default_config(:vm_name) do |_config|
"vm"
end
default_config(:nic_name) do |_config|
""
end
default_config(:vnet_id) do |_config|
""
end
default_config(:subnet_id) do |_config|
""
end
default_config(:storage_account_type) do |_config|
"Standard_LRS"
end
default_config(:existing_storage_account_blob_url) do |_config|
""
end
default_config(:existing_storage_account_container) do |_config|
"vhds"
end
default_config(:boot_diagnostics_enabled) do |_config|
"true"
end
default_config(:winrm_powershell_script) do |_config|
false
end
default_config(:azure_environment) do |_config|
"Azure"
end
default_config(:pre_deployment_template) do |_config|
""
end
default_config(:pre_deployment_parameters) do |_config|
{}
end
default_config(:post_deployment_template) do |_config|
""
end
default_config(:post_deployment_parameters) do |_config|
{}
end
default_config(:vm_tags) do |_config|
{}
end
default_config(:public_ip) do |_config|
false
end
default_config(:use_managed_disks) do |_config|
true
end
default_config(:data_disks) do |_config|
nil
end
default_config(:format_data_disks) do |_config|
false
end
default_config(:format_data_disks_powershell_script) do |_config|
false
end
default_config(:system_assigned_identity) do |_config|
false
end
default_config(:user_assigned_identities) do |_config|
[]
end
default_config(:destroy_explicit_resource_group) do |_config|
true
end
default_config(:destroy_resource_group_contents) do |_config|
false
end
default_config(:deployment_sleep) do |_config|
10
end
default_config(:secret_url) do |_config|
""
end
default_config(:vault_name) do |_config|
""
end
default_config(:vault_resource_group) do |_config|
""
end
default_config(:subscription_id) do |_config|
ENV["AZURE_SUBSCRIPTION_ID"]
end
default_config(:azure_api_retries) do |_config|
5
end
def create(state)
state = validate_state(state)
deployment_parameters = {
location: config[:location],
vmSize: config[:machine_size],
storageAccountType: config[:storage_account_type],
bootDiagnosticsEnabled: config[:boot_diagnostics_enabled],
newStorageAccountName: "storage#{state[:uuid]}",
adminUsername: state[:username],
dnsNameForPublicIP: "kitchen-#{state[:uuid]}",
vmName: state[:vm_name],
systemAssignedIdentity: config[:system_assigned_identity],
userAssignedIdentities: config[:user_assigned_identities],
secretUrl: config[:secret_url],
vaultName: config[:vault_name],
vaultResourceGroup: config[:vault_resource_group],
}
if instance.transport[:ssh_key].nil?
deployment_parameters["adminPassword"] = state[:password]
end
if config[:subscription_id].to_s == ""
raise "A subscription_id config value was not detected and kitchen-azurerm cannot continue. Please check your .kitchen.yml configuration. Exiting."
end
if config[:nic_name].to_s == ""
vmnic = "nic-#{config[:vm_name]}"
else
vmnic = config[:nic_name]
end
deployment_parameters["nicName"] = vmnic.to_s
if config[:custom_data].to_s != ""
deployment_parameters["customData"] = prepared_custom_data
end
# When deploying in a shared storage account, we needs to add
# a unique suffix to support multiple kitchen instances
if config[:existing_storage_account_blob_url].to_s != ""
deployment_parameters["osDiskNameSuffix"] = "-#{state[:azure_resource_group_name]}"
end
if config[:existing_storage_account_blob_url].to_s != ""
deployment_parameters["existingStorageAccountBlobURL"] = config[:existing_storage_account_blob_url]
end
if config[:existing_storage_account_container].to_s != ""
deployment_parameters["existingStorageAccountBlobContainer"] = config[:existing_storage_account_container]
end
if config[:os_disk_size_gb].to_s != ""
deployment_parameters["osDiskSizeGb"] = config[:os_disk_size_gb]
end
# The three deployment modes
# a) Private Image: Managed VM Image (by id)
# b) Private Image: Using a VHD URL (note: we must use existing_storage_account_blob_url due to azure limitations)
# c) Public Image: Using a marketplace image (urn)
if config[:image_id].to_s != ""
deployment_parameters["imageId"] = config[:image_id]
elsif config[:image_url].to_s != ""
deployment_parameters["imageUrl"] = config[:image_url]
deployment_parameters["osType"] = config[:os_type]
else
image_publisher, image_offer, image_sku, image_version = config[:image_urn].split(":", 4)
deployment_parameters["imagePublisher"] = image_publisher
deployment_parameters["imageOffer"] = image_offer
deployment_parameters["imageSku"] = image_sku
deployment_parameters["imageVersion"] = image_version
end
options = Kitchen::Driver::AzureCredentials.new(subscription_id: config[:subscription_id],
environment: config[:azure_environment]).azure_options
debug "Azure environment: #{config[:azure_environment]}"
@resource_management_client = ::Azure::Resources::Profiles::Latest::Mgmt::Client.new(options)
# Create Resource Group
resource_group = ::Azure::Resources::Profiles::Latest::Mgmt::Models::ResourceGroup.new
resource_group.location = config[:location]
resource_group.tags = config[:resource_group_tags]
begin
info "Creating Resource Group: #{state[:azure_resource_group_name]}"
create_resource_group(state[:azure_resource_group_name], resource_group)
rescue ::MsRestAzure::AzureOperationError => operation_error
error operation_error.body
raise operation_error
end
# Execute deployment steps
begin
if File.file?(config[:pre_deployment_template])
pre_deployment_name = "pre-deploy-#{state[:uuid]}"
info "Creating deployment: #{pre_deployment_name}"
create_deployment_async(state[:azure_resource_group_name], pre_deployment_name, pre_deployment(config[:pre_deployment_template], config[:pre_deployment_parameters])).value!
follow_deployment_until_end_state(state[:azure_resource_group_name], pre_deployment_name)
end
deployment_name = "deploy-#{state[:uuid]}"
info "Creating deployment: #{deployment_name}"
create_deployment_async(state[:azure_resource_group_name], deployment_name, deployment(deployment_parameters)).value!
follow_deployment_until_end_state(state[:azure_resource_group_name], deployment_name)
if File.file?(config[:post_deployment_template])
post_deployment_name = "post-deploy-#{state[:uuid]}"
info "Creating deployment: #{post_deployment_name}"
create_deployment_async(state[:azure_resource_group_name], post_deployment_name, post_deployment(config[:post_deployment_template], config[:post_deployment_parameters])).value!
follow_deployment_until_end_state(state[:azure_resource_group_name], post_deployment_name)
end
rescue ::MsRestAzure::AzureOperationError => operation_error
rest_error = operation_error.body["error"]
deployment_active = rest_error["code"] == "DeploymentActive"
if deployment_active
info "Deployment for resource group #{state[:azure_resource_group_name]} is ongoing."
info "If you need to change the deployment template you'll need to rerun `kitchen create` for this instance."
else
info rest_error
raise operation_error
end
end
@network_management_client = ::Azure::Network::Profiles::Latest::Mgmt::Client.new(options)
if config[:vnet_id] == "" || config[:public_ip]
# Retrieve the public IP from the resource group:
result = get_public_ip(state[:azure_resource_group_name], "publicip")
info "IP Address is: #{result.ip_address} [#{result.dns_settings.fqdn}]"
state[:hostname] = result.ip_address
else
# Retrieve the internal IP from the resource group:
result = get_network_interface(state[:azure_resource_group_name], vmnic.to_s)
info "IP Address is: #{result.ip_configurations[0].private_ipaddress}"
state[:hostname] = result.ip_configurations[0].private_ipaddress
end
end
def existing_state_value?(state, property)
state.key?(property) && !state[property].nil?
end
def validate_state(state = {})
state[:uuid] = SecureRandom.hex(8) unless existing_state_value?(state, :uuid)
state[:server_id] = "vm#{state[:uuid]}" unless existing_state_value?(state, :server_id)
state[:azure_resource_group_name] = azure_resource_group_name unless existing_state_value?(state, :azure_resource_group_name)
%i{subscription_id username password vm_name azure_environment use_managed_disks}.each do |config_element|
state[config_element] = config[config_element] unless existing_state_value?(state, config_element)
end
state.delete(:password) unless instance.transport[:ssh_key].nil?
state
end
def azure_resource_group_name
formatted_time = Time.now.utc.strftime "%Y%m%dT%H%M%S"
return "#{config[:azure_resource_group_prefix]}#{config[:azure_resource_group_name]}-#{formatted_time}#{config[:azure_resource_group_suffix]}" unless config[:explicit_resource_group_name]
config[:explicit_resource_group_name]
end
def data_disks_for_vm_json
return nil if config[:data_disks].nil?
disks = []
if config[:use_managed_disks]
config[:data_disks].each do |data_disk|
disks << { name: "datadisk#{data_disk[:lun]}", lun: data_disk[:lun], diskSizeGB: data_disk[:disk_size_gb], createOption: "Empty" }
end
debug "Additional disks being added to configuration: #{disks.inspect}"
else
warn 'Data disks are only supported when used with the "use_managed_disks" option. No additional disks were added to the configuration.'
end
disks.to_json
end
def template_for_transport_name
template = JSON.parse(virtual_machine_deployment_template)
if instance.transport.name.casecmp("winrm") == 0
if instance.platform.name.index("nano").nil?
info "Adding WinRM configuration to provisioning profile."
encoded_command = Base64.strict_encode64(custom_data_script_windows)
template["resources"].select { |h| h["type"] == "Microsoft.Compute/virtualMachines" }.each do |resource|
resource["properties"]["osProfile"]["customData"] = encoded_command
resource["properties"]["osProfile"]["windowsConfiguration"] = windows_unattend_content
end
end
end
unless instance.transport[:ssh_key].nil?
info "Adding public key from #{File.expand_path(instance.transport[:ssh_key])}.pub to the deployment."
public_key = public_key_for_deployment(File.expand_path(instance.transport[:ssh_key]))
template["resources"].select { |h| h["type"] == "Microsoft.Compute/virtualMachines" }.each do |resource|
resource["properties"]["osProfile"]["linuxConfiguration"] = JSON.parse(custom_linux_configuration(public_key))
end
end
template.to_json
end
def public_key_for_deployment(private_key_filename)
if File.file?(private_key_filename) == false
k = SSHKey.generate
::FileUtils.mkdir_p(File.dirname(private_key_filename))
private_key_file = File.new(private_key_filename, "w")
private_key_file.syswrite(k.private_key)
private_key_file.chmod(0600)
private_key_file.close
public_key_file = File.new("#{private_key_filename}.pub", "w")
public_key_file.syswrite(k.ssh_public_key)
public_key_file.chmod(0600)
public_key_file.close
output = k.ssh_public_key
else
output = if instance.transport[:ssh_public_key].nil?
File.read("#{private_key_filename}.pub")
else
File.read(instance.transport[:ssh_public_key])
end
end
output.strip
end
def pre_deployment(pre_deployment_template_filename, pre_deployment_parameters)
pre_deployment_template = ::File.read(pre_deployment_template_filename)
pre_deployment = ::Azure::Resources::Profiles::Latest::Mgmt::Models::Deployment.new
pre_deployment.properties = ::Azure::Resources::Profiles::Latest::Mgmt::Models::DeploymentProperties.new
pre_deployment.properties.mode = ::Azure::Resources::Profiles::Latest::Mgmt::Models::DeploymentMode::Incremental
pre_deployment.properties.template = JSON.parse(pre_deployment_template)
pre_deployment.properties.parameters = parameters_in_values_format(pre_deployment_parameters)
debug(pre_deployment.properties.template)
pre_deployment
end
def deployment(parameters)
template = template_for_transport_name
deployment = ::Azure::Resources::Profiles::Latest::Mgmt::Models::Deployment.new
deployment.properties = ::Azure::Resources::Profiles::Latest::Mgmt::Models::DeploymentProperties.new
deployment.properties.mode = ::Azure::Resources::Profiles::Latest::Mgmt::Models::DeploymentMode::Incremental
deployment.properties.template = JSON.parse(template)
deployment.properties.parameters = parameters_in_values_format(parameters)
debug(JSON.pretty_generate(deployment.properties.template))
deployment
end
def post_deployment(post_deployment_template_filename, post_deployment_parameters)
post_deployment_template = ::File.read(post_deployment_template_filename)
post_deployment = ::Azure::Resources::Profiles::Latest::Mgmt::Models::Deployment.new
post_deployment.properties = ::Azure::Resources::Profiles::Latest::Mgmt::Models::DeploymentProperties.new
post_deployment.properties.mode = ::Azure::Resources::Profiles::Latest::Mgmt::Models::DeploymentMode::Incremental
post_deployment.properties.template = JSON.parse(post_deployment_template)
post_deployment.properties.parameters = parameters_in_values_format(post_deployment_parameters)
debug(post_deployment.properties.template)
post_deployment
end
def empty_deployment
template = virtual_machine_deployment_template_file("empty.erb", nil)
empty_deployment = ::Azure::Resources::Profiles::Latest::Mgmt::Models::Deployment.new
empty_deployment.properties = ::Azure::Resources::Profiles::Latest::Mgmt::Models::DeploymentProperties.new
empty_deployment.properties.mode = ::Azure::Resources::Profiles::Latest::Mgmt::Models::DeploymentMode::Complete
empty_deployment.properties.template = JSON.parse(template)
debug(JSON.pretty_generate(empty_deployment.properties.template))
empty_deployment
end
def vm_tag_string(vm_tags_in)
tag_string = ""
unless vm_tags_in.empty?
tag_array = vm_tags_in.map do |key, value|
"\"#{key}\": \"#{value}\",\n"
end
# Strip punctuation from last item
tag_array[-1] = tag_array[-1][0..-3]
tag_string = tag_array.join
end
tag_string
end
def parameters_in_values_format(parameters_in)
parameters = parameters_in.map do |key, value|
{ key.to_sym => { "value" => value } }
end
parameters.reduce(:merge!)
end
def follow_deployment_until_end_state(resource_group, deployment_name)
end_provisioning_states = "Canceled,Failed,Deleted,Succeeded"
end_provisioning_state_reached = false
until end_provisioning_state_reached
list_outstanding_deployment_operations(resource_group, deployment_name)
sleep config[:deployment_sleep]
deployment_provisioning_state = get_deployment_state(resource_group, deployment_name)
end_provisioning_state_reached = end_provisioning_states.split(",").include?(deployment_provisioning_state)
end
info "Resource Template deployment reached end state of '#{deployment_provisioning_state}'."
show_failed_operations(resource_group, deployment_name) if deployment_provisioning_state == "Failed"
end
def show_failed_operations(resource_group, deployment_name)
failed_operations = list_deployment_operations(resource_group, deployment_name)
failed_operations.each do |val|
resource_code = val.properties.status_code
raise val.properties.status_message.inspect.to_s if resource_code != "OK"
end
end
def list_outstanding_deployment_operations(resource_group, deployment_name)
end_operation_states = "Failed,Succeeded"
deployment_operations = list_deployment_operations(resource_group, deployment_name)
deployment_operations.each do |val|
resource_provisioning_state = val.properties.provisioning_state
unless val.properties.target_resource.nil?
resource_name = val.properties.target_resource.resource_name
resource_type = val.properties.target_resource.resource_type
end
end_operation_state_reached = end_operation_states.split(",").include?(resource_provisioning_state)
unless end_operation_state_reached
info "Resource #{resource_type} '#{resource_name}' provisioning status is #{resource_provisioning_state}"
end
end
end
def destroy(state)
return if state[:server_id].nil?
options = Kitchen::Driver::AzureCredentials.new(subscription_id: state[:subscription_id],
environment: state[:azure_environment]).azure_options
@resource_management_client = ::Azure::Resources::Profiles::Latest::Mgmt::Client.new(options)
if config[:destroy_resource_group_contents] == true
info "Destroying individual resources within the Resource Group."
empty_deployment_name = "empty-deploy-#{state[:uuid]}"
begin
info "Creating deployment: #{empty_deployment_name}"
create_deployment_async(state[:azure_resource_group_name], empty_deployment_name, empty_deployment).value!
follow_deployment_until_end_state(state[:azure_resource_group_name], empty_deployment_name)
rescue ::MsRestAzure::AzureOperationError => operation_error
error operation_error.body
raise operation_error
end
end
if config[:destroy_explicit_resource_group] == false && !config[:explicit_resource_group_name].nil?
warn 'The "destroy_explicit_resource_group" setting value is set to "false". The resource group will not be deleted.'
warn 'Remember to manually destroy resources, or set "destroy_resource_group_contents: true" to save costs!' unless config[:destroy_resource_group_contents] == true
return
end
info "Azure environment: #{state[:azure_environment]}"
begin
info "Destroying Resource Group: #{state[:azure_resource_group_name]}"
delete_resource_group_async(state[:azure_resource_group_name])
info "Destroy operation accepted and will continue in the background."
rescue ::MsRestAzure::AzureOperationError => operation_error
error operation_error.body
raise operation_error
end
state.delete(:server_id)
state.delete(:hostname)
state.delete(:username)
state.delete(:password)
end
def enable_winrm_powershell_script
config[:winrm_powershell_script] ||
<<-PS1
$cert = New-SelfSignedCertificate -DnsName $env:COMPUTERNAME -CertStoreLocation Cert:\\LocalMachine\\My
$config = '@{CertificateThumbprint="' + $cert.Thumbprint + '"}'
winrm create winrm/config/listener?Address=*+Transport=HTTPS $config
winrm create winrm/config/Listener?Address=*+Transport=HTTP
winrm set winrm/config/service/auth '@{Basic="true";Kerberos="false";Negotiate="true";Certificate="false";CredSSP="true"}'
New-NetFirewallRule -DisplayName "Windows Remote Management (HTTPS-In)" -Name "Windows Remote Management (HTTPS-In)" -Profile Any -LocalPort 5986 -Protocol TCP
winrm set winrm/config/service '@{AllowUnencrypted="true"}'
New-NetFirewallRule -DisplayName "Windows Remote Management (HTTP-In)" -Name "Windows Remote Management (HTTP-In)" -Profile Any -LocalPort 5985 -Protocol TCP
PS1
end
def format_data_disks_powershell_script
return unless config[:format_data_disks]
info "Data disks will be initialized and formatted NTFS automatically." unless config[:data_disks].nil?
config[:format_data_disks_powershell_script] ||
<<-PS1
Write-Host "Initializing and formatting raw disks"
$disks = Get-Disk | where partitionstyle -eq 'raw'
$letters = New-Object System.Collections.ArrayList
$letters.AddRange( ('F','G','H','I','J','K','L','M','N','O','P','Q','R','S','T','U','V','W','X','Y','Z') )
Function AvailableVolumes() {
$currentDrives = get-volume
ForEach ($v in $currentDrives) {
if ($letters -contains $v.DriveLetter.ToString()) {
Write-Host "Drive letter $($v.DriveLetter) is taken, moving to next letter"
$letters.Remove($v.DriveLetter.ToString())
}
}
}
ForEach ($d in $disks) {
AvailableVolumes
$driveLetter = $letters[0]
Write-Host "Creating volume $($driveLetter)"
$d | Initialize-Disk -PartitionStyle GPT -PassThru | New-Partition -DriveLetter $driveLetter -UseMaximumSize
# Prevent error ' Cannot perform the requested operation while the drive is read only'
Start-Sleep 1
Format-Volume -FileSystem NTFS -NewFileSystemLabel "datadisk" -DriveLetter $driveLetter -Confirm:$false
}
PS1
end
def custom_data_script_windows
<<-EOH
#{enable_winrm_powershell_script}
#{format_data_disks_powershell_script}
logoff
EOH
end
def custom_linux_configuration(public_key)
<<-EOH
{
"disablePasswordAuthentication": "true",
"ssh": {
"publicKeys": [
{
"path": "[concat('/home/',parameters('adminUsername'),'/.ssh/authorized_keys')]",
"keyData": "#{public_key}"
}
]
}
}
EOH
end
def windows_unattend_content
{
additionalUnattendContent: [
{
passName: "oobeSystem",
componentName: "Microsoft-Windows-Shell-Setup",
settingName: "FirstLogonCommands",
content: '<FirstLogonCommands><SynchronousCommand><CommandLine>cmd /c "copy C:\\AzureData\\CustomData.bin C:\\Config.ps1"</CommandLine><Description>copy</Description><Order>1</Order></SynchronousCommand><SynchronousCommand><CommandLine>%windir%\\System32\\WindowsPowerShell\\v1.0\\powershell.exe -NoProfile -ExecutionPolicy Bypass -file C:\\Config.ps1</CommandLine><Description>script</Description><Order>2</Order></SynchronousCommand></FirstLogonCommands>',
},
{
passName: "oobeSystem",
componentName: "Microsoft-Windows-Shell-Setup",
settingName: "AutoLogon",
content: "[concat('<AutoLogon><Password><Value>', parameters('adminPassword'), '</Value></Password><Enabled>true</Enabled><LogonCount>1</LogonCount><Username>', parameters('adminUserName'), '</Username></AutoLogon>')]",
},
],
}
end
def virtual_machine_deployment_template
if config[:vnet_id] == ""
virtual_machine_deployment_template_file("public.erb", vm_tags: vm_tag_string(config[:vm_tags]), use_managed_disks: config[:use_managed_disks], image_url: config[:image_url], existing_storage_account_blob_url: config[:existing_storage_account_blob_url], image_id: config[:image_id], existing_storage_account_container: config[:existing_storage_account_container], custom_data: config[:custom_data], os_disk_size_gb: config[:os_disk_size_gb], data_disks_for_vm_json: data_disks_for_vm_json, use_ephemeral_osdisk: config[:use_ephemeral_osdisk], ssh_key: instance.transport[:ssh_key])
else
info "Using custom vnet: #{config[:vnet_id]}"
virtual_machine_deployment_template_file("internal.erb", vnet_id: config[:vnet_id], subnet_id: config[:subnet_id], public_ip: config[:public_ip], vm_tags: vm_tag_string(config[:vm_tags]), use_managed_disks: config[:use_managed_disks], image_url: config[:image_url], existing_storage_account_blob_url: config[:existing_storage_account_blob_url], image_id: config[:image_id], existing_storage_account_container: config[:existing_storage_account_container], custom_data: config[:custom_data], os_disk_size_gb: config[:os_disk_size_gb], data_disks_for_vm_json: data_disks_for_vm_json, use_ephemeral_osdisk: config[:use_ephemeral_osdisk], ssh_key: instance.transport[:ssh_key])
end
end
def virtual_machine_deployment_template_file(template_file, data = {})
template = File.read(File.expand_path(File.join(__dir__, "../../../templates", template_file)))
render_binding = OpenStruct.new(data)
ERB.new(template, nil, "-").result(render_binding.instance_eval { binding })
end
def resource_manager_endpoint_url(azure_environment)
case azure_environment.downcase
when "azureusgovernment"
MsRestAzure::AzureEnvironments::AzureUSGovernment.resource_manager_endpoint_url
when "azurechina"
MsRestAzure::AzureEnvironments::AzureChinaCloud.resource_manager_endpoint_url
when "azuregermancloud"
MsRestAzure::AzureEnvironments::AzureGermanCloud.resource_manager_endpoint_url
when "azure"
MsRestAzure::AzureEnvironments::AzureCloud.resource_manager_endpoint_url
end
end
def prepared_custom_data
# If user_data is a file reference, lets read it as such
return nil if config[:custom_data].nil?
@custom_data ||= begin
if File.file?(config[:custom_data])
Base64.strict_encode64(File.read(config[:custom_data]))
else
Base64.strict_encode64(config[:custom_data])
end
end
end
private
#
# Wrapper methods for the Azure API calls to retry the calls when getting timeouts.
#
def create_resource_group(resource_group_name, resource_group)
retries = config[:azure_api_retries]
begin
resource_management_client.resource_groups.create_or_update(resource_group_name, resource_group)
rescue Faraday::TimeoutError, Faraday::ClientError => exception
send_exception_message(exception, "while creating resource group '#{resource_group_name}'. #{retries} retries left.")
raise if retries == 0
retries -= 1
retry
end
end
def create_deployment_async(resource_group, deployment_name, deployment)
retries = config[:azure_api_retries]
begin
resource_management_client.deployments.begin_create_or_update_async(resource_group, deployment_name, deployment)
rescue Faraday::TimeoutError, Faraday::ClientError => exception
send_exception_message(exception, "while sending deployment creation request for deployment '#{deployment_name}'. #{retries} retries left.")
raise if retries == 0
retries -= 1
retry
end
end
def get_public_ip(resource_group_name, public_ip_name)
retries = config[:azure_api_retries]
begin
network_management_client.public_ipaddresses.get(resource_group_name, public_ip_name)
rescue Faraday::TimeoutError, Faraday::ClientError => exception
send_exception_message(exception, "while fetching public ip '#{public_ip_name}' for resource group '#{resource_group_name}'. #{retries} retries left.")
raise if retries == 0
retries -= 1
retry
end
end
def get_network_interface(resource_group_name, network_interface_name)
retries = config[:azure_api_retries]
begin
network_interfaces = ::Azure::Network::Profiles::Latest::Mgmt::NetworkInterfaces.new(network_management_client)
network_interfaces.get(resource_group_name, network_interface_name)
rescue Faraday::TimeoutError, Faraday::ClientError => exception
send_exception_message(exception, "while fetching network interface '#{network_interface_name}' for resource group '#{resource_group_name}'. #{retries} retries left.")
raise if retries == 0
retries -= 1
retry
end
end
def list_deployment_operations(resource_group, deployment_name)
retries = config[:azure_api_retries]
begin
resource_management_client.deployment_operations.list(resource_group, deployment_name)
rescue Faraday::TimeoutError, Faraday::ClientError => exception
send_exception_message(exception, "while listing deployment operations for deployment '#{deployment_name}'. #{retries} retries left.")
raise if retries == 0
retries -= 1
retry
end
end
def get_deployment_state(resource_group, deployment_name)
retries = config[:azure_api_retries]
begin
deployments = resource_management_client.deployments.get(resource_group, deployment_name)
deployments.properties.provisioning_state
rescue Faraday::TimeoutError, Faraday::ClientError => exception
send_exception_message(exception, "while retrieving state for deployment '#{deployment_name}'. #{retries} retries left.")
raise if retries == 0
retries -= 1
retry
end
end
def delete_resource_group_async(resource_group_name)
retries = config[:azure_api_retries]
begin
resource_management_client.resource_groups.begin_delete(resource_group_name)
rescue Faraday::TimeoutError, Faraday::ClientError => exception
send_exception_message(exception, "while sending resource group deletion request for '#{resource_group_name}'. #{retries} retries left.")
raise if retries == 0
retries -= 1
retry
end
end
def send_exception_message(exception, message)
if exception.is_a?(Faraday::TimeoutError)
header = "Timed out"
elsif exception.is_a?(Faraday::ClientError)
header = "Connection reset by peer"
else
# Unhandled exception, return early
info "Unrecognized exception type."
return
end
info "#{header} #{message}"
end
end
end
end
| 42.082603 | 682 | 0.676303 |
622eb53fc7145e1cab8f620afce6874afa54991c | 4,435 | # -*- indent-tabs-mode: nil; fill-column: 110 -*-
require 'rspec'
require "spec_helper"
require 'chef_fixie'
require 'chef_fixie/config'
RSpec.describe ChefFixie::CheckOrgAssociations, "Association checker" do
let (:test_org_name) { "ponyville"}
let (:orgs) { ChefFixie::Sql::Orgs.new }
let (:test_org) { orgs[test_org_name] }
let (:users) { ChefFixie::Sql::Users.new }
let (:adminuser) { users['rainbowdash'] }
let (:notorguser) { users['mary'] }
# TODO this should use a freshly created object and purge it afterwords.
# But we need to write the create object feature still
context "Individual user check" do
it "Works on expected sane org/user pair" do
expect(ChefFixie::CheckOrgAssociations.check_association(test_org, adminuser)).to be true
expect(ChefFixie::CheckOrgAssociations.check_association(test_org_name, adminuser.name)).to be true
end
end
context "Individual user check" do
before :each do
expect(ChefFixie::CheckOrgAssociations.check_association(test_org, adminuser)).to be true
end
after :each do
usag = test_org.groups[adminuser.id]
usag.group_add(adminuser)
test_org.groups['users'].group_add(usag)
adminuser.ace_add(:read, test_org.global_admins)
end
it "Detects user not associated" do
# break it
expect(ChefFixie::CheckOrgAssociations.check_association(test_org, notorguser)).to be :not_associated
end
# TODO: Write missing USAG test, but can't until we can restore the USAG or use disposable org
it "Detects user missing from usag" do
# break it
usag = test_org.groups[adminuser.id]
usag.group_delete(adminuser)
expect(ChefFixie::CheckOrgAssociations.check_association(test_org, adminuser)).to be :user_not_in_usag
end
it "Detects usag missing from users group" do
# break it
usag = test_org.groups[adminuser.id]
test_org.groups['users'].group_delete(usag)
expect(ChefFixie::CheckOrgAssociations.check_association(test_org, adminuser)).to be :usag_not_in_users
end
it "Detects global admins missing read" do
# break it
adminuser.ace_delete(:read, test_org.global_admins)
expect(ChefFixie::CheckOrgAssociations.check_association(test_org, adminuser)).to be :global_admins_lacks_read
end
# TODO test zombie invite; need some way to create it.
end
context "Individual user fixup" do
before :each do
expect(ChefFixie::CheckOrgAssociations.check_association(test_org, adminuser)).to be true
end
after :each do
usag = test_org.groups[adminuser.id]
usag.group_add(adminuser)
test_org.groups['users'].group_add(usag)
adminuser.ace_add(:read, test_org.global_admins)
end
it "Detects user not associated" do
# break it
expect(ChefFixie::CheckOrgAssociations.check_association(test_org, notorguser)).to be :not_associated
end
# TODO: Write missing USAG test, but can't until we can restore the USAG or use disposable org
it "Fixes user missing from usag" do
# break it
usag = test_org.groups[adminuser.id]
usag.group_delete(adminuser)
expect(ChefFixie::CheckOrgAssociations.fix_association(test_org, adminuser)).to be true
expect(ChefFixie::CheckOrgAssociations.check_association(test_org, adminuser)).to be true
end
it "Fixes usag missing from users group" do
# break it
usag = test_org.groups[adminuser.id]
test_org.groups['users'].group_delete(usag)
expect(ChefFixie::CheckOrgAssociations.fix_association(test_org, adminuser)).to be true
expect(ChefFixie::CheckOrgAssociations.check_association(test_org, adminuser)).to be true
end
it "Fixes global admins missing read" do
# break it
adminuser.ace_delete(:read, test_org.global_admins)
expect(ChefFixie::CheckOrgAssociations.fix_association(test_org, adminuser)).to be true
expect(ChefFixie::CheckOrgAssociations.check_association(test_org, adminuser)).to be true
end
# TODO test zombie invite; need some way to create it.
end
# TODO Break the org and check it!
context "Global org check" do
it "Works on expected sane org" do
expect(ChefFixie::CheckOrgAssociations.check_associations("acme")).to be true
expect(ChefFixie::CheckOrgAssociations.check_associations(orgs["acme"])).to be true
end
end
end
| 31.453901 | 116 | 0.717024 |
8720e6adfbab2772928dfcd05e26c71dfd9bbd9e | 255 | class ApplicationController < ActionController::Base
rescue_from DeviseLdapAuthenticatable::LdapException do |exception|
render :text => exception, :status => 500
end
protect_from_forgery with: :exception
before_action :authenticate_user!
end
| 31.875 | 69 | 0.796078 |
bb2ccb5bf80359a485f6f7d0c68eeab9b889267d | 7,254 | =begin
#Datadog API V1 Collection
#Collection of all Datadog Public endpoints.
The version of the OpenAPI document: 1.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
This product includes software developed at Datadog (https://www.datadoghq.com/).
Copyright 2020-Present Datadog, Inc.
=end
require 'date'
require 'time'
module DatadogAPIClient::V1
# Available prefix information for the Logs endpoints.
class IPPrefixesLogs
# List of IPv4 prefixes.
attr_accessor :prefixes_ipv4
# List of IPv6 prefixes.
attr_accessor :prefixes_ipv6
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'prefixes_ipv4' => :'prefixes_ipv4',
:'prefixes_ipv6' => :'prefixes_ipv6'
}
end
# Returns all the JSON keys this model knows about
def self.acceptable_attributes
attribute_map.values
end
# Attribute type mapping.
def self.openapi_types
{
:'prefixes_ipv4' => :'Array<String>',
:'prefixes_ipv6' => :'Array<String>'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V1::IPPrefixesLogs` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `DatadogAPIClient::V1::IPPrefixesLogs`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'prefixes_ipv4')
if (value = attributes[:'prefixes_ipv4']).is_a?(Array)
self.prefixes_ipv4 = value
end
end
if attributes.key?(:'prefixes_ipv6')
if (value = attributes[:'prefixes_ipv6']).is_a?(Array)
self.prefixes_ipv6 = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
prefixes_ipv4 == o.prefixes_ipv4 &&
prefixes_ipv6 == o.prefixes_ipv6
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[prefixes_ipv4, prefixes_ipv6].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
elsif type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :Time
Time.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
# models (e.g. Pet) or oneOf
klass = DatadogAPIClient::V1.const_get(type)
klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 30.478992 | 214 | 0.632203 |
bb785fed4f0556cad932c4f6ea004b29977971c1 | 349 | module ActiveModel
module Dirty
def changes_attributes
HashWithIndifferentAccess[changed.map { |attr| [attr, __send__(attr)] }]
end
def changes_attributes_as_json
ca, json = changes_attributes, self.as_json
json.each do |key, value|
ca[key.to_s] = value if ca.key?(key)
end
ca
end
end
end
| 20.529412 | 78 | 0.65043 |
5da5565401b5ac10c3f92b836f1835951309256d | 1,042 | # frozen_string_literal: true
module Sequel::Plugins::AttrEncrypted::SimpleCrypt
extend self
require "base64"
SEPARATOR = "$"
def encrypt(string, key)
return unless string.is_a?(String) && !string.empty?
encryptor = new_cipher(key, &:encrypt)
iv = encryptor.random_iv
encrypted = encryptor.update(string) + encryptor.final
dump(encrypted, iv, encryptor.auth_tag)
end
def decrypt(string, key)
encrypted, iv, auth_tag = parse(string) if string.is_a?(String)
return if [encrypted, iv, auth_tag].any?(&:nil?)
decryptor = new_cipher(key, &:decrypt)
decryptor.iv = iv
decryptor.auth_tag = auth_tag
decryptor.update(encrypted) + decryptor.final
end
private
def new_cipher(key)
result = OpenSSL::Cipher.new("aes-256-gcm")
yield(result)
result.key = key
result
end
def parse(string)
string.split(SEPARATOR).map { |x| Base64.strict_decode64(x) }
end
def dump(*values)
Array(values).map { |x| Base64.strict_encode64(x) }.join(SEPARATOR)
end
end
| 22.170213 | 71 | 0.683301 |
5d2603e3b36cec66cd70f5c6eaaaa56c4f7f6195 | 2,505 | require 'csv'
require 'google/apis/civicinfo_v2'
require 'erb'
require 'pry-byebug'
require 'date'
def clean_zipcode(zipcode)
zipcode.to_s.rjust(5, '0')[0..4]
end
def legislators_by_zipcode(zip)
civic_info = Google::Apis::CivicinfoV2::CivicInfoService.new
civic_info.key = 'AIzaSyClRzDqDh5MsXwnCWi0kOiiBivP6JsSyBw'
begin
legislators = civic_info.representative_info_by_address(
address: zip,
levels: 'country',
roles: ['legislatorUpperBody', 'legislatorLowerBody']
).officials
rescue
'You can find your representatives by visiting www.commoncause.org/take-action/find-elected-officials'
end
end
def save_thank_you_letter(id, form_letter)
Dir.mkdir('output') unless Dir.exist?('output')
filename = "output/thanks_#{id}.html"
File.open(filename, 'w') do |file|
file.puts form_letter
end
end
def clean_phone(phone_number)
cleaned = phone_number.to_s.tr('^0-9', '')
case
when cleaned.length > 11
return "Bad number!"
when cleaned.length < 10
return "Bad number!"
when cleaned.length == 10
return cleaned
when cleaned.length == 11
return cleaned[1..-1] if cleaned[0] == '1'
end
end
def extract_hour(reg_date)
DateTime.strptime(reg_date, "%m/%d/%Y %H:%M").hour
end
def extract_weekday(reg_date)
DateTime.strptime(reg_date, "%m/%d/%Y %H:%M").wday
end
puts 'Event Manager Initialized!'
contents = CSV.open(
'event_attendees.csv',
headers: true,
header_converters: :symbol
)
template_letter = File.read('form_letter.erb')
erb_template = ERB.new template_letter
peak_hours = Hash.new(0)
peak_weekdays = Hash.new(0)
contents.each do |row|
id = row[0]
name = row[:first_name]
zipcode = clean_zipcode(row[:zipcode])
phone = clean_phone(row[:homephone])
peak_hours[extract_hour(row[:regdate])] += 1
peak_weekdays[extract_weekday(row[:regdate])] += 1
legislators = legislators_by_zipcode(zipcode)
form_letter = erb_template.result(binding)
save_thank_you_letter(id, form_letter)
end
peak_weekdays["Sunday"] = peak_weekdays.delete 0
peak_weekdays["Monday"] = peak_weekdays.delete 1
peak_weekdays["Tuesday"] = peak_weekdays.delete 2
peak_weekdays["Wednesday"] = peak_weekdays.delete 3
peak_weekdays["Thursday"] = peak_weekdays.delete 4
peak_weekdays["Friday"] = peak_weekdays.delete 5
peak_weekdays["Saturday"] = peak_weekdays.delete 6
peak_weekdays.each do |key, value|
peak_weekdays[key] = "0" if value.nil?
end
puts peak_hours
puts peak_weekdays
# next: Assignment: Clean Phone Numbers
| 24.80198 | 106 | 0.735329 |
01b8973b1feb8432f45375b8ac7a12f70a3132a4 | 3,148 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `rails
# db:schema:load`. When creating a new database, `rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2021_11_12_074006) do
create_table "active_storage_attachments", force: :cascade do |t|
t.string "name", null: false
t.string "record_type", null: false
t.integer "record_id", null: false
t.integer "blob_id", null: false
t.datetime "created_at", null: false
t.index ["blob_id"], name: "index_active_storage_attachments_on_blob_id"
t.index ["record_type", "record_id", "name", "blob_id"], name: "index_active_storage_attachments_uniqueness", unique: true
end
create_table "active_storage_blobs", force: :cascade do |t|
t.string "key", null: false
t.string "filename", null: false
t.string "content_type"
t.text "metadata"
t.bigint "byte_size", null: false
t.string "checksum", null: false
t.datetime "created_at", null: false
t.index ["key"], name: "index_active_storage_blobs_on_key", unique: true
end
create_table "microposts", force: :cascade do |t|
t.text "content"
t.integer "user_id", null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["user_id", "created_at"], name: "index_microposts_on_user_id_and_created_at"
t.index ["user_id"], name: "index_microposts_on_user_id"
end
create_table "relationships", force: :cascade do |t|
t.integer "follower_id"
t.integer "followed_id"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["followed_id"], name: "index_relationships_on_followed_id"
t.index ["follower_id", "followed_id"], name: "index_relationships_on_follower_id_and_followed_id", unique: true
t.index ["follower_id"], name: "index_relationships_on_follower_id"
end
create_table "users", force: :cascade do |t|
t.string "name"
t.string "email"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.string "password_digest"
t.string "remember_digest"
t.boolean "admin", default: false
t.string "activation_digest"
t.boolean "activated", default: false
t.datetime "activated_at"
t.string "reset_digest"
t.datetime "reset_sent_at"
t.index ["email"], name: "index_users_on_email", unique: true
end
add_foreign_key "active_storage_attachments", "active_storage_blobs", column: "blob_id"
add_foreign_key "microposts", "users"
end
| 42.540541 | 126 | 0.728399 |
7957a7bfbc70f183858ccbb3a7367b51034842c7 | 752 | # frozen_string_literal: true
require_relative '../app/credentials'
require_relative '../app/s3'
require 'aws-sdk-s3'
describe Credentials do
it 'can get the AWS profile name' do
allow(System).to receive(:log)
profile_name = Credentials.profile_name
expect(profile_name).is_a? String
end
context 'missing profile' do
before(:each) do
allow(System).to receive(:log)
expect(System).to receive(:exit)
end
it 'elegantly handles missing profile' do
allow(System).to receive(:environment_var).and_return(nil)
Credentials.profile_name
end
it 'elegantly handles empty profile' do
allow(System).to receive(:environment_var).and_return('')
Credentials.profile_name
end
end
end
| 24.258065 | 64 | 0.706117 |
7ab805a30180b3e0ce0877b1ffa446a6af70a207 | 3,830 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Storage::Mgmt::V2018_02_01
#
# The Azure Storage Management API.
#
class Operations
include MsRestAzure
#
# Creates and initializes a new instance of the Operations class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [StorageManagementClient] reference to the StorageManagementClient
attr_reader :client
#
# Lists all of the available Storage Rest API operations.
#
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [OperationListResult] operation results.
#
def list(custom_headers:nil)
response = list_async(custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Lists all of the available Storage Rest API operations.
#
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_with_http_info(custom_headers:nil)
list_async(custom_headers:custom_headers).value!
end
#
# Lists all of the available Storage Rest API operations.
#
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_async(custom_headers:nil)
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, "'@client.api_version' should satisfy the constraint - 'MinLength': '1'" if [email protected]_version.nil? && @client.api_version.length < 1
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'providers/Microsoft.Storage/operations'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Storage::Mgmt::V2018_02_01::Models::OperationListResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
end
end
| 35.137615 | 161 | 0.679373 |
6a9e84227bde528b994ebeda4c23e26291a6d092 | 414 | module Abilities
class AuthorizedVendorAbility
include CanCan::Ability
def initialize(user, organization_ids=[])
#-------------------------------------------------------------------------
# Vendors
#-------------------------------------------------------------------------
can :manage, Vendor do |v|
v.organization_id == user.organization_id
end
end
end
end | 25.875 | 80 | 0.405797 |
b9743bf23c0004885dcd0c294ad04d23a947b2be | 13,204 | #
# Copyright (C) 2010-2016 dtk contributors
#
# This file is part of the dtk project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require File.expand_path('amqp_clients_wrapper', File.dirname(__FILE__))
module XYZ
class WorkerTask
attr_reader :input_msg, :status, :return_code, :errors, :log_entries
attr_accessor :parent_task, :results
private
def initialize(input_msg, opts)
@input_msg = input_msg
@caller_channel = nil
@msg_bus_client = nil
@status = :initiated
@parent_task = nil
@return_code = nil
@results = opts[:results] #results can be set if caller fn did the computation
@errors = []
@log_entries = []
# TBD: what about events and logged msgs
end
public
def add_reply_to_info(caller_channel, msg_bus_client)
@caller_channel = caller_channel
@msg_bus_client = msg_bus_client
extend InstanceMixinReplyToCaller
end
def add_log_entry(type, params = {})
@log_entries << WorkerTaskLogEntry.create(type, params)
end
def self.create(type, input_msg, opts = {})
case type
when :basic
WorkerTaskBasic.create(input_msg, opts)
when :local
WorkerTaskLocal.new(input_msg, opts)
when :remote
WorkerTaskRemote.new(input_msg, opts)
when :task_set
WorkerTaskSet.create(input_msg, opts)
else
fail Error.new("#{type} is not a legal worker task type")
end
end
def process_task_finished
@status = :complete #TBD: where to we distinguish whether error or not
if @parent_task
# if within parent task, signal this (sub) task finished
# if last task finished and there is a caller channel tehn this
# below wil reply to caller; so not needed explicitly here
@parent_task.process_child_task_finished()
else
reply_to_caller() if @caller_channel
end
end
def add_routing_info(_opts)
# no op (unless overwritten)
end
def set_so_can_run_concurrently
# no op (unless overwritten)
end
private
def reply_to_caller
# no op (unless overwritten)
end
end
module InstanceMixinReplyToCaller
private
def reply_to_caller
fail Error.new('cannot call reply_to_caller() if caller_channel not set') unless @caller_channel
fail Error.new('cannot call reply_to_caller() if msg_bus_client not set') unless @msg_bus_client
reply_queue = @msg_bus_client.publish_queue(@caller_channel, passive: true)
input_msg_reply = ProcessorMsg.create({ msg_type: :task })
# TBD: stub; want to strip out a number of these fields
# only send a subset of task info
task = WorkerTaskWireSubset.new(self)
reply_queue.publish(input_msg_reply.marshal_to_message_bus_msg(),
{ message_id: @caller_channel, task: task })
end
end
class WorkerTaskSet < WorkerTask
attr_reader :subtasks
def self.create(input_msg, opts = {})
temporal_sequencing = opts[:temporal_sequencing] || :concurrent
case temporal_sequencing
when :concurrent
WorkerTaskSetConcurrent.new(input_msg, opts)
when :sequential
WorkerTaskSetSequential.new(input_msg, opts)
else
fail Error.new("#{temporal_sequencing} is an illegal temporal sequencing type")
end
end
private
def initialize(input_msg, opts = {})
super(input_msg, opts)
@subtasks = []
@num_tasks_not_complete = 0
end
public
def add_task(task)
task.set_so_can_run_concurrently if self.is_a?(WorkerTaskSetConcurrent)
@subtasks << task
task.parent_task = self
@num_tasks_not_complete += 1
end
def add_routing_info(opts)
@subtasks.each { |t| t.add_routing_info(opts) }
end
def determine_local_and_remote_tasks!(worker)
@subtasks.each { |t| t.determine_local_and_remote_tasks!(worker) }
end
end
class WorkerTaskSetConcurrent < WorkerTaskSet
def execute
if @subtasks.size > 0
# process_task_finished() triggered by last complete subtasks
@subtasks.each(&:execute)
else
process_task_finished()
end
end
def process_child_task_finished
@num_tasks_not_complete -= 1
if @num_tasks_not_complete < 1
# Log.debug_pp [:finished,WorkerTaskWireSubset.new(self)]
Log.debug_pp [:finished, WorkerTaskWireSubset.new(self).flatten()]
process_task_finished()
end
end
end
class WorkerTaskSetSequential < WorkerTaskSet
def execute
if @subtasks.size > 0
# start first sub task and this will set chain that callss subsequent ones
@subtasks.first.execute()
else
process_task_finished()
end
end
def process_child_task_finished
@num_tasks_not_complete -= 1
if @num_tasks_not_complete < 1
# Log.debug_pp [:finished,WorkerTaskWireSubset.new(self)]
Log.debug_pp [:finished, WorkerTaskWireSubset.new(self).flatten()]
process_task_finished()
else
i = @subtasks.size - @num_tasks_not_complete
@subtasks[i].execute()
end
end
end
class WorkerTaskBasic < WorkerTask
attr_reader :task_type
private
def initialize(input_msg, opts = {})
super(input_msg, opts)
@task_type = nil
end
public
def self.create(input_msg, opts = {})
WorkerTaskBasic.new(input_msg, opts)
end
def determine_local_and_remote_tasks!(_worker)
# no op if basic types extended already
return nil unless self.class == WorkerTaskBasic
# TBD: stub where can optimize by appropriately making thsi local call
extend_as_remote()
end
private
def extend_as_remote
@task_type = :remote
extend MixinWorkerTaskRemote if self.class == WorkerTaskBasic
initialize_remote()
end
def extend_as_local
@task_type = :local
extend MixinWorkerTaskLocal if self.class == WorkerTaskBasic
initialize_local()
end
end
module MixinWorkerTaskRemote
attr_reader :delegated_task
def initialize_remote
@queue_name = nil
@exchange_name = nil
# opts related to creating queue or exchange
# legal values: [:type, :passive, :durable, :exclusive, :auto_delete, :nowait, :internal]
@create_opts = {}
# legal values: [:key, :reply_timeout]
@publish_opts = {}
# gets task status results back from delagated worker
@delegated_task = nil
end
def add_routing_info(opts)
@msg_bus_client = opts[:msg_bus_client]
# TDB: stub to find queue or exchange to publish on
if @input_msg.msg_type == :execute_on_node
@queue_name = @input_msg.key()
@create_opts[:passive] = true
else
@exchange_name = @input_msg.topic()
@create_opts[:type] = :topic
@publish_opts[:key] = @input_msg.key()
end
end
def execute
queue_or_exchange = ret_queue_or_exchange()
@status = :started
msg_bus_msg_out = @input_msg.marshal_to_message_bus_msg()
Log.debug_pp [:sending_to,
@queue_name ? "queue #{@queue_name}" : "exchange #{@exchange_name}",
msg_bus_msg_out]
queue_or_exchange.publish_with_callback(msg_bus_msg_out, @publish_opts) do |trans_info, msg_bus_msg_in|
Log.debug_pp [:received_from, trans_info, msg_bus_msg_in]
@delegated_task = trans_info[:task]
process_task_finished()
end
rescue Error::AMQP::QueueDoesNotExist => e
if @input_msg.is_a?(ProcessorMsg) and @input_msg.msg_type == :execute_on_node
@errors << WorkerTaskErrorNodeNotConnected.new(e.queue_name.to_i())
else
@errors << WorkerTaskError.new(e)
end
process_task_finished()
rescue Exception => e
@errors << WorkerTaskError.new(e)
# Log.debug_pp [:error,e,e.backtrace]
process_task_finished()
end
private
# can throw an error (e.g., if passive and queue does not exist)
def ret_queue_or_exchange
if @queue_name
@msg_bus_client.publish_queue(@queue_name, @create_opts || {})
else # #@exchange_name
@msg_bus_client.exchange(@exchange_name, @create_opts || {})
end
end
end
class WorkerTaskRemote < WorkerTaskBasic
include MixinWorkerTaskRemote
def initialize(input_msg, opts = {})
super(input_msg, opts)
extend_as_remote()
end
end
module MixinWorkerTaskLocal
def initialize_local
@work = proc {}
@run_in_new_thread_or_fork = nil
end
def add_work_info(opts)
@work = proc do
begin
opts[:work].call()
rescue Exception => e
Log.debug_pp [e, e.backtrace]
# TBD: since this can be arbitrary error as stop gap measure converting to_s; ?: should I do same for errors with different tasks or only needed here because can get arbitrary for example chef errors
@errors << WorkerTaskError.new(e.to_s)
:failed
end
end
@run_in_new_thread_or_fork = opts[:run_in_new_thread_or_fork]
end
def set_so_can_run_concurrently
@run_in_new_thread_or_fork = true
end
def execute
# modified from design pattern from right_link
callback = proc do |results|
@results = results
@return_code = @errors.empty?() ? :succeeded : :failed
process_task_finished()
end
if @run_in_new_thread_or_fork
EM.defer(@work, callback)
else
# TBD: using next_tick is from rightlink design pattern; need to work through all shuffles to
# see if this is what we want
EM.next_tick { callback.call(@work.call()) }
end
end
end
class WorkerTaskLocal < WorkerTaskBasic
include MixinWorkerTaskLocal
def initialize(input_msg, opts = {})
super(input_msg, opts)
extend_as_local()
add_work_info(opts)
end
end
class WorkerTaskError
def initialize(err_obj)
@error = err_obj
end
end
class WorkerTaskErrorNodeNotConnected < WorkerTaskError
def initialize(node_guid)
@node_guid = node_guid
end
end
class WorkerTaskLogEntry < HashObject
def self.create(type, params = {})
WorkerTaskLogEntry.new(type, params)
end
private
def initialize(type, params = {})
super(params.merge(type: type))
end
end
end
# TBD: reconcile this, what is above in task in db model; this might be same as what is in db; or this may be object in
# TBD: may rename WorkTaskStatus, although may be better word because not exactly "snapshot"
module XYZ
class WorkerTaskWireSubset < HashObject
def initialize(tsk)
super({})
self[:input_msg] = tsk.input_msg
self[:status] = tsk.status if tsk.status
self[:return_code] = tsk.return_code if tsk.return_code
self[:errors] = tsk.errors if tsk.errors and !tsk.errors.empty?()
self[:log_entries] = tsk.log_entries if tsk.log_entries and !tsk.log_entries.empty?()
if tsk.is_a?(WorkerTaskBasic) and tsk.task_type == :remote
self[:delegated_task] = tsk.delegated_task
elsif tsk.is_a?(WorkerTaskSet)
self[:subtasks] = tsk.subtasks.map { |t| WorkerTaskWireSubset.new(t) }
end
# TBD: whether results should come back in task info or in other spot, such as
# place where it could be called in a block after request
self[:results] = tsk.results if tsk.results
end
# TBD: rather than having wire object with delegation links, may flatten while producing
# TBD: may write so that if subtasks or delegated parts falttened already its a no op
# flatten removes the links to delegated
def flatten
ret =
if self[:delegated_task]
self[:delegated_task].flatten()
else
{}
end
input_msg = nil
if self[:input_msg].is_a?(ProcessorMsg)
input_msg = { msg_type: self[:input_msg].msg_type }
input_msg[:msg_content] = self[:input_msg].msg_content unless self[:input_msg].msg_content.empty?
else
input_msg = self[:input_msg]
end
ret[:input_msg] ||= input_msg
ret[:status] ||= self[:status] if self[:status]
ret[:return_code] ||= self[:return_code] if self[:return_code]
ret[:errors] ||= self[:errors] if self[:errors]
ret[:log_entries] ||= self[:log_entries] if self[:log_entries]
ret[:results] ||= self[:results] if self[:results]
ret[:subtasks] ||= self[:subtasks].map(&:flatten) if self[:subtasks] and !self[:subtasks].empty?
ret
end
end
end | 30.494226 | 209 | 0.664647 |
f71e4a553db7a0f7dd89df925b260be201b6a19d | 300 | class CreateApplicationInterviewers < ActiveRecord::Migration
def self.up
create_table :application_interviewers do |t|
t.integer :application_for_offering_id
t.integer :person_id
t.timestamps
end
end
def self.down
drop_table :application_interviewers
end
end
| 20 | 61 | 0.743333 |
61ed8b4af43218da5debcafd3b093fa9ae8ebfc1 | 2,417 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::DataFactory::Mgmt::V2018_06_01
module Models
#
# A copy activity source for web page table.
#
class WebSource < CopySource
include MsRestAzure
def initialize
@type = "WebSource"
end
attr_accessor :type
#
# Mapper for WebSource class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'WebSource',
type: {
name: 'Composite',
class_name: 'WebSource',
model_properties: {
additional_properties: {
client_side_validation: true,
required: false,
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'ObjectElementType',
type: {
name: 'Object'
}
}
}
},
source_retry_count: {
client_side_validation: true,
required: false,
serialized_name: 'sourceRetryCount',
type: {
name: 'Object'
}
},
source_retry_wait: {
client_side_validation: true,
required: false,
serialized_name: 'sourceRetryWait',
type: {
name: 'Object'
}
},
max_concurrent_connections: {
client_side_validation: true,
required: false,
serialized_name: 'maxConcurrentConnections',
type: {
name: 'Object'
}
},
type: {
client_side_validation: true,
required: true,
serialized_name: 'type',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 26.855556 | 70 | 0.445594 |
3847aec3cda752a99fce5693bbfc6e551d08e7d7 | 378 | class HomeController < ApplicationController
def index
end
def contact_us
end
def menu
@sections = Section.all
@food_items = FoodItem.get_menu_by_params(params[:section], params[:orderby])
# if params[:section]
# @food_items = Section.where(section_name: params[:section]).take.food_items
# else
# @food_items = FoodItem.all
#end
end
end
| 18.9 | 81 | 0.698413 |
79fdf14dc8f1fc4c3753922c889387ef1d382855 | 797 | class ErrorsController < ApplicationController
def not_found
respond_to do |format|
format.json { render json: { status: 404, error: 'Not Found' } }
format.html { render(status: 404) }
end
end
def unprocessable
respond_to do |format|
format.json { render json: { status: 422, error: 'Unprocessable Entity' } }
format.html { render(status: 422) }
end
end
def internal_server_error
respond_to do |format|
format.json { render json: { status: 500, error: 'Internal Server Error' } }
format.html { render(status: 500) }
end
end
def service_unavailable
respond_to do |format|
format.json { render json: { status: 503, error: 'Service Unavailable' } }
format.html { render(status: 503) }
end
end
end
| 23.441176 | 82 | 0.646173 |
e841dd80b675889630085b8632f6eb3dc16179c3 | 2,410 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Groups::Crm::OrganizationsController do
let_it_be(:user) { create(:user) }
shared_examples 'response with 404 status' do
it 'returns 404' do
subject
expect(response).to have_gitlab_http_status(:not_found)
end
end
shared_examples 'ok response with index template' do
it 'renders the index template' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:index)
end
end
shared_examples 'ok response with index template if authorized' do
context 'private group' do
let(:group) { create(:group, :private, :crm_enabled) }
context 'with authorized user' do
before do
group.add_reporter(user)
sign_in(user)
end
context 'when crm_enabled is true' do
it_behaves_like 'ok response with index template'
end
context 'when crm_enabled is false' do
let(:group) { create(:group, :private) }
it_behaves_like 'response with 404 status'
end
context 'when subgroup' do
let(:group) { create(:group, :private, :crm_enabled, parent: create(:group)) }
it_behaves_like 'response with 404 status'
end
end
context 'with unauthorized user' do
before do
sign_in(user)
end
it_behaves_like 'response with 404 status'
end
context 'with anonymous user' do
it 'blah' do
subject
expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(new_user_session_path)
end
end
end
context 'public group' do
let(:group) { create(:group, :public, :crm_enabled) }
context 'with anonymous user' do
it_behaves_like 'response with 404 status'
end
end
end
describe 'GET #index' do
subject { get group_crm_organizations_path(group) }
it_behaves_like 'ok response with index template if authorized'
end
describe 'GET #new' do
subject { get new_group_crm_organization_path(group) }
it_behaves_like 'ok response with index template if authorized'
end
describe 'GET #edit' do
subject { get edit_group_crm_organization_path(group, id: 1) }
it_behaves_like 'ok response with index template if authorized'
end
end
| 24.845361 | 88 | 0.656432 |
386efe23bd3bee40f61544e5b96537f2f8c82b6e | 2,691 | module Rack::Test::Assertions
RESPONSE_CODES = {
:ok => 200,
:not_authorized => 401,
:not_found => 404,
:redirect => 302
}
CONTENT_TYPES = {
:json => "application/json;charset=utf-8",
:html => "text/html;charset=utf-8"
}
def assert_body_contains(expected, message=nil)
msg = build_message(message, "expected body to contain <?>\n#{last_response.body}", expected)
assert_block(msg) do
last_response.body.include?(expected)
end
end
def assert_content_type(content_type)
unless CONTENT_TYPES.keys.include?(content_type)
raise ArgumentError, "unrecognized content_type (#{content_type})"
end
assert_equal CONTENT_TYPES[content_type], last_response.content_type
end
def assert_flash(type=:notice, message=nil)
msg = build_message(message, "expected <?> flash to exist, but was nil", type.to_s)
assert_block(msg) do
last_request.env['rack.session']['flash']
end
end
def assert_flash_message(expected, type=:notice, message=nil)
assert_flash(type, message)
flash = last_request.env['rack.session']['flash'][type.to_s]
msg = build_message(message, "expected flash to be <?> but was <?>", expected, flash)
assert_block(msg) do
expected == flash
end
end
def assert_has_session(message=nil)
msg = build_message(message, "expected a valid session")
assert_block(msg) do
last_request.env["rack.session"]
end
end
def assert_session_has_key(key, message=nil)
assert_has_session
msg = build_message(message, "expected session to have key named <?>", key)
assert_block(msg) do
last_request.env["rack.session"].keys.include?(key.to_s)
end
end
def assert_session(key, expected, message=nil)
assert_session_has_key(key)
actual = last_request.env["rack.session"][key.to_s]
msg = build_message(message, "expected session key <?> to be <?>, but was <?>", key, expected, actual)
assert_block(msg) do
expected == actual
end
end
def assert_response(expected, message=nil)
status = last_response.status
msg = build_message(
message,
"expected last response to be <?> but was <?>",
"#{RESPONSE_CODES[expected]}:#{expected}",
"#{status}:#{RESPONSE_CODES.key(status)}"
)
assert_block(msg) do
status == RESPONSE_CODES[expected]
end
end
def assert_redirected_to(expected, msg=nil)
assert_response(:redirect)
actual = URI(last_response.location).path
msg = build_message(message, "expected to be redirected to <?> but was <?>", expected, actual)
assert_block(msg) do
expected == actual
end
end
end
| 28.935484 | 106 | 0.672241 |
799c33212077582eecb69af1c007001d1437eca9 | 3,111 | module Matching
class Engine
attr :orderbook, :mode, :queue
delegate :ask_orders, :bid_orders, to: :orderbook
def initialize(market, options={})
@market = market
@orderbook = OrderBookManager.new(market.id)
# Engine is able to run in different mode:
# dryrun: do the match, do not publish the trades
# run: do the match, publish the trades (default)
shift_gears(options[:mode] || :run)
end
def submit(order)
book, counter_book = orderbook.get_books order.type
match order, counter_book
add_or_cancel order, book
rescue => e
Rails.logger.error "Failed to submit order #{order.label}."
report_exception(e)
end
def cancel(order)
book, counter_book = orderbook.get_books order.type
if removed_order = book.remove(order)
publish_cancel removed_order, "cancelled by user"
else
Rails.logger.warn "Cannot find order##{order.id} to cancel, skip."
end
rescue => e
Rails.logger.error "Failed to cancel order #{order.label}."
report_exception(e)
end
def limit_orders
{ ask: ask_orders.limit_orders,
bid: bid_orders.limit_orders }
end
def market_orders
{ ask: ask_orders.market_orders,
bid: bid_orders.market_orders }
end
def shift_gears(mode)
case mode
when :dryrun
@queue = []
class <<@queue
def enqueue(*args)
push args
end
end
when :run
@queue = AMQPQueue
else
raise "Unrecognized mode: #{mode}"
end
@mode = mode
end
private
def match(order, counter_book)
return if order.filled?
counter_order = counter_book.top
return unless counter_order
if trade = order.trade_with(counter_order, counter_book)
counter_book.fill_top *trade
order.fill *trade
publish order, counter_order, trade
match order, counter_book
end
end
def add_or_cancel(order, book)
return if order.filled?
order.is_a?(LimitOrder) ?
book.add(order) : publish_cancel(order, "fill or kill market order")
end
def publish(order, counter_order, trade)
ask, bid = order.type == :ask ? [order, counter_order] : [counter_order, order]
price = @market.fix_number_precision :bid, trade[0]
volume = @market.fix_number_precision :ask, trade[1]
funds = trade[2]
Rails.logger.info "[#{@market.id}] new trade - ask: #{ask.label} bid: #{bid.label} price: #{price} volume: #{volume} funds: #{funds}"
@queue.enqueue(
:trade_executor,
{market_id: @market.id, ask_id: ask.id, bid_id: bid.id, strike_price: price, volume: volume, funds: funds},
{persistent: false}
)
end
def publish_cancel(order, reason)
Rails.logger.info "[#{@market.id}] cancel order ##{order.id} - reason: #{reason}"
@queue.enqueue(
:order_processor,
{action: 'cancel', order: order.attributes},
{persistent: false}
)
end
end
end
| 26.589744 | 139 | 0.617486 |
014fc24cd8e14a15806e0a04a9758f7abb7a2f44 | 672 | require 'fileutils'
module MiqLdapToSssd
class AuthTemplateFilesError < StandardError; end
class AuthTemplateFiles
TEMPLATE_DIR = "/var/www/miq/system/TEMPLATE".freeze
ALT_TEMPLATE_DIR = "/opt/rh/cfme-appliance/TEMPLATE".freeze
HTTPD_CONF_DIR = "/etc/httpd/conf.d".freeze
PAM_CONF_DIR = "/etc/pam.d".freeze
SSSD_CONF_DIR = "/etc/sssd".freeze
attr_reader :initial_settings, :template_dir
def initialize(initial_settings)
LOGGER.debug("Invoked #{self.class}\##{__method__}")
@initial_settings = initial_settings
@template_dir = Dir.exist?(TEMPLATE_DIR) ? TEMPLATE_DIR : ALT_TEMPLATE_DIR
end
end
end
| 28 | 80 | 0.712798 |
e2f11e5093e8f0b5e0e9a19709b59d36e45c7fca | 122 | json.array!(@images) do |image|
json.extract! image, :id, :url, :user_id
json.url image_url(image, format: :json)
end
| 24.4 | 42 | 0.688525 |
38323cbccbe7519e7b2c7497ea0544bf0dcbbe79 | 333 | #
# Cookbook Name:: application_rails
# Recipe:: rails
#
include_recipe 'application_rails::clone' if node['application_rails']['git_integration']
# Install required gems
include_recipe "application_rails::gems"
# Configure and install app
include_recipe "application_rails::install"
# Start App Server
#include_recipe "::start"
| 20.8125 | 89 | 0.783784 |
ac2293ede57a78dd81258f00bc74df2cde141eac | 5,226 | require 'aws-sdk-core'
require 'keystore'
timestamp = Time.now.strftime '%Y%m%d%H%M%S'
ts_key = "testkey#{timestamp}"
ts_val = "testvalue#{timestamp}"
Given(/^test data to use$/) do
@key = ts_key
@value = ts_val
end
Given(/^a region to operate in$/) do
@region = ENV['region']
raise if @region.nil?
end
Given(/^a KMS key id or KMS key alias to use$/) do
@key_id = ENV['key_id']
@key_alias = ENV['key_alias']
raise if @key_id.nil? && @key_alias.nil?
end
Given(/^a DynamoDB table to use$/) do
@table_name = ENV['table_name']
raise if @table_name.nil?
end
When(/^I store a value in the keystore$/) do
@dynamo = Aws::DynamoDB::Client.new region: @region
@kms = Aws::KMS::Client.new region: @region
keystore = Keystore.new dynamo: @dynamo, table_name: @table_name, kms: @kms, key_id: @key_id, key_alias: @key_alias
keystore.store key: @key, value: @value
end
Then(/^I should see that encrypted data in the raw data store$/) do
name = { 'ParameterName' => @key }
@result = @dynamo.get_item(table_name: @table_name, key: name).item
expect(@result).to be
expect(@result['Value']).not_to eq @value
end
When(/^I retrieve a value from the keystore$/) do
@dynamo = Aws::DynamoDB::Client.new region: @region
@kms = Aws::KMS::Client.new region: @region
keystore = Keystore.new dynamo: @dynamo, table_name: @table_name, kms: @kms, key_id: @key_id, key_alias: @key_alias
keystore.store key: @key, value: @value
@result = keystore.retrieve key: @key
expect(@result).to be
end
Then(/^I should get that data back in plaintext$/) do
keystore = Keystore.new dynamo: @dynamo, table_name: @table_name, kms: @kms
@result = keystore.retrieve key: @key
expect(@result).to eq @value
end
When(/^I retrieve a value using the command line interface$/) do
# add the data to look up
@dynamo = Aws::DynamoDB::Client.new region: @region
@kms = Aws::KMS::Client.new region: @region
keystore = Keystore.new dynamo: @dynamo, table_name: @table_name, kms: @kms, key_id: @key_id, key_alias: @key_alias
keystore.store key: "#{@key}-cli", value: @value
command = "bin/keystore.rb retrieve --table #{@table_name} --keyname #{@key}-cli"
`#{command}`
end
Then(/^I should get that CLI entered data back in plaintext$/) do
@dynamo = Aws::DynamoDB::Client.new region: @region
@kms = Aws::KMS::Client.new region: @region
keystore = Keystore.new dynamo: @dynamo, table_name: @table_name, kms: @kms
@result = keystore.retrieve key: "#{@key}-cli"
expect(@result).to eq @value
end
When(/^I store a value using the command line interface$/) do
if @key_id
kmsoption = "--kmsid #{@key_id}"
else
kmsoption = "--kmsalias #{@key_alias}"
end
command = "bin/keystore.rb store --table #{@table_name} --keyname #{@key}-cli --value #{@value}-cli #{kmsoption}"
`#{command}`
end
Then(/^I should see that encrypted data from the CLI in the raw data store$/) do
@dynamo = Aws::DynamoDB::Client.new region: @region
@kms = Aws::KMS::Client.new region: @region
name = { 'ParameterName' => "#{@key}-cli" }
@result = @dynamo.get_item(table_name: @table_name, key: name).item
expect(@result.nil?).to be false
expect(@result['Value']).not_to eq @value
end
When(/^I store an empty value in the keystore$/) do
@dynamo = Aws::DynamoDB::Client.new region: @region
@kms = Aws::KMS::Client.new region: @region
keystore = Keystore.new dynamo: @dynamo, table_name: @table_name, kms: @kms, key_id: @key_id, key_alias: @key_alias
keystore.store key: @key, value: ''
end
When(/^I retrieve an empty value from the keystore$/) do
@dynamo = Aws::DynamoDB::Client.new region: @region
@kms = Aws::KMS::Client.new region: @region
keystore = Keystore.new dynamo: @dynamo, table_name: @table_name, kms: @kms, key_id: @key_id, key_alias: @key_alias
keystore.store key: @key, value: ''
@result = keystore.retrieve key: @key
expect(@result).to be
expect(@result.empty?).to be true
end
Then(/^I should get an empty string back$/) do
keystore = Keystore.new dynamo: @dynamo, table_name: @table_name, kms: @kms
@result = keystore.retrieve key: @key
expect(@result).to eq ''
end
When(/^I store a blank value using the command line interface$/) do
if @key_id
kmsoption = "--kmsid #{@key_id}"
else
kmsoption = "--kmsalias #{@key_alias}"
end
command = "bin/keystore.rb store --table #{@table_name} --keyname #{@key}-cli #{kmsoption} --value ''"
`#{command}`
end
When(/^I retrieve a blank value using the command line interface$/) do
# add the data to look up
@dynamo = Aws::DynamoDB::Client.new region: @region
@kms = Aws::KMS::Client.new region: @region
keystore = Keystore.new dynamo: @dynamo, table_name: @table_name, kms: @kms, key_id: @key_id, key_alias: @key_alias
keystore.store key: "#{@key}-cli", value: ''
command = "bin/keystore.rb retrieve --table #{@table_name} --keyname #{@key}-cli"
`#{command}`
end
Then(/^I should get an empty string back in plaintext$/) do
@dynamo = Aws::DynamoDB::Client.new region: @region
@kms = Aws::KMS::Client.new region: @region
keystore = Keystore.new dynamo: @dynamo, table_name: @table_name, kms: @kms
@result = keystore.retrieve key: "#{@key}-cli"
expect(@result.empty?).to be true
end
| 35.55102 | 117 | 0.683506 |
4a7db09a7e7356052e865757d91e2eda2a76ce8b | 1,134 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe User do
describe 'guests_without_bookmarks scope' do
let!(:guest_user_with_bookmarks) { FactoryBot.create(:user, guest: true) }
let!(:non_guest_user) { FactoryBot.create(:user, guest: false) }
before do
Bookmark.create(document: SolrDocument.new('stanford-abc123'), user: guest_user_with_bookmarks)
10.times do
FactoryBot.create(:user, guest: true)
end
end
it 'does not include non-guest users' do
expect(User.guests_without_bookmarks.pluck(:id)).not_to include(non_guest_user.id)
end
it 'does not include guest users who have bookmarks' do
expect(User.guests_without_bookmarks.pluck(:id)).not_to include(guest_user_with_bookmarks.id)
end
it 'includes guest users who do not have bookmarks' do
expect(User.count).to be 12
expect(User.guests_without_bookmarks.count).to be 10
end
end
describe '#sunet' do
it 'returns just the SUNet part of the email address' do
expect(User.new(email: '[email protected]').sunet).to eq 'jstanford'
end
end
end
| 30.648649 | 101 | 0.713404 |
7a8c7b06cc14693322fdd433fcabef8ba1fc8009 | 5,865 | require 'benchmark'
module RedisPrometheus
class Middleware
def initialize(app, options = {})
@app = app
end
def call(env)
if env["PATH_INFO"] == "/metrics/#{ENV["REDIS_PROMETHEUS_TOKEN"]}"
results(env)
else
trace(env) do
@app.call(env)
end
end
end
protected
def trace(env)
response = nil
duration = Benchmark.realtime { response = yield }
record(env, response.first.to_s, duration)
response
end
def results(env)
headers = {}
response = ""
keys = Redis.current.keys("http_request_duration_seconds_bucket/#{ENV["REDIS_PROMETHEUS_SERVICE"]}|*")
values = []
if keys.size > 0
values = Redis.current.mget(keys)
end
response << "# TYPE http_request_duration_seconds_bucket histogram\n"
response << "# HELP http_request_duration_seconds_bucket The HTTP response duration of the Rack application.\n"
keys.each_with_index do |key, i|
key_parts = key.split("|")
key_parts.shift
data = {}
key_parts.each do |p|
k,v = p.split("=")
data[k.to_sym] = v
end
data[:service] = ENV["REDIS_PROMETHEUS_SERVICE"]
next if defined?(Rails) && Rails.application.config.redis_prometheus.ignored_urls.include?(data[:url])
response << "http_request_duration_seconds_bucket{"
response << data.map {|k,v| "#{k}=\"#{v}\""}.join(",")
response << "} #{values[i].to_f}\n"
end
response << RedisPrometheus::Collector.current.stats
response << "# TYPE http_request_duration_seconds_count counter\n"
response << "# HELP http_request_duration_seconds_count The total number of HTTP requests handled by the Rack application.\n"
requests = Redis.current.get("http_request_duration_seconds_count/#{ENV["REDIS_PROMETHEUS_SERVICE"]}") || 0
response << "http_request_duration_seconds_count{service=\"#{ENV["REDIS_PROMETHEUS_SERVICE"]}\"} #{requests.to_f}\n"
response << "# TYPE http_request_duration_seconds_sum counter\n"
response << "# HELP http_request_duration_seconds_sum The total number of seconds spent processing HTTP requests by the Rack application.\n"
requests = Redis.current.get("http_request_duration_seconds_sum/#{ENV["REDIS_PROMETHEUS_SERVICE"]}") || 0
response << "http_request_duration_seconds_sum{service=\"#{ENV["REDIS_PROMETHEUS_SERVICE"]}\"} #{requests.to_f}\n"
response << "# TYPE http_request_client_errors_counter counter\n"
response << "# HELP http_request_client_errors_counter The total number of HTTP errors return by the Rack application.\n"
requests = Redis.current.get("http_request_client_errors_counter/#{ENV["REDIS_PROMETHEUS_SERVICE"]}") || 0
response << "http_request_client_errors_counter{service=\"#{ENV["REDIS_PROMETHEUS_SERVICE"]}\"} #{requests.to_f}\n"
response << "# TYPE http_request_server_errors_counter counter\n"
response << "# HELP http_request_server_errors_counter The total number of HTTP errors return by the Rack application.\n"
requests = Redis.current.get("http_request_server_errors_counter/#{ENV["REDIS_PROMETHEUS_SERVICE"]}") || 0
response << "http_request_server_errors_counter{service=\"#{ENV["REDIS_PROMETHEUS_SERVICE"]}\"} #{requests.to_f}\n"
if defined?(Resque)
stats = Resque.info
response << "# TYPE http_request_queue_length gauge\n"
response << "# HELP http_request_queue_length The length of Resque's pending queues.\n"
response << "http_request_queue_length{service=\"#{ENV["REDIS_PROMETHEUS_SERVICE"]}\"} #{stats[:pending]}\n"
response << "# TYPE http_request_queue_failed gauge\n"
response << "# HELP http_request_queue_failed The length of Resque's failed queue.\n"
response << "http_request_queue_failed{service=\"#{ENV["REDIS_PROMETHEUS_SERVICE"]}\"} #{stats[:failed]}\n"
end
headers['Content-Encoding'] = "gzip"
headers['Content-Type'] = "text/plain"
gzip = Zlib::GzipWriter.new(StringIO.new)
gzip << response
compressed_response = gzip.close.string
[200, headers, [compressed_response]]
end
def record(env, code, duration)
url = "#{env["SCRIPT_NAME"]}#{env["PATH_INFO"]}"
return if defined?(Rails) && Rails.application.config.redis_prometheus.ignored_urls.include?(url)
url.gsub!(%r{[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}}i, "{uuid}")
url.gsub!(%r{/\b\d+\b}i, "{id}")
url.gsub!(":id", "{id}")
bucket = duration_to_bucket(duration)
Redis.current.incr("http_request_duration_seconds_bucket/#{ENV["REDIS_PROMETHEUS_SERVICE"]}|url=#{url}|le=#{bucket}")
Redis.current.incr("http_request_duration_seconds_count/#{ENV["REDIS_PROMETHEUS_SERVICE"]}")
Redis.current.incrbyfloat("http_request_duration_seconds_sum/#{ENV["REDIS_PROMETHEUS_SERVICE"]}", duration)
if code.to_i >= 400 && code.to_i <= 499
Redis.current.incr("http_request_client_errors_counter/#{ENV["REDIS_PROMETHEUS_SERVICE"]}")
end
if code.to_i >= 500 && code.to_i <= 599
Redis.current.incr("http_request_server_errors_counter/#{ENV["REDIS_PROMETHEUS_SERVICE"]}")
end
end
def duration_to_bucket(duration)
return case
when duration <= 0.005
"0.005"
when duration <= 0.01
"0.01"
when duration <= 0.025
"0.025"
when duration <= 0.05
"0.05"
when duration <= 0.1
"0.1"
when duration <= 0.25
"0.25"
when duration <= 0.5
"0.5"
when duration <= 1
"1"
when duration <= 2.5
"2.5"
when duration <= 5
"5"
when duration <= 10
"10"
else
"+Inf"
end
end
end
end
| 38.585526 | 146 | 0.649446 |
e206f8438963d6900e43beb5088ec5a314ee77cc | 556 | class StateBuilder
def self.build(state, &block)
# If no block is passed in, raise an exception
raise "You need a block to build!" unless block_given?
StateBuilder.new(state, &block)
end
def initialize(state, &block)
@state = state
# Evaluate the contents of the block passed in within the context of this instance of StateBuilder
instance_eval &block
end
def company(&block)
StateBuilder::CompanyScope.new(@state, &block)
end
def employee(&block)
StateBuilder::EmployeeScope.new(@state, &block)
end
end | 24.173913 | 102 | 0.708633 |
1ce049746221f6c9829c44e2e7d5234291ec242c | 1,277 | require 'spec_helper'
describe "cms_copy_nodes", type: :feature, dbscope: :example do
let(:site) { cms_site }
let(:node) { create :cms_node }
let(:index_path) { node_copy_path site.id, node.id }
context 'run copy nodes' do
let(:target_node_name) { unique_id }
before do
login_cms_user
end
it 'redirect index_path and notice text' do
visit index_path
fill_in 'item[target_node_name]', with: target_node_name
click_on '実行'
expect(current_path).to eq index_path
expect(page).to have_css('#notice .wrap', text: '処理を開始します。ジョブ実行履歴で結果をご確認下さい', wait: 60)
expect(Cms::CopyNodesTask.first.target_node_name).to eq target_node_name
expect(enqueued_jobs.first[:job]).to eq Cms::Node::CopyNodesJob
end
it 'invalid without target_node_name' do
visit index_path
fill_in 'item[target_node_name]', with: ''
click_on '実行'
expect(page).to have_css('.errorExplanation', text: '複製先フォルダー名を入力してください。')
end
it 'invalid without existing parent node' do
visit index_path
fill_in 'item[target_node_name]', with: "parent/#{target_node_name}"
click_on '実行'
expect(page).to have_css('.errorExplanation', text: "製先フォルダー名に親フォルダー「parent」がありません。")
end
end
end
| 29.022727 | 93 | 0.686766 |
28326872fff59a2718ee2aefa27b5e10b644db29 | 751 | module RestforceMock
class Configuration
attr_accessor :schema_file
attr_writer :error_on_required
attr_writer :required_exclusions
attr_writer :raise_on_schema_missing
attr_accessor :objects_for_schema
attr_writer :objects_for_schema
def error_on_required
@error_on_required.nil? ? true : @error_on_required
end
def raise_on_schema_missing
@raise_on_schema_missing.nil? ? false : @raise_on_schema_missing
end
def required_exclusions
@required_exclusions || default_exclusions
end
private
def default_exclusions
[
:Id, :IsDeleted, :Name, :CreatedDate, :CreatedById,
:LastModifiedDate, :LastModifiedById, :SystemModstamp
]
end
end
end
| 22.757576 | 70 | 0.721704 |
1128b0557bc6d8b99d88e2c9236d01148c7bfdec | 2,054 | require "rails_helper"
# Our helper slug class so we can use the helper methods in our spec
module SpecHelperClassesForViews
class InsuredFamiliesHelperSlugForGroupSelectionTermination
extend Insured::FamiliesHelper
end
end
RSpec.describe "app/views/insured/group_selection/terminate_confirm.html.erb" do
context "DCHL ID and Premium" do
let(:family) { FactoryBot.create(:family, :with_primary_family_member)}
let(:hbx_enrollment) { FactoryBot.create(:hbx_enrollment, :with_enrollment_members, :individual_assisted, { household: family.households.first, family: family })}
let(:benefit_sponsorship) { FactoryBot.create :benefit_sponsors_benefit_sponsorship, :with_benefit_market, :with_organization_cca_profile, :with_initial_benefit_application}
let(:benefit_application) { benefit_sponsorship.benefit_applications.first }
let(:employer_profile) { benefit_sponsorship.organization.employer_profile }
let(:product) { FactoryBot.create(:benefit_markets_products_health_products_health_product, :with_issuer_profile) }
let(:employee_names) { ["fname1 sname1", "fname2 sname2"] }
let(:current_user) {FactoryBot.create(:user)}
before(:each) do
allow(hbx_enrollment).to receive(:product).and_return(product)
allow(hbx_enrollment).to receive(:covered_members_first_names).and_return(employee_names)
allow(hbx_enrollment).to receive(:total_employee_cost).and_return(100.00)
@hbx_enrollment = hbx_enrollment
render :template =>"insured/group_selection/terminate_confirm.html.erb"
end
it "should show the DCHL ID as hbx_enrollment.hbx_id" do
expect(rendered).to match /DCHL ID/
expect(rendered).to match /#{hbx_enrollment.hbx_id}/
end
it "should show the correct Premium" do
dollar_amount = number_to_currency(SpecHelperClassesForViews::InsuredFamiliesHelperSlugForGroupSelectionTermination.current_premium(hbx_enrollment), precision: 2)
expect(rendered).to match /Premium/
expect(rendered).to include dollar_amount
end
end
end
| 47.767442 | 177 | 0.779942 |
18308f979bd49cd985fe00dba56c2a3dff3a57af | 19 | module Likeable
end | 9.5 | 15 | 0.894737 |
267f6e3e9a71934f57cf4d63980486c835982d36 | 13,578 | # results.rb
require_relative '../config/environment'
require 'rails_helper'
require 'test_utils'
describe "Module #3 Summative: Implement Results Collection" do
include Test_utils
before :all do
$continue = true
end
around :each do |example|
if $continue
$continue = false
example.run
$continue = true unless example.exception
else
example.skip
end
end
before :each do
Entrant.collection.delete_many
end
let(:overall_place) { 10 }
let(:gender_place) { 8 }
let(:group_place) { 5 }
let(:bib) { 0 }
let(:secs) { 100.123 }
let(:gender) { "M" }
let(:group) { "masters" }
let(:overall_placing) {{ :place=>overall_place }}
let(:gender_placing) {{ :name=>gender, :place=>gender_place }}
let(:group_placing) {{ :name=>group, :place=>group_place }}
context "rq01" do
it "Entrant class created" do
expect(class_exists?("Entrant"))
end
it "Entrant is stored in results collection" do
expect(Entrant.collection.name).to eql "results"
end
it "Entrant class has fields for name, date, location, and timestamps" do
expect(Entrant).to have_field(:bib).of_type(Integer)
expect(Entrant.new).to respond_to(:bib)
expect(Entrant).to have_field(:secs).of_type(Float)
expect(Entrant.new).to respond_to(:secs)
expect(Entrant).to have_field(:o).with_alias(:overall).of_type(Placing)
expect(Entrant.new).to respond_to(:overall)
expect(Entrant.new).to respond_to(:o)
expect(Entrant).to have_field(:gender).of_type(Placing)
expect(Entrant).to have_field(:group).of_type(Placing)
expect(Entrant).to have_field(:created_at)
expect(Entrant.new).to respond_to(:created_at)
expect(Entrant).to have_field(:updated_at)
expect(Entrant.new).to respond_to(:updated_at)
end
it "Entrant class creates documents with all specified fields" do
expect(e0 = Entrant.new(:bib=>bib, :secs=>secs)).to_not be_nil
expect(e0.overall = Placing.demongoize(overall_placing)).to_not be_nil
expect(e0.gender = Placing.demongoize(gender_placing)).to_not be_nil
expect(e0.group = Placing.demongoize(group_placing)).to_not be_nil
expect(e0.save).to be true
expect(result = Entrant.find(e0.id).attributes).to_not be_nil
expect(result["_id"]).to eql e0.id
expect(result["bib"]).to eql bib
expect(result["secs"]).to eql secs
expect(result["created_at"]).to_not be_nil
expect(result["updated_at"]).to_not be_nil
expect(result["o"]["place"]).to eql overall_place
expect(result["gender"]["place"]).to eql gender_place
expect(result["gender"]["name"]).to eql gender
expect(result["group"]["place"]).to eql group_place
expect(result["group"]["name"]).to eql group
expect(Entrant.find(e0.id).group.name).to eql group
expect(Entrant.find(e0.id).group.place).to eql group_place
expect(Entrant.find(e0.id).gender.name).to eql gender
expect(Entrant.find(e0.id).gender.place).to eql gender_place
end
end
context "rq02" do
it "LegResult class created" do
expect(class_exists?("LegResult"))
end
it "LegResult class has a secs field" do
expect(LegResult).to have_field(:secs).of_type(Float)
expect(LegResult.new).to respond_to(:secs)
end
it "LegResult has an empty calc_ave callback method to be used by subclasses" do
expect(LegResult.new).to respond_to(:calc_ave)
expect((LegResult.new.method(:calc_ave).parameters.flatten - [:req, :opt]).count).to eq 0
end
it "LegResult overrides secs= intance method" do
expect(LegResult.new).to respond_to(:secs=)
expect((LegResult.new.method(:secs=).parameters.flatten - [:req, :opt]).count).to eq 1
expect(LegResult.new.method(:secs=).parameters.flatten).to include(:req)
end
it "LegResult returns a document with an id and secs" do
expect(leg = LegResult.new).to_not be_nil
expect(leg.id).to_not be_nil
expect(leg.id).to be_a BSON::ObjectId
expect(leg = LegResult.new(:secs=>60)).to_not be_nil
expect(leg.id).to_not be_nil
expect(leg.secs).to_not be_nil
expect(leg.secs.round(1)).to eql 60.0
leg.secs=120.0
expect(leg.secs).to_not be_nil
expect(leg.secs.round(1)).to eql 120.0
end
end
context "rq03" do
it "LegResult has an M:1 embedded relationship with Entrant" do
expect(Entrant).to embed_many(:results).of_type(LegResult)
expect(LegResult).to be_embedded_in(:entrant)
end
it "LegResults that are created with Entrant are embedded within it" do
expect(entrant = Entrant.new).to_not be_nil
expect(result0 = entrant.results.build(:secs=>secs)).to_not be_nil
expect(result1 = entrant.results.build(:secs=>1600)).to_not be_nil
expect(entrant.results.count).to eql 0
expect(entrant.results.to_a.count).to eql 2
expect(entrant.results[0].secs).to eql result0.secs
expect(entrant.results[1].secs).to eql result1.secs
end
end
context "rq04" do
it "LegResult has a polymorphic 1:1 embedded relationship with Event" do
expect(Entrant).to embed_many(:results).of_type(LegResult)
expect(LegResult).to embed_one(:event).of_type(Event)
expect(LegResult).to validate_presence_of(:event)
end
it "Entrant cannot be saved without presence of an event" do
setup_data_for_testing
expect(entrant = Entrant.new).to_not be_nil
expect(result = entrant.results.build(:secs=>60.13)).to_not be_nil
expect(entrant.save).to be false
expect(entrant.errors).to_not be_nil
expect(entrant.errors.messages).to_not be_nil
expect(entrant.errors.messages[:results].count).to be > 0
expect(result.errors.messages[:event].count).to be > 0
end
it "With events, Entrant embeds results and associated events" do
setup_data_for_testing
expect(entrant = Entrant.new).to_not be_nil
expect(result = entrant.results.build(:secs=>60.13)).to_not be_nil
expect(race = Race.find_by(:name=>Test_utils::RACE_FIELDS[:name])).to_not be_nil
expect(event = race.events.where(:name=>"t1").first).to_not be_nil
expect(result.build_event(event.attributes)).to_not be_nil
expect(entrant.validate).to be true
expect(entrant.save).to be true
expect(event = race.events.where(:name=>"t2").first).to_not be_nil
expect(entrant.results.create(:event=>event, :secs=>45)).to_not be_nil
# Test to ensure that resulting Entant document has embedded results
# where results each have an embedded event
expect(entrant_doc = Entrant.find(entrant.id).attributes).to_not be_nil
expect((result_doc = entrant_doc["results"]).count).to eql 2
order_val = -100
event_name_array = Array.new
result_doc.each { |r|
expect(r["event"]).to_not be_nil
expect(r["event"]["o"]).to be >= order_val
order_val = r["event"]["o"]
event_name_array.push(r["event"]["n"])
}
expect(event_name_array).to include("t1", "t2")
end
end
context "rq05" do
it "SwimResult class created" do
expect(class_exists?("SwimResult"))
end
it "SwimResult class has a pace_100 field" do
expect(SwimResult).to have_field(:pace_100).of_type(Float)
expect(SwimResult.new).to respond_to(:pace_100)
end
it "SwimResult has instance method calc_ave that takes no parameters and returns a float" do
expect(SwimResult.new).to respond_to(:calc_ave)
expect(SwimResult.new.method(:calc_ave).parameters.count).to eq 0
expect(sr = SwimResult.new(event:Event.new(d:100, u:"meters"), secs:10.0)).to_not be_nil
expect(sr.calc_ave).to_not be_nil
expect(sr.calc_ave).to be_a Float
end
it "BikeResult class created" do
expect(class_exists?("BikeResult"))
end
it "BikeResult class has a mph field" do
expect(BikeResult).to have_field(:mph).of_type(Float)
expect(BikeResult.new).to respond_to(:mph)
end
it "BikeResult has instance method calc_ave that takes no parameters and returns a float" do
expect(BikeResult.new).to respond_to(:calc_ave)
expect(BikeResult.new.method(:calc_ave).parameters.count).to eq 0
expect(br = BikeResult.new(event:Event.new(d:10, u:"miles"), secs:3600.0)).to_not be_nil
expect(br.calc_ave).to_not be_nil
expect(br.calc_ave).to be_a Float
end
it "RunResult class created" do
expect(class_exists?("RunResult"))
end
it "RunResult class has a minute_mile field" do
expect(RunResult).to have_field(:mmile).with_alias(:minute_mile).of_type(Float)
expect(RunResult.new).to respond_to(:mmile)
expect(RunResult.new).to respond_to(:minute_mile)
end
it "RunResult has instance method calc_ave that takes no parameters and returns a float" do
expect(RunResult.new).to respond_to(:calc_ave)
expect(RunResult.new.method(:calc_ave).parameters.count).to eq 0
expect(br = RunResult.new(event:Event.new(d:10, u:"miles"), secs:3600.0)).to_not be_nil
expect(br.calc_ave).to_not be_nil
expect(br.calc_ave).to be_a Float
end
end
context "rq06" do
it "SwimResult calc_ave method calculates the secs to travel 100 m based on event" do
expect(s = SwimResult.new(:event=>Event.new(distance:100, units:"meters"), :secs=>10)).to_not be_nil
expect(s.calc_ave).to_not be_nil
expect(s.calc_ave.round(1)).to eql 10.0
expect(s = SwimResult.new(:event=>Event.new(distance:1, units:"miles"), :secs=>160.934)).to_not be_nil
expect(s.calc_ave.round(1)).to eql 10.0
expect(s = SwimResult.new(:event=>Event.new(distance:1, units:"miles"), :secs=>160.934)).to_not be_nil
expect(s.pace_100.round(1)).to eql 10.0
end
it "BikeResult calc_ave method calculates the average mph based on event" do
expect(b = BikeResult.new(:event=>Event.new(distance:10, units:"miles"), :secs=>3600)).to_not be_nil
expect(b.calc_ave).to_not be_nil
expect(b.calc_ave.round(1)).to eql 10.0
expect(b = BikeResult.new(:event=>Event.new(distance:100, units:"kilometers"), :secs=>3600)).to_not be_nil
expect(b.calc_ave.round(2)).to eql 62.14
expect(b = BikeResult.new(:event=>Event.new(distance:100, units:"kilometers"), :secs=>3600)).to_not be_nil
expect(b.mph.round(2)).to eql 62.14
end
it "RunResult calc_ave method calculates average time to run a mile based on event" do
expect(r = RunResult.new(:event=>Event.new(distance:1, units:"miles"), :secs=>240)).to_not be_nil
expect(r.calc_ave).to_not be_nil
expect(r.calc_ave.round(1)).to eql 4.0
expect(r = RunResult.new(:event=>Event.new(distance:10, units:"kilometers"), :secs=>6000)).to_not be_nil
expect(r.calc_ave.round(2)).to eql 16.09
expect(r = RunResult.new(:event=>Event.new(distance:10, units:"kilometers"), :secs=>6000)).to_not be_nil
expect(r.mmile.round(2)).to eql 16.09
end
it "secs= method updates field values of the result classes" do
expect(s = SwimResult.new(event:Event.new(d:100, u:"meters"))).to_not be_nil
s.secs=1000.0
expect(s.pace_100).to_not be_nil
expect(s.pace_100.round(1)).to eq(1000.0)
expect(b = BikeResult.new(event:Event.new(d:10, u:"miles"))).to_not be_nil
b.secs=3600.0
expect(b.mph).to_not be_nil
expect(b.mph.round(1)).to eq(10.0)
expect(r = RunResult.new(event:Event.new(d:5, u:"miles"))).to_not be_nil
r.secs=1500.0
expect(r.minute_mile).to_not be_nil
expect(r.minute_mile.round(1)).to eq(5.0)
end
end
context "rq07" do
it "Entrant has update_total instance method for callback that takes one parameter" do
expect(Entrant.new).to respond_to(:update_total)
expect((Entrant.new.method(:update_total).parameters.flatten - [:opt, :req]).count).to eq 1
expect(Entrant.new.method(:update_total).parameters.flatten).to include(:req)
end
it "update_total callback updates the Entrants secs field and updated_at with event additions and deletions" do
expect(entrant = Entrant.create).to_not be_nil
expect(entrant.secs).to be_nil
update_time = entrant.updated_at
total_secs = 0.0
sec_val = 3600.0
entrant.results << SwimResult.new(:event=>Event.new(o:0,n:"swim",distance:1,units:"miles"), :secs=>sec_val)
expect(total_secs = total_secs + sec_val).to eql entrant.secs
expect(entrant.updated_at).to be >= update_time
update_time = entrant.updated_at
entrant.results << BikeResult.new(:event=>Event.new(o:2,n:"bike",distance:100,units:"kilometers"), :secs=>sec_val)
expect(total_secs = total_secs + sec_val).to eql entrant.secs
expect(entrant.updated_at).to be >= update_time
update_time = entrant.updated_at
sec_val = 6000.0
entrant.results << RunResult.new(:event=>Event.new(o:4,n:"run",distance:10,units:"kilometers"), :secs=>sec_val)
expect(total_secs = total_secs + sec_val).to eql entrant.secs
expect(entrant.updated_at).to be >= update_time
end
end
end
| 42.564263 | 121 | 0.664089 |
1c1b85823d1cd4e7cbb52d75ae8bc87679b7a1c3 | 1,339 | class Libgig < Formula
desc "Library for Gigasampler and DLS (Downloadable Sounds) Level 1/2 files"
homepage "https://www.linuxsampler.org/libgig/"
url "https://download.linuxsampler.org/packages/libgig-4.2.0.tar.bz2"
sha256 "16229a46138b101eb9eda042c66d2cd652b1b3c9925a7d9577d52f2282f745ff"
bottle do
cellar :any
sha256 "538a70194a691a8a8bd09095736f6aba4c6de6ed4f03bed512726372e41bd7a4" => :catalina
sha256 "5b4c6358356d805ce317ed31014a8235fc79bad43a80b6c03deb63abe8bc1aac" => :mojave
sha256 "050bb14b4914d0c08e2a8c192b5254ecb77f9239b8f516022260f5356a8ab947" => :high_sierra
sha256 "6e7d4ee68ce41305b89c91b2c7e34eeb57f45c6ea5d991beb0e66aac76a5d458" => :sierra
end
depends_on "pkg-config" => :build
depends_on "libsndfile"
def install
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"test.cpp").write <<~EOS
#include <libgig/gig.h>
#include <iostream>
using namespace std;
int main()
{
cout << gig::libraryName() << endl;
return 0;
}
EOS
system ENV.cxx, "test.cpp", "-L#{lib}/libgig", "-lgig", "-o", "test"
assert_match "libgig", shell_output("./test")
end
end
| 32.658537 | 93 | 0.679612 |
bf88586e95928916338b3425cadca6d9bd1443f6 | 2,227 | # frozen_string_literal: true
#
# @author Kivanio Barbosa
require 'date'
module Brcobranca
# Métodos auxiliares de cálculos envolvendo Datas.
module CalculoData
# Calcula o número de dias corridos entre a <b>data base ("Fixada" em 07.10.1997)</b> e a <b>data de vencimento</b> desejada.
# A partir de 22.02.2025, o fator retorna para '1000' adicionando- se '1' a cada dia subsequente a este fator
# até que chegue a 9999 novamente onde deve ser usada nova data base começando de 1000.
#
# Somente serão considerados válidos para pagamento os boletos com 3.000 fatores de vencimento anteriores
# e 5.500 fatores futuros, ambos em relação a data atual.
# Boletos fora deste controle não serão considerados validos para pagamento na rede bancária.
# Ex. Hoje é 13/03/2014 (fator 6.001)
# Limite para emissão ou pagamento de boletos vencido: 24/12/2005 (fator 3.000)
# Limite para emissão ou pagamento de boletos à vencer: 03/04/2029 (fator 2.501)
#
# @return [String] Contendo 4 dígitos
# @example
# Date.parse(2000-07-04).fator_vencimento #=> 1001
def fator_vencimento
data_base = Date.parse '1997-10-07'
fator_vencimento = Integer(self - data_base)
while fator_vencimento > 9999
data_base += 10_000
fator_vencimento = Integer(self - data_base) + 1000
end
fator_vencimento.to_s
end
# Mostra a data em formato <b>dia/mês/ano</b>
# @return [String]
# @example
# Date.current.to_s_br #=> 20/01/2010
def to_s_br
strftime('%d/%m/%Y')
end
# Calcula número de dias julianos.
#
# O cálculo é feito subtraindo-se a data atual, pelo último dia válido do ano anterior,
# acrescentando-se o último algarismo do ano atual na quarta posição.
#
# @return [String] contendo 4 dígitos
#
# @example
# Date.parse(2009-02-11).to_juliano #=> "0429"
def to_juliano
ultima_data = Date.parse("#{year - 1}-12-31")
ultimo_digito_ano = to_s[3..3]
dias = Integer(self - ultima_data)
(dias.to_s + ultimo_digito_ano).rjust(4, '0')
end
end
end
[Date].each do |klass|
klass.class_eval { include Brcobranca::CalculoData }
end
| 34.796875 | 129 | 0.670858 |
79fb6716ad60aa39b001fc179ff0ba24beac3df7 | 772 | name 'firefox'
maintainer 'Nghiem Ba Hieu'
maintainer_email '[email protected]'
license 'Apache-2.0'
description 'Installs/Configures firefox'
long_description 'Installs/Configures firefox'
version '0.1.1'
chef_version '>= 12.1' if respond_to?(:chef_version)
supports 'windows'
# The `issues_url` points to the location where issues for this cookbook are
# tracked. A `View Issues` link will be displayed on this cookbook's page when
# uploaded to a Supermarket.
#
# issues_url 'https://github.com/<insert_org_here>/firefox/issues'
# The `source_url` points to the development repository for this cookbook. A
# `View Source` link will be displayed on this cookbook's page when uploaded to
# a Supermarket.
#
# source_url 'https://github.com/<insert_org_here>/firefox'
| 33.565217 | 79 | 0.770725 |
e9de936bf095e4db4ff93f102dd6fce20edf0720 | 695 | SpinningWheel.define do
fabric name: 'transaction', class_name: 'SixSaferpay::Transaction' do
type { 'PAYMENT' }
status { 'AUTHORIZED' }
id { 'K5OYS9Ad6Ex4rASU1IM1b3CEU8bb' }
capture_id { 'ECthWpbv1SI6SAIdU2p6AIC1bppA' }
date { '2011-09-23T14:57:23.023+02.00' }
amount { SpinningWheel.create('amount') }
order_id { 'c52ad18472354511ab2c33b59e796901' }
acquirer_name { 'AcquirerName' }
acquirer_reference { 'AcquirerReference' }
six_transaction_reference { '0:0:3:K5OYS9Ad6Ex4rASU1IM1b3CEU8bb' }
approval_code { 'AcquirerReference' }
direct_debit { SpinningWheel.create('direct_debit') }
invoice { SpinningWheel.create('invoice') }
end
end
| 38.611111 | 71 | 0.713669 |
4af5de051c12f9b3f32a88934b67d47dc654fc36 | 822 | require "./src/common/validators/validator_factory"
require "./src/user_config/validators/aws_validator"
require "./src/user_config/validators/azure_validator"
require "./src/user_config/validators/newrelic_validator"
require "./src/user_config/validators/git_validator"
module UserConfig
module Validators
class ProviderValidatorFactory < Common::Validators::ValidatorFactory
def initialize()
super(
{ "aws" => AwsValidator,
"azure" => AzureValidator,
"newrelic" => NewRelicValidator,
"git" => GitValidator
},
lambda {|parsed| return parsed["provider"]},
lambda {|validator_type| return validator_type.new()},
nil,
lambda { return nil }
)
end
end
end
end | 29.357143 | 74 | 0.627737 |
38057703ef590ddf5105a6b8102eee21974a2d6d | 397 | module SfdcUpdatable
extend ActiveSupport::Concern
module ClassMethods
def updater_klass
"#{self.name}::Updater".constantize
end
def set_sfdc_updatable_attributes(exc)
@none_queriable_attributes = exc
end
def set_sfdc_updatable_attributes
@none_queriable_attributes || []
end
def update_sfdc(payload)
updater_klass.new(payload).run
end
end
end
| 17.26087 | 41 | 0.730479 |
1c8d230977cf8d6aa4e60038800095c3796f756b | 2,449 | # frozen_string_literal: true
require "active_record_doctor/detectors/base"
module ActiveRecordDoctor
module Detectors
class MissingUniqueIndexes < Base # :nodoc:
@description = "detect uniqueness validators not backed by a database constraint"
@config = {
ignore_models: {
description: "models whose uniqueness validators should not be checked",
global: true
},
ignore_columns: {
description: "specific validators, written as Model(column1, column2, ...), that should not be checked"
}
}
private
def message(table:, columns:)
# rubocop:disable Layout/LineLength
"add a unique index on #{table}(#{columns.join(', ')}) - validating uniqueness in the model without an index can lead to duplicates"
# rubocop:enable Layout/LineLength
end
def detect
ignore_columns = config(:ignore_columns).map do |column|
column.gsub(" ", "")
end
models(except: config(:ignore_models)).each do |model|
next if model.table_name.nil?
model.validators.each do |validator|
scope = Array(validator.options.fetch(:scope, []))
next unless validator.is_a?(ActiveRecord::Validations::UniquenessValidator)
next unless supported_validator?(validator)
next if unique_index?(model.table_name, validator.attributes, scope)
columns = (scope + validator.attributes).map(&:to_s)
next if ignore_columns.include?("#{model.name}(#{columns.join(',')})")
problem!(table: model.table_name, columns: columns)
end
end
end
def supported_validator?(validator)
validator.options[:if].nil? &&
validator.options[:unless].nil? &&
validator.options[:conditions].nil? &&
# In Rails 6, default option values are no longer explicitly set on
# options so if the key is absent we must fetch the default value
# ourselves. case_sensitive is the default in 4.2+ so it's safe to
# put true literally.
validator.options.fetch(:case_sensitive, true)
end
def unique_index?(table_name, columns, scope)
columns = (Array(scope) + columns).map(&:to_s)
indexes(table_name).any? do |index|
index.columns.to_set == columns.to_set && index.unique
end
end
end
end
end
| 34.013889 | 140 | 0.628011 |
2802c0409002717eecf28eafbf53f998b1826d1f | 424 | cask 'simpholders' do
version '3.0.10,2277'
sha256 'edcfb7b1c78873755487640615960980f2c13ae56a565f67c55d30ad1657f134'
url "https://simpholders.com/site/assets/files/#{version.after_comma}/simpholders_#{version.before_comma.dots_to_underscores}.dmg"
appcast 'https://simpholders.com/releases/'
name 'SimPholders'
homepage 'https://simpholders.com/'
depends_on macos: '>= :mojave'
app 'SimPholders.app'
end
| 30.285714 | 132 | 0.771226 |
f898b4087d38804d09c7a0e44b4f48c53614850b | 1,195 | class Backend::UsersController < Backend::ApplicationController
before_action :set_user, only: [:edit, :update, :destroy]
def index
q = params[:q]
p = params[:page]
if !q.blank?
@users = User.page(p).name_like(params[:q])
else
@users = User.page(p)
end
end
def new
@user = User.new
end
def edit
end
def create
@user = User.set_attribute user_params
respond_to do |format|
if @user.save
format.html { redirect_to backend_users_url, notice: '新增成功!' }
else
format.html { render :new }
end
end
end
def update
respond_to do |format|
if @user.update(user_params)
format.html { redirect_to backend_users_url, notice: '修改成功!' }
else
format.html { render :edit }
end
end
end
def destroy
User.delete(@user)
respond_to do |format|
format.html { redirect_to backend_users_url, notice: '禁用成功!' }
end
end
private
def set_user
@user = User.find(params[:id])
end
def user_params
params.require(:user).permit(:email, :display_name, :password)
end
end
| 20.964912 | 71 | 0.586611 |
1d27b3933eae016b7b01a488efa0c4b1a4151da9 | 1,519 | # coding: utf-8
require File.expand_path('../lib/bosh_agent/version', __FILE__)
version = Bosh::Agent::VERSION
Gem::Specification.new do |s|
s.name = 'bosh_agent'
s.version = version
s.summary = 'Agent for Cloud Foundry BOSH release engineering tool.'
s.description = "This agent listens for instructions from the bosh director on each server that bosh manages.\n#{`git rev-parse HEAD`[0, 6]}"
s.author = 'VMware'
s.homepage = 'https://github.com/cloudfoundry/bosh'
s.license = 'Apache 2.0'
s.email = '[email protected]'
s.required_ruby_version = Gem::Requirement.new('>= 1.9.3')
# Third party dependencies
s.add_dependency 'netaddr', '~>1.5.0'
s.add_dependency 'thin', '~>1.5.0'
s.add_dependency 'yajl-ruby', '~>1.1.0'
s.add_dependency 'sinatra', '~>1.4.2'
s.add_dependency 'nats', '=0.5.0.beta.12'
s.add_dependency 'sigar', '~>0.7.2'
s.add_dependency 'httpclient', '=2.2.4'
s.add_dependency 'syslog_protocol', '~>0.9.2'
s.add_dependency 'crack', '~>0.3.2'
# Bosh Dependencies
s.add_dependency 'bosh-core', "~>#{version}"
s.add_dependency 'bosh_common', "~>#{version}"
s.add_dependency 'blobstore_client', "~>#{version}"
s.files = `git ls-files -- lib/*`.split("\n") + %w(CHANGELOG)
s.require_paths = %w(lib)
s.test_files = s.files.grep(%r{^(test|spec|features)/})
s.bindir = 'bin'
s.executables << 'bosh_agent'
end
| 39.973684 | 143 | 0.613562 |
0856e2602fa70c2bab1ea739d0f3d2b2dd2085e3 | 617 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "google/apis/groupsmigration_v1"
| 38.5625 | 74 | 0.766613 |
ac3b613ccd66c813e1c542971457e32441df6a2e | 3,079 | # Copyright © 2011-2019 MUSC Foundation for Research Development~
# All rights reserved.~
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:~
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.~
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following~
# disclaimer in the documentation and/or other materials provided with the distribution.~
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products~
# derived from this software without specific prior written permission.~
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,~
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT~
# SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL~
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS~
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR~
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.~
desc "Deactivate services designated in CSV"
task deactivate_services: :environment do
def prompt(*args)
print(*args)
STDIN.gets.strip
end
def get_file(error=false)
puts "No import file specified or the file specified does not exist in db/imports" if error
file = prompt "Please specify the file name to import from db/imports (must be a CSV, see db/imports/example.csv for formatting): "
while file.blank? or not File.exists?(Rails.root.join("db", "imports", file))
file = get_file(true)
end
file
end
skipped_services = CSV.open("tmp/skipped_active_services_#{Time.now.strftime('%m%d%Y%T')}.csv", "wb")
skipped_services << ['EAP ID', 'CPT Code', 'Revenue Code', 'Skipped Because']
input_file = Rails.root.join("db", "imports", get_file)
continue = prompt('Preparing to modify the services. Are you sure you want to continue? (y/n): ')
if (continue == 'y') || (continue == 'Y')
ActiveRecord::Base.transaction do
CSV.foreach(input_file, headers: true) do |row|
service = Service.find_by(eap_id: row["EAP ID"], cpt_code: row["CPT Code"], revenue_code: row["Revenue Code"])
if service
service.assign_attributes({ is_available: false, audit_comment: 'by script' })
service.save
puts "Deactivated #{service.name}"
else
skipped_services << [row["EAP ID"], row["CPT Code"], row["Revenue Code"], "service not found"]
end
end
end
else
puts "Exiting rake task..."
end
end
| 47.369231 | 146 | 0.728158 |
8777444a1179d4f3533e881435b017861915a10c | 93 | require "health_check_rails/version"
module HealthCheckRails
# Your code goes here...
end
| 15.5 | 36 | 0.784946 |
ed54882e7aa9e29614b0a84f90d6e29b7fe86697 | 5,246 | # frozen_string_literal: true
require 'set'
module Sprockets
# Internal: Utils, we didn't know where else to put it! Functions may
# eventually be shuffled into more specific drawers.
module Utils
extend self
# Internal: Check if object can safely be .dup'd.
#
# Similar to ActiveSupport #duplicable? check.
#
# obj - Any Object
#
# Returns false if .dup would raise a TypeError, otherwise true.
def duplicable?(obj)
case obj
when NilClass, FalseClass, TrueClass, Symbol, Numeric
false
else
true
end
end
# Internal: Duplicate and store key/value on new frozen hash.
#
# Seperated for recursive calls, always use hash_reassoc(hash, *keys).
#
# hash - Hash
# key - Object key
#
# Returns Hash.
def hash_reassoc1(hash, key)
hash = hash.dup if hash.frozen?
old_value = hash[key]
old_value = old_value.dup if duplicable?(old_value)
new_value = yield old_value
new_value.freeze if duplicable?(new_value)
hash.store(key, new_value)
hash.freeze
end
# Internal: Duplicate and store key/value on new frozen hash.
#
# Similar to Hash#store for nested frozen hashes.
#
# hash - Hash
# key_a - Object key. Use multiple keys for nested hashes.
# key_b - Object key. Use multiple keys for nested hashes.
# block - Receives current value at key.
#
# Examples
#
# config = {paths: ["/bin", "/sbin"]}.freeze
# new_config = hash_reassoc(config, :paths) do |paths|
# paths << "/usr/local/bin"
# end
#
# Returns duplicated frozen Hash.
def hash_reassoc(hash, key_a, key_b = nil, &block)
if key_b
hash_reassoc1(hash, key_a) do |value|
hash_reassoc(value, key_b, &block)
end
else
hash_reassoc1(hash, key_a, &block)
end
end
# Internal: Check if string has a trailing semicolon.
#
# str - String
#
# Returns true or false.
def string_end_with_semicolon?(str)
i = str.size - 1
while i >= 0
c = str[i].ord
i -= 1
# Need to compare against the ordinals because the string can be UTF_8 or UTF_32LE encoded
# 0x0A == "\n"
# 0x20 == " "
# 0x09 == "\t"
# 0x3B == ";"
unless c == 0x0A || c == 0x20 || c == 0x09
return c === 0x3B
end
end
true
end
# Internal: Accumulate asset source to buffer and append a trailing
# semicolon if necessary.
#
# buf - String buffer to append to
# source - String source to append
#
# Returns buf String.
def concat_javascript_sources(buf, source)
if source.bytesize > 0
buf << source
# If the source contains non-ASCII characters, indexing on it becomes O(N).
# This will lead to O(N^2) performance in string_end_with_semicolon?, so we should use 32 bit encoding to make sure indexing stays O(1)
source = source.encode(Encoding::UTF_32LE) unless source.ascii_only?
if !string_end_with_semicolon?(source)
buf << ";\n"
elsif source[source.size - 1].ord != 0x0A
buf << "\n"
end
end
buf
end
# Internal: Inject into target module for the duration of the block.
#
# mod - Module
#
# Returns result of block.
def module_include(base, mod)
old_methods = {}
mod.instance_methods.each do |sym|
old_methods[sym] = base.instance_method(sym) if base.method_defined?(sym)
end
mod.instance_methods.each do |sym|
method = mod.instance_method(sym)
base.send(:define_method, sym, method)
end
yield
ensure
mod.instance_methods.each do |sym|
base.send(:undef_method, sym) if base.method_defined?(sym)
end
old_methods.each do |sym, method|
base.send(:define_method, sym, method)
end
end
# Internal: Post-order Depth-First search algorithm.
#
# Used for resolving asset dependencies.
#
# initial - Initial Array of nodes to traverse.
# block -
# node - Current node to get children of
#
# Returns a Set of nodes.
def dfs(initial)
nodes, seen = Set.new, Set.new
stack = Array(initial).reverse
while node = stack.pop
if seen.include?(node)
nodes.add(node)
else
seen.add(node)
stack.push(node)
stack.concat(Array(yield node).reverse)
end
end
nodes
end
# Internal: Post-order Depth-First search algorithm that gathers all paths
# along the way.
#
# TODO: Rename function.
#
# path - Initial Array node path
# block -
# node - Current node to get children of
#
# Returns an Array of node Arrays.
def dfs_paths(path)
paths = []
stack = [path]
seen = Set.new
while path = stack.pop
seen.add(path.last)
paths << path
children = yield path.last
children.reverse_each do |node|
stack.push(path + [node]) unless seen.include?(node)
end
end
paths
end
end
end
| 25.970297 | 143 | 0.592451 |
034288af1db7c63cfa1fa9b91b28c4a80255dfda | 1,534 | module Crawler
module Rails
module Sites
class EntriesController < ApplicationController
def new
@site = Site.find(params[:site_id])
@entry = @site.entries.build
end
def create
@site = Site.find(params[:site_id])
@entry = @site.entries.build(entry_params)
if @entry.save
redirect_to @site, notice: '作成しました'
else
render :new
end
end
def edit
@site = Site.find(params[:site_id])
@entry = @site.entries.find(params[:id])
end
def update
@site = Site.find(params[:site_id])
@entry = @site.entries.find(params[:id])
if @entry.update(entry_params)
redirect_to @site, notice: '作成しました'
else
render :edit
end
end
def test_run
@site = Site.find(params[:site_id])
page_source = @site.page_sources.find(params[:page_source_id])
entry = @site.entries.build(page_source_id: page_source.id,
scraping_code: params[:scraping_code])
@out = entry.scrape[:message]
@out = [@out, Time.now.to_s].join("\n") # add timestamp
respond_to do |format|
format.js { render :test_run }
format.any { head :not_found }
end
end
private
def entry_params
params.required(:entry).permit!
end
end
end
end
end
| 26.912281 | 76 | 0.525424 |
03004580e7f7a3d63204ee716d052ebbec9fb76b | 2,601 | class Gearman < Formula
desc "Application framework to farm out work to other machines or processes"
homepage "http://gearman.org/"
url "https://github.com/gearman/gearmand/releases/download/1.1.19.1/gearmand-1.1.19.1.tar.gz"
sha256 "8ea6e0d16a0c924e6a65caea8a7cd49d3840b9256d440d991de4266447166bfb"
license "BSD-3-Clause"
bottle do
sha256 cellar: :any, catalina: "3a1a4bc57288dea7905134d9290c88a04273f7cc6361646694324e3bc9eb42d3"
sha256 cellar: :any, mojave: "582d1de464569352536501e2aa832a9bc540220eae335b682411ecadffbfe198"
sha256 cellar: :any, high_sierra: "8664f5b9c91ef99190cb70000758aa3d50f68afcad01d2e8cac234adf6a5424c"
end
depends_on "pkg-config" => :build
depends_on "sphinx-doc" => :build
depends_on "boost"
depends_on "libevent"
depends_on "libmemcached"
def install
# Work around "error: no member named 'signbit' in the global namespace"
# encountered when trying to detect boost regex in configure
if MacOS.version == :high_sierra
ENV.delete("HOMEBREW_SDKROOT")
ENV.delete("SDKROOT")
end
# https://bugs.launchpad.net/gearmand/+bug/1368926
Dir["tests/**/*.cc", "libtest/main.cc"].each do |test_file|
next unless /std::unique_ptr/.match?(File.read(test_file))
inreplace test_file, "std::unique_ptr", "std::auto_ptr"
end
args = %W[
--prefix=#{prefix}
--localstatedir=#{var}
--disable-silent-rules
--disable-dependency-tracking
--disable-cyassl
--disable-hiredis
--disable-libdrizzle
--disable-libpq
--disable-libtokyocabinet
--disable-ssl
--enable-libmemcached
--with-boost=#{Formula["boost"].opt_prefix}
--with-memcached=#{Formula["memcached"].opt_bin}/memcached
--with-sqlite3
--without-mysql
--without-postgresql
]
ENV.append_to_cflags "-DHAVE_HTONLL"
(var/"log").mkpath
system "./configure", *args
system "make", "install"
end
plist_options manual: "gearmand -d"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN"
"http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>Program</key>
<string>#{opt_sbin}/gearmand</string>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
assert_match(/gearman\s*Error in usage/, shell_output("#{bin}/gearman --version 2>&1", 1))
end
end
| 30.6 | 104 | 0.658593 |
ac3cb4b20763d2956bcb8b01963e681459dd44ce | 207 | class CreateCities < ActiveRecord::Migration
def change
create_table :cities do |t|
t.string :name, presence: true
t.belongs_to :state, presence: true
t.timestamps
end
end
end
| 18.818182 | 44 | 0.671498 |
0159d84563e2fe70acab4ec57cb8af8b6a03ee24 | 54,162 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended to check this file into your version control system.
ActiveRecord::Schema.define(:version => 20141209093141) do
create_table "about_pages", :force => true do |t|
t.integer "topical_event_id"
t.string "name"
t.text "summary"
t.text "body"
t.string "read_more_link_text"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
create_table "access_and_opening_times", :force => true do |t|
t.text "body"
t.string "accessible_type"
t.integer "accessible_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "access_and_opening_times", ["accessible_id", "accessible_type"], :name => "accessible_index"
create_table "attachment_data", :force => true do |t|
t.string "carrierwave_file"
t.string "content_type"
t.integer "file_size"
t.integer "number_of_pages"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "replaced_by_id"
end
add_index "attachment_data", ["replaced_by_id"], :name => "index_attachment_data_on_replaced_by_id"
create_table "attachment_sources", :force => true do |t|
t.integer "attachment_id"
t.string "url"
end
add_index "attachment_sources", ["attachment_id"], :name => "index_attachment_sources_on_attachment_id"
create_table "attachments", :force => true do |t|
t.datetime "created_at"
t.datetime "updated_at"
t.string "title"
t.boolean "accessible"
t.string "isbn"
t.string "unique_reference"
t.string "command_paper_number"
t.string "order_url"
t.integer "price_in_pence"
t.integer "attachment_data_id"
t.integer "ordering", :null => false
t.string "hoc_paper_number"
t.string "parliamentary_session"
t.boolean "unnumbered_command_paper"
t.boolean "unnumbered_hoc_paper"
t.integer "attachable_id"
t.string "attachable_type"
t.string "type"
t.string "slug"
t.string "locale"
t.string "external_url"
end
add_index "attachments", ["attachable_id", "attachable_type"], :name => "index_attachments_on_attachable_id_and_attachable_type"
add_index "attachments", ["attachable_type", "attachable_id", "ordering"], :name => "no_duplicate_attachment_orderings", :unique => true
add_index "attachments", ["attachment_data_id"], :name => "index_attachments_on_attachment_data_id"
add_index "attachments", ["ordering"], :name => "index_attachments_on_ordering"
create_table "classification_featuring_image_data", :force => true do |t|
t.string "carrierwave_image"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "classification_featurings", :force => true do |t|
t.integer "edition_id"
t.integer "classification_id"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "ordering"
t.integer "classification_featuring_image_data_id"
t.string "alt_text"
t.integer "offsite_link_id"
end
add_index "classification_featurings", ["classification_featuring_image_data_id"], :name => "index_cl_feat_on_edition_org_image_data_id"
add_index "classification_featurings", ["classification_id"], :name => "index_cl_feat_on_classification_id"
add_index "classification_featurings", ["edition_id", "classification_id"], :name => "index_cl_feat_on_edition_id_and_classification_id", :unique => true
add_index "classification_featurings", ["offsite_link_id"], :name => "index_classification_featurings_on_offsite_link_id"
create_table "classification_memberships", :force => true do |t|
t.integer "classification_id"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "edition_id"
t.integer "ordering"
end
add_index "classification_memberships", ["classification_id"], :name => "index_classification_memberships_on_classification_id"
add_index "classification_memberships", ["edition_id"], :name => "index_classification_memberships_on_edition_id"
create_table "classification_relations", :force => true do |t|
t.integer "classification_id", :null => false
t.integer "related_classification_id", :null => false
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "classification_relations", ["classification_id"], :name => "index_classification_relations_on_classification_id"
add_index "classification_relations", ["related_classification_id"], :name => "index_classification_relations_on_related_classification_id"
create_table "classifications", :force => true do |t|
t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
t.text "description"
t.string "slug"
t.string "state"
t.string "type"
t.string "carrierwave_image"
t.string "logo_alt_text"
t.date "start_date"
t.date "end_date"
end
add_index "classifications", ["slug"], :name => "index_classifications_on_slug"
create_table "consultation_participations", :force => true do |t|
t.integer "edition_id"
t.string "link_url"
t.datetime "created_at"
t.datetime "updated_at"
t.string "email"
t.integer "consultation_response_form_id"
t.text "postal_address"
end
add_index "consultation_participations", ["consultation_response_form_id"], :name => "index_cons_participations_on_cons_response_form_id"
add_index "consultation_participations", ["edition_id"], :name => "index_consultation_participations_on_edition_id"
create_table "consultation_response_form_data", :force => true do |t|
t.string "carrierwave_file"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "consultation_response_forms", :force => true do |t|
t.string "title"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "consultation_response_form_data_id"
end
create_table "contact_number_translations", :force => true do |t|
t.integer "contact_number_id"
t.string "locale"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
t.string "label"
t.string "number"
end
add_index "contact_number_translations", ["contact_number_id"], :name => "index_contact_number_translations_on_contact_number_id"
add_index "contact_number_translations", ["locale"], :name => "index_contact_number_translations_on_locale"
create_table "contact_numbers", :force => true do |t|
t.integer "contact_id"
t.string "label"
t.string "number"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "contact_numbers", ["contact_id"], :name => "index_contact_numbers_on_contact_id"
create_table "contact_translations", :force => true do |t|
t.integer "contact_id"
t.string "locale"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
t.string "title"
t.text "comments"
t.string "recipient"
t.text "street_address"
t.string "locality"
t.string "region"
t.string "email"
t.string "contact_form_url"
end
add_index "contact_translations", ["contact_id"], :name => "index_contact_translations_on_contact_id"
add_index "contact_translations", ["locale"], :name => "index_contact_translations_on_locale"
create_table "contacts", :force => true do |t|
t.decimal "latitude", :precision => 15, :scale => 10
t.decimal "longitude", :precision => 15, :scale => 10
t.integer "contactable_id"
t.string "contactable_type"
t.string "postal_code"
t.integer "country_id"
t.integer "contact_type_id", :null => false
end
add_index "contacts", ["contact_type_id"], :name => "index_contacts_on_contact_type_id"
add_index "contacts", ["contactable_id", "contactable_type"], :name => "index_contacts_on_contactable_id_and_contactable_type"
create_table "data_migration_records", :force => true do |t|
t.string "version"
end
add_index "data_migration_records", ["version"], :name => "index_data_migration_records_on_version", :unique => true
create_table "default_news_organisation_image_data", :force => true do |t|
t.string "carrierwave_image"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "document_collection_group_memberships", :force => true do |t|
t.integer "document_id"
t.integer "document_collection_group_id"
t.integer "ordering"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "document_collection_group_memberships", ["document_collection_group_id", "ordering"], :name => "index_dc_group_memberships_on_dc_group_id_and_ordering"
add_index "document_collection_group_memberships", ["document_id"], :name => "index_document_collection_group_memberships_on_document_id"
create_table "document_collection_groups", :force => true do |t|
t.integer "document_collection_id"
t.string "heading"
t.text "body"
t.integer "ordering"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "document_collection_groups", ["document_collection_id", "ordering"], :name => "index_dc_groups_on_dc_id_and_ordering"
create_table "document_sources", :force => true do |t|
t.integer "document_id"
t.string "url", :null => false
t.integer "import_id"
t.integer "row_number"
t.string "locale", :default => "en"
end
add_index "document_sources", ["document_id"], :name => "index_document_sources_on_document_id"
add_index "document_sources", ["url"], :name => "index_document_sources_on_url", :unique => true
create_table "documents", :force => true do |t|
t.datetime "created_at"
t.datetime "updated_at"
t.string "slug"
t.string "document_type"
t.string "content_id"
end
add_index "documents", ["document_type"], :name => "index_documents_on_document_type"
add_index "documents", ["slug", "document_type"], :name => "index_documents_on_slug_and_document_type", :unique => true
create_table "edition_authors", :force => true do |t|
t.integer "edition_id"
t.integer "user_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "edition_authors", ["edition_id"], :name => "index_edition_authors_on_edition_id"
add_index "edition_authors", ["user_id"], :name => "index_edition_authors_on_user_id"
create_table "edition_mainstream_categories", :force => true do |t|
t.integer "edition_id"
t.integer "mainstream_category_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "edition_mainstream_categories", ["edition_id"], :name => "index_edition_mainstream_categories_on_edition_id"
add_index "edition_mainstream_categories", ["mainstream_category_id"], :name => "index_edition_mainstream_categories_on_mainstream_category_id"
create_table "edition_ministerial_roles", :force => true do |t|
t.integer "edition_id"
t.integer "ministerial_role_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "edition_ministerial_roles", ["edition_id"], :name => "index_edition_ministerial_roles_on_edition_id"
add_index "edition_ministerial_roles", ["ministerial_role_id"], :name => "index_edition_ministerial_roles_on_ministerial_role_id"
create_table "edition_organisations", :force => true do |t|
t.integer "edition_id"
t.integer "organisation_id"
t.datetime "created_at"
t.datetime "updated_at"
t.boolean "lead", :default => false, :null => false
t.integer "lead_ordering"
end
add_index "edition_organisations", ["edition_id", "organisation_id"], :name => "index_edition_organisations_on_edition_id_and_organisation_id", :unique => true
add_index "edition_organisations", ["organisation_id"], :name => "index_edition_organisations_on_organisation_id"
create_table "edition_policy_groups", :force => true do |t|
t.integer "edition_id"
t.integer "policy_group_id"
end
create_table "edition_relations", :force => true do |t|
t.integer "edition_id", :null => false
t.datetime "created_at"
t.datetime "updated_at"
t.integer "document_id"
end
add_index "edition_relations", ["document_id"], :name => "index_edition_relations_on_document_id"
add_index "edition_relations", ["edition_id"], :name => "index_edition_relations_on_edition_id"
create_table "edition_role_appointments", :force => true do |t|
t.integer "edition_id"
t.integer "role_appointment_id"
end
add_index "edition_role_appointments", ["edition_id"], :name => "index_edition_role_appointments_on_edition_id"
add_index "edition_role_appointments", ["role_appointment_id"], :name => "index_edition_role_appointments_on_role_appointment_id"
create_table "edition_statistical_data_sets", :force => true do |t|
t.integer "edition_id"
t.integer "document_id"
end
add_index "edition_statistical_data_sets", ["document_id"], :name => "index_edition_statistical_data_sets_on_document_id"
add_index "edition_statistical_data_sets", ["edition_id"], :name => "index_edition_statistical_data_sets_on_edition_id"
create_table "edition_translations", :force => true do |t|
t.integer "edition_id"
t.string "locale"
t.string "title"
t.text "summary"
t.text "body", :limit => 16777215
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "edition_translations", ["edition_id"], :name => "index_edition_translations_on_edition_id"
add_index "edition_translations", ["locale"], :name => "index_edition_translations_on_locale"
create_table "edition_world_locations", :force => true do |t|
t.integer "edition_id"
t.integer "world_location_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "edition_world_locations", ["edition_id", "world_location_id"], :name => "idx_edition_world_locations_on_edition_and_world_location_ids", :unique => true
add_index "edition_world_locations", ["edition_id"], :name => "index_edition_world_locations_on_edition_id"
add_index "edition_world_locations", ["world_location_id"], :name => "index_edition_world_locations_on_world_location_id"
create_table "edition_worldwide_organisations", :force => true do |t|
t.integer "edition_id"
t.integer "worldwide_organisation_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "edition_worldwide_organisations", ["edition_id"], :name => "index_edition_worldwide_orgs_on_edition_id"
add_index "edition_worldwide_organisations", ["worldwide_organisation_id"], :name => "index_edition_worldwide_orgs_on_worldwide_organisation_id"
create_table "editioned_supporting_page_mappings", :force => true do |t|
t.integer "old_supporting_page_id"
t.integer "new_supporting_page_id"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "editioned_supporting_page_mappings", ["old_supporting_page_id"], :name => "index_editioned_supporting_page_mappings", :unique => true
create_table "editions", :force => true do |t|
t.datetime "created_at"
t.datetime "updated_at"
t.integer "lock_version", :default => 0
t.integer "document_id"
t.string "state", :default => "draft", :null => false
t.string "type"
t.integer "role_appointment_id"
t.string "location"
t.datetime "delivered_on"
t.datetime "major_change_published_at"
t.datetime "first_published_at"
t.integer "speech_type_id"
t.boolean "stub", :default => false
t.text "change_note"
t.boolean "force_published"
t.boolean "minor_change", :default => false
t.integer "publication_type_id"
t.string "related_mainstream_content_url"
t.string "related_mainstream_content_title"
t.string "additional_related_mainstream_content_url"
t.string "additional_related_mainstream_content_title"
t.integer "alternative_format_provider_id"
t.integer "published_related_publication_count", :default => 0, :null => false
t.datetime "public_timestamp"
t.integer "primary_mainstream_category_id"
t.datetime "scheduled_publication"
t.boolean "replaces_businesslink", :default => false
t.boolean "access_limited", :null => false
t.integer "published_major_version"
t.integer "published_minor_version"
t.integer "operational_field_id"
t.text "roll_call_introduction"
t.integer "news_article_type_id"
t.boolean "relevant_to_local_government", :default => false
t.string "person_override"
t.string "locale", :default => "en", :null => false
t.boolean "external", :default => false
t.string "external_url"
t.datetime "opening_at"
t.datetime "closing_at"
t.integer "corporate_information_page_type_id"
t.string "need_ids"
end
add_index "editions", ["alternative_format_provider_id"], :name => "index_editions_on_alternative_format_provider_id"
add_index "editions", ["closing_at"], :name => "index_editions_on_closing_at"
add_index "editions", ["document_id"], :name => "index_editions_on_document_id"
add_index "editions", ["first_published_at"], :name => "index_editions_on_first_published_at"
add_index "editions", ["locale"], :name => "index_editions_on_locale"
add_index "editions", ["opening_at"], :name => "index_editions_on_opening_at"
add_index "editions", ["operational_field_id"], :name => "index_editions_on_operational_field_id"
add_index "editions", ["primary_mainstream_category_id"], :name => "index_editions_on_primary_mainstream_category_id"
add_index "editions", ["public_timestamp", "document_id"], :name => "index_editions_on_public_timestamp_and_document_id"
add_index "editions", ["public_timestamp"], :name => "index_editions_on_public_timestamp"
add_index "editions", ["publication_type_id"], :name => "index_editions_on_publication_type_id"
add_index "editions", ["role_appointment_id"], :name => "index_editions_on_role_appointment_id"
add_index "editions", ["speech_type_id"], :name => "index_editions_on_speech_type_id"
add_index "editions", ["state", "type"], :name => "index_editions_on_state_and_type"
add_index "editions", ["state"], :name => "index_editions_on_state"
add_index "editions", ["type"], :name => "index_editions_on_type"
create_table "editorial_remarks", :force => true do |t|
t.text "body"
t.integer "edition_id"
t.integer "author_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "editorial_remarks", ["author_id"], :name => "index_editorial_remarks_on_author_id"
add_index "editorial_remarks", ["edition_id"], :name => "index_editorial_remarks_on_edition_id"
create_table "fact_check_requests", :force => true do |t|
t.integer "edition_id"
t.string "key"
t.datetime "created_at"
t.datetime "updated_at"
t.string "email_address"
t.text "comments"
t.text "instructions"
t.integer "requestor_id"
end
add_index "fact_check_requests", ["edition_id"], :name => "index_fact_check_requests_on_edition_id"
add_index "fact_check_requests", ["key"], :name => "index_fact_check_requests_on_key", :unique => true
add_index "fact_check_requests", ["requestor_id"], :name => "index_fact_check_requests_on_requestor_id"
create_table "fatality_notice_casualties", :force => true do |t|
t.integer "fatality_notice_id"
t.text "personal_details"
end
create_table "feature_lists", :force => true do |t|
t.integer "featurable_id"
t.string "featurable_type"
t.string "locale"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "feature_lists", ["featurable_id", "featurable_type", "locale"], :name => "featurable_lists_unique_locale_per_featurable", :unique => true
create_table "featured_items", :force => true do |t|
t.integer "item_id", :null => false
t.string "item_type", :null => false
t.integer "featured_topics_and_policies_list_id"
t.integer "ordering"
t.datetime "started_at"
t.datetime "ended_at"
end
add_index "featured_items", ["featured_topics_and_policies_list_id", "ordering"], :name => "idx_featured_items_on_featured_ts_and_ps_list_id_and_ordering"
add_index "featured_items", ["featured_topics_and_policies_list_id"], :name => "index_featured_items_on_featured_topics_and_policies_list_id"
add_index "featured_items", ["item_id", "item_type"], :name => "index_featured_items_on_item_id_and_item_type"
create_table "featured_links", :force => true do |t|
t.string "url"
t.string "title"
t.integer "linkable_id"
t.string "linkable_type"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
create_table "featured_services_and_guidance", :force => true do |t|
t.string "url"
t.string "title"
t.integer "linkable_id"
t.string "linkable_type"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
create_table "featured_topics_and_policies_lists", :force => true do |t|
t.integer "organisation_id", :null => false
t.text "summary"
t.boolean "link_to_filtered_policies", :default => true, :null => false
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "featured_topics_and_policies_lists", ["organisation_id"], :name => "index_featured_topics_and_policies_lists_on_organisation_id"
create_table "features", :force => true do |t|
t.integer "document_id"
t.integer "feature_list_id"
t.string "carrierwave_image"
t.string "alt_text"
t.integer "ordering"
t.datetime "started_at"
t.datetime "ended_at"
t.integer "topical_event_id"
t.integer "offsite_link_id"
end
add_index "features", ["document_id"], :name => "index_features_on_document_id"
add_index "features", ["feature_list_id", "ordering"], :name => "index_features_on_feature_list_id_and_ordering", :unique => true
add_index "features", ["feature_list_id"], :name => "index_features_on_feature_list_id"
add_index "features", ["offsite_link_id"], :name => "index_features_on_offsite_link_id"
add_index "features", ["ordering"], :name => "index_features_on_ordering"
create_table "financial_reports", :force => true do |t|
t.integer "organisation_id"
t.integer "funding", :limit => 8
t.integer "spending", :limit => 8
t.integer "year"
end
add_index "financial_reports", ["organisation_id", "year"], :name => "index_financial_reports_on_organisation_id_and_year", :unique => true
add_index "financial_reports", ["organisation_id"], :name => "index_financial_reports_on_organisation_id"
add_index "financial_reports", ["year"], :name => "index_financial_reports_on_year"
create_table "force_publication_attempts", :force => true do |t|
t.integer "import_id"
t.integer "total_documents"
t.integer "successful_documents"
t.datetime "enqueued_at"
t.datetime "started_at"
t.datetime "finished_at"
t.text "log", :limit => 2147483647
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "force_publication_attempts", ["import_id"], :name => "index_force_publication_attempts_on_import_id"
create_table "govspeak_contents", :force => true do |t|
t.integer "html_attachment_id"
t.text "body", :limit => 16777215
t.boolean "manually_numbered_headings"
t.text "computed_body_html", :limit => 16777215
t.text "computed_headers_html"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "govspeak_contents", ["html_attachment_id"], :name => "index_govspeak_contents_on_html_attachment_id"
create_table "group_memberships", :force => true do |t|
t.integer "group_id"
t.integer "person_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "group_memberships", ["group_id"], :name => "index_group_memberships_on_group_id"
add_index "group_memberships", ["person_id"], :name => "index_group_memberships_on_person_id"
create_table "groups", :force => true do |t|
t.integer "organisation_id"
t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
t.string "slug"
t.text "description"
end
add_index "groups", ["organisation_id"], :name => "index_groups_on_organisation_id"
add_index "groups", ["slug"], :name => "index_groups_on_slug"
create_table "historical_account_roles", :force => true do |t|
t.integer "role_id"
t.integer "historical_account_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "historical_account_roles", ["historical_account_id"], :name => "index_historical_account_roles_on_historical_account_id"
add_index "historical_account_roles", ["role_id"], :name => "index_historical_account_roles_on_role_id"
create_table "historical_accounts", :force => true do |t|
t.integer "person_id"
t.text "summary"
t.text "body"
t.string "born"
t.string "died"
t.text "major_acts"
t.text "interesting_facts"
t.datetime "created_at"
t.datetime "updated_at"
t.string "political_party_ids"
end
add_index "historical_accounts", ["person_id"], :name => "index_historical_accounts_on_person_id"
create_table "home_page_list_items", :force => true do |t|
t.integer "home_page_list_id", :null => false
t.integer "item_id", :null => false
t.string "item_type", :null => false
t.integer "ordering"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "home_page_list_items", ["home_page_list_id", "ordering"], :name => "index_home_page_list_items_on_home_page_list_id_and_ordering"
add_index "home_page_list_items", ["home_page_list_id"], :name => "index_home_page_list_items_on_home_page_list_id"
add_index "home_page_list_items", ["item_id", "item_type"], :name => "index_home_page_list_items_on_item_id_and_item_type"
create_table "home_page_lists", :force => true do |t|
t.integer "owner_id", :null => false
t.string "owner_type", :null => false
t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "home_page_lists", ["owner_id", "owner_type", "name"], :name => "index_home_page_lists_on_owner_id_and_owner_type_and_name", :unique => true
create_table "image_data", :force => true do |t|
t.string "carrierwave_image"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "images", :force => true do |t|
t.integer "image_data_id"
t.integer "edition_id"
t.string "alt_text"
t.text "caption"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "images", ["edition_id"], :name => "index_images_on_edition_id"
add_index "images", ["image_data_id"], :name => "index_images_on_image_data_id"
create_table "import_errors", :force => true do |t|
t.integer "import_id"
t.integer "row_number"
t.text "message"
t.datetime "created_at"
end
add_index "import_errors", ["import_id"], :name => "index_import_errors_on_import_id"
create_table "import_logs", :force => true do |t|
t.integer "import_id"
t.integer "row_number"
t.string "level"
t.text "message"
t.datetime "created_at"
end
create_table "imports", :force => true do |t|
t.string "original_filename"
t.string "data_type"
t.text "csv_data", :limit => 2147483647
t.text "successful_rows"
t.integer "creator_id"
t.datetime "import_started_at"
t.datetime "import_finished_at"
t.integer "total_rows"
t.integer "current_row"
t.datetime "created_at"
t.datetime "updated_at"
t.datetime "import_enqueued_at"
t.integer "organisation_id"
end
create_table "links_reports", :force => true do |t|
t.text "links", :limit => 16777215
t.text "broken_links"
t.string "status"
t.string "link_reportable_type"
t.integer "link_reportable_id"
t.datetime "completed_at"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "links_reports", ["link_reportable_id", "link_reportable_type"], :name => "link_reportable_index"
create_table "mainstream_categories", :force => true do |t|
t.string "slug"
t.string "title"
t.string "parent_title"
t.datetime "created_at"
t.datetime "updated_at"
t.string "parent_tag"
t.text "description"
end
add_index "mainstream_categories", ["slug"], :name => "index_mainstream_categories_on_slug", :unique => true
create_table "nation_inapplicabilities", :force => true do |t|
t.integer "nation_id"
t.integer "edition_id"
t.datetime "created_at"
t.datetime "updated_at"
t.string "alternative_url"
end
add_index "nation_inapplicabilities", ["edition_id"], :name => "index_nation_inapplicabilities_on_edition_id"
add_index "nation_inapplicabilities", ["nation_id"], :name => "index_nation_inapplicabilities_on_nation_id"
create_table "offsite_links", :force => true do |t|
t.string "title"
t.string "summary"
t.string "url"
t.string "link_type"
t.integer "parent_id"
t.string "parent_type"
t.datetime "date"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
create_table "operational_fields", :force => true do |t|
t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
t.text "description"
t.string "slug"
end
add_index "operational_fields", ["slug"], :name => "index_operational_fields_on_slug"
create_table "organisation_classifications", :force => true do |t|
t.integer "organisation_id", :null => false
t.integer "classification_id", :null => false
t.datetime "created_at"
t.datetime "updated_at"
t.integer "ordering"
t.boolean "lead", :default => false, :null => false
t.integer "lead_ordering"
end
add_index "organisation_classifications", ["classification_id"], :name => "index_org_classifications_on_classification_id"
add_index "organisation_classifications", ["organisation_id", "ordering"], :name => "index_org_classifications_on_organisation_id_and_ordering", :unique => true
add_index "organisation_classifications", ["organisation_id"], :name => "index_org_classifications_on_organisation_id"
create_table "organisation_mainstream_categories", :force => true do |t|
t.integer "organisation_id", :null => false
t.integer "mainstream_category_id", :null => false
t.integer "ordering", :default => 99, :null => false
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "organisation_mainstream_categories", ["mainstream_category_id"], :name => "index_org_mainstream_cats_on_mainstream_cat_id"
add_index "organisation_mainstream_categories", ["organisation_id", "mainstream_category_id"], :name => "index_org_mainstream_cats_on_org_id_and_mainstream_cat_id", :unique => true
add_index "organisation_mainstream_categories", ["organisation_id"], :name => "index_org_mainstream_cats_on_org_id"
create_table "organisation_roles", :force => true do |t|
t.integer "organisation_id"
t.integer "role_id"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "ordering"
end
add_index "organisation_roles", ["organisation_id"], :name => "index_organisation_roles_on_organisation_id"
add_index "organisation_roles", ["role_id"], :name => "index_organisation_roles_on_role_id"
create_table "organisation_supersedings", :force => true do |t|
t.integer "superseded_organisation_id"
t.integer "superseding_organisation_id"
end
add_index "organisation_supersedings", ["superseded_organisation_id"], :name => "index_organisation_supersedings_on_superseded_organisation_id"
create_table "organisation_translations", :force => true do |t|
t.integer "organisation_id"
t.string "locale"
t.string "name"
t.text "logo_formatted_name"
t.string "acronym"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "organisation_translations", ["locale"], :name => "index_organisation_translations_on_locale"
add_index "organisation_translations", ["name"], :name => "index_organisation_translations_on_name"
add_index "organisation_translations", ["organisation_id"], :name => "index_organisation_translations_on_organisation_id"
create_table "organisational_relationships", :force => true do |t|
t.integer "parent_organisation_id"
t.integer "child_organisation_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "organisational_relationships", ["child_organisation_id"], :name => "index_organisational_relationships_on_child_organisation_id"
add_index "organisational_relationships", ["parent_organisation_id"], :name => "index_organisational_relationships_on_parent_organisation_id"
create_table "organisations", :force => true do |t|
t.datetime "created_at"
t.datetime "updated_at"
t.string "slug", :null => false
t.string "url"
t.string "alternative_format_contact_email"
t.string "govuk_status", :default => "live", :null => false
t.integer "organisation_logo_type_id", :default => 2
t.string "analytics_identifier"
t.boolean "handles_fatalities", :default => false
t.integer "important_board_members", :default => 1
t.integer "default_news_organisation_image_data_id"
t.datetime "closed_at"
t.integer "organisation_brand_colour_id"
t.boolean "ocpa_regulated"
t.boolean "public_meetings"
t.boolean "public_minutes"
t.boolean "register_of_interests"
t.boolean "regulatory_function"
t.string "logo"
t.string "organisation_type_key"
t.boolean "foi_exempt", :default => false, :null => false
t.string "organisation_chart_url"
t.string "govuk_closed_status"
t.string "custom_jobs_url"
t.string "content_id"
t.string "homepage_type", :default => "news"
end
add_index "organisations", ["content_id"], :name => "index_organisations_on_content_id", :unique => true
add_index "organisations", ["default_news_organisation_image_data_id"], :name => "index_organisations_on_default_news_organisation_image_data_id"
add_index "organisations", ["organisation_logo_type_id"], :name => "index_organisations_on_organisation_logo_type_id"
add_index "organisations", ["organisation_type_key"], :name => "index_organisations_on_organisation_type_key"
add_index "organisations", ["slug"], :name => "index_organisations_on_slug", :unique => true
create_table "people", :force => true do |t|
t.string "title"
t.string "forename"
t.string "surname"
t.string "letters"
t.datetime "created_at"
t.datetime "updated_at"
t.string "carrierwave_image"
t.string "slug"
t.boolean "privy_counsellor", :default => false
end
add_index "people", ["slug"], :name => "index_people_on_slug", :unique => true
create_table "person_translations", :force => true do |t|
t.integer "person_id"
t.string "locale"
t.text "biography"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "person_translations", ["locale"], :name => "index_person_translations_on_locale"
add_index "person_translations", ["person_id"], :name => "index_person_translations_on_person_id"
create_table "policy_groups", :force => true do |t|
t.string "email"
t.datetime "created_at"
t.datetime "updated_at"
t.string "name"
t.text "description"
t.text "summary"
t.string "slug"
end
add_index "policy_groups", ["slug"], :name => "index_policy_groups_on_slug"
create_table "promotional_feature_items", :force => true do |t|
t.integer "promotional_feature_id"
t.text "summary"
t.string "image"
t.string "image_alt_text"
t.string "title"
t.string "title_url"
t.boolean "double_width", :default => false
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "promotional_feature_items", ["promotional_feature_id"], :name => "index_promotional_feature_items_on_promotional_feature_id"
create_table "promotional_feature_links", :force => true do |t|
t.integer "promotional_feature_item_id"
t.string "url"
t.string "text"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "promotional_feature_links", ["promotional_feature_item_id"], :name => "index_promotional_feature_links_on_promotional_feature_item_id"
create_table "promotional_features", :force => true do |t|
t.integer "organisation_id"
t.string "title"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "promotional_features", ["organisation_id"], :name => "index_promotional_features_on_organisation_id"
create_table "recent_edition_openings", :force => true do |t|
t.integer "edition_id", :null => false
t.integer "editor_id", :null => false
t.datetime "created_at", :null => false
end
add_index "recent_edition_openings", ["edition_id", "editor_id"], :name => "index_recent_edition_openings_on_edition_id_and_editor_id", :unique => true
create_table "responses", :force => true do |t|
t.integer "edition_id"
t.text "summary"
t.datetime "created_at"
t.datetime "updated_at"
t.date "published_on"
t.string "type"
end
add_index "responses", ["edition_id", "type"], :name => "index_responses_on_edition_id_and_type"
add_index "responses", ["edition_id"], :name => "index_responses_on_edition_id"
create_table "role_appointments", :force => true do |t|
t.integer "role_id"
t.integer "person_id"
t.datetime "created_at"
t.datetime "updated_at"
t.datetime "started_at"
t.datetime "ended_at"
end
add_index "role_appointments", ["ended_at"], :name => "index_role_appointments_on_ended_at"
add_index "role_appointments", ["person_id"], :name => "index_role_appointments_on_person_id"
add_index "role_appointments", ["role_id"], :name => "index_role_appointments_on_role_id"
create_table "role_translations", :force => true do |t|
t.integer "role_id"
t.string "locale"
t.string "name"
t.text "responsibilities"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "role_translations", ["locale"], :name => "index_role_translations_on_locale"
add_index "role_translations", ["name"], :name => "index_role_translations_on_name"
add_index "role_translations", ["role_id"], :name => "index_role_translations_on_role_id"
create_table "roles", :force => true do |t|
t.datetime "created_at"
t.datetime "updated_at"
t.string "type", :null => false
t.boolean "permanent_secretary", :default => false
t.boolean "cabinet_member", :default => false, :null => false
t.string "slug"
t.boolean "chief_of_the_defence_staff", :default => false, :null => false
t.integer "whip_organisation_id"
t.integer "seniority", :default => 100
t.integer "attends_cabinet_type_id"
t.integer "role_payment_type_id"
t.boolean "supports_historical_accounts", :default => false, :null => false
t.integer "whip_ordering", :default => 100
end
add_index "roles", ["attends_cabinet_type_id"], :name => "index_roles_on_attends_cabinet_type_id"
add_index "roles", ["slug"], :name => "index_roles_on_slug"
add_index "roles", ["supports_historical_accounts"], :name => "index_roles_on_supports_historical_accounts"
create_table "sitewide_settings", :force => true do |t|
t.string "key"
t.text "description"
t.boolean "on"
t.text "govspeak"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
create_table "social_media_accounts", :force => true do |t|
t.integer "socialable_id"
t.integer "social_media_service_id"
t.string "url"
t.datetime "created_at"
t.datetime "updated_at"
t.string "socialable_type"
t.string "title"
end
add_index "social_media_accounts", ["social_media_service_id"], :name => "index_social_media_accounts_on_social_media_service_id"
add_index "social_media_accounts", ["socialable_id"], :name => "index_social_media_accounts_on_organisation_id"
create_table "social_media_services", :force => true do |t|
t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "specialist_sectors", :force => true do |t|
t.integer "edition_id"
t.string "tag"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
t.boolean "primary", :default => false
end
add_index "specialist_sectors", ["edition_id", "tag"], :name => "index_specialist_sectors_on_edition_id_and_tag", :unique => true
create_table "sponsorships", :force => true do |t|
t.integer "organisation_id"
t.integer "worldwide_organisation_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "sponsorships", ["organisation_id", "worldwide_organisation_id"], :name => "unique_sponsorships", :unique => true
add_index "sponsorships", ["worldwide_organisation_id"], :name => "index_sponsorships_on_worldwide_organisation_id"
create_table "statistics_announcement_dates", :force => true do |t|
t.integer "statistics_announcement_id"
t.datetime "release_date"
t.integer "precision"
t.boolean "confirmed"
t.string "change_note"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
t.integer "creator_id"
end
add_index "statistics_announcement_dates", ["creator_id"], :name => "index_statistics_announcement_dates_on_creator_id"
add_index "statistics_announcement_dates", ["statistics_announcement_id", "created_at"], :name => "statistics_announcement_release_date"
create_table "statistics_announcement_organisations", :id => false, :force => true do |t|
t.integer "statistics_announcement_id"
t.integer "organisation_id"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "statistics_announcement_organisations", ["organisation_id"], :name => "index_statistics_announcement_organisations_on_organisation_id"
add_index "statistics_announcement_organisations", ["statistics_announcement_id", "organisation_id"], :name => "index_on_statistics_announcement_id_and_organisation_id"
create_table "statistics_announcement_topics", :id => false, :force => true do |t|
t.integer "statistics_announcement_id"
t.integer "topic_id"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "statistics_announcement_topics", ["statistics_announcement_id"], :name => "index_statistics_announcement_topics_on_statistics_announcement"
add_index "statistics_announcement_topics", ["topic_id"], :name => "index_statistics_announcement_topics_on_topic_id"
create_table "statistics_announcements", :force => true do |t|
t.string "title"
t.string "slug"
t.text "summary"
t.integer "publication_type_id"
t.integer "topic_id"
t.integer "creator_id"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
t.integer "publication_id"
t.text "cancellation_reason"
t.datetime "cancelled_at"
t.integer "cancelled_by_id"
end
add_index "statistics_announcements", ["cancelled_by_id"], :name => "index_statistics_announcements_on_cancelled_by_id"
add_index "statistics_announcements", ["creator_id"], :name => "index_statistics_announcements_on_creator_id"
add_index "statistics_announcements", ["publication_id"], :name => "index_statistics_announcements_on_publication_id"
add_index "statistics_announcements", ["slug"], :name => "index_statistics_announcements_on_slug"
add_index "statistics_announcements", ["title"], :name => "index_statistics_announcements_on_title"
add_index "statistics_announcements", ["topic_id"], :name => "index_statistics_announcements_on_topic_id"
create_table "supporting_page_redirects", :force => true do |t|
t.integer "policy_document_id"
t.integer "supporting_page_document_id"
t.string "original_slug"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "supporting_page_redirects", ["policy_document_id", "original_slug"], :name => "index_supporting_page_redirects_on_policy_and_slug", :unique => true
create_table "take_part_pages", :force => true do |t|
t.string "title", :null => false
t.string "slug", :null => false
t.string "summary", :null => false
t.text "body", :limit => 16777215, :null => false
t.string "carrierwave_image"
t.string "image_alt_text"
t.integer "ordering", :null => false
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "take_part_pages", ["ordering"], :name => "index_take_part_pages_on_ordering"
add_index "take_part_pages", ["slug"], :name => "index_take_part_pages_on_slug", :unique => true
create_table "top_tasks", :force => true do |t|
t.string "url"
t.string "title"
t.datetime "created_at"
t.datetime "updated_at"
t.string "linkable_type"
t.integer "linkable_id"
end
add_index "top_tasks", ["linkable_id", "linkable_type"], :name => "index_top_tasks_on_linkable_id_and_linkable_type"
add_index "top_tasks", ["linkable_type"], :name => "index_top_tasks_on_linkable_type"
create_table "unpublishings", :force => true do |t|
t.integer "edition_id"
t.integer "unpublishing_reason_id"
t.text "explanation"
t.text "alternative_url"
t.datetime "created_at"
t.datetime "updated_at"
t.string "document_type"
t.string "slug"
t.boolean "redirect", :default => false
end
add_index "unpublishings", ["edition_id"], :name => "index_unpublishings_on_edition_id"
add_index "unpublishings", ["unpublishing_reason_id"], :name => "index_unpublishings_on_unpublishing_reason_id"
create_table "user_world_locations", :force => true do |t|
t.integer "user_id"
t.integer "world_location_id"
end
add_index "user_world_locations", ["user_id", "world_location_id"], :name => "index_user_world_locations_on_user_id_and_world_location_id", :unique => true
create_table "users", :force => true do |t|
t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
t.string "email"
t.string "uid"
t.integer "version"
t.text "permissions"
t.boolean "remotely_signed_out", :default => false
t.string "organisation_slug"
t.boolean "disabled", :default => false
end
add_index "users", ["disabled"], :name => "index_users_on_disabled"
add_index "users", ["organisation_slug"], :name => "index_users_on_organisation_slug"
create_table "versions", :force => true do |t|
t.string "item_type", :null => false
t.integer "item_id", :null => false
t.string "event", :null => false
t.string "whodunnit"
t.text "object"
t.datetime "created_at"
t.text "state"
end
add_index "versions", ["item_type", "item_id"], :name => "index_versions_on_item_type_and_item_id"
create_table "world_location_translations", :force => true do |t|
t.integer "world_location_id"
t.string "locale"
t.string "name"
t.text "mission_statement"
t.datetime "created_at"
t.datetime "updated_at"
t.string "title"
end
add_index "world_location_translations", ["locale"], :name => "index_world_location_translations_on_locale"
add_index "world_location_translations", ["world_location_id"], :name => "index_world_location_translations_on_world_location_id"
create_table "world_locations", :force => true do |t|
t.datetime "created_at"
t.datetime "updated_at"
t.string "slug"
t.boolean "active", :default => false, :null => false
t.integer "world_location_type_id", :null => false
t.string "iso2", :limit => 2
t.string "analytics_identifier"
t.string "content_id"
end
add_index "world_locations", ["iso2"], :name => "index_world_locations_on_iso2", :unique => true
add_index "world_locations", ["slug"], :name => "index_world_locations_on_slug"
add_index "world_locations", ["world_location_type_id"], :name => "index_world_locations_on_world_location_type_id"
create_table "worldwide_office_worldwide_services", :force => true do |t|
t.integer "worldwide_office_id", :null => false
t.integer "worldwide_service_id", :null => false
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "worldwide_offices", :force => true do |t|
t.integer "worldwide_organisation_id"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "worldwide_office_type_id", :null => false
t.string "slug"
end
add_index "worldwide_offices", ["slug"], :name => "index_worldwide_offices_on_slug"
add_index "worldwide_offices", ["worldwide_organisation_id"], :name => "index_worldwide_offices_on_worldwide_organisation_id"
create_table "worldwide_organisation_roles", :force => true do |t|
t.integer "worldwide_organisation_id"
t.integer "role_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "worldwide_organisation_roles", ["role_id"], :name => "index_worldwide_org_roles_on_role_id"
add_index "worldwide_organisation_roles", ["worldwide_organisation_id"], :name => "index_worldwide_org_roles_on_worldwide_organisation_id"
create_table "worldwide_organisation_translations", :force => true do |t|
t.integer "worldwide_organisation_id"
t.string "locale"
t.string "name"
t.text "services"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "worldwide_organisation_translations", ["locale"], :name => "index_worldwide_org_translations_on_locale"
add_index "worldwide_organisation_translations", ["worldwide_organisation_id"], :name => "index_worldwide_org_translations_on_worldwide_organisation_id"
create_table "worldwide_organisation_world_locations", :force => true do |t|
t.integer "worldwide_organisation_id"
t.integer "world_location_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "worldwide_organisation_world_locations", ["world_location_id"], :name => "index_worldwide_org_world_locations_on_world_location_id"
add_index "worldwide_organisation_world_locations", ["worldwide_organisation_id"], :name => "index_worldwide_org_world_locations_on_worldwide_organisation_id"
create_table "worldwide_organisations", :force => true do |t|
t.string "url"
t.string "slug"
t.string "logo_formatted_name"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "main_office_id"
t.integer "default_news_organisation_image_data_id"
t.string "analytics_identifier"
t.string "content_id"
end
add_index "worldwide_organisations", ["default_news_organisation_image_data_id"], :name => "index_worldwide_organisations_on_image_data_id"
add_index "worldwide_organisations", ["slug"], :name => "index_worldwide_organisations_on_slug", :unique => true
create_table "worldwide_services", :force => true do |t|
t.string "name", :null => false
t.integer "service_type_id", :null => false
t.datetime "created_at"
t.datetime "updated_at"
end
end
| 41.567153 | 182 | 0.694768 |
33de41d5bb3f10238d88179996826f3690de1637 | 8,937 | require 'test_helper'
class AppiumLibCoreTest
class DriverTest < Minitest::Test
include AppiumLibCoreTest::Mock
def setup
@core ||= ::Appium::Core.for(Caps.android)
end
class ExampleDriver
def initialize(opts)
::Appium::Core.for(opts)
end
end
def test_no_caps
opts = { no: { caps: {} }, appium_lib: {} }
assert_raises ::Appium::Core::Error::NoCapabilityError do
ExampleDriver.new(opts)
end
end
def test_with_caps
opts = { caps: {} }
refute_nil ExampleDriver.new(opts)
end
def test_with_caps_and_appium_lib
opts = { caps: {}, appium_lib: {} }
refute_nil ExampleDriver.new(opts)
end
def test_with_caps_and_wrong_appium_lib
opts = { caps: { appium_lib: {} } }
assert_raises ::Appium::Core::Error::CapabilityStructureError do
ExampleDriver.new(opts)
end
end
def test_verify_session_id_in_the_export_session_path
@core.wait { assert File.size?(@core.export_session_path) }
end
def test_verify_appium_core_base_capabilities_create_capabilities
caps = ::Appium::Core::Base::Capabilities.create_capabilities(platformName: 'ios',
platformVersion: '11.4',
automationName: 'XCUITest',
deviceName: 'iPhone Simulator',
app: 'test/functional/app/UICatalog.app.zip',
some_capability1: 'some_capability1',
someCapability2: 'someCapability2')
caps_with_json = JSON.parse(caps.to_json)
assert_equal 'ios', caps_with_json['platformName']
assert_equal '11.4', caps_with_json['platformVersion']
assert_equal 'test/functional/app/UICatalog.app.zip', caps_with_json['app']
assert_equal 'XCUITest', caps_with_json['automationName']
assert_equal 'iPhone Simulator', caps_with_json['deviceName']
assert_equal 'some_capability1', caps_with_json['someCapability1']
assert_equal 'someCapability2', caps_with_json['someCapability2']
assert_equal 'ios', caps[:platformName]
assert_equal '11.4', caps[:platformVersion]
assert_equal 'test/functional/app/UICatalog.app.zip', caps[:app]
assert_equal 'XCUITest', caps[:automationName]
assert_equal 'iPhone Simulator', caps[:deviceName]
assert_equal 'some_capability1', caps[:some_capability1]
assert_equal 'someCapability2', caps[:someCapability2]
end
def test_default_wait
assert_equal 0, @core.default_wait
end
def test_default_timeout_for_http_client
@driver ||= android_mock_create_session
assert_equal 999_999, @core.http_client.open_timeout
assert_equal 999_999, @core.http_client.read_timeout
uri = @driver.send(:bridge).http.send(:server_url)
assert [email protected]_connect
assert_equal 'http', uri.scheme
assert_equal '127.0.0.1', uri.host
assert_equal 4723, uri.port
assert_equal '/wd/hub/', uri.path
end
def test_default_timeout_for_http_client_with_direct
def android_mock_create_session_w3c_direct(core)
response = {
value: {
sessionId: '1234567890',
capabilities: {
platformName: :android,
automationName: ENV['AUTOMATION_NAME_DROID'] || 'uiautomator2',
app: 'test/functional/app/api.apk.zip',
platformVersion: '7.1.1',
deviceName: 'Android Emulator',
appPackage: 'io.appium.android.apis',
appActivity: 'io.appium.android.apis.ApiDemos',
someCapability: 'some_capability',
unicodeKeyboard: true,
resetKeyboard: true,
directConnectProtocol: 'http',
directConnectHost: 'localhost',
directConnectPort: '8888',
directConnectPath: '/wd/hub'
}
}
}.to_json
stub_request(:post, 'http://127.0.0.1:4723/wd/hub/session')
.to_return(headers: HEADER, status: 200, body: response)
stub_request(:post, 'http://localhost:8888/wd/hub/session/1234567890/timeouts')
.with(body: { implicit: 30_000 }.to_json)
.to_return(headers: HEADER, status: 200, body: { value: nil }.to_json)
driver = core.start_driver
assert_requested(:post, 'http://127.0.0.1:4723/wd/hub/session', times: 1)
assert_requested(:post, 'http://localhost:8888/wd/hub/session/1234567890/timeouts',
body: { implicit: 30_000 }.to_json, times: 1)
driver
end
core = ::Appium::Core.for(Caps.android_direct)
driver = android_mock_create_session_w3c_direct(core)
assert_equal 999_999, driver.send(:bridge).http.open_timeout
assert_equal 999_999, driver.send(:bridge).http.read_timeout
uri = driver.send(:bridge).http.send(:server_url)
assert core.direct_connect
assert_equal 'http', uri.scheme
assert_equal 'localhost', uri.host
assert_equal 8888, uri.port
assert_equal '/wd/hub/', uri.path
end
def test_default_timeout_for_http_client_with_direct_no_path
def android_mock_create_session_w3c_direct_no_path(core)
response = {
value: {
sessionId: '1234567890',
capabilities: {
platformName: :android,
automationName: ENV['AUTOMATION_NAME_DROID'] || 'uiautomator2',
app: 'test/functional/app/api.apk.zip',
platformVersion: '7.1.1',
deviceName: 'Android Emulator',
appPackage: 'io.appium.android.apis',
appActivity: 'io.appium.android.apis.ApiDemos',
someCapability: 'some_capability',
unicodeKeyboard: true,
resetKeyboard: true,
directConnectProtocol: 'http',
directConnectHost: 'localhost',
directConnectPort: '8888'
}
}
}.to_json
stub_request(:post, 'http://127.0.0.1:4723/wd/hub/session')
.to_return(headers: HEADER, status: 200, body: response)
stub_request(:post, 'http://127.0.0.1:4723/wd/hub/session/1234567890/timeouts')
.with(body: { implicit: 30_000 }.to_json)
.to_return(headers: HEADER, status: 200, body: { value: nil }.to_json)
driver = core.start_driver
assert_requested(:post, 'http://127.0.0.1:4723/wd/hub/session', times: 1)
assert_requested(:post, 'http://127.0.0.1:4723/wd/hub/session/1234567890/timeouts',
body: { implicit: 30_000 }.to_json, times: 1)
driver
end
core = ::Appium::Core.for(Caps.android_direct)
driver = android_mock_create_session_w3c_direct_no_path(core)
assert_equal 999_999, driver.send(:bridge).http.open_timeout
assert_equal 999_999, driver.send(:bridge).http.read_timeout
uri = driver.send(:bridge).http.send(:server_url)
assert core.direct_connect
assert_equal 'http', uri.scheme
assert_equal '127.0.0.1', uri.host
assert_equal 4723, uri.port
assert_equal '/wd/hub/', uri.path
end
# https://www.w3.org/TR/webdriver1/
def test_search_context_in_element_class
assert_equal 21, ::Selenium::WebDriver::Element::FINDERS.length
assert_equal({ class: 'class name',
class_name: 'class name',
css: 'css selector', # Defined in W3C spec
id: 'id',
link: 'link text', # Defined in W3C spec
link_text: 'link text', # Defined in W3C spec
name: 'name',
partial_link_text: 'partial link text', # Defined in W3C spec
tag_name: 'tag name', # Defined in W3C spec
xpath: 'xpath', # Defined in W3C spec
accessibility_id: 'accessibility id',
image: '-image',
custom: '-custom',
uiautomator: '-android uiautomator',
viewtag: '-android viewtag',
data_matcher: '-android datamatcher',
uiautomation: '-ios uiautomation',
predicate: '-ios predicate string',
class_chain: '-ios class chain',
windows_uiautomation: '-windows uiautomation',
tizen_uiautomation: '-tizen uiautomation' }, ::Selenium::WebDriver::Element::FINDERS)
end
end
end
| 40.622727 | 113 | 0.588117 |
bf44dd2cfe877c85e932a4c484dd7cd3b37d00fb | 916 | # WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
module Aws::Rekognition
module Errors
extend Aws::Errors::DynamicErrors
class HumanLoopQuotaExceededException < ServiceError
# @param [Seahorse::Client::RequestContext] context
# @param [String] message
# @param [Aws::Rekognition::Types::HumanLoopQuotaExceededException] data
def initialize(context, message, data = Aws::EmptyStructure.new)
super(context, message, data)
end
# @return [String]
def resource_type
@data[:resource_type]
end
# @return [String]
def quota_code
@data[:quota_code]
end
# @return [String]
def service_code
@data[:service_code]
end
end
end
end
| 22.341463 | 78 | 0.663755 |
acf8e116fc9c98d837356bf5a3ecae4ae1bddf89 | 76 | require 'rubygems'
require 'bundler/setup'
require 'rspec'
require 'glitter' | 19 | 23 | 0.789474 |
4a1b4d9dfdd220cd59077be270cb777523377f98 | 750 | require 'web_blocks/framework'
require 'web_blocks/structure/tree/leaf_node'
require 'web_blocks/structure/attribute/dependency'
require 'web_blocks/structure/attribute/loose_dependency'
module WebBlocks
module Structure
class RawFile < ::WebBlocks::Structure::Tree::LeafNode
include WebBlocks::Framework
include WebBlocks::Structure::Attribute::Dependency
include WebBlocks::Structure::Attribute::LooseDependency
set :required, true
def resolved_path
parent.resolved_path + (attributes.has_key?(:path) ? attributes[:path] : name)
end
def inspect
{
:name => name,
:route => route,
:resolved_path => resolved_path
}
end
end
end
end | 25 | 86 | 0.678667 |
e28b63b652becffc8363d2f6f0784bf812600e6b | 62 | module FFI
module RadixTree
VERSION = "0.4.0"
end
end
| 10.333333 | 21 | 0.645161 |
ffa3f7ca534a02f08adf9ffc0246f65e8da90512 | 6,156 | require 'models/runtime/space'
require 'models/runtime/app_model'
require 'models/runtime/build_model'
require 'models/runtime/route_mapping_model'
require 'models/runtime/package_model'
require 'models/runtime/droplet_model'
require 'models/runtime/buildpack_lifecycle_data_model'
require 'models/runtime/buildpack_lifecycle_buildpack_model'
require 'models/runtime/docker_lifecycle_data_model'
require 'models/runtime/task_model'
require 'models/runtime/isolation_segment_model'
require 'models/runtime/pollable_job_model'
require 'models/runtime/job_warning_model'
require 'models/runtime/security_group'
require 'models/runtime/security_groups_space'
require 'models/runtime/staging_security_groups_space'
require 'models/runtime/app_usage_event'
require 'models/runtime/auto_detection_buildpack'
require 'models/runtime/app_event'
require 'models/runtime/helpers/sidecar_mixin'
require 'models/runtime/sidecar_model'
require 'models/runtime/sidecar_process_type_model'
require 'models/runtime/process_model'
require 'models/runtime/buildpack'
require 'models/runtime/buildpack_bits_delete'
require 'models/runtime/domain'
require 'models/runtime/shared_domain'
require 'models/runtime/space_reserved_route_ports'
require 'models/runtime/private_domain'
require 'models/runtime/event'
require 'models/runtime/feature_flag'
require 'models/runtime/environment_variable_group'
require 'models/runtime/custom_buildpack'
require 'models/runtime/organization'
require 'models/runtime/organization_routes'
require 'models/runtime/organization_reserved_route_ports'
require 'models/runtime/quota_definition'
require 'models/runtime/quota_constraints/max_private_domains_policy'
require 'models/runtime/quota_constraints/max_routes_policy'
require 'models/runtime/quota_constraints/max_reserved_route_ports_policy'
require 'models/runtime/quota_constraints/max_service_instance_policy'
require 'models/runtime/quota_constraints/paid_service_instance_policy'
require 'models/runtime/quota_constraints/max_service_keys_policy'
require 'models/runtime/constraints/max_disk_quota_policy'
require 'models/runtime/constraints/min_disk_quota_policy'
require 'models/runtime/constraints/max_memory_policy'
require 'models/runtime/constraints/max_instance_memory_policy'
require 'models/runtime/constraints/min_memory_policy'
require 'models/runtime/constraints/ports_policy'
require 'models/runtime/constraints/instances_policy'
require 'models/runtime/constraints/max_app_instances_policy'
require 'models/runtime/constraints/max_app_tasks_policy'
require 'models/runtime/constraints/health_check_policy'
require 'models/runtime/constraints/docker_policy'
require 'models/runtime/constraints/sidecar_memory_less_than_process_memory_policy'
require 'models/runtime/revision_model'
require 'models/runtime/revision_process_command_model'
require 'models/runtime/revision_sidecar_model'
require 'models/runtime/revision_sidecar_process_type_model'
require 'models/runtime/route'
require 'models/runtime/space_routes'
require 'models/runtime/space_quota_definition'
require 'models/runtime/stack'
require 'models/runtime/user'
require 'models/runtime/locking'
require 'models/runtime/clock_job'
require 'models/runtime/system_audit_user'
require 'models/runtime/deployment_model'
require 'models/runtime/deployment_process_model'
require 'models/runtime/encryption_key_sentinel_model'
require 'models/runtime/app_label_model'
require 'models/runtime/build_label_model'
require 'models/runtime/buildpack_label_model'
require 'models/runtime/deployment_label_model'
require 'models/runtime/domain_label_model'
require 'models/runtime/droplet_label_model'
require 'models/runtime/isolation_segment_label_model'
require 'models/runtime/organization_label_model'
require 'models/runtime/package_label_model'
require 'models/runtime/process_label_model'
require 'models/runtime/revision_label_model'
require 'models/runtime/route_label_model'
require 'models/runtime/service_instance_label_model'
require 'models/runtime/space_label_model'
require 'models/runtime/stack_label_model'
require 'models/runtime/task_label_model'
require 'models/runtime/user_label_model'
require 'models/runtime/app_annotation_model'
require 'models/runtime/build_annotation_model'
require 'models/runtime/buildpack_annotation_model'
require 'models/runtime/deployment_annotation_model'
require 'models/runtime/domain_annotation_model'
require 'models/runtime/droplet_annotation_model'
require 'models/runtime/isolation_segment_annotation_model'
require 'models/runtime/organization_annotation_model'
require 'models/runtime/package_annotation_model'
require 'models/runtime/process_annotation_model'
require 'models/runtime/revision_annotation_model'
require 'models/runtime/route_annotation_model'
require 'models/runtime/service_instance_annotation_model'
require 'models/runtime/space_annotation_model'
require 'models/runtime/stack_annotation_model'
require 'models/runtime/task_annotation_model'
require 'models/runtime/user_annotation_model'
require 'models/services/service'
require 'models/services/service_binding'
require 'models/services/route_binding'
require 'models/services/service_dashboard_client'
require 'models/services/service_instance'
require 'models/services/managed_service_instance'
require 'models/services/service_instance_operation'
require 'models/services/service_binding_operation'
require 'models/services/user_provided_service_instance'
require 'models/services/service_broker'
require 'models/services/service_broker_state'
require 'models/services/service_broker_state_enum'
require 'models/services/service_plan'
require 'models/services/service_plan_visibility'
require 'models/services/service_usage_event'
require 'models/services/service_key'
require 'models/services/route_binding'
require 'models/request_count'
require 'models/orphaned_blob'
require 'models/runtime/space_auditor'
require 'models/runtime/space_manager'
require 'models/runtime/space_developer'
require 'models/runtime/organization_user'
require 'models/runtime/organization_auditor'
require 'models/runtime/organization_manager'
require 'models/runtime/organization_billing_manager'
| 45.940299 | 83 | 0.870045 |
1113fa94bd8b54e50c7f13166ffe559c9dacd35f | 132 | class Result < ActiveRecord::Base
belongs_to :exam_sessions, :class_name => "ExamSessions", :foreign_key => "exam_session_id"
end
| 33 | 93 | 0.765152 |
4aaa0ee685d069529b311b05d1e44c980f6e1cc5 | 119 | class ChangeEndAtInErrands < ActiveRecord::Migration
def change
change_column :errands, :end_at, :date
end
end
| 19.833333 | 52 | 0.764706 |
38bb0ae6cd8d21bd2f41fb031d26171d47dc3537 | 563 | class RemoveEditedAtEditedByFromDocument < ActiveRecord::Migration[5.2]
def up
remove_column :versioned_documents, :last_edited_at
remove_reference :versioned_documents, :last_edited_by
end
def down
add_column :versioned_documents,
:last_edited_at,
:datetime,
null: false # rubocop:disable Rails/NotNullColumn
add_reference :versioned_documents,
:last_edited_by,
foreign_key: { to_table: :users, on_delete: :nullify },
index: true
end
end
| 31.277778 | 73 | 0.648313 |
d578e39f231f4fffb3d6ad3bf12dc782fdb72f5c | 77 | class Favorite < ApplicationRecord
belongs_to :user
has_many :houses
end
| 15.4 | 34 | 0.792208 |
038a6429a6c7d0d1b91e800843fa35ee571a876d | 1,073 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'weather_judge/version'
Gem::Specification.new do |spec|
spec.name = "weather_judge"
spec.version = WeatherJudge::VERSION
spec.authors = ["Makoto Scott-Hinkle"]
spec.email = ["[email protected]"]
spec.homepage = "https://github.com/makotogitdev/weather-judge"
spec.summary = %q{Scores weather forecast on given coordinates.}
spec.description = %q{This gem gives a score to given coordinate's weather forecast. It uses Forecast.IO as data source.}
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.12"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.4"
end
| 41.269231 | 125 | 0.662628 |
01510e256a936e8b18cde90a563f7d7da4deeaec | 185 | class BallotResponse < ActiveRecord::Base
belongs_to :source
has_and_belongs_to_many :referendums
has_and_belongs_to_many :custom_ballots
has_many :custom_notes, :as => :object
end
| 26.428571 | 41 | 0.827027 |
61ca72bc4151779dd347a5af527aec6859ec5d5b | 843 |
## SportDb::load_plugins
def self.load_plugins
@found ||= []
@loaded ||= {}
@files ||= Gem.find_files( 'sportdb_plugin.rb' )
puts "#{@files.size} plugin files found:"
@files.each do |file|
puts " >#{file}<"
end
## todo: extract version and name of gem?
puts "normalized/match pattern:"
@files.each do |file|
if file =~ /sportdb-([a-z]+)-(\d\.\d.\d)/
puts " >#{$1}< | >#{$2}<"
@found << file
else
puts "*** error: ignoring plugin script >#{file}< not matching gem naming pattern"
end
end
@found.each do |file|
begin
puts "loading plugin script #{file}"
require file
rescue LoadError => e
puts "*** error loading plugin script #{file.inspect}: #{e.message}. skipping..."
end
end
end
| 23.416667 | 90 | 0.533808 |
f8ccd450e05d6dcb27dc519b84957500f8456c1f | 1,602 | SuitRiot::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Print deprecation notices to the stderr
config.active_support.deprecation = :stderr
# Config webrat for integration testing
config.gem 'webrat', :version => ">=0.5.0"
end
| 41.076923 | 85 | 0.769663 |
d5ea920a4a40dfd9dd1aa6ceeb34adcd387092b1 | 1,258 | # BioCatalogue: app/controllers/contact_controller.rb
#
# Copyright (c) 2008-2010, University of Manchester, The European Bioinformatics
# Institute (EMBL-EBI) and the University of Southampton.
# See license.txt for details.
class ContactController < ApplicationController
before_filter :disable_action_for_api
# GET /contact
def index
respond_to do |format|
format.html # index.html.erb
end
end
def create
from_user = params[:from] || current_user.try(:display_name) || "no name specified"
from_user += ' (' + (params[:email] || current_user.try(:email) || 'no email specified') + ')'
if !params[:content].blank? and (params[:content].split(/[biocat]/, -1).length == params[:length_check].to_i)
ContactMailer.feedback(from_user, params[:subject], params[:content]).deliver
respond_to do |format|
flash[:notice] = 'Your message has been submitted. Thank you very much.'
format.html { redirect_to contact_url }
end
else
respond_to do |format|
flash.now[:error] = 'Failed to submit your message. Either there was an empty message or the security number you typed in is incorrect.'
format.html { render :action => :index }
end
end
end
end
| 33.105263 | 144 | 0.683625 |
01cec29480a21ffee06188ae8a4881acc0aa324f | 550 | # frozen_string_literal: true
module DocumentExporter
class Thumbnail
THUMBNAIL_RATIO = 1.25
def initialize(content)
@content = content
end
def export
pdf = ::MiniMagick::Image.read(@content)
@width = pdf.pages[0][:width] / THUMBNAIL_RATIO
@height = pdf.pages[0][:height] / THUMBNAIL_RATIO
pdf.format('jpg', 0, density: 300, background: '#fff', alpha: 'remove', resize: "#{@width}x#{@height}").to_blob
end
def orientation
@width < @height ? 'portrait' : 'landscape'
end
end
end
| 23.913043 | 117 | 0.636364 |
612cba0a93f46179bed1b82bf987badf0a467ef4 | 288 | Rails.application.routes.draw do
root 'missed_calls#index'
post 'call/enqueue', to: 'call#enqueue'
post 'call/incoming', to: 'call#incoming'
post 'assignment', to: 'callback#assignment'
post 'events', to: 'callback#events'
post 'message/incoming', to: 'message#incoming'
end
| 28.8 | 49 | 0.715278 |
7ab111e45ed682637f66d966f6832d97735d3bdd | 443 | # frozen-string-literal: true
module Navigable
class ObserverMap
METHODS = {
successfully: :on_success,
successfully_created: :on_creation,
failed_to_validate: :on_failure_to_validate,
failed_to_find: :on_failure_to_find,
failed_to_create: :on_failure_to_create,
failed_to_update: :on_failure_to_update,
failed_to_delete: :on_failure_to_delete,
failed: :on_failure
}.freeze
end
end
| 26.058824 | 50 | 0.726862 |
5dcc73f2460249b214b90c72ac42a7dca7658b27 | 1,971 | # encoding: UTF-8
# (c) Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Students process
class MockProcess
def create_student(sObjectType, hParams)
PrcLib.state(format("Running creation process for object '%s' = '%s'",
sObjectType, hParams[:student_name]))
# byebug if ENV['BYEBUG'] # rubocop: disable Debugger
object = controller_create(sObjectType)
PrcLib.runtime_fail "Student '%s' not created.",
hParams[:student_name] if object.nil?
PrcLib.info("'%s': '%s' created with id %s",
sObjectType, hParams[:student_name], object[:id])
object
end
end
# Declaring your data model and handlers.
class Lorj::BaseDefinition # rubocop: disable Style/ClassAndModuleChildren
# We need to define the student object and the handler to use while we need to
# create it.
define_obj(:student,
# The function to call in the class Students
:create_e => :create_student,
# We use predefined call to the controller query
:query_e => :controller_query,
# We use predefined call to the controller get
:get_e => :controller_get,
# We use predefined call to the controller delete
:delete_e => :controller_delete
)
obj_needs :data, :student_name, :for => [:create_e], :mapping => :name
end
| 40.22449 | 80 | 0.664637 |
b9f575e0a2a087c9fa5236e28ca4e5c57718d4d4 | 319 | module ActiveRecord
# Returns the version of the currently loaded Active Record as a <tt>Gem::Version</tt>
def self.gem_version
Gem::Version.new VERSION::STRING
end
module VERSION
MAJOR = 5
MINOR = 0
TINY = 7
PRE = "2"
STRING = [MAJOR, MINOR, TINY, PRE].compact.join(".")
end
end
| 19.9375 | 88 | 0.642633 |
ab85ab9e3e442d5d8b30a74fbb98267e22f9ba27 | 5,657 | module Rails
module Paths
# This object is an extended hash that behaves as root of the <tt>Rails::Paths</tt> system.
# It allows you to collect information about how you want to structure your application
# paths by a Hash like API. It requires you to give a physical path on initialization.
#
# root = Root.new "/rails"
# root.add "app/controllers", eager_load: true
#
# The command above creates a new root object and add "app/controllers" as a path.
# This means we can get a <tt>Rails::Paths::Path</tt> object back like below:
#
# path = root["app/controllers"]
# path.eager_load? # => true
# path.is_a?(Rails::Paths::Path) # => true
#
# The +Path+ object is simply an enumerable and allows you to easily add extra paths:
#
# path.is_a?(Enumerable) # => true
# path.to_ary.inspect # => ["app/controllers"]
#
# path << "lib/controllers"
# path.to_ary.inspect # => ["app/controllers", "lib/controllers"]
#
# Notice that when you add a path using +add+, the path object created already
# contains the path with the same path value given to +add+. In some situations,
# you may not want this behavior, so you can give +:with+ as option.
#
# root.add "config/routes", with: "config/routes.rb"
# root["config/routes"].inspect # => ["config/routes.rb"]
#
# The +add+ method accepts the following options as arguments:
# eager_load, autoload, autoload_once and glob.
#
# Finally, the +Path+ object also provides a few helpers:
#
# root = Root.new "/rails"
# root.add "app/controllers"
#
# root["app/controllers"].expanded # => ["/rails/app/controllers"]
# root["app/controllers"].existent # => ["/rails/app/controllers"]
#
# Check the <tt>Rails::Paths::Path</tt> documentation for more information.
class Root
attr_accessor :path
def initialize(path)
@current = nil
@path = path
@root = {}
end
def []=(path, value)
glob = self[path] ? self[path].glob : nil
add(path, with: value, glob: glob)
end
def add(path, options = {})
with = Array(options.fetch(:with, path))
@root[path] = Path.new(self, path, with, options)
end
def [](path)
@root[path]
end
def values
@root.values
end
def keys
@root.keys
end
def values_at(*list)
@root.values_at(*list)
end
def all_paths
values.tap { |v| v.uniq! }
end
def autoload_once
filter_by { |p| p.autoload_once? }
end
def eager_load
filter_by { |p| p.eager_load? }
end
def autoload_paths
filter_by { |p| p.autoload? }
end
def load_paths
filter_by { |p| p.load_path? }
end
private
def filter_by(&block)
all_paths.find_all(&block).flat_map { |path|
paths = path.existent
paths - path.children.map { |p| yield(p) ? [] : p.existent }.flatten
}.uniq
end
end
class Path
include Enumerable
attr_accessor :glob
def initialize(root, current, paths, options = {})
@paths = paths
@current = current
@root = root
@glob = options[:glob]
options[:autoload_once] ? autoload_once! : skip_autoload_once!
options[:eager_load] ? eager_load! : skip_eager_load!
options[:autoload] ? autoload! : skip_autoload!
options[:load_path] ? load_path! : skip_load_path!
end
def children
keys = @root.keys.find_all { |k|
k.start_with?(@current) && k != @current
}
@root.values_at(*keys.sort)
end
def first
expanded.first
end
def last
expanded.last
end
%w(autoload_once eager_load autoload load_path).each do |m|
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{m}! # def eager_load!
@#{m} = true # @eager_load = true
end # end
#
def skip_#{m}! # def skip_eager_load!
@#{m} = false # @eager_load = false
end # end
#
def #{m}? # def eager_load?
@#{m} # @eager_load
end # end
RUBY
end
def each(&block)
@paths.each(&block)
end
def <<(path)
@paths << path
end
alias :push :<<
def concat(paths)
@paths.concat paths
end
def unshift(path)
@paths.unshift path
end
def to_ary
@paths
end
# Expands all paths against the root and return all unique values.
def expanded
raise "You need to set a path root" unless @root.path
result = []
each do |p|
path = File.expand_path(p, @root.path)
if @glob && File.directory?(path)
Dir.chdir(path) do
result.concat(Dir.glob(@glob).map { |file| File.join path, file }.sort)
end
else
result << path
end
end
result.uniq!
result
end
# Returns all expanded paths but only if they exist in the filesystem.
def existent
expanded.select { |f| File.exist?(f) }
end
def existent_directories
expanded.select { |d| File.directory?(d) }
end
alias to_a expanded
end
end
end
| 26.683962 | 95 | 0.540746 |
acfc358fbee1164c94c3ea3e847bbedb667076f1 | 52,805 | # WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
module Aws::CloudTrail
# @api private
module ClientApi
include Seahorse::Model
AddTagsRequest = Shapes::StructureShape.new(name: 'AddTagsRequest')
AddTagsResponse = Shapes::StructureShape.new(name: 'AddTagsResponse')
Boolean = Shapes::BooleanShape.new(name: 'Boolean')
ByteBuffer = Shapes::BlobShape.new(name: 'ByteBuffer')
CloudTrailARNInvalidException = Shapes::StructureShape.new(name: 'CloudTrailARNInvalidException')
CloudTrailAccessNotEnabledException = Shapes::StructureShape.new(name: 'CloudTrailAccessNotEnabledException')
CloudWatchLogsDeliveryUnavailableException = Shapes::StructureShape.new(name: 'CloudWatchLogsDeliveryUnavailableException')
CreateTrailRequest = Shapes::StructureShape.new(name: 'CreateTrailRequest')
CreateTrailResponse = Shapes::StructureShape.new(name: 'CreateTrailResponse')
DataResource = Shapes::StructureShape.new(name: 'DataResource')
DataResourceValues = Shapes::ListShape.new(name: 'DataResourceValues')
DataResources = Shapes::ListShape.new(name: 'DataResources')
Date = Shapes::TimestampShape.new(name: 'Date')
DeleteTrailRequest = Shapes::StructureShape.new(name: 'DeleteTrailRequest')
DeleteTrailResponse = Shapes::StructureShape.new(name: 'DeleteTrailResponse')
DescribeTrailsRequest = Shapes::StructureShape.new(name: 'DescribeTrailsRequest')
DescribeTrailsResponse = Shapes::StructureShape.new(name: 'DescribeTrailsResponse')
Event = Shapes::StructureShape.new(name: 'Event')
EventCategory = Shapes::StringShape.new(name: 'EventCategory')
EventSelector = Shapes::StructureShape.new(name: 'EventSelector')
EventSelectors = Shapes::ListShape.new(name: 'EventSelectors')
EventsList = Shapes::ListShape.new(name: 'EventsList')
ExcludeManagementEventSources = Shapes::ListShape.new(name: 'ExcludeManagementEventSources')
GetEventSelectorsRequest = Shapes::StructureShape.new(name: 'GetEventSelectorsRequest')
GetEventSelectorsResponse = Shapes::StructureShape.new(name: 'GetEventSelectorsResponse')
GetInsightSelectorsRequest = Shapes::StructureShape.new(name: 'GetInsightSelectorsRequest')
GetInsightSelectorsResponse = Shapes::StructureShape.new(name: 'GetInsightSelectorsResponse')
GetTrailRequest = Shapes::StructureShape.new(name: 'GetTrailRequest')
GetTrailResponse = Shapes::StructureShape.new(name: 'GetTrailResponse')
GetTrailStatusRequest = Shapes::StructureShape.new(name: 'GetTrailStatusRequest')
GetTrailStatusResponse = Shapes::StructureShape.new(name: 'GetTrailStatusResponse')
InsightNotEnabledException = Shapes::StructureShape.new(name: 'InsightNotEnabledException')
InsightSelector = Shapes::StructureShape.new(name: 'InsightSelector')
InsightSelectors = Shapes::ListShape.new(name: 'InsightSelectors')
InsightType = Shapes::StringShape.new(name: 'InsightType')
InsufficientDependencyServiceAccessPermissionException = Shapes::StructureShape.new(name: 'InsufficientDependencyServiceAccessPermissionException')
InsufficientEncryptionPolicyException = Shapes::StructureShape.new(name: 'InsufficientEncryptionPolicyException')
InsufficientS3BucketPolicyException = Shapes::StructureShape.new(name: 'InsufficientS3BucketPolicyException')
InsufficientSnsTopicPolicyException = Shapes::StructureShape.new(name: 'InsufficientSnsTopicPolicyException')
InvalidCloudWatchLogsLogGroupArnException = Shapes::StructureShape.new(name: 'InvalidCloudWatchLogsLogGroupArnException')
InvalidCloudWatchLogsRoleArnException = Shapes::StructureShape.new(name: 'InvalidCloudWatchLogsRoleArnException')
InvalidEventCategoryException = Shapes::StructureShape.new(name: 'InvalidEventCategoryException')
InvalidEventSelectorsException = Shapes::StructureShape.new(name: 'InvalidEventSelectorsException')
InvalidHomeRegionException = Shapes::StructureShape.new(name: 'InvalidHomeRegionException')
InvalidInsightSelectorsException = Shapes::StructureShape.new(name: 'InvalidInsightSelectorsException')
InvalidKmsKeyIdException = Shapes::StructureShape.new(name: 'InvalidKmsKeyIdException')
InvalidLookupAttributesException = Shapes::StructureShape.new(name: 'InvalidLookupAttributesException')
InvalidMaxResultsException = Shapes::StructureShape.new(name: 'InvalidMaxResultsException')
InvalidNextTokenException = Shapes::StructureShape.new(name: 'InvalidNextTokenException')
InvalidParameterCombinationException = Shapes::StructureShape.new(name: 'InvalidParameterCombinationException')
InvalidS3BucketNameException = Shapes::StructureShape.new(name: 'InvalidS3BucketNameException')
InvalidS3PrefixException = Shapes::StructureShape.new(name: 'InvalidS3PrefixException')
InvalidSnsTopicNameException = Shapes::StructureShape.new(name: 'InvalidSnsTopicNameException')
InvalidTagParameterException = Shapes::StructureShape.new(name: 'InvalidTagParameterException')
InvalidTimeRangeException = Shapes::StructureShape.new(name: 'InvalidTimeRangeException')
InvalidTokenException = Shapes::StructureShape.new(name: 'InvalidTokenException')
InvalidTrailNameException = Shapes::StructureShape.new(name: 'InvalidTrailNameException')
KmsException = Shapes::StructureShape.new(name: 'KmsException')
KmsKeyDisabledException = Shapes::StructureShape.new(name: 'KmsKeyDisabledException')
KmsKeyNotFoundException = Shapes::StructureShape.new(name: 'KmsKeyNotFoundException')
ListPublicKeysRequest = Shapes::StructureShape.new(name: 'ListPublicKeysRequest')
ListPublicKeysResponse = Shapes::StructureShape.new(name: 'ListPublicKeysResponse')
ListTagsRequest = Shapes::StructureShape.new(name: 'ListTagsRequest')
ListTagsResponse = Shapes::StructureShape.new(name: 'ListTagsResponse')
ListTrailsRequest = Shapes::StructureShape.new(name: 'ListTrailsRequest')
ListTrailsResponse = Shapes::StructureShape.new(name: 'ListTrailsResponse')
LookupAttribute = Shapes::StructureShape.new(name: 'LookupAttribute')
LookupAttributeKey = Shapes::StringShape.new(name: 'LookupAttributeKey')
LookupAttributesList = Shapes::ListShape.new(name: 'LookupAttributesList')
LookupEventsRequest = Shapes::StructureShape.new(name: 'LookupEventsRequest')
LookupEventsResponse = Shapes::StructureShape.new(name: 'LookupEventsResponse')
MaxResults = Shapes::IntegerShape.new(name: 'MaxResults')
MaximumNumberOfTrailsExceededException = Shapes::StructureShape.new(name: 'MaximumNumberOfTrailsExceededException')
NextToken = Shapes::StringShape.new(name: 'NextToken')
NotOrganizationMasterAccountException = Shapes::StructureShape.new(name: 'NotOrganizationMasterAccountException')
OperationNotPermittedException = Shapes::StructureShape.new(name: 'OperationNotPermittedException')
OrganizationNotInAllFeaturesModeException = Shapes::StructureShape.new(name: 'OrganizationNotInAllFeaturesModeException')
OrganizationsNotInUseException = Shapes::StructureShape.new(name: 'OrganizationsNotInUseException')
PublicKey = Shapes::StructureShape.new(name: 'PublicKey')
PublicKeyList = Shapes::ListShape.new(name: 'PublicKeyList')
PutEventSelectorsRequest = Shapes::StructureShape.new(name: 'PutEventSelectorsRequest')
PutEventSelectorsResponse = Shapes::StructureShape.new(name: 'PutEventSelectorsResponse')
PutInsightSelectorsRequest = Shapes::StructureShape.new(name: 'PutInsightSelectorsRequest')
PutInsightSelectorsResponse = Shapes::StructureShape.new(name: 'PutInsightSelectorsResponse')
ReadWriteType = Shapes::StringShape.new(name: 'ReadWriteType')
RemoveTagsRequest = Shapes::StructureShape.new(name: 'RemoveTagsRequest')
RemoveTagsResponse = Shapes::StructureShape.new(name: 'RemoveTagsResponse')
Resource = Shapes::StructureShape.new(name: 'Resource')
ResourceIdList = Shapes::ListShape.new(name: 'ResourceIdList')
ResourceList = Shapes::ListShape.new(name: 'ResourceList')
ResourceNotFoundException = Shapes::StructureShape.new(name: 'ResourceNotFoundException')
ResourceTag = Shapes::StructureShape.new(name: 'ResourceTag')
ResourceTagList = Shapes::ListShape.new(name: 'ResourceTagList')
ResourceTypeNotSupportedException = Shapes::StructureShape.new(name: 'ResourceTypeNotSupportedException')
S3BucketDoesNotExistException = Shapes::StructureShape.new(name: 'S3BucketDoesNotExistException')
StartLoggingRequest = Shapes::StructureShape.new(name: 'StartLoggingRequest')
StartLoggingResponse = Shapes::StructureShape.new(name: 'StartLoggingResponse')
StopLoggingRequest = Shapes::StructureShape.new(name: 'StopLoggingRequest')
StopLoggingResponse = Shapes::StructureShape.new(name: 'StopLoggingResponse')
String = Shapes::StringShape.new(name: 'String')
Tag = Shapes::StructureShape.new(name: 'Tag')
TagsLimitExceededException = Shapes::StructureShape.new(name: 'TagsLimitExceededException')
TagsList = Shapes::ListShape.new(name: 'TagsList')
Trail = Shapes::StructureShape.new(name: 'Trail')
TrailAlreadyExistsException = Shapes::StructureShape.new(name: 'TrailAlreadyExistsException')
TrailInfo = Shapes::StructureShape.new(name: 'TrailInfo')
TrailList = Shapes::ListShape.new(name: 'TrailList')
TrailNameList = Shapes::ListShape.new(name: 'TrailNameList')
TrailNotFoundException = Shapes::StructureShape.new(name: 'TrailNotFoundException')
TrailNotProvidedException = Shapes::StructureShape.new(name: 'TrailNotProvidedException')
Trails = Shapes::ListShape.new(name: 'Trails')
UnsupportedOperationException = Shapes::StructureShape.new(name: 'UnsupportedOperationException')
UpdateTrailRequest = Shapes::StructureShape.new(name: 'UpdateTrailRequest')
UpdateTrailResponse = Shapes::StructureShape.new(name: 'UpdateTrailResponse')
AddTagsRequest.add_member(:resource_id, Shapes::ShapeRef.new(shape: String, required: true, location_name: "ResourceId"))
AddTagsRequest.add_member(:tags_list, Shapes::ShapeRef.new(shape: TagsList, location_name: "TagsList"))
AddTagsRequest.struct_class = Types::AddTagsRequest
AddTagsResponse.struct_class = Types::AddTagsResponse
CreateTrailRequest.add_member(:name, Shapes::ShapeRef.new(shape: String, required: true, location_name: "Name"))
CreateTrailRequest.add_member(:s3_bucket_name, Shapes::ShapeRef.new(shape: String, required: true, location_name: "S3BucketName"))
CreateTrailRequest.add_member(:s3_key_prefix, Shapes::ShapeRef.new(shape: String, location_name: "S3KeyPrefix"))
CreateTrailRequest.add_member(:sns_topic_name, Shapes::ShapeRef.new(shape: String, location_name: "SnsTopicName"))
CreateTrailRequest.add_member(:include_global_service_events, Shapes::ShapeRef.new(shape: Boolean, location_name: "IncludeGlobalServiceEvents"))
CreateTrailRequest.add_member(:is_multi_region_trail, Shapes::ShapeRef.new(shape: Boolean, location_name: "IsMultiRegionTrail"))
CreateTrailRequest.add_member(:enable_log_file_validation, Shapes::ShapeRef.new(shape: Boolean, location_name: "EnableLogFileValidation"))
CreateTrailRequest.add_member(:cloud_watch_logs_log_group_arn, Shapes::ShapeRef.new(shape: String, location_name: "CloudWatchLogsLogGroupArn"))
CreateTrailRequest.add_member(:cloud_watch_logs_role_arn, Shapes::ShapeRef.new(shape: String, location_name: "CloudWatchLogsRoleArn"))
CreateTrailRequest.add_member(:kms_key_id, Shapes::ShapeRef.new(shape: String, location_name: "KmsKeyId"))
CreateTrailRequest.add_member(:is_organization_trail, Shapes::ShapeRef.new(shape: Boolean, location_name: "IsOrganizationTrail"))
CreateTrailRequest.add_member(:tags_list, Shapes::ShapeRef.new(shape: TagsList, location_name: "TagsList"))
CreateTrailRequest.struct_class = Types::CreateTrailRequest
CreateTrailResponse.add_member(:name, Shapes::ShapeRef.new(shape: String, location_name: "Name"))
CreateTrailResponse.add_member(:s3_bucket_name, Shapes::ShapeRef.new(shape: String, location_name: "S3BucketName"))
CreateTrailResponse.add_member(:s3_key_prefix, Shapes::ShapeRef.new(shape: String, location_name: "S3KeyPrefix"))
CreateTrailResponse.add_member(:sns_topic_name, Shapes::ShapeRef.new(shape: String, deprecated: true, location_name: "SnsTopicName"))
CreateTrailResponse.add_member(:sns_topic_arn, Shapes::ShapeRef.new(shape: String, location_name: "SnsTopicARN"))
CreateTrailResponse.add_member(:include_global_service_events, Shapes::ShapeRef.new(shape: Boolean, location_name: "IncludeGlobalServiceEvents"))
CreateTrailResponse.add_member(:is_multi_region_trail, Shapes::ShapeRef.new(shape: Boolean, location_name: "IsMultiRegionTrail"))
CreateTrailResponse.add_member(:trail_arn, Shapes::ShapeRef.new(shape: String, location_name: "TrailARN"))
CreateTrailResponse.add_member(:log_file_validation_enabled, Shapes::ShapeRef.new(shape: Boolean, location_name: "LogFileValidationEnabled"))
CreateTrailResponse.add_member(:cloud_watch_logs_log_group_arn, Shapes::ShapeRef.new(shape: String, location_name: "CloudWatchLogsLogGroupArn"))
CreateTrailResponse.add_member(:cloud_watch_logs_role_arn, Shapes::ShapeRef.new(shape: String, location_name: "CloudWatchLogsRoleArn"))
CreateTrailResponse.add_member(:kms_key_id, Shapes::ShapeRef.new(shape: String, location_name: "KmsKeyId"))
CreateTrailResponse.add_member(:is_organization_trail, Shapes::ShapeRef.new(shape: Boolean, location_name: "IsOrganizationTrail"))
CreateTrailResponse.struct_class = Types::CreateTrailResponse
DataResource.add_member(:type, Shapes::ShapeRef.new(shape: String, location_name: "Type"))
DataResource.add_member(:values, Shapes::ShapeRef.new(shape: DataResourceValues, location_name: "Values"))
DataResource.struct_class = Types::DataResource
DataResourceValues.member = Shapes::ShapeRef.new(shape: String)
DataResources.member = Shapes::ShapeRef.new(shape: DataResource)
DeleteTrailRequest.add_member(:name, Shapes::ShapeRef.new(shape: String, required: true, location_name: "Name"))
DeleteTrailRequest.struct_class = Types::DeleteTrailRequest
DeleteTrailResponse.struct_class = Types::DeleteTrailResponse
DescribeTrailsRequest.add_member(:trail_name_list, Shapes::ShapeRef.new(shape: TrailNameList, location_name: "trailNameList"))
DescribeTrailsRequest.add_member(:include_shadow_trails, Shapes::ShapeRef.new(shape: Boolean, location_name: "includeShadowTrails"))
DescribeTrailsRequest.struct_class = Types::DescribeTrailsRequest
DescribeTrailsResponse.add_member(:trail_list, Shapes::ShapeRef.new(shape: TrailList, location_name: "trailList"))
DescribeTrailsResponse.struct_class = Types::DescribeTrailsResponse
Event.add_member(:event_id, Shapes::ShapeRef.new(shape: String, location_name: "EventId"))
Event.add_member(:event_name, Shapes::ShapeRef.new(shape: String, location_name: "EventName"))
Event.add_member(:read_only, Shapes::ShapeRef.new(shape: String, location_name: "ReadOnly"))
Event.add_member(:access_key_id, Shapes::ShapeRef.new(shape: String, location_name: "AccessKeyId"))
Event.add_member(:event_time, Shapes::ShapeRef.new(shape: Date, location_name: "EventTime"))
Event.add_member(:event_source, Shapes::ShapeRef.new(shape: String, location_name: "EventSource"))
Event.add_member(:username, Shapes::ShapeRef.new(shape: String, location_name: "Username"))
Event.add_member(:resources, Shapes::ShapeRef.new(shape: ResourceList, location_name: "Resources"))
Event.add_member(:cloud_trail_event, Shapes::ShapeRef.new(shape: String, location_name: "CloudTrailEvent"))
Event.struct_class = Types::Event
EventSelector.add_member(:read_write_type, Shapes::ShapeRef.new(shape: ReadWriteType, location_name: "ReadWriteType"))
EventSelector.add_member(:include_management_events, Shapes::ShapeRef.new(shape: Boolean, location_name: "IncludeManagementEvents"))
EventSelector.add_member(:data_resources, Shapes::ShapeRef.new(shape: DataResources, location_name: "DataResources"))
EventSelector.add_member(:exclude_management_event_sources, Shapes::ShapeRef.new(shape: ExcludeManagementEventSources, location_name: "ExcludeManagementEventSources"))
EventSelector.struct_class = Types::EventSelector
EventSelectors.member = Shapes::ShapeRef.new(shape: EventSelector)
EventsList.member = Shapes::ShapeRef.new(shape: Event)
ExcludeManagementEventSources.member = Shapes::ShapeRef.new(shape: String)
GetEventSelectorsRequest.add_member(:trail_name, Shapes::ShapeRef.new(shape: String, required: true, location_name: "TrailName"))
GetEventSelectorsRequest.struct_class = Types::GetEventSelectorsRequest
GetEventSelectorsResponse.add_member(:trail_arn, Shapes::ShapeRef.new(shape: String, location_name: "TrailARN"))
GetEventSelectorsResponse.add_member(:event_selectors, Shapes::ShapeRef.new(shape: EventSelectors, location_name: "EventSelectors"))
GetEventSelectorsResponse.struct_class = Types::GetEventSelectorsResponse
GetInsightSelectorsRequest.add_member(:trail_name, Shapes::ShapeRef.new(shape: String, required: true, location_name: "TrailName"))
GetInsightSelectorsRequest.struct_class = Types::GetInsightSelectorsRequest
GetInsightSelectorsResponse.add_member(:trail_arn, Shapes::ShapeRef.new(shape: String, location_name: "TrailARN"))
GetInsightSelectorsResponse.add_member(:insight_selectors, Shapes::ShapeRef.new(shape: InsightSelectors, location_name: "InsightSelectors"))
GetInsightSelectorsResponse.struct_class = Types::GetInsightSelectorsResponse
GetTrailRequest.add_member(:name, Shapes::ShapeRef.new(shape: String, required: true, location_name: "Name"))
GetTrailRequest.struct_class = Types::GetTrailRequest
GetTrailResponse.add_member(:trail, Shapes::ShapeRef.new(shape: Trail, location_name: "Trail"))
GetTrailResponse.struct_class = Types::GetTrailResponse
GetTrailStatusRequest.add_member(:name, Shapes::ShapeRef.new(shape: String, required: true, location_name: "Name"))
GetTrailStatusRequest.struct_class = Types::GetTrailStatusRequest
GetTrailStatusResponse.add_member(:is_logging, Shapes::ShapeRef.new(shape: Boolean, location_name: "IsLogging"))
GetTrailStatusResponse.add_member(:latest_delivery_error, Shapes::ShapeRef.new(shape: String, location_name: "LatestDeliveryError"))
GetTrailStatusResponse.add_member(:latest_notification_error, Shapes::ShapeRef.new(shape: String, location_name: "LatestNotificationError"))
GetTrailStatusResponse.add_member(:latest_delivery_time, Shapes::ShapeRef.new(shape: Date, location_name: "LatestDeliveryTime"))
GetTrailStatusResponse.add_member(:latest_notification_time, Shapes::ShapeRef.new(shape: Date, location_name: "LatestNotificationTime"))
GetTrailStatusResponse.add_member(:start_logging_time, Shapes::ShapeRef.new(shape: Date, location_name: "StartLoggingTime"))
GetTrailStatusResponse.add_member(:stop_logging_time, Shapes::ShapeRef.new(shape: Date, location_name: "StopLoggingTime"))
GetTrailStatusResponse.add_member(:latest_cloud_watch_logs_delivery_error, Shapes::ShapeRef.new(shape: String, location_name: "LatestCloudWatchLogsDeliveryError"))
GetTrailStatusResponse.add_member(:latest_cloud_watch_logs_delivery_time, Shapes::ShapeRef.new(shape: Date, location_name: "LatestCloudWatchLogsDeliveryTime"))
GetTrailStatusResponse.add_member(:latest_digest_delivery_time, Shapes::ShapeRef.new(shape: Date, location_name: "LatestDigestDeliveryTime"))
GetTrailStatusResponse.add_member(:latest_digest_delivery_error, Shapes::ShapeRef.new(shape: String, location_name: "LatestDigestDeliveryError"))
GetTrailStatusResponse.add_member(:latest_delivery_attempt_time, Shapes::ShapeRef.new(shape: String, location_name: "LatestDeliveryAttemptTime"))
GetTrailStatusResponse.add_member(:latest_notification_attempt_time, Shapes::ShapeRef.new(shape: String, location_name: "LatestNotificationAttemptTime"))
GetTrailStatusResponse.add_member(:latest_notification_attempt_succeeded, Shapes::ShapeRef.new(shape: String, location_name: "LatestNotificationAttemptSucceeded"))
GetTrailStatusResponse.add_member(:latest_delivery_attempt_succeeded, Shapes::ShapeRef.new(shape: String, location_name: "LatestDeliveryAttemptSucceeded"))
GetTrailStatusResponse.add_member(:time_logging_started, Shapes::ShapeRef.new(shape: String, location_name: "TimeLoggingStarted"))
GetTrailStatusResponse.add_member(:time_logging_stopped, Shapes::ShapeRef.new(shape: String, location_name: "TimeLoggingStopped"))
GetTrailStatusResponse.struct_class = Types::GetTrailStatusResponse
InsightSelector.add_member(:insight_type, Shapes::ShapeRef.new(shape: InsightType, location_name: "InsightType"))
InsightSelector.struct_class = Types::InsightSelector
InsightSelectors.member = Shapes::ShapeRef.new(shape: InsightSelector)
ListPublicKeysRequest.add_member(:start_time, Shapes::ShapeRef.new(shape: Date, location_name: "StartTime"))
ListPublicKeysRequest.add_member(:end_time, Shapes::ShapeRef.new(shape: Date, location_name: "EndTime"))
ListPublicKeysRequest.add_member(:next_token, Shapes::ShapeRef.new(shape: String, location_name: "NextToken"))
ListPublicKeysRequest.struct_class = Types::ListPublicKeysRequest
ListPublicKeysResponse.add_member(:public_key_list, Shapes::ShapeRef.new(shape: PublicKeyList, location_name: "PublicKeyList"))
ListPublicKeysResponse.add_member(:next_token, Shapes::ShapeRef.new(shape: String, location_name: "NextToken"))
ListPublicKeysResponse.struct_class = Types::ListPublicKeysResponse
ListTagsRequest.add_member(:resource_id_list, Shapes::ShapeRef.new(shape: ResourceIdList, required: true, location_name: "ResourceIdList"))
ListTagsRequest.add_member(:next_token, Shapes::ShapeRef.new(shape: String, location_name: "NextToken"))
ListTagsRequest.struct_class = Types::ListTagsRequest
ListTagsResponse.add_member(:resource_tag_list, Shapes::ShapeRef.new(shape: ResourceTagList, location_name: "ResourceTagList"))
ListTagsResponse.add_member(:next_token, Shapes::ShapeRef.new(shape: String, location_name: "NextToken"))
ListTagsResponse.struct_class = Types::ListTagsResponse
ListTrailsRequest.add_member(:next_token, Shapes::ShapeRef.new(shape: String, location_name: "NextToken"))
ListTrailsRequest.struct_class = Types::ListTrailsRequest
ListTrailsResponse.add_member(:trails, Shapes::ShapeRef.new(shape: Trails, location_name: "Trails"))
ListTrailsResponse.add_member(:next_token, Shapes::ShapeRef.new(shape: String, location_name: "NextToken"))
ListTrailsResponse.struct_class = Types::ListTrailsResponse
LookupAttribute.add_member(:attribute_key, Shapes::ShapeRef.new(shape: LookupAttributeKey, required: true, location_name: "AttributeKey"))
LookupAttribute.add_member(:attribute_value, Shapes::ShapeRef.new(shape: String, required: true, location_name: "AttributeValue"))
LookupAttribute.struct_class = Types::LookupAttribute
LookupAttributesList.member = Shapes::ShapeRef.new(shape: LookupAttribute)
LookupEventsRequest.add_member(:lookup_attributes, Shapes::ShapeRef.new(shape: LookupAttributesList, location_name: "LookupAttributes"))
LookupEventsRequest.add_member(:start_time, Shapes::ShapeRef.new(shape: Date, location_name: "StartTime"))
LookupEventsRequest.add_member(:end_time, Shapes::ShapeRef.new(shape: Date, location_name: "EndTime"))
LookupEventsRequest.add_member(:event_category, Shapes::ShapeRef.new(shape: EventCategory, location_name: "EventCategory"))
LookupEventsRequest.add_member(:max_results, Shapes::ShapeRef.new(shape: MaxResults, location_name: "MaxResults"))
LookupEventsRequest.add_member(:next_token, Shapes::ShapeRef.new(shape: NextToken, location_name: "NextToken"))
LookupEventsRequest.struct_class = Types::LookupEventsRequest
LookupEventsResponse.add_member(:events, Shapes::ShapeRef.new(shape: EventsList, location_name: "Events"))
LookupEventsResponse.add_member(:next_token, Shapes::ShapeRef.new(shape: NextToken, location_name: "NextToken"))
LookupEventsResponse.struct_class = Types::LookupEventsResponse
PublicKey.add_member(:value, Shapes::ShapeRef.new(shape: ByteBuffer, location_name: "Value"))
PublicKey.add_member(:validity_start_time, Shapes::ShapeRef.new(shape: Date, location_name: "ValidityStartTime"))
PublicKey.add_member(:validity_end_time, Shapes::ShapeRef.new(shape: Date, location_name: "ValidityEndTime"))
PublicKey.add_member(:fingerprint, Shapes::ShapeRef.new(shape: String, location_name: "Fingerprint"))
PublicKey.struct_class = Types::PublicKey
PublicKeyList.member = Shapes::ShapeRef.new(shape: PublicKey)
PutEventSelectorsRequest.add_member(:trail_name, Shapes::ShapeRef.new(shape: String, required: true, location_name: "TrailName"))
PutEventSelectorsRequest.add_member(:event_selectors, Shapes::ShapeRef.new(shape: EventSelectors, required: true, location_name: "EventSelectors"))
PutEventSelectorsRequest.struct_class = Types::PutEventSelectorsRequest
PutEventSelectorsResponse.add_member(:trail_arn, Shapes::ShapeRef.new(shape: String, location_name: "TrailARN"))
PutEventSelectorsResponse.add_member(:event_selectors, Shapes::ShapeRef.new(shape: EventSelectors, location_name: "EventSelectors"))
PutEventSelectorsResponse.struct_class = Types::PutEventSelectorsResponse
PutInsightSelectorsRequest.add_member(:trail_name, Shapes::ShapeRef.new(shape: String, required: true, location_name: "TrailName"))
PutInsightSelectorsRequest.add_member(:insight_selectors, Shapes::ShapeRef.new(shape: InsightSelectors, required: true, location_name: "InsightSelectors"))
PutInsightSelectorsRequest.struct_class = Types::PutInsightSelectorsRequest
PutInsightSelectorsResponse.add_member(:trail_arn, Shapes::ShapeRef.new(shape: String, location_name: "TrailARN"))
PutInsightSelectorsResponse.add_member(:insight_selectors, Shapes::ShapeRef.new(shape: InsightSelectors, location_name: "InsightSelectors"))
PutInsightSelectorsResponse.struct_class = Types::PutInsightSelectorsResponse
RemoveTagsRequest.add_member(:resource_id, Shapes::ShapeRef.new(shape: String, required: true, location_name: "ResourceId"))
RemoveTagsRequest.add_member(:tags_list, Shapes::ShapeRef.new(shape: TagsList, location_name: "TagsList"))
RemoveTagsRequest.struct_class = Types::RemoveTagsRequest
RemoveTagsResponse.struct_class = Types::RemoveTagsResponse
Resource.add_member(:resource_type, Shapes::ShapeRef.new(shape: String, location_name: "ResourceType"))
Resource.add_member(:resource_name, Shapes::ShapeRef.new(shape: String, location_name: "ResourceName"))
Resource.struct_class = Types::Resource
ResourceIdList.member = Shapes::ShapeRef.new(shape: String)
ResourceList.member = Shapes::ShapeRef.new(shape: Resource)
ResourceTag.add_member(:resource_id, Shapes::ShapeRef.new(shape: String, location_name: "ResourceId"))
ResourceTag.add_member(:tags_list, Shapes::ShapeRef.new(shape: TagsList, location_name: "TagsList"))
ResourceTag.struct_class = Types::ResourceTag
ResourceTagList.member = Shapes::ShapeRef.new(shape: ResourceTag)
StartLoggingRequest.add_member(:name, Shapes::ShapeRef.new(shape: String, required: true, location_name: "Name"))
StartLoggingRequest.struct_class = Types::StartLoggingRequest
StartLoggingResponse.struct_class = Types::StartLoggingResponse
StopLoggingRequest.add_member(:name, Shapes::ShapeRef.new(shape: String, required: true, location_name: "Name"))
StopLoggingRequest.struct_class = Types::StopLoggingRequest
StopLoggingResponse.struct_class = Types::StopLoggingResponse
Tag.add_member(:key, Shapes::ShapeRef.new(shape: String, required: true, location_name: "Key"))
Tag.add_member(:value, Shapes::ShapeRef.new(shape: String, location_name: "Value"))
Tag.struct_class = Types::Tag
TagsList.member = Shapes::ShapeRef.new(shape: Tag)
Trail.add_member(:name, Shapes::ShapeRef.new(shape: String, location_name: "Name"))
Trail.add_member(:s3_bucket_name, Shapes::ShapeRef.new(shape: String, location_name: "S3BucketName"))
Trail.add_member(:s3_key_prefix, Shapes::ShapeRef.new(shape: String, location_name: "S3KeyPrefix"))
Trail.add_member(:sns_topic_name, Shapes::ShapeRef.new(shape: String, deprecated: true, location_name: "SnsTopicName"))
Trail.add_member(:sns_topic_arn, Shapes::ShapeRef.new(shape: String, location_name: "SnsTopicARN"))
Trail.add_member(:include_global_service_events, Shapes::ShapeRef.new(shape: Boolean, location_name: "IncludeGlobalServiceEvents"))
Trail.add_member(:is_multi_region_trail, Shapes::ShapeRef.new(shape: Boolean, location_name: "IsMultiRegionTrail"))
Trail.add_member(:home_region, Shapes::ShapeRef.new(shape: String, location_name: "HomeRegion"))
Trail.add_member(:trail_arn, Shapes::ShapeRef.new(shape: String, location_name: "TrailARN"))
Trail.add_member(:log_file_validation_enabled, Shapes::ShapeRef.new(shape: Boolean, location_name: "LogFileValidationEnabled"))
Trail.add_member(:cloud_watch_logs_log_group_arn, Shapes::ShapeRef.new(shape: String, location_name: "CloudWatchLogsLogGroupArn"))
Trail.add_member(:cloud_watch_logs_role_arn, Shapes::ShapeRef.new(shape: String, location_name: "CloudWatchLogsRoleArn"))
Trail.add_member(:kms_key_id, Shapes::ShapeRef.new(shape: String, location_name: "KmsKeyId"))
Trail.add_member(:has_custom_event_selectors, Shapes::ShapeRef.new(shape: Boolean, location_name: "HasCustomEventSelectors"))
Trail.add_member(:has_insight_selectors, Shapes::ShapeRef.new(shape: Boolean, location_name: "HasInsightSelectors"))
Trail.add_member(:is_organization_trail, Shapes::ShapeRef.new(shape: Boolean, location_name: "IsOrganizationTrail"))
Trail.struct_class = Types::Trail
TrailInfo.add_member(:trail_arn, Shapes::ShapeRef.new(shape: String, location_name: "TrailARN"))
TrailInfo.add_member(:name, Shapes::ShapeRef.new(shape: String, location_name: "Name"))
TrailInfo.add_member(:home_region, Shapes::ShapeRef.new(shape: String, location_name: "HomeRegion"))
TrailInfo.struct_class = Types::TrailInfo
TrailList.member = Shapes::ShapeRef.new(shape: Trail)
TrailNameList.member = Shapes::ShapeRef.new(shape: String)
Trails.member = Shapes::ShapeRef.new(shape: TrailInfo)
UpdateTrailRequest.add_member(:name, Shapes::ShapeRef.new(shape: String, required: true, location_name: "Name"))
UpdateTrailRequest.add_member(:s3_bucket_name, Shapes::ShapeRef.new(shape: String, location_name: "S3BucketName"))
UpdateTrailRequest.add_member(:s3_key_prefix, Shapes::ShapeRef.new(shape: String, location_name: "S3KeyPrefix"))
UpdateTrailRequest.add_member(:sns_topic_name, Shapes::ShapeRef.new(shape: String, location_name: "SnsTopicName"))
UpdateTrailRequest.add_member(:include_global_service_events, Shapes::ShapeRef.new(shape: Boolean, location_name: "IncludeGlobalServiceEvents"))
UpdateTrailRequest.add_member(:is_multi_region_trail, Shapes::ShapeRef.new(shape: Boolean, location_name: "IsMultiRegionTrail"))
UpdateTrailRequest.add_member(:enable_log_file_validation, Shapes::ShapeRef.new(shape: Boolean, location_name: "EnableLogFileValidation"))
UpdateTrailRequest.add_member(:cloud_watch_logs_log_group_arn, Shapes::ShapeRef.new(shape: String, location_name: "CloudWatchLogsLogGroupArn"))
UpdateTrailRequest.add_member(:cloud_watch_logs_role_arn, Shapes::ShapeRef.new(shape: String, location_name: "CloudWatchLogsRoleArn"))
UpdateTrailRequest.add_member(:kms_key_id, Shapes::ShapeRef.new(shape: String, location_name: "KmsKeyId"))
UpdateTrailRequest.add_member(:is_organization_trail, Shapes::ShapeRef.new(shape: Boolean, location_name: "IsOrganizationTrail"))
UpdateTrailRequest.struct_class = Types::UpdateTrailRequest
UpdateTrailResponse.add_member(:name, Shapes::ShapeRef.new(shape: String, location_name: "Name"))
UpdateTrailResponse.add_member(:s3_bucket_name, Shapes::ShapeRef.new(shape: String, location_name: "S3BucketName"))
UpdateTrailResponse.add_member(:s3_key_prefix, Shapes::ShapeRef.new(shape: String, location_name: "S3KeyPrefix"))
UpdateTrailResponse.add_member(:sns_topic_name, Shapes::ShapeRef.new(shape: String, deprecated: true, location_name: "SnsTopicName"))
UpdateTrailResponse.add_member(:sns_topic_arn, Shapes::ShapeRef.new(shape: String, location_name: "SnsTopicARN"))
UpdateTrailResponse.add_member(:include_global_service_events, Shapes::ShapeRef.new(shape: Boolean, location_name: "IncludeGlobalServiceEvents"))
UpdateTrailResponse.add_member(:is_multi_region_trail, Shapes::ShapeRef.new(shape: Boolean, location_name: "IsMultiRegionTrail"))
UpdateTrailResponse.add_member(:trail_arn, Shapes::ShapeRef.new(shape: String, location_name: "TrailARN"))
UpdateTrailResponse.add_member(:log_file_validation_enabled, Shapes::ShapeRef.new(shape: Boolean, location_name: "LogFileValidationEnabled"))
UpdateTrailResponse.add_member(:cloud_watch_logs_log_group_arn, Shapes::ShapeRef.new(shape: String, location_name: "CloudWatchLogsLogGroupArn"))
UpdateTrailResponse.add_member(:cloud_watch_logs_role_arn, Shapes::ShapeRef.new(shape: String, location_name: "CloudWatchLogsRoleArn"))
UpdateTrailResponse.add_member(:kms_key_id, Shapes::ShapeRef.new(shape: String, location_name: "KmsKeyId"))
UpdateTrailResponse.add_member(:is_organization_trail, Shapes::ShapeRef.new(shape: Boolean, location_name: "IsOrganizationTrail"))
UpdateTrailResponse.struct_class = Types::UpdateTrailResponse
# @api private
API = Seahorse::Model::Api.new.tap do |api|
api.version = "2013-11-01"
api.metadata = {
"apiVersion" => "2013-11-01",
"endpointPrefix" => "cloudtrail",
"jsonVersion" => "1.1",
"protocol" => "json",
"serviceAbbreviation" => "CloudTrail",
"serviceFullName" => "AWS CloudTrail",
"serviceId" => "CloudTrail",
"signatureVersion" => "v4",
"targetPrefix" => "com.amazonaws.cloudtrail.v20131101.CloudTrail_20131101",
"uid" => "cloudtrail-2013-11-01",
}
api.add_operation(:add_tags, Seahorse::Model::Operation.new.tap do |o|
o.name = "AddTags"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: AddTagsRequest)
o.output = Shapes::ShapeRef.new(shape: AddTagsResponse)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: CloudTrailARNInvalidException)
o.errors << Shapes::ShapeRef.new(shape: ResourceTypeNotSupportedException)
o.errors << Shapes::ShapeRef.new(shape: TagsLimitExceededException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTrailNameException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTagParameterException)
o.errors << Shapes::ShapeRef.new(shape: UnsupportedOperationException)
o.errors << Shapes::ShapeRef.new(shape: OperationNotPermittedException)
o.errors << Shapes::ShapeRef.new(shape: NotOrganizationMasterAccountException)
end)
api.add_operation(:create_trail, Seahorse::Model::Operation.new.tap do |o|
o.name = "CreateTrail"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: CreateTrailRequest)
o.output = Shapes::ShapeRef.new(shape: CreateTrailResponse)
o.errors << Shapes::ShapeRef.new(shape: MaximumNumberOfTrailsExceededException)
o.errors << Shapes::ShapeRef.new(shape: TrailAlreadyExistsException)
o.errors << Shapes::ShapeRef.new(shape: S3BucketDoesNotExistException)
o.errors << Shapes::ShapeRef.new(shape: InsufficientS3BucketPolicyException)
o.errors << Shapes::ShapeRef.new(shape: InsufficientSnsTopicPolicyException)
o.errors << Shapes::ShapeRef.new(shape: InsufficientEncryptionPolicyException)
o.errors << Shapes::ShapeRef.new(shape: InvalidS3BucketNameException)
o.errors << Shapes::ShapeRef.new(shape: InvalidS3PrefixException)
o.errors << Shapes::ShapeRef.new(shape: InvalidSnsTopicNameException)
o.errors << Shapes::ShapeRef.new(shape: InvalidKmsKeyIdException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTrailNameException)
o.errors << Shapes::ShapeRef.new(shape: TrailNotProvidedException)
o.errors << Shapes::ShapeRef.new(shape: InvalidParameterCombinationException)
o.errors << Shapes::ShapeRef.new(shape: KmsKeyNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: KmsKeyDisabledException)
o.errors << Shapes::ShapeRef.new(shape: KmsException)
o.errors << Shapes::ShapeRef.new(shape: InvalidCloudWatchLogsLogGroupArnException)
o.errors << Shapes::ShapeRef.new(shape: InvalidCloudWatchLogsRoleArnException)
o.errors << Shapes::ShapeRef.new(shape: CloudWatchLogsDeliveryUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTagParameterException)
o.errors << Shapes::ShapeRef.new(shape: UnsupportedOperationException)
o.errors << Shapes::ShapeRef.new(shape: OperationNotPermittedException)
o.errors << Shapes::ShapeRef.new(shape: CloudTrailAccessNotEnabledException)
o.errors << Shapes::ShapeRef.new(shape: InsufficientDependencyServiceAccessPermissionException)
o.errors << Shapes::ShapeRef.new(shape: NotOrganizationMasterAccountException)
o.errors << Shapes::ShapeRef.new(shape: OrganizationsNotInUseException)
o.errors << Shapes::ShapeRef.new(shape: OrganizationNotInAllFeaturesModeException)
end)
api.add_operation(:delete_trail, Seahorse::Model::Operation.new.tap do |o|
o.name = "DeleteTrail"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: DeleteTrailRequest)
o.output = Shapes::ShapeRef.new(shape: DeleteTrailResponse)
o.errors << Shapes::ShapeRef.new(shape: TrailNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTrailNameException)
o.errors << Shapes::ShapeRef.new(shape: InvalidHomeRegionException)
o.errors << Shapes::ShapeRef.new(shape: UnsupportedOperationException)
o.errors << Shapes::ShapeRef.new(shape: OperationNotPermittedException)
o.errors << Shapes::ShapeRef.new(shape: NotOrganizationMasterAccountException)
o.errors << Shapes::ShapeRef.new(shape: InsufficientDependencyServiceAccessPermissionException)
end)
api.add_operation(:describe_trails, Seahorse::Model::Operation.new.tap do |o|
o.name = "DescribeTrails"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: DescribeTrailsRequest)
o.output = Shapes::ShapeRef.new(shape: DescribeTrailsResponse)
o.errors << Shapes::ShapeRef.new(shape: UnsupportedOperationException)
o.errors << Shapes::ShapeRef.new(shape: OperationNotPermittedException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTrailNameException)
end)
api.add_operation(:get_event_selectors, Seahorse::Model::Operation.new.tap do |o|
o.name = "GetEventSelectors"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: GetEventSelectorsRequest)
o.output = Shapes::ShapeRef.new(shape: GetEventSelectorsResponse)
o.errors << Shapes::ShapeRef.new(shape: TrailNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTrailNameException)
o.errors << Shapes::ShapeRef.new(shape: UnsupportedOperationException)
o.errors << Shapes::ShapeRef.new(shape: OperationNotPermittedException)
end)
api.add_operation(:get_insight_selectors, Seahorse::Model::Operation.new.tap do |o|
o.name = "GetInsightSelectors"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: GetInsightSelectorsRequest)
o.output = Shapes::ShapeRef.new(shape: GetInsightSelectorsResponse)
o.errors << Shapes::ShapeRef.new(shape: TrailNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTrailNameException)
o.errors << Shapes::ShapeRef.new(shape: UnsupportedOperationException)
o.errors << Shapes::ShapeRef.new(shape: OperationNotPermittedException)
o.errors << Shapes::ShapeRef.new(shape: InsightNotEnabledException)
end)
api.add_operation(:get_trail, Seahorse::Model::Operation.new.tap do |o|
o.name = "GetTrail"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: GetTrailRequest)
o.output = Shapes::ShapeRef.new(shape: GetTrailResponse)
o.errors << Shapes::ShapeRef.new(shape: TrailNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTrailNameException)
o.errors << Shapes::ShapeRef.new(shape: UnsupportedOperationException)
o.errors << Shapes::ShapeRef.new(shape: OperationNotPermittedException)
end)
api.add_operation(:get_trail_status, Seahorse::Model::Operation.new.tap do |o|
o.name = "GetTrailStatus"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: GetTrailStatusRequest)
o.output = Shapes::ShapeRef.new(shape: GetTrailStatusResponse)
o.errors << Shapes::ShapeRef.new(shape: TrailNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTrailNameException)
o.errors << Shapes::ShapeRef.new(shape: UnsupportedOperationException)
o.errors << Shapes::ShapeRef.new(shape: OperationNotPermittedException)
end)
api.add_operation(:list_public_keys, Seahorse::Model::Operation.new.tap do |o|
o.name = "ListPublicKeys"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: ListPublicKeysRequest)
o.output = Shapes::ShapeRef.new(shape: ListPublicKeysResponse)
o.errors << Shapes::ShapeRef.new(shape: InvalidTimeRangeException)
o.errors << Shapes::ShapeRef.new(shape: UnsupportedOperationException)
o.errors << Shapes::ShapeRef.new(shape: OperationNotPermittedException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTokenException)
o[:pager] = Aws::Pager.new(
tokens: {
"next_token" => "next_token"
}
)
end)
api.add_operation(:list_tags, Seahorse::Model::Operation.new.tap do |o|
o.name = "ListTags"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: ListTagsRequest)
o.output = Shapes::ShapeRef.new(shape: ListTagsResponse)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: CloudTrailARNInvalidException)
o.errors << Shapes::ShapeRef.new(shape: ResourceTypeNotSupportedException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTrailNameException)
o.errors << Shapes::ShapeRef.new(shape: UnsupportedOperationException)
o.errors << Shapes::ShapeRef.new(shape: OperationNotPermittedException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTokenException)
o[:pager] = Aws::Pager.new(
tokens: {
"next_token" => "next_token"
}
)
end)
api.add_operation(:list_trails, Seahorse::Model::Operation.new.tap do |o|
o.name = "ListTrails"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: ListTrailsRequest)
o.output = Shapes::ShapeRef.new(shape: ListTrailsResponse)
o.errors << Shapes::ShapeRef.new(shape: UnsupportedOperationException)
o.errors << Shapes::ShapeRef.new(shape: OperationNotPermittedException)
o[:pager] = Aws::Pager.new(
tokens: {
"next_token" => "next_token"
}
)
end)
api.add_operation(:lookup_events, Seahorse::Model::Operation.new.tap do |o|
o.name = "LookupEvents"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: LookupEventsRequest)
o.output = Shapes::ShapeRef.new(shape: LookupEventsResponse)
o.errors << Shapes::ShapeRef.new(shape: InvalidLookupAttributesException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTimeRangeException)
o.errors << Shapes::ShapeRef.new(shape: InvalidMaxResultsException)
o.errors << Shapes::ShapeRef.new(shape: InvalidNextTokenException)
o.errors << Shapes::ShapeRef.new(shape: InvalidEventCategoryException)
o.errors << Shapes::ShapeRef.new(shape: UnsupportedOperationException)
o.errors << Shapes::ShapeRef.new(shape: OperationNotPermittedException)
o[:pager] = Aws::Pager.new(
limit_key: "max_results",
tokens: {
"next_token" => "next_token"
}
)
end)
api.add_operation(:put_event_selectors, Seahorse::Model::Operation.new.tap do |o|
o.name = "PutEventSelectors"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: PutEventSelectorsRequest)
o.output = Shapes::ShapeRef.new(shape: PutEventSelectorsResponse)
o.errors << Shapes::ShapeRef.new(shape: TrailNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTrailNameException)
o.errors << Shapes::ShapeRef.new(shape: InvalidHomeRegionException)
o.errors << Shapes::ShapeRef.new(shape: InvalidEventSelectorsException)
o.errors << Shapes::ShapeRef.new(shape: UnsupportedOperationException)
o.errors << Shapes::ShapeRef.new(shape: OperationNotPermittedException)
o.errors << Shapes::ShapeRef.new(shape: NotOrganizationMasterAccountException)
o.errors << Shapes::ShapeRef.new(shape: InsufficientDependencyServiceAccessPermissionException)
end)
api.add_operation(:put_insight_selectors, Seahorse::Model::Operation.new.tap do |o|
o.name = "PutInsightSelectors"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: PutInsightSelectorsRequest)
o.output = Shapes::ShapeRef.new(shape: PutInsightSelectorsResponse)
o.errors << Shapes::ShapeRef.new(shape: TrailNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTrailNameException)
o.errors << Shapes::ShapeRef.new(shape: InvalidHomeRegionException)
o.errors << Shapes::ShapeRef.new(shape: InvalidInsightSelectorsException)
o.errors << Shapes::ShapeRef.new(shape: InsufficientS3BucketPolicyException)
o.errors << Shapes::ShapeRef.new(shape: InsufficientEncryptionPolicyException)
o.errors << Shapes::ShapeRef.new(shape: UnsupportedOperationException)
o.errors << Shapes::ShapeRef.new(shape: OperationNotPermittedException)
o.errors << Shapes::ShapeRef.new(shape: NotOrganizationMasterAccountException)
end)
api.add_operation(:remove_tags, Seahorse::Model::Operation.new.tap do |o|
o.name = "RemoveTags"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: RemoveTagsRequest)
o.output = Shapes::ShapeRef.new(shape: RemoveTagsResponse)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: CloudTrailARNInvalidException)
o.errors << Shapes::ShapeRef.new(shape: ResourceTypeNotSupportedException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTrailNameException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTagParameterException)
o.errors << Shapes::ShapeRef.new(shape: UnsupportedOperationException)
o.errors << Shapes::ShapeRef.new(shape: OperationNotPermittedException)
o.errors << Shapes::ShapeRef.new(shape: NotOrganizationMasterAccountException)
end)
api.add_operation(:start_logging, Seahorse::Model::Operation.new.tap do |o|
o.name = "StartLogging"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: StartLoggingRequest)
o.output = Shapes::ShapeRef.new(shape: StartLoggingResponse)
o.errors << Shapes::ShapeRef.new(shape: TrailNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTrailNameException)
o.errors << Shapes::ShapeRef.new(shape: InvalidHomeRegionException)
o.errors << Shapes::ShapeRef.new(shape: UnsupportedOperationException)
o.errors << Shapes::ShapeRef.new(shape: OperationNotPermittedException)
o.errors << Shapes::ShapeRef.new(shape: NotOrganizationMasterAccountException)
o.errors << Shapes::ShapeRef.new(shape: InsufficientDependencyServiceAccessPermissionException)
end)
api.add_operation(:stop_logging, Seahorse::Model::Operation.new.tap do |o|
o.name = "StopLogging"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: StopLoggingRequest)
o.output = Shapes::ShapeRef.new(shape: StopLoggingResponse)
o.errors << Shapes::ShapeRef.new(shape: TrailNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTrailNameException)
o.errors << Shapes::ShapeRef.new(shape: InvalidHomeRegionException)
o.errors << Shapes::ShapeRef.new(shape: UnsupportedOperationException)
o.errors << Shapes::ShapeRef.new(shape: OperationNotPermittedException)
o.errors << Shapes::ShapeRef.new(shape: NotOrganizationMasterAccountException)
o.errors << Shapes::ShapeRef.new(shape: InsufficientDependencyServiceAccessPermissionException)
end)
api.add_operation(:update_trail, Seahorse::Model::Operation.new.tap do |o|
o.name = "UpdateTrail"
o.http_method = "POST"
o.http_request_uri = "/"
o.input = Shapes::ShapeRef.new(shape: UpdateTrailRequest)
o.output = Shapes::ShapeRef.new(shape: UpdateTrailResponse)
o.errors << Shapes::ShapeRef.new(shape: S3BucketDoesNotExistException)
o.errors << Shapes::ShapeRef.new(shape: InsufficientS3BucketPolicyException)
o.errors << Shapes::ShapeRef.new(shape: InsufficientSnsTopicPolicyException)
o.errors << Shapes::ShapeRef.new(shape: InsufficientEncryptionPolicyException)
o.errors << Shapes::ShapeRef.new(shape: TrailNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: InvalidS3BucketNameException)
o.errors << Shapes::ShapeRef.new(shape: InvalidS3PrefixException)
o.errors << Shapes::ShapeRef.new(shape: InvalidSnsTopicNameException)
o.errors << Shapes::ShapeRef.new(shape: InvalidKmsKeyIdException)
o.errors << Shapes::ShapeRef.new(shape: InvalidTrailNameException)
o.errors << Shapes::ShapeRef.new(shape: TrailNotProvidedException)
o.errors << Shapes::ShapeRef.new(shape: InvalidEventSelectorsException)
o.errors << Shapes::ShapeRef.new(shape: InvalidParameterCombinationException)
o.errors << Shapes::ShapeRef.new(shape: InvalidHomeRegionException)
o.errors << Shapes::ShapeRef.new(shape: KmsKeyNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: KmsKeyDisabledException)
o.errors << Shapes::ShapeRef.new(shape: KmsException)
o.errors << Shapes::ShapeRef.new(shape: InvalidCloudWatchLogsLogGroupArnException)
o.errors << Shapes::ShapeRef.new(shape: InvalidCloudWatchLogsRoleArnException)
o.errors << Shapes::ShapeRef.new(shape: CloudWatchLogsDeliveryUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: UnsupportedOperationException)
o.errors << Shapes::ShapeRef.new(shape: OperationNotPermittedException)
o.errors << Shapes::ShapeRef.new(shape: CloudTrailAccessNotEnabledException)
o.errors << Shapes::ShapeRef.new(shape: InsufficientDependencyServiceAccessPermissionException)
o.errors << Shapes::ShapeRef.new(shape: OrganizationsNotInUseException)
o.errors << Shapes::ShapeRef.new(shape: NotOrganizationMasterAccountException)
o.errors << Shapes::ShapeRef.new(shape: OrganizationNotInAllFeaturesModeException)
end)
end
end
end
| 70.500668 | 171 | 0.761708 |
d59b69c33f777f83902c431abfa3fb472aee43ec | 2,407 | class Pybind11 < Formula
desc "Seamless operability between C++11 and Python"
homepage "https://github.com/pybind/pybind11"
url "https://github.com/pybind/pybind11/archive/v2.8.0.tar.gz"
sha256 "9ca7770fc5453b10b00a4a2f99754d7a29af8952330be5f5602e7c2635fa3e79"
license "BSD-3-Clause"
bottle do
sha256 cellar: :any_skip_relocation, all: "9b7b5ea9f5a76240bf431bac3ad103c15d5b6fd2f8b133adeca4c469ff3276bf"
end
depends_on "cmake" => :build
depends_on "[email protected]"
def install
# Install /include and /share/cmake to the global location
system "cmake", "-S", ".", "-B", "build",
"-DPYBIND11_TEST=OFF",
"-DPYBIND11_NOPYTHON=ON",
*std_cmake_args
system "cmake", "--install", "build"
# Install Python package too
system Formula["[email protected]"].opt_bin/"python3", *Language::Python.setup_install_args(libexec)
version = Language::Python.major_minor_version Formula["[email protected]"].opt_bin/"python3"
site_packages = "lib/python#{version}/site-packages"
pth_contents = "import site; site.addsitedir('#{libexec/site_packages}')\n"
(prefix/site_packages/"homebrew-pybind11.pth").write pth_contents
# Also pybind11-config
bin.install Dir[libexec/"bin/*"]
end
test do
(testpath/"example.cpp").write <<~EOS
#include <pybind11/pybind11.h>
int add(int i, int j) {
return i + j;
}
namespace py = pybind11;
PYBIND11_MODULE(example, m) {
m.doc() = "pybind11 example plugin";
m.def("add", &add, "A function which adds two numbers");
}
EOS
(testpath/"example.py").write <<~EOS
import example
example.add(1,2)
EOS
version = Language::Python.major_minor_version Formula["[email protected]"].opt_bin/"python3"
site_packages = "lib/python#{version}/site-packages"
python_flags = `#{Formula["[email protected]"].opt_bin}/python3-config --cflags --ldflags --embed`.split
system ENV.cxx, "-shared", "-fPIC", "-O3", "-std=c++11", "example.cpp", "-o", "example.so", *python_flags
system Formula["[email protected]"].opt_bin/"python3", "example.py"
test_module = shell_output("#{Formula["[email protected]"].opt_bin/"python3"} -m pybind11 --includes")
assert_match (libexec/site_packages).to_s, test_module
test_script = shell_output("#{opt_bin/"pybind11-config"} --includes")
assert_match test_module, test_script
end
end
| 35.397059 | 112 | 0.674283 |
8716f6639dc3e7159cafac27187ccd7c73c93cb9 | 160 | require 'test_helper'
class Api::V1::FitnessClassDetailsControllerTest < ActionDispatch::IntegrationTest
# test "the truth" do
# assert true
# end
end
| 20 | 82 | 0.75 |
1ad43d39d67c23798f211b719551f8d3dc200737 | 10,100 | require 'reentrant_flock'
require 'rubygems/util'
module Geminabox
class Server < Sinatra::Base
enable :static, :methodoverride
set :public_folder, Geminabox.public_folder
set :views, Geminabox.views
if Geminabox.rubygems_proxy
use Proxy::Hostess
else
use Hostess
end
class << self
def disallow_replace?
! Geminabox.allow_replace
end
def allow_delete?
Geminabox.allow_delete
end
def allow_upload?
Geminabox.allow_upload
end
def fixup_bundler_rubygems!
return if @post_reset_hook_applied
Gem.post_reset{ Gem::Specification.all = nil } if defined? Bundler and Gem.respond_to? :post_reset
@post_reset_hook_applied = true
end
def reindex(force_rebuild = false)
fixup_bundler_rubygems!
force_rebuild = true unless Geminabox.incremental_updates
if force_rebuild
indexer.generate_index
dependency_cache.flush
else
begin
require 'geminabox/indexer'
updated_gemspecs = Geminabox::Indexer.updated_gemspecs(indexer)
return if updated_gemspecs.empty?
Geminabox::Indexer.patch_rubygems_update_index_pre_1_8_25(indexer)
indexer.update_index
updated_gemspecs.each { |gem| dependency_cache.flush_key(gem.name) }
rescue Errno::ENOENT
with_rlock { reindex(:force_rebuild) }
rescue => e
puts "#{e.class}:#{e.message}"
puts e.backtrace.join("\n")
with_rlock { reindex(:force_rebuild) }
end
end
rescue Gem::SystemExitException
end
def indexer
Gem::Indexer.new(Geminabox.data, :build_legacy => Geminabox.build_legacy)
end
def dependency_cache
@dependency_cache ||= Geminabox::DiskCache.new(File.join(Geminabox.data, "_cache"))
end
def with_rlock(&block)
file_class.open(Geminabox.lockfile, File::RDWR | File::CREAT) do |f|
ReentrantFlock.synchronize(f, File::LOCK_EX | File::LOCK_NB, &block)
end
end
# This method provides a test hook, as stubbing File is painful...
def file_class
@file_class ||= File
end
def file_class=(klass)
@file_class = klass
end
end
before do
headers 'X-Powered-By' => "geminabox #{Geminabox::VERSION}"
end
get '/' do
@gems = load_gems
@index_gems = index_gems(@gems)
@allow_upload = self.class.allow_upload?
@allow_delete = self.class.allow_delete?
erb :index
end
get '/atom.xml' do
@gems = load_gems
erb :atom, :layout => false
end
get '/api/v1/dependencies' do
query_gems.any? ? Marshal.dump(gem_list) : 200
end
get '/api/v1/dependencies.json' do
query_gems.any? ? gem_list.to_json : {}
end
get '/upload' do
unless self.class.allow_upload?
error_response(403, 'Gem uploading is disabled')
end
erb :upload
end
get '/reindex' do
serialize_update do
params[:force_rebuild] ||= 'true'
unless %w(true false).include? params[:force_rebuild]
error_response(400, "force_rebuild parameter must be either of true or false")
end
force_rebuild = params[:force_rebuild] == 'true'
self.class.reindex(force_rebuild)
redirect url("/")
end
end
get '/gems/:gemname' do
gems = Hash[load_gems.by_name]
@gem = gems[params[:gemname]]
@allow_delete = self.class.allow_delete?
halt 404 unless @gem
erb :gem
end
delete '/gems/*.gem' do
unless self.class.allow_delete?
error_response(403, 'Gem deletion is disabled - see https://github.com/geminabox/geminabox/issues/115')
end
serialize_update do
File.delete file_path if File.exist? file_path
self.class.reindex(:force_rebuild)
redirect url("/")
end
end
delete '/api/v1/gems/yank' do
unless self.class.allow_delete?
error_response(403, 'Gem deletion is disabled')
end
halt 400 unless request.form_data?
serialize_update do
gems = load_gems.select { |gem| request['gem_name'] == gem.name and
request['version'] == gem.number.version }
halt 404, 'Gem not found' if gems.size == 0
gems.each do |gem|
gem_path = File.expand_path(File.join(Geminabox.data, 'gems',
"#{gem.gemfile_name}.gem"))
File.delete gem_path if File.exists? gem_path
end
self.class.reindex(:force_rebuild)
return 200, 'Yanked gem and reindexed'
end
end
post '/upload' do
unless self.class.allow_upload?
error_response(403, 'Gem uploading is disabled')
end
if params[:file] && params[:file][:filename] && (tmpfile = params[:file][:tempfile])
serialize_update do
handle_incoming_gem(Geminabox::IncomingGem.new(tmpfile))
end
else
@error = "No file selected"
halt [400, erb(:upload)]
end
end
post '/api/v1/gems' do
unless self.class.allow_upload?
error_response(403, 'Gem uploading is disabled')
end
begin
serialize_update do
handle_incoming_gem(Geminabox::IncomingGem.new(request.body))
end
rescue Object => o
File.open "/tmp/debug.txt", "a" do |io|
io.puts o, o.backtrace
end
end
end
private
def serialize_update(&block)
with_rlock(&block)
rescue ReentrantFlock::AlreadyLocked
halt 503, { 'Retry-After' => Geminabox.retry_interval }, 'Repository lock is held by another process'
end
def with_rlock(&block)
self.class.with_rlock(&block)
end
def handle_incoming_gem(gem)
begin
GemStore.create(gem, params[:overwrite])
rescue GemStoreError => error
error_response error.code, error.reason
end
begin
Geminabox.on_gem_received.call(gem) if Geminabox.on_gem_received
rescue
# ignore errors which occur within the hook
end
if api_request?
"Gem #{gem.name} received and indexed."
else
redirect url("/")
end
end
def api_request?
request.accept.first.to_s != "text/html"
end
def error_response(code, message)
halt [code, message] if api_request?
html = <<HTML
<html>
<head><title>Error - #{code}</title></head>
<body>
<h1>Error - #{code}</h1>
<p>#{message}</p>
</body>
</html>
HTML
halt [code, html]
end
def file_path
File.expand_path(File.join(Geminabox.data, *request.path_info))
end
def dependency_cache
self.class.dependency_cache
end
def all_gems
all_gems_with_duplicates.inject(:|)
end
def all_gems_with_duplicates
specs_files_paths.map do |specs_file_path|
if File.exist?(specs_file_path)
Marshal.load(Gem::Util.gunzip(Gem.read_binary(specs_file_path)))
else
[]
end
end
end
def specs_file_types
[:specs, :prerelease_specs]
end
def specs_files_paths
specs_file_types.map do |specs_file_type|
File.join(Geminabox.data, spec_file_name(specs_file_type))
end
end
def spec_file_name(specs_file_type)
[specs_file_type, Gem.marshal_version, 'gz'].join('.')
end
def load_gems
@loaded_gems ||= Geminabox::GemVersionCollection.new(all_gems)
end
def index_gems(gems)
Set.new(gems.map{|gem| gem.name[0..0].downcase})
end
def gem_list
Geminabox.rubygems_proxy ? combined_gem_list : local_gem_list
end
def query_gems
params[:gems].to_s.split(',')
end
def local_gem_list
query_gems.map{|query_gem| gem_dependencies(query_gem) }.flatten(1)
end
def remote_gem_list
RubygemsDependency.for(*query_gems)
end
def combined_gem_list
GemListMerge.merge(local_gem_list, remote_gem_list, strategy: Geminabox.rubygems_proxy_merge_strategy)
end
helpers do
def href(text)
if text && (text.start_with?('http://') || text.start_with?('https://'))
Rack::Utils.escape_html(text)
else
'#'
end
end
def h(text)
Rack::Utils.escape_html(text)
end
def spec_for(gem_name, version, platform = default_platform)
filename = [gem_name, version]
filename.push(platform) if platform != default_platform
spec_file = File.join(Geminabox.data, "quick", "Marshal.#{Gem.marshal_version}", "#{filename.join("-")}.gemspec.rz")
File::open(spec_file, 'r') do |unzipped_spec_file|
unzipped_spec_file.binmode
Marshal.load(Gem::Util.inflate(unzipped_spec_file.read))
end if File.exist? spec_file
end
def default_platform
'ruby'
end
# Return a list of versions of gem 'gem_name' with the dependencies of each version.
def gem_dependencies(gem_name)
dependency_cache.marshal_cache(gem_name) do
load_gems.
select { |gem| gem_name == gem.name }.
map { |gem| [gem, spec_for(gem.name, gem.number, gem.platform)] }.
reject { |(_, spec)| spec.nil? }.
map do |(gem, spec)|
{
:name => gem.name,
:number => gem.number.version,
:platform => gem.platform,
:dependencies => runtime_dependencies(spec)
}
end
end
end
def runtime_dependencies(spec)
spec.
dependencies.
select { |dep| dep.type == :runtime }.
map { |dep| name_and_requirements_for(dep) }
end
def name_and_requirements_for(dep)
name = dep.name.kind_of?(Array) ? dep.name.first : dep.name
[name, dep.requirement.to_s]
end
end
end
end
| 26.578947 | 124 | 0.608515 |
4aee1bb4120fb4bdb5eb88abf10315a3f8d8ee32 | 169 | Rails.application.config.middleware.insert_before 0, Rack::Cors do
allow do
origins '*'
resource '*', headers: :any, methods: %i[get post put delete]
end
end | 28.166667 | 66 | 0.698225 |
38503df19f87d0c21cab09773fad8ac8c21d77f1 | 196 | require 'test_helper'
class TitsTest < Minitest::Test
def test_that_it_has_a_version_number
refute_nil ::Tits::VERSION
end
def test_it_does_something_useful
assert false
end
end
| 16.333333 | 39 | 0.77551 |
ab7b41483e13a4a7aa8be90fb926258a12ca5c9a | 656 | require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Clean
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
end
end
| 32.8 | 82 | 0.76372 |
624b73b9046772f83f887356aa3ed76164486fe7 | 621 | # Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
module OCI
module DataCatalog::Models
HARVEST_STATUS_ENUM = [
HARVEST_STATUS_COMPLETE = 'COMPLETE'.freeze,
HARVEST_STATUS_ERROR = 'ERROR'.freeze,
HARVEST_STATUS_IN_PROGRESS = 'IN_PROGRESS'.freeze,
HARVEST_STATUS_DEFERRED = 'DEFERRED'.freeze
].freeze
end
end
| 44.357143 | 245 | 0.742351 |
616cba85208403843b2107e8b168642ecfd0e148 | 155 | # frozen_string_literal: true
#\ --quiet
require_relative 'app.rb'
use Rack::ETag
use Rack::ConditionalGet
use Rack::Deflater
run Sinatra::Application
| 12.916667 | 29 | 0.767742 |
3886665874e08997fa6790dede60cffe5f1f7898 | 8,607 | require 'spec_helper'
describe Lobot::Clippy do
let(:working_path) { Dir.mktmpdir }
let(:cli) { double(:cli).as_null_object }
let(:clippy) { Lobot::Clippy.new }
before { clippy.stub(:cli => cli) }
around do |example|
Dir.chdir(working_path) { example.run }
end
describe "#config" do
it "creates the config directory if it does not exist" do
expect {
clippy.config
}.to change {
File.directory?(File.join(working_path, "config"))
}.from(false).to(true)
end
it "uses the values in your existing lobot.yml" do
FileUtils.mkdir_p "config"
config = Lobot::Config.new(:path => "config/lobot.yml")
config.ssh_port = 2222
config.save
clippy.config.ssh_port.should == 2222
end
it "saves off the config to config/lobot.yml" do
expect {
clippy.config.save
}.to change {
File.exists?(File.join(working_path, "config", "lobot.yml"))
}.from(false).to(true)
end
end
describe "#ask_with_default" do
it "makes you feel like you need a shower" do
clippy.should_receive(:ask).with("Your ID [1]:")
clippy.ask_with_default("Your ID", "1")
end
it "defaults to the default value" do
clippy.should_receive(:ask).and_return("")
clippy.ask_with_default("Who is buried in Grant's Tomb", "Grant").should == "Grant"
end
it "uses the provided answer" do
clippy.should_receive(:ask).and_return("robert e lee's left nipple")
clippy.ask_with_default("Who is buried in Grant's Tomb", "Grant").should_not == "Grant"
end
it "does not display a nil default" do
clippy.should_receive(:ask).with("Monkey mustache:")
clippy.ask_with_default("Monkey mustache", nil)
end
end
describe "#clippy" do
before do
clippy.stub(:ask => "totally-valid-value", :yes? => true, :say => nil)
clippy.config.stub(:save)
end
it "Says that you're trying to set up a ci box" do
question = "It looks like you're trying to set up a CI Box. Can I help?"
clippy.should_receive(:yes?).with(question)
clippy.clippy
end
it "prompts for aws credentials" do
clippy.should_receive(:prompt_for_aws)
clippy.clippy
end
it "prompts for nginx basic auth credentials" do
clippy.should_receive(:prompt_for_basic_auth)
clippy.clippy
end
it "prompts for an ssh key" do
clippy.should_receive(:prompt_for_ssh_key)
clippy.clippy
end
it "prompts for a github key" do
clippy.should_receive(:prompt_for_github_key)
clippy.clippy
end
it "prompts for a build" do
clippy.should_receive(:prompt_for_build)
clippy.clippy
end
it "saves the config" do
clippy.config.should_receive(:save)
clippy.clippy
end
it "prompts to start an instance on amazon" do
clippy.should_receive(:prompt_for_amazon_create)
clippy.clippy
end
it "provisions the server" do
clippy.should_receive(:provision_server)
clippy.clippy
end
end
describe "#prompt_for_aws" do
before { clippy.stub(:say) }
it "reads in the key and secret" do
clippy.should_receive(:ask).and_return("aws-key")
clippy.should_receive(:ask).and_return("aws-secret-key")
clippy.prompt_for_aws
clippy.config.aws_key.should == "aws-key"
clippy.config.aws_secret.should == "aws-secret-key"
end
end
describe "#prompt_for_basic_auth" do
it "prompts for the username and password" do
clippy.should_receive(:ask).and_return("admin")
clippy.should_receive(:ask).and_return("password")
clippy.prompt_for_basic_auth
clippy.config.node_attributes.nginx.basic_auth_user.should == "admin"
clippy.config.node_attributes.nginx.basic_auth_password.should == "password"
end
end
describe "#prompt_for_server_ssh_key" do
it "prompts for the path" do
clippy.should_receive(:ask).and_return("~/.ssh/top_secret_rsa")
clippy.prompt_for_ssh_key
clippy.config.server_ssh_key.should == File.expand_path("~/.ssh/top_secret_rsa")
end
end
describe "#prompt_for_github_key" do
it "prompts for the path" do
clippy.should_receive(:ask).and_return("~/.ssh/the_matthew_kocher_memorial_key")
clippy.prompt_for_github_key
clippy.config.github_ssh_key.should == File.expand_path("~/.ssh/the_matthew_kocher_memorial_key")
end
end
describe "#prompt_for_build" do
before { clippy.stub(:ask) }
context "when there are no builds" do
it "asks you for the build name" do
clippy.should_receive(:ask).and_return("fancy-build")
clippy.prompt_for_build
clippy.config.node_attributes.jenkins.builds.first["name"].should == "fancy-build"
end
it "asks you for the git repository" do
clippy.should_receive(:ask)
clippy.should_receive(:ask).and_return("earwax-under-my-pillow")
clippy.prompt_for_build
clippy.config.node_attributes.jenkins.builds.first["repository"].should == "earwax-under-my-pillow"
end
it "asks you for the build command" do
clippy.should_receive(:ask).twice
clippy.should_receive(:ask).and_return("unit-tested-bash")
clippy.prompt_for_build
clippy.config.node_attributes.jenkins.builds.first["command"].should == "unit-tested-bash"
end
it "always builds the master branch" do
clippy.prompt_for_build
clippy.config.node_attributes.jenkins.builds.first["branch"].should == "master"
end
end
context "when there are builds" do
before do
clippy.stub(:ask_with_default)
clippy.config.node_attributes.jenkins.builds << {
"name" => "first-post",
"repository" => "what",
"command" => "hot-grits",
"branch" => "oak"
}
clippy.config.node_attributes.jenkins.builds << {
"name" => "grails",
"repository" => "huh",
"command" => "colored-greens",
"branch" => "larch"
}
end
it "prompts for the name using the first build as a default" do
clippy.should_receive(:ask_with_default).with(anything, "first-post")
clippy.prompt_for_build
end
it "prompts for the repository using the first build as a default" do
clippy.should_receive(:ask_with_default)
clippy.should_receive(:ask_with_default).with(anything, "what")
clippy.prompt_for_build
end
it "prompts for the repository using the first build as a default" do
clippy.should_receive(:ask_with_default).twice
clippy.should_receive(:ask_with_default).with(anything, "hot-grits")
clippy.prompt_for_build
end
end
end
describe "#prompt_for_amazon_create" do
before { clippy.stub(:yes? => true, :say => nil) }
context "when there is not an instance in the config" do
it "asks to start an amazon instance" do
clippy.should_receive(:yes?).and_return(false)
clippy.prompt_for_amazon_create
end
it "calls create on CLI" do
cli.should_receive(:create)
clippy.prompt_for_amazon_create
end
it "waits for the amazon instance to be alive" do
Godot.any_instance.should_receive(:wait!)
clippy.prompt_for_amazon_create
end
end
context "when there is an instance in the config" do
before { clippy.config.master = "1.123.123.1" }
it "does not ask to start an instance" do
clippy.should_not_receive(:yes?)
clippy.prompt_for_amazon_create
end
it "does not create an instance" do
cli.should_not_receive(:create)
clippy.prompt_for_amazon_create
end
end
end
describe "#provision_server" do
before { clippy.stub(:say) }
context "when there is no instance in the config" do
it "does not bootstrap the instance" do
cli.should_not_receive(:bootstrap)
clippy.provision_server
end
it "does not run chef" do
cli.should_not_receive(:chef)
clippy.provision_server
end
end
context "when an instance exists" do
before do
clippy.config
clippy.config.master = "1.2.3.4"
clippy.config.save
clippy.config.master = nil
end
it "bootstraps the instance" do
cli.should_receive(:bootstrap)
clippy.provision_server
end
it "runs chef" do
cli.should_receive(:chef)
clippy.provision_server
end
end
end
end
| 28.69 | 107 | 0.65377 |
0864d2e5dc8acd5797b59d5ae38e9914cb7f213a | 297 | # WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
module Aws::CodeDeploy
module Errors
extend Aws::Errors::DynamicErrors
end
end
| 19.8 | 74 | 0.760943 |
1df7e18289edc3501575ce046fd252cb1c385bd5 | 4,096 | class Elasticsearch < Formula
desc "Distributed search & analytics engine"
homepage "https://www.elastic.co/products/elasticsearch"
url "https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-5.3.0.tar.gz"
sha256 "effd922973e9f4fe25565e0a194a4b534c08b22849f03cb9fea13c311401e21b"
head do
url "https://github.com/elasticsearch/elasticsearch.git"
depends_on "gradle" => :build
end
bottle :unneeded
depends_on :java => "1.8+"
def cluster_name
"elasticsearch_#{ENV["USER"]}"
end
def install
if build.head?
# Build the package from source
system "gradle", "clean", ":distribution:tar:assemble"
# Extract the package to the tar directory
mkdir "tar"
cd "tar"
system "tar", "--strip-components=1", "-xf", Dir["../distribution/tar/build/distributions/elasticsearch-*.tar.gz"].first
end
# Remove Windows files
rm_f Dir["bin/*.bat"]
rm_f Dir["bin/*.exe"]
# Install everything else into package directory
libexec.install "bin", "config", "lib", "modules"
# Set up Elasticsearch for local development:
inreplace "#{libexec}/config/elasticsearch.yml" do |s|
# 1. Give the cluster a unique name
s.gsub!(/#\s*cluster\.name\: .*/, "cluster.name: #{cluster_name}")
# 2. Configure paths
s.sub!(%r{#\s*path\.data: /path/to.+$}, "path.data: #{var}/elasticsearch/")
s.sub!(%r{#\s*path\.logs: /path/to.+$}, "path.logs: #{var}/log/elasticsearch/")
end
inreplace "#{libexec}/bin/elasticsearch.in.sh" do |s|
# Configure ES_HOME
s.sub!(%r{#\!/bin/bash\n}, "#!/bin/bash\n\nES_HOME=#{libexec}")
end
inreplace "#{libexec}/bin/elasticsearch-plugin" do |s|
# Add the proper ES_CLASSPATH configuration
s.sub!(/SCRIPT="\$0"/, %Q(SCRIPT="$0"\nES_CLASSPATH=#{libexec}/lib))
# Replace paths to use libexec instead of lib
s.gsub!(%r{\$ES_HOME/lib/}, "$ES_CLASSPATH/")
end
# Move config files into etc
(etc/"elasticsearch").install Dir[libexec/"config/*"]
(etc/"elasticsearch/scripts").mkdir unless File.exist?(etc/"elasticsearch/scripts")
(libexec/"config").rmtree
bin.write_exec_script Dir[libexec/"bin/elasticsearch"]
bin.write_exec_script Dir[libexec/"bin/elasticsearch-plugin"]
end
def post_install
# Make sure runtime directories exist
(var/"elasticsearch/#{cluster_name}").mkpath
(var/"log/elasticsearch").mkpath
ln_s etc/"elasticsearch", libexec/"config"
(libexec/"plugins").mkdir
end
def caveats
s = <<-EOS.undent
Data: #{var}/elasticsearch/#{cluster_name}/
Logs: #{var}/log/elasticsearch/#{cluster_name}.log
Plugins: #{libexec}/plugins/
Config: #{etc}/elasticsearch/
plugin script: #{libexec}/bin/elasticsearch-plugin
EOS
s
end
plist_options :manual => "elasticsearch"
def plist; <<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<false/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/elasticsearch</string>
</array>
<key>EnvironmentVariables</key>
<dict>
</dict>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{var}</string>
<key>StandardErrorPath</key>
<string>#{var}/log/elasticsearch.log</string>
<key>StandardOutPath</key>
<string>#{var}/log/elasticsearch.log</string>
</dict>
</plist>
EOS
end
test do
system "#{libexec}/bin/elasticsearch-plugin", "list"
pid = "#{testpath}/pid"
begin
system "#{bin}/elasticsearch", "-d", "-p", pid, "-Epath.data=#{testpath}/data"
sleep 10
system "curl", "-XGET", "localhost:9200/"
ensure
Process.kill(9, File.read(pid).to_i)
end
end
end
| 31.267176 | 126 | 0.619385 |
4a9059c6853d4381cfa686a5d238d9b687273e13 | 2,071 | require 'rails_helper'
RSpec.describe Indicator, type: :model do
context 'validations' do
it 'should be invalid when subcategory not present' do
expect(
build(:indicator, subcategory: nil)
).to have(1).errors_on(:subcategory)
end
end
context 'linked scenarios and models' do
let(:indicator) { create(:indicator) }
let(:scenario1) { create(:scenario) }
let!(:scenario2) { create(:scenario) }
let!(:time_series_value) {
create(
:time_series_value, indicator: indicator, scenario: scenario1
)
}
it 'should return scenarios linked to this indicator' do
expect(indicator.scenarios).to include(scenario1)
expect(indicator.scenarios.length).to eq(1)
end
it 'should return modelslinked to this indicator' do
expect(indicator.models).to include(scenario1.model)
expect(indicator.models.length).to eq(1)
end
end
describe :destroy do
let(:indicator) { create(:indicator) }
let!(:time_series_value) {
create(:time_series_value, indicator: indicator)
}
it 'should destroy all time series values' do
expect { indicator.destroy }.to change(TimeSeriesValue, :count).by(-1)
end
end
describe :time_series_data? do
let!(:indicator) { create(:indicator) }
it 'returns false when no time series data present' do
expect(indicator.time_series_data?).to be(false)
end
it 'returns true when time series data present' do
create(:time_series_value, indicator: indicator)
expect(indicator.time_series_data?).to be(true)
end
end
describe :scenarios do
let(:indicator) { create(:indicator) }
let(:scenario) { create(:scenario) }
let!(:time_series_value1) {
create(
:time_series_value, indicator: indicator, scenario: scenario
)
}
let!(:time_series_value2) {
create(
:time_series_value, indicator: indicator, scenario: scenario
)
}
it 'should not double count' do
expect(indicator.scenarios.count).to eq(1)
end
end
end
| 28.763889 | 76 | 0.667793 |
91f91a6efd915841a68bef22ddd135cabcb0862e | 2,665 | class Nybooks::CommandLineInterface
def run
main_menu
goodbye
end
def main_menu
puts "Welcome to New York Times Best Seller Books:"
puts "Please select item number below:"
puts "1. Hardcover Fiction"
puts "2. Hardcover NonFiction"
puts "3. Print and E-Book Fiction"
puts "4. Print and E-Book NonFiction"
puts "5. Exit"
input = gets.strip
if input.upcase == "EXIT"
elsif input.to_i > 0 and input.to_i < 5
puts "---Please wait for the list to load..---"
case input.to_i
when 1
puts "NY Times Bestseller List: Hardcover Fiction"
when 2
puts "NY Times Bestseller List: Hardcover NonFiction"
when 3
puts "NY Times Bestseller List: Print and E-Book Fiction"
when 4
puts "NY Times Bestseller List: Print and E-Book NonFiction"
end
puts "------------------------------------------------------"
generate_book_list(input)
display_list(input)
second_menu(input)
elsif input.to_i == 5
else
main_menu
end
end
def second_menu(book_category)
puts "Select book no. or Type 'Menu' to return to main menu or Type 'Exit'"
input = gets.strip
if input.upcase == "MENU"
main_menu
elsif input.to_i > 0 && input.to_i <= Nybooks::Books.all.size
select_a_book(book_category,input.to_i - 1)
elsif input.upcase != "EXIT"
second_menu(book_category)
end
end
def generate_book_list(category_selection)
if Nybooks::Books.find_by_books_category(category_selection) == []
booklist = Nybooks::Scraper.scrape_page(category_selection)
Nybooks::Books.create_from_collection(booklist)
end
end
def display_list(book_category)
Nybooks::Books.find_by_books_category(book_category).each_with_index do |book, index|
puts "#{index + 1}. #{book.title}"
end
end
def select_a_book(book_category, book_number)
Nybooks::Books.find_by_books_category(book_category).each_with_index do |book, index|
if book.index == book_number
puts "---------------------------------------------"
puts "Rank: #{index + 1}"
puts "Freshness: #{book.freshness}"
puts "Title: #{book.title}"
puts "Author: #{book.author}"
puts "Description: #{book.description.strip}"
puts "--------------------------------------------"
end
end
second_menu(book_category)
end
def goodbye
puts "Thanks for checking the list(s). Goodbye!"
end
end
| 30.632184 | 91 | 0.582739 |
d506b34811a726e2acd47634a1578fca34bc165c | 5,221 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Web::Mgmt::V2016_08_01
#
# A service client - single point of access to the REST API.
#
class WebSiteManagementClient < MsRestAzure::AzureServiceClient
include MsRestAzure
include MsRestAzure::Serialization
# @return [String] the base URI of the service.
attr_accessor :base_url
# @return Credentials needed for the client to connect to Azure.
attr_reader :credentials
# @return [String] Your Azure subscription ID. This is a GUID-formatted
# string (e.g. 00000000-0000-0000-0000-000000000000).
attr_accessor :subscription_id
# @return [String] API Version
attr_reader :api_version
# @return [String] The preferred language for the response.
attr_accessor :accept_language
# @return [Integer] The retry timeout in seconds for Long Running
# Operations. Default value is 30.
attr_accessor :long_running_operation_retry_timeout
# @return [Boolean] Whether a unique x-ms-client-request-id should be
# generated. When set to true a unique x-ms-client-request-id value is
# generated and included in each request. Default is true.
attr_accessor :generate_client_request_id
# @return [WebApps] web_apps
attr_reader :web_apps
#
# Creates initializes a new instance of the WebSiteManagementClient class.
# @param credentials [MsRest::ServiceClientCredentials] credentials to authorize HTTP requests made by the service client.
# @param base_url [String] the base URI of the service.
# @param options [Array] filters to be applied to the HTTP requests.
#
def initialize(credentials = nil, base_url = nil, options = nil)
super(credentials, options)
@base_url = base_url || 'https://management.azure.com'
fail ArgumentError, 'invalid type of credentials input parameter' unless credentials.is_a?(MsRest::ServiceClientCredentials) unless credentials.nil?
@credentials = credentials
@web_apps = WebApps.new(self)
@api_version = '2016-08-01'
@accept_language = 'en-US'
@long_running_operation_retry_timeout = 30
@generate_client_request_id = true
add_telemetry
end
#
# Makes a request and returns the body of the response.
# @param method [Symbol] with any of the following values :get, :put, :post, :patch, :delete.
# @param path [String] the path, relative to {base_url}.
# @param options [Hash{String=>String}] specifying any request options like :body.
# @return [Hash{String=>String}] containing the body of the response.
# Example:
#
# request_content = "{'location':'westus','tags':{'tag1':'val1','tag2':'val2'}}"
# path = "/path"
# options = {
# body: request_content,
# query_params: {'api-version' => '2016-02-01'}
# }
# result = @client.make_request(:put, path, options)
#
def make_request(method, path, options = {})
result = make_request_with_http_info(method, path, options)
result.body unless result.nil?
end
#
# Makes a request and returns the operation response.
# @param method [Symbol] with any of the following values :get, :put, :post, :patch, :delete.
# @param path [String] the path, relative to {base_url}.
# @param options [Hash{String=>String}] specifying any request options like :body.
# @return [MsRestAzure::AzureOperationResponse] Operation response containing the request, response and status.
#
def make_request_with_http_info(method, path, options = {})
result = make_request_async(method, path, options).value!
result.body = result.response.body.to_s.empty? ? nil : JSON.load(result.response.body)
result
end
#
# Makes a request asynchronously.
# @param method [Symbol] with any of the following values :get, :put, :post, :patch, :delete.
# @param path [String] the path, relative to {base_url}.
# @param options [Hash{String=>String}] specifying any request options like :body.
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def make_request_async(method, path, options = {})
fail ArgumentError, 'method is nil' if method.nil?
fail ArgumentError, 'path is nil' if path.nil?
request_url = options[:base_url] || @base_url
if(!options[:headers].nil? && !options[:headers]['Content-Type'].nil?)
@request_headers['Content-Type'] = options[:headers]['Content-Type']
end
request_headers = @request_headers
request_headers.merge!({'accept-language' => @accept_language}) unless @accept_language.nil?
options.merge!({headers: request_headers.merge(options[:headers] || {})})
options.merge!({credentials: @credentials}) unless @credentials.nil?
super(request_url, method, path, options)
end
private
#
# Adds telemetry information.
#
def add_telemetry
sdk_information = 'azure_mgmt_web'
sdk_information = "#{sdk_information}/0.17.6"
add_user_agent_information(sdk_information)
end
end
end
| 39.255639 | 154 | 0.690289 |
014582f86bc2a6232e2ed879e5e6da37b7841a24 | 392 | class ApplicationController < ActionController::Base
before_action :authenticate_user!
before_action :configure_permitted_parameters, if: :devise_controller?
protected
def configure_permitted_parameters
devise_parameter_sanitizer.permit(:sign_up, keys: [:first_name, :last_name])
devise_parameter_sanitizer.permit(:account_update, keys: [:first_name, :last_name])
end
end
| 32.666667 | 87 | 0.808673 |
f793a06845bd625611f864b664d7d361efb9dea6 | 12,599 | # Copyright (c) 2015 Sqreen. All Rights Reserved.
# Please refer to our terms for more information: https://www.sqreen.io/terms.html
require 'ipaddr'
require 'sqreen/events/remote_exception'
require 'sqreen/callbacks'
require 'sqreen/exception'
require 'sqreen/log'
require 'sqreen/frameworks/request_recorder'
module Sqreen
module Frameworks
# This is the base class for framework specific code
class GenericFramework
include RequestRecorder
attr_accessor :sqreen_configuration
def initialize
if defined?(Rack::Builder)
hook_rack_builder
else
to_app_done(Process.pid)
end
clean_request_record
end
# What kind of database is this
def db_settings(_options = {})
raise Sqreen::NotImplementedYet
end
# More information about the current framework
def framework_infos
raise Sqreen::NotImplementedYet unless ensure_rack_loaded
{
:framework_type => 'Rack',
:framework_version => Rack.version,
:environment => ENV['RACK_ENV'],
}
end
def development?
ENV['RACK_ENV'] == 'development'
end
PREFFERED_IP_HEADERS = %w(HTTP_X_FORWARDED_FOR HTTP_X_REAL_IP
HTTP_CLIENT_IP HTTP_X_FORWARDED
HTTP_X_CLUSTER_CLIENT_IP HTTP_FORWARDED_FOR
HTTP_FORWARDED HTTP_VIA).freeze
def ip_headers
req = request
return [] unless req
ips = []
(PREFFERED_IP_HEADERS + ['REMOTE_ADDR']).each do |header|
v = req.env[header]
ips << [header, v] unless v.nil?
end
ips << ['rack.ip', req.ip] if req.respond_to?(:ip)
ips
end
# What is the current client IP as seen by rack
def rack_client_ip
req = request
return nil unless req
return req.ip if req.respond_to?(:ip)
req.env['REMOTE_ADDR']
end
# Sourced from rack:Request#trusted_proxy?
TRUSTED_PROXIES = /\A127\.0\.0\.1\Z|\A(10|172\.(1[6-9]|2[0-9]|30|31)|192\.168)\.|\A::1\Z|\Afd[0-9a-f]{2}:.+|\Alocalhost\Z|\Aunix\Z|\Aunix:/i
LOCALHOST = /\A127\.0\.0\.1\Z|\A::1\Z|\Alocalhost\Z|\Aunix\Z|\Aunix:/i
# What is the current client IP
def client_ip
req = request
return nil unless req
# Look for an external address being forwarded
split_ips = []
PREFFERED_IP_HEADERS.each do |header_name|
forwarded = req.env[header_name]
ips = split_ip_addresses(forwarded)
lip = ips.find { |ip| (ip !~ TRUSTED_PROXIES) && valid_ip?(ip) }
split_ips << ips unless ips.empty?
return lip unless lip.nil?
end
# Else fall back to declared remote addr
r = req.env['REMOTE_ADDR']
# If this is localhost get the last hop before
if r.nil? || r =~ LOCALHOST
split_ips.each do |ips|
lip = ips.find { |ip| (ip !~ LOCALHOST) && valid_ip?(ip) }
return lip unless lip.nil?
end
end
r
end
# Get a header by name
def header(name)
req = request
return nil unless req
req.env[name]
end
def http_headers
req = request
return nil unless req
req.env.select { |k, _| k.to_s.start_with?('HTTP_') }
end
def hostname
req = request
return nil unless req
http_host = req.env['HTTP_HOST']
return http_host if http_host && !http_host.empty?
req.env['SERVER_NAME']
end
def request_id
req = request
return nil unless req
req.env['HTTP_X_REQUEST_ID']
end
# Summary of known request infos
def request_infos
req = request
return {} unless req
# FIXME: Use frozen string keys
{
:rid => request_id,
:user_agent => client_user_agent,
:scheme => req.scheme,
:verb => req.env['REQUEST_METHOD'],
:host => hostname,
:port => req.env['SERVER_PORT'],
:referer => req.env['HTTP_REFERER'],
:path => request_path,
:remote_port => req.env['REMOTE_PORT'],
:remote_ip => remote_addr,
:client_ip => client_ip,
}
end
# Request URL path
def request_path
req = request
return nil unless req
req.script_name + req.path_info
end
# request user agent
def client_user_agent
req = request
return nil unless req
req.env['HTTP_USER_AGENT']
end
# Application root
def root
nil
end
# Main entry point for sqreen.
# launch whenever we are ready
def on_start
yield self
end
# Should the agent not be starting up?
def prevent_startup
return :irb if $0 == 'irb'
return if sqreen_configuration.nil?
disable = sqreen_configuration.get(:disable)
return :config_disable if disable == true || disable.to_s.to_i == 1
end
# Instrument with our rules when the framework as finished loading
def instrument_when_ready!(instrumentor, rules)
wait_for_to_app do
instrumentor.instrument!(rules, self)
end
end
def to_app_done(val)
return if @to_app_done
@to_app_done = val
return unless @wait
@wait.each(&:call)
@wait.clear
end
def wait_for_to_app(&block)
yield && return if @to_app_done
@wait ||= []
@wait << block
end
# Does the parameters value include this value
def params_include?(value)
params = request_params
return false if params.nil?
each_value_for_hash(params) do |param|
return true if param == value
end
false
end
# Does the parameters key/value include this value
def full_params_include?(value)
params = request_params
return false if params.nil?
each_key_value_for_hash(params) do |param|
return true if param == value
end
false
end
# Fetch and store the current request object
# Nota: cleanup should be performed at end of request (see clean_request)
def store_request(object)
return unless ensure_rack_loaded
SharedStorage.set(:request, Rack::Request.new(object))
SharedStorage.inc(:stored_requests)
end
# Get the currently stored request
def request
SharedStorage.get(:request)
end
# Cleanup request context
def clean_request
return unless SharedStorage.dec(:stored_requests) <= 0
payload_creator = Sqreen::PayloadCreator.new(self)
close_request_record(Sqreen.queue, Sqreen.observations_queue, payload_creator)
SharedStorage.set(:request, nil)
end
def request_params
self.class.parameters_from_request(request)
end
def filtered_request_params
params = request_params
params.delete('cookies')
params
end
%w(form query cookies).each do |section|
define_method("#{section}_params") do
self.class.send("#{section}_params", request)
end
end
P_FORM = 'form'.freeze
P_QUERY = 'query'.freeze
P_COOKIE = 'cookies'.freeze
P_GRAPE = 'grape_params'.freeze
P_RACK_ROUTING = 'rack_routing'.freeze
def self.form_params(request)
return nil unless request
begin
request.POST
rescue => e
Sqreen.log.debug("POST Parameters are invalid #{e.inspect}")
nil
end
end
def self.cookies_params(request)
return nil unless request
begin
request.cookies
rescue => e
Sqreen.log.debug("cookies are invalid #{e.inspect}")
nil
end
end
def self.query_params(request)
return nil unless request
begin
request.GET
rescue => e
Sqreen.log.debug("GET Parameters are invalid #{e.inspect}")
nil
end
end
def self.parameters_from_request(request)
return {} unless request
r = {
P_FORM => form_params(request),
P_QUERY => query_params(request),
P_COOKIE => cookies_params(request),
}
# Add grape parameters if seen
p = request.env['grape.request.params']
r[P_GRAPE] = p if p
p = request.env['rack.routing_args']
if p
r[P_RACK_ROUTING] = p.dup
r[P_RACK_ROUTING].delete :route_info
r[P_RACK_ROUTING].delete :version
end
r
end
# Expose current working directory
def cwd
Dir.getwd
end
WHITELIST_KEY = 'sqreen.whitelisted_request'.freeze
# Return the current item that whitelist this request
# returns nil if request is not whitelisted
def whitelisted_match
return nil unless request
return request.env[WHITELIST_KEY] if request.env.key?(WHITELIST_KEY)
request.env[WHITELIST_KEY] = whitelisted_ip || whitelisted_path
end
# Returns the current path that whitelist the request
def whitelisted_path
path = request_path
return nil unless path
find_whitelisted_path(path)
end
# Returns the current path that whitelist the request
def whitelisted_ip
ip = client_ip
return nil unless ip
find_whitelisted_ip(ip)
rescue
nil
end
def remote_addr
return nil unless request
request.env['REMOTE_ADDR']
end
protected
# Is this a whitelisted path?
# return the path witelisted prefix that match path
def find_whitelisted_path(rpath)
(Sqreen.whitelisted_paths || []).find do |path|
rpath.start_with?(path)
end
end
# Is this a whitelisted ip?
# return the ip witelisted range that match ip
def find_whitelisted_ip(rip)
ret = (Sqreen.whitelisted_ips || {}).find do |_, ip|
ip.include?(rip)
end
return nil unless ret
ret.first
end
def hook_rack_request(klass)
@calling_pid = Process.pid
klass.class_eval do
define_method(:call_with_sqreen) do |*args, &block|
rv = call_without_sqreen(*args, &block)
if Sqreen.framework.instance_variable_get('@calling_pid') != Process.pid
Sqreen.framework.instance_variable_set('@calling_pid', Process.pid)
yield Sqreen.framework
end
rv
end
alias_method :call_without_sqreen, :call
alias_method :call, :call_with_sqreen
end
end
def hook_rack_builder
Rack::Builder.class_eval do
define_method(:to_app_with_sqreen) do |*args, &block|
Sqreen.framework.to_app_done(Process.pid)
to_app_without_sqreen(*args, &block)
end
alias_method :to_app_without_sqreen, :to_app
alias_method :to_app, :to_app_with_sqreen
end
end
# FIXME: Extract to another object (utils?)
# FIXME: protect against cycles ?
def each_value_for_hash(params, &block)
case params
when Hash then params.each { |_k, v| each_value_for_hash(v, &block) }
when Array then params.each { |v| each_value_for_hash(v, &block) }
else
yield params
end
end
def each_key_value_for_hash(params, &block)
case params
when Hash then params.each do |k, v|
yield k
each_key_value_for_hash(v, &block)
end
when Array then params.each { |v| each_key_value_for_hash(v, &block) }
else
yield params
end
end
def ensure_rack_loaded
@cannot_load_rack ||= false
return false if @cannot_load_rack
require 'rack' unless defined?(Rack)
true
rescue LoadError => e
# FIXME: find a nice way to test this branch
Sqreen::RemoteException.record(e)
@cannot_load_rack = true
false
end
private
def split_ip_addresses(ip_addresses)
ip_addresses ? ip_addresses.strip.split(/[,\s]+/) : []
end
def valid_ip?(ip)
IPAddr.new(ip)
true
rescue
false
end
end
end
end
| 28.185682 | 146 | 0.590682 |
1d4009c21e6c41f629af5953a27c76a154b0f770 | 7,393 | require 'spec_helper'
describe "Koala::Facebook::RealtimeUpdates" do
before :all do
# get oauth data
@oauth_data = $testing_data["oauth_test_data"]
@app_id = @oauth_data["app_id"]
@secret = @oauth_data["secret"]
@callback_url = @oauth_data["callback_url"]
@app_access_token = @oauth_data["app_access_token"]
# check OAuth data
unless @app_id && @secret && @callback_url && @app_access_token
raise Exception, "Must supply OAuth app id, secret, app_access_token, and callback to run live subscription tests!"
end
# get subscription data
@subscription_data = $testing_data["subscription_test_data"]
@verify_token = @subscription_data["verify_token"]
@challenge_data = @subscription_data["challenge_data"]
@subscription_path = @subscription_data["subscription_path"]
# check subscription data
unless @verify_token && @challenge_data && @subscription_path
raise Exception, "Must supply verify_token and equivalent challenge_data to run live subscription tests!"
end
end
describe "when initializing" do
# basic initialization
it "should initialize properly with an app_id and an app_access_token" do
updates = Koala::Facebook::RealtimeUpdates.new(:app_id => @app_id, :app_access_token => @app_access_token)
updates.should be_a(Koala::Facebook::RealtimeUpdates)
end
# attributes
it "should allow read access to app_id, app_access_token, and secret" do
updates = Koala::Facebook::RealtimeUpdates.new(:app_id => @app_id, :app_access_token => @app_access_token)
# this should not throw errors
updates.app_id && updates.app_access_token && updates.secret
end
it "should not allow write access to app_id" do
updates = Koala::Facebook::RealtimeUpdates.new(:app_id => @app_id, :app_access_token => @app_access_token)
# this should not throw errors
lambda { updates.app_id = 2 }.should raise_error(NoMethodError)
end
it "should not allow write access to app_access_token" do
updates = Koala::Facebook::RealtimeUpdates.new(:app_id => @app_id, :app_access_token => @app_access_token)
# this should not throw errors
lambda { updates.app_access_token = 2 }.should raise_error(NoMethodError)
end
it "should not allow write access to secret" do
updates = Koala::Facebook::RealtimeUpdates.new(:app_id => @app_id, :app_access_token => @app_access_token)
# this should not throw errors
lambda { updates.secret = 2 }.should raise_error(NoMethodError)
end
# init with secret / fetching the token
it "should initialize properly with an app_id and a secret" do
updates = Koala::Facebook::RealtimeUpdates.new(:app_id => @app_id, :secret => @secret)
updates.should be_a(Koala::Facebook::RealtimeUpdates)
end
it "should fetch an app_token from Facebook when provided an app_id and a secret" do
updates = Koala::Facebook::RealtimeUpdates.new(:app_id => @app_id, :secret => @secret)
updates.app_access_token.should_not be_nil
end
it "should use the OAuth class to fetch a token when provided an app_id and a secret" do
oauth = Koala::Facebook::OAuth.new(@app_id, @secret)
token = oauth.get_app_access_token
oauth.should_receive(:get_app_access_token).and_return(token)
Koala::Facebook::OAuth.should_receive(:new).with(@app_id, @secret).and_return(oauth)
updates = Koala::Facebook::RealtimeUpdates.new(:app_id => @app_id, :secret => @secret)
end
end
describe "when used" do
before :each do
@updates = Koala::Facebook::RealtimeUpdates.new(:app_id => @app_id, :secret => @secret)
end
it "should send a subscription request to a valid server" do
result = @updates.subscribe("user", "name", @subscription_path, @verify_token)
result.should be_true
end
it "should send a subscription request to a valid server" do
result = @updates.subscribe("user", "name", @subscription_path, @verify_token)
result.should be_true
end
it "should send a subscription request to an invalid path on a valid server" do
lambda { result = @updates.subscribe("user", "name", @subscription_path + "foo", @verify_token) }.should raise_exception(Koala::Facebook::APIError)
end
it "should fail to send a subscription request to an invalid server" do
lambda { @updates.subscribe("user", "name", "foo", @verify_token) }.should raise_exception(Koala::Facebook::APIError)
end
it "should unsubscribe a valid individual object successfully" do
@updates.unsubscribe("user").should be_true
end
it "should unsubscribe all subscriptions successfully" do
@updates.unsubscribe.should be_true
end
it "should fail when an invalid object is provided to unsubscribe" do
lambda { @updates.unsubscribe("kittens") }.should raise_error(Koala::Facebook::APIError)
end
it "should is subscriptions properly" do
@updates.list_subscriptions.should be_a(Array)
end
end # describe "when used"
describe "when meeting challenge" do
it "should return false if hub.mode isn't subscribe" do
params = {'hub.mode' => 'not subscribe'}
Koala::Facebook::RealtimeUpdates.meet_challenge(params).should be_false
end
it "should return false if not given a verify_token or block" do
params = {'hub.mode' => 'subscribe'}
Koala::Facebook::RealtimeUpdates.meet_challenge(params).should be_false
end
describe "and mode is 'subscribe'" do
before(:each) do
@params = {'hub.mode' => 'subscribe'}
end
describe "and a token is given" do
before(:each) do
@token = 'token'
@params['hub.verify_token'] = @token
end
it "should return false if the given verify token doesn't match" do
Koala::Facebook::RealtimeUpdates.meet_challenge(@params, @token + '1').should be_false
end
it "should return the challenge if the given verify token matches" do
@params['hub.challenge'] = 'challenge val'
Koala::Facebook::RealtimeUpdates.meet_challenge(@params, @token).should == @params['hub.challenge']
end
end
describe "and a block is given" do
it "should give the block the token as a parameter" do
Koala::Facebook::RealtimeUpdates.meet_challenge(@params)do |token|
token.should == @token
end
end
it "should return false if the given block return false" do
Koala::Facebook::RealtimeUpdates.meet_challenge(@params)do |token|
false
end.should be_false
end
it "should return false if the given block returns nil" do
Koala::Facebook::RealtimeUpdates.meet_challenge(@params)do |token|
nil
end.should be_false
end
it "should return the challenge if the given block returns true" do
@params['hub.challenge'] = 'challenge val'
Koala::Facebook::RealtimeUpdates.meet_challenge(@params) do |token|
true
end.should be_true
end
end
end # describe "and mode is subscribe"
end # describe "when meeting challenge"
end # describe
| 39.962162 | 153 | 0.671446 |
3326ff9ef64e9fab7c3afb8d82dbd76c0c950644 | 2,405 | class Scope::Domain < Scope::Parameterized
matches 'domain/:id/:domain_scope'
description "Grant access to perform API actions against a single domain and the contained applications."
DOMAIN_SCOPES = {
:view => 'Grant read-only access to a single domain.',
:edit => 'Grant edit access to a single domain and all its applications.',
:admin => 'Grant full administrative access to a single domain and all its applications.',
}.freeze
def allows_action?(controller)
case domain_scope
when :view
controller.request.method == "GET"
else
true
end
end
def authorize_action?(permission, resource, other_resources, user)
case domain_scope
when :admin
case resource
when Domain
resource._id === id
when Application
return false unless resource.domain_id === id
Scope::Application.authorize_action?(resource._id, :admin, permission, resource, other_resources, user)
end
when :edit
case resource
when Domain
resource._id === id && [:create_application, :create_builder_application].include?(permission)
when Application
return false unless resource.domain_id === id
return true if [:destroy, :update_application].include?(permission)
Scope::Application.authorize_action?(resource._id, :edit, permission, resource, other_resources, user)
end
end
end
def limits_access(criteria)
case criteria.klass
when Application then (criteria.options[:conditions] ||= []).concat([{:domain_id => @id}])
when Domain then (criteria.options[:for_ids] ||= []) << @id
when CloudUser then (criteria.options[:visible] ||= domain_scope == :admin)
when Team then (criteria.options[:visible] ||= domain_scope == :admin)
else criteria.options[:visible] ||= false
end
criteria
end
def self.describe
DOMAIN_SCOPES.map{ |k,v| s = with_params(nil, k); [s, v, default_expiration(s), maximum_expiration(s)] unless v.nil? }.compact
end
protected
def id=(s)
s = s.to_s
raise Scope::Invalid, "id must be less than 40 characters" unless s.length < 40
s = Moped::BSON::ObjectId.from_string(s)
@id = s
end
def domain_scope=(s)
raise Scope::Invalid, "'#{s}' is not a valid domain scope" unless DOMAIN_SCOPES.keys.any?{ |k| k.to_s == s.to_s }
@domain_scope = s.to_sym
end
end
| 34.357143 | 130 | 0.671102 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.