hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
5deead6b023249673c72bbc8fdcfcc072c04c4e9 | 1,563 | SeattleIo::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Configure static asset server for tests with Cache-Control for performance.
config.serve_static_assets = true
config.static_cache_control = "public, max-age=3600"
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
end
| 42.243243 | 85 | 0.774792 |
b957412815768ea894977158fab7333f38354107 | 7,236 | class RubyAT20 < Formula
desc "Powerful, clean, object-oriented scripting language"
homepage "https://www.ruby-lang.org/"
url "https://cache.ruby-lang.org/pub/ruby/2.0/ruby-2.0.0-p648.tar.bz2"
sha256 "087ad4dec748cfe665c856dbfbabdee5520268e94bb81a1d8565d76c3cc62166"
revision 5
bottle do
sha256 "05e1cbc036d1c06e517525674ab04a88f6272d8191c8bb6abb3fe5034ec7a37f" => :high_sierra
sha256 "f8410ba6a21b6c160d6440271bb4808dc20beeb60cbd910e01952e5294645386" => :sierra
sha256 "ac812a9ce7c713bab2279d8faa7cf36c1912e088d4ffe72f7194ee1fb1dc0f2a" => :el_capitan
end
keg_only :versioned_formula
option "with-suffix", "Suffix commands with '20'"
option "with-doc", "Install documentation"
option "with-tcltk", "Install with Tcl/Tk support"
depends_on "pkg-config" => :build
depends_on "readline" => :recommended
depends_on "gdbm" => :optional
depends_on "libffi" => :optional
depends_on "libyaml"
depends_on "openssl"
depends_on :x11 if build.with? "tcltk"
# This should be kept in sync with the main Ruby formula
# but a revision bump should not be forced every update
# unless there are security fixes in that Rubygems release.
resource "rubygems" do
url "https://rubygems.org/rubygems/rubygems-2.7.4.tgz"
sha256 "bbe35ce6646e4168fcb1071d5f83b2d1154924f5150df0f5fca0f37d2583a182"
end
def program_suffix
build.with?("suffix") ? "20" : ""
end
def ruby
"#{bin}/ruby#{program_suffix}"
end
def api_version
"2.0.0"
end
def rubygems_bindir
HOMEBREW_PREFIX/"lib/ruby/gems/#{api_version}/bin"
end
def install
args = %W[
--prefix=#{prefix}
--enable-shared
--with-sitedir=#{HOMEBREW_PREFIX}/lib/ruby/site_ruby
--with-vendordir=#{HOMEBREW_PREFIX}/lib/ruby/vendor_ruby
]
args << "--program-suffix=#{program_suffix}" if build.with? "suffix"
args << "--with-out-ext=tk" if build.without? "tcltk"
args << "--disable-install-doc" if build.without? "doc"
args << "--disable-dtrace" unless MacOS::CLT.installed?
paths = [
Formula["libyaml"].opt_prefix,
Formula["openssl"].opt_prefix,
]
%w[readline gdbm libffi].each do |dep|
paths << Formula[dep].opt_prefix if build.with? dep
end
args << "--with-opt-dir=#{paths.join(":")}"
system "./configure", *args
# Ruby has been configured to look in the HOMEBREW_PREFIX for the
# sitedir and vendordir directories; however we don't actually want to create
# them during the install.
#
# These directories are empty on install; sitedir is used for non-rubygems
# third party libraries, and vendordir is used for packager-provided libraries.
inreplace "tool/rbinstall.rb" do |s|
s.gsub! 'prepare "extension scripts", sitelibdir', ""
s.gsub! 'prepare "extension scripts", vendorlibdir', ""
s.gsub! 'prepare "extension objects", sitearchlibdir', ""
s.gsub! 'prepare "extension objects", vendorarchlibdir', ""
end
system "make"
system "make", "install"
# This is easier than trying to keep both current & versioned Ruby
# formulae repeatedly updated with Rubygem patches.
resource("rubygems").stage do
ENV.prepend_path "PATH", bin
system ruby, "setup.rb", "--prefix=#{buildpath}/vendor_gem"
rg_in = lib/"ruby/#{api_version}"
# Remove bundled Rubygem version.
rm_rf rg_in/"rubygems"
rm_f rg_in/"rubygems.rb"
rm_f rg_in/"ubygems.rb"
rm_f bin/"gem#{program_suffix}"
# Drop in the new version.
rg_in.install Dir[buildpath/"vendor_gem/lib/*"]
bin.install buildpath/"vendor_gem/bin/gem" => "gem#{program_suffix}"
(libexec/"gembin").install buildpath/"vendor_gem/bin/bundle" => "bundle#{program_suffix}"
(libexec/"gembin").install_symlink "bundle#{program_suffix}" => "bundler#{program_suffix}"
end
end
def post_install
# Since Gem ships Bundle we want to provide that full/expected installation
# but to do so we need to handle the case where someone has previously
# installed bundle manually via `gem install`.
rm_f %W[
#{rubygems_bindir}/bundle
#{rubygems_bindir}/bundle#{program_suffix}
#{rubygems_bindir}/bundler
#{rubygems_bindir}/bundler#{program_suffix}
]
rm_rf Dir[HOMEBREW_PREFIX/"lib/ruby/gems/#{api_version}/gems/bundler-*"]
rubygems_bindir.install_symlink Dir[libexec/"gembin/*"]
# Customize rubygems to look/install in the global gem directory
# instead of in the Cellar, making gems last across reinstalls
config_file = lib/"ruby/#{api_version}/rubygems/defaults/operating_system.rb"
config_file.unlink if config_file.exist?
config_file.write rubygems_config
# Create the sitedir and vendordir that were skipped during install
%w[sitearchdir vendorarchdir].each do |dir|
mkdir_p `#{ruby} -rrbconfig -e 'print RbConfig::CONFIG["#{dir}"]'`
end
end
def rubygems_config; <<~EOS
module Gem
class << self
alias :old_default_dir :default_dir
alias :old_default_path :default_path
alias :old_default_bindir :default_bindir
alias :old_ruby :ruby
end
def self.default_dir
path = [
"#{HOMEBREW_PREFIX}",
"lib",
"ruby",
"gems",
"#{api_version}"
]
@default_dir ||= File.join(*path)
end
def self.private_dir
path = if defined? RUBY_FRAMEWORK_VERSION then
[
File.dirname(RbConfig::CONFIG['sitedir']),
'Gems',
RbConfig::CONFIG['ruby_version']
]
elsif RbConfig::CONFIG['rubylibprefix'] then
[
RbConfig::CONFIG['rubylibprefix'],
'gems',
RbConfig::CONFIG['ruby_version']
]
else
[
RbConfig::CONFIG['libdir'],
ruby_engine,
'gems',
RbConfig::CONFIG['ruby_version']
]
end
@private_dir ||= File.join(*path)
end
def self.default_path
if Gem.user_home && File.exist?(Gem.user_home)
[user_dir, default_dir, private_dir]
else
[default_dir, private_dir]
end
end
def self.default_bindir
"#{rubygems_bindir}"
end
def self.ruby
"#{opt_bin}/ruby#{program_suffix}"
end
end
EOS
end
def caveats; <<~EOS
By default, binaries installed by gem will be placed into:
#{rubygems_bindir}
You may want to add this to your PATH.
EOS
end
test do
hello_text = shell_output("#{bin}/ruby#{program_suffix} -e 'puts :hello'")
assert_equal "hello\n", hello_text
ENV["GEM_HOME"] = testpath
system "#{bin}/gem#{program_suffix}", "install", "json"
(testpath/"Gemfile").write <<~EOS
source 'https://rubygems.org'
gem 'gemoji'
EOS
system rubygems_bindir/"bundle#{program_suffix}", "install", "--binstubs=#{testpath}/bin"
assert_predicate testpath/"bin/gemoji", :exist?, "gemoji is not installed in #{testpath}/bin"
end
end
| 31.736842 | 97 | 0.642344 |
4a61abb7e4c5bc44b137449972a70697118fe610 | 170 | class SceneLocationship < ApplicationRecord
include HasContentLinking
belongs_to :user
belongs_to :scene
belongs_to :scene_location, class_name: 'Location'
end
| 18.888889 | 52 | 0.805882 |
03c841071b5ea76342fb0fa1f77349e22de4a845 | 2,459 | require 'serverspec'
require 'net/ssh'
require 'yaml'
set :backend, :ssh
if ENV['ASK_SUDO_PASSWORD']
begin
require 'highline/import'
rescue LoadError
fail "highline is not available. Try installing it."
end
set :sudo_password, ask("Enter sudo password: ") { |q| q.echo = false }
else
set :sudo_password, ENV['SUDO_PASSWORD']
end
host = ENV['TARGET_HOST']
options = Net::SSH::Config.for(host)
options[:user] = ENV['user']
options[:keys] = ENV['keypath']
set :host, options[:host_name] || host
set :ssh_options, options
# Disable sudo
set :disable_sudo, true
# Set shell
set :shell, '/bin/bash'
# Set environment variables
# set :env, :LANG => 'C', :LC_MESSAGES => 'C'
# Set PATH
# set :path, '/sbin:/usr/local/sbin:$PATH'
def count_inventory_roles(role)
file = File.open(ENV['inventory'], "rb")
input = file.read
file.close
if input.include? "[#{role}]"
rows = input.split("[#{role}]")[1].split("[")[0]
counter = rows.scan(/ansible_host/).count
else counter = 0
end
return counter
end
def hostInGroups?(role)
file = File.open(ENV['inventory'], "rb")
input = file.read
file.close
if input.include? "[#{role}]"
rows = input.split("[#{role}]")[1].split("[")[0]
return rows =~ /\b#{ENV['TARGET_HOST']}\b/
else return false
end
end
def readDataYaml(kind)
path = ENV['inventory'].dup
path.sub! 'inventory' , 'manifest.yml'
datayaml = []
YAML.load_stream(File.read path) do |ruby|
datayaml << ruby
end
return datayaml.select {|x| x["kind"] == kind }[0]
end
def listInventoryHosts(role)
file = File.open(ENV['inventory'], "rb")
input = file.read
file.close
list = []
if input.include? "[#{role}]"
rows = input.split("[#{role}]")[1].split("[")[0]
rows.each_line do |line|
if line[0] != '#' and line =~ /(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})/
list << line.split.first
end
end
end
return list
end
def listInventoryIPs(role)
file = File.open(ENV['inventory'], "rb")
input = file.read
file.close
list = []
if input.include? "[#{role}]"
rows = input.split("[#{role}]")[1].split("[")[0]
rows.each_line do |line|
if line[0] != '#' and line =~ /(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})/
list << line.split('=').last.strip
end
end
end
return list
end
| 23.198113 | 76 | 0.576251 |
bb145cb1d350cbc5a0e02a48c9e73db77b13554f | 1,718 | require 'test_helper'
class UsersSignupTest < ActionDispatch::IntegrationTest
def setup
ActionMailer::Base.deliveries.clear
end
test "invalid signup information" do
get signup_path
assert_no_difference 'User.count' do
post users_path, params: { user: { name: "",
email: "user@invalid",
password: "foo",
password_confirmation: "bar" } }
end
assert_template 'users/new'
assert_select 'div#error_explanation'
assert_select 'div.field_with_errors'
end
test "valid signup information with account acctivation" do
get signup_path
assert_difference 'User.count', 1 do
post users_path, params: { user: { name: "Example User",
email: "[email protected]",
password: "password",
password_confirmation: "password" } }
end
assert_equal 1, ActionMailer::Base.deliveries.size
user = assigns(:user)
assert_not user.activated?
# 有効化していない状態でログインしてみる
log_in_as(user)
assert_not is_logged_in?
# 有効化トークンが不正な場合
get edit_account_activation_path("invalid token", email: user.email)
assert_not is_logged_in?
# トークンは正しいがメールアドレスが無効な場合
get edit_account_activation_path(user.activation_token, email: 'wrong')
assert_not is_logged_in?
# 有効化トークンが正しい場合
get edit_account_activation_path(user.activation_token, email: user.email)
assert user.reload.activated?
follow_redirect!
assert_template 'users/show'
assert is_logged_in?
end
end
| 33.038462 | 78 | 0.614086 |
4a9e93adfce5b62a0f6fbe8c343261e84c46b4a3 | 6,244 | property :name, String, required: true, name_property: true
property :value, [String, Array]
property :type, String, required: true
property :ttl, Integer, default: 3600
property :weight, String
property :set_identifier, String
property :geo_location, String
property :geo_location_country, String
property :geo_location_continent, String
property :geo_location_subdivision, String
property :zone_id, String
property :aws_access_key_id, String
property :aws_secret_access_key, String
property :aws_region, String, default: 'us-east-1'
property :overwrite, [true, false], default: true
property :alias_target, Hash
property :mock, [true, false], default: false
property :fail_on_error, [true, false], default: false
action :create do
require 'aws-sdk'
if current_resource_record_set == resource_record_set
Chef::Log.info "Record has not changed, skipping: #{name}[#{type}]"
elsif overwrite?
change_record 'UPSERT'
Chef::Log.info "Record created/modified: #{name}[#{type}]"
else
change_record 'CREATE'
Chef::Log.info "Record created: #{name}[#{type}]"
end
end
action :delete do
require 'aws-sdk'
if mock?
# Make some fake data so that we can successfully delete when testing.
mock_resource_record_set = {
name: 'pdb_test.example.com.',
type: 'A',
ttl: 300,
resource_records: [{ value: '192.168.1.2' }],
}
route53.stub_responses(
:list_resource_record_sets,
resource_record_sets: [mock_resource_record_set],
is_truncated: false,
max_items: 1
)
end
if current_resource_record_set.nil?
Chef::Log.info 'There is nothing to delete.'
else
change_record 'DELETE'
Chef::Log.info "Record deleted: #{name}"
end
end
action_class do
def name
@name ||= begin
return new_resource.name + '.' if new_resource.name !~ /\.$/
new_resource.name
end
end
def value
@value ||= Array(new_resource.value)
end
def type
@type ||= new_resource.type
end
def ttl
@ttl ||= new_resource.ttl
end
def geo_location_country
@geo_location_country ||= new_resource.geo_location_country
end
def geo_location_continent
@geo_location_continent ||= new_resource.geo_location_continent
end
def geo_location_subdivision
@geo_location_subdivision ||= new_resource.geo_location_subdivision
end
def geo_location
if geo_location_country
{ country_code: geo_location_country }
elsif geo_location_continent
{ continent_code: geo_location_continent }
elsif geo_location_subdivision
{ country_code: geo_location_country, subdivision_code: geo_location_subdivision }
else
@geo_location ||= new_resource.geo_location
end
end
def set_identifier
@set_identifier ||= new_resource.set_identifier
end
def overwrite?
@overwrite ||= new_resource.overwrite
end
def alias_target
@alias_target ||= new_resource.alias_target
end
def mock?
@mock ||= new_resource.mock
end
def zone_id
@zone_id ||= new_resource.zone_id
end
def fail_on_error
@fail_on_error ||= new_resource.fail_on_error
end
def route53
@route53 ||= begin
if mock?
@route53 = Aws::Route53::Client.new(stub_responses: true)
elsif new_resource.aws_access_key_id && new_resource.aws_secret_access_key
credentials = Aws::Credentials.new(new_resource.aws_access_key_id, new_resource.aws_secret_access_key)
@route53 = Aws::Route53::Client.new(
credentials: credentials,
region: new_resource.aws_region
)
else
Chef::Log.info 'No AWS credentials supplied, going to attempt to use automatic credentials from IAM or ENV'
@route53 = Aws::Route53::Client.new(
region: new_resource.aws_region
)
end
end
end
def resource_record_set
rr_set = {
name: name,
type: type,
}
if alias_target
rr_set[:alias_target] = alias_target
elsif geo_location
rr_set[:set_identifier] = set_identifier
rr_set[:geo_location] = geo_location
rr_set[:ttl] = ttl
rr_set[:resource_records] = value.sort.map { |v| { value: v } }
else
rr_set[:ttl] = ttl
rr_set[:resource_records] = value.sort.map { |v| { value: v } }
end
rr_set
end
def current_resource_record_set
# List all the resource records for this zone:
lrrs = route53
.list_resource_record_sets(
hosted_zone_id: "/hostedzone/#{zone_id}",
start_record_name: name
)
# Select current resource record set by name
current = lrrs[:resource_record_sets]
.select { |rr| rr[:name] == name && rr[:type] == type }.first
# return as hash, converting resource record
# array of structs to array of hashes
if current
crr_set = {
name: current[:name],
type: current[:type],
}
crr_set[:alias_target] = current[:alias_target].to_h unless current[:alias_target].nil?
crr_set[:ttl] = current[:ttl] unless current[:ttl].nil?
crr_set[:resource_records] = current[:resource_records].sort_by(&:value).map(&:to_h) unless current[:resource_records].empty?
crr_set
else
{}
end
end
def change_record(action)
request = {
hosted_zone_id: "/hostedzone/#{zone_id}",
change_batch: {
comment: "Chef Route53 Resource: #{name}",
changes: [
{
action: action,
resource_record_set: resource_record_set,
},
],
},
}
converge_by("#{action} record #{new_resource.name} ") do
response = route53.change_resource_record_sets(request)
Chef::Log.debug "Changed record - #{action}: #{response.inspect}"
end
rescue Aws::Route53::Errors::ServiceError => e
raise if fail_on_error
Chef::Log.error "Error with #{action}request: #{request.inspect} ::: "
Chef::Log.error e.message
end
end
| 28.381818 | 131 | 0.642857 |
d59c2a7e6a04867370c5cc9a95d43f3bc2d658a4 | 149 | class RemoveMeasurementsFromTanks < ActiveRecord::Migration[5.2]
def change
remove_reference :measurements, :tank, foreign_key: true
end
end
| 24.833333 | 64 | 0.785235 |
e204336fcb6901c41bba7c907b9391df358626c9 | 3,807 | # frozen_string_literal: true
require "spec_helper"
describe Lita::Authorization, lita: true do
let(:requesting_user) { instance_double("Lita::User", id: "1") }
let(:robot) { Lita::Robot.new(registry) }
let(:user) { instance_double("Lita::User", id: "2") }
subject { described_class.new(robot) }
before do
registry.config.robot.admins = ["1"]
end
describe "#add_user_to_group" do
it "adds users to an auth group" do
subject.add_user_to_group(requesting_user, user, "employees")
expect(subject.user_in_group?(user, "employees")).to be true
end
it "can only be called by admins" do
registry.config.robot.admins = nil
result = subject.add_user_to_group(
requesting_user,
user,
"employees"
)
expect(result).to eq(:unauthorized)
expect(subject.user_in_group?(user, "employees")).to be false
end
it "normalizes the group name" do
subject.add_user_to_group(requesting_user, user, "eMPLoYeeS")
expect(subject.user_in_group?(user, " EmplOyEEs ")).to be true
end
end
describe "#remove_user_from_group" do
it "removes users from an auth group" do
subject.add_user_to_group(requesting_user, user, "employees")
subject.remove_user_from_group(requesting_user, user, "employees")
expect(subject.user_in_group?(user, "employees")).to be false
end
it "can only be called by admins" do
subject.add_user_to_group(requesting_user, user, "employees")
registry.config.robot.admins = nil
result = subject.remove_user_from_group(
requesting_user,
user,
"employees"
)
expect(result).to eq(:unauthorized)
expect(subject.user_in_group?(user, "employees")).to be true
end
it "normalizes the group name" do
subject.add_user_to_group(requesting_user, user, "eMPLoYeeS")
subject.remove_user_from_group(requesting_user, user, "EmployeeS")
expect(subject.user_in_group?(user, " EmplOyEEs ")).to be false
end
end
describe "#user_in_group?" do
it "returns false if the user is in the group" do
expect(subject.user_in_group?(user, "employees")).to be false
end
it "delegates to .user_is_admin? if the group is admins" do
expect(subject).to receive(:user_is_admin?)
subject.user_in_group?(user, "admins")
end
end
describe "#user_is_admin?" do
it "returns true if the user's ID is in the config" do
expect(subject.user_is_admin?(requesting_user)).to be true
end
it "returns false if the user's ID is not in the config" do
registry.config.robot.admins = nil
expect(subject.user_is_admin?(user)).to be false
end
end
describe "#groups" do
before do
%i[foo bar baz].each do |group|
subject.add_user_to_group(requesting_user, user, group)
end
end
it "returns a list of all authorization groups" do
expect(subject.groups).to match_array(%i[foo bar baz])
end
end
describe "#groups_with_users" do
before do
%i[foo bar baz].each do |group|
subject.add_user_to_group(requesting_user, user, group)
subject.add_user_to_group(
requesting_user,
requesting_user,
group
)
end
allow(Lita::User).to receive(:find_by_id).with("1").and_return(requesting_user)
allow(Lita::User).to receive(:find_by_id).with("2").and_return(user)
end
it "returns a hash of all authorization groups and their members" do
groups = %i[foo bar baz]
groups_with_users = subject.groups_with_users
expect(groups_with_users.keys).to match_array(groups)
groups.each do |group|
expect(groups_with_users[group]).to match_array([user, requesting_user])
end
end
end
end
| 30.95122 | 85 | 0.673496 |
e91e22f3527fa3fd91f2efd4a69572e6df9032d1 | 447 | require 'thor/group'
module Foodie
module Generators
class Recipe < Thor::Group
include Thor::Actions
argument :group, :type => :string
argument :name, :type => :string
def create_group
empty_directory(group)
end
def copy_recipe
template("recipe.txt", "#{group}/#{name}.txt")
end
def self.source_root
File.dirname(__FILE__) + "/recipe"
end
end
end
end | 17.88 | 54 | 0.595078 |
bb3eb856ed30d82cf9d4029074d0fe7f5162e497 | 848 | require 'hashie'
require 'faraday'
require 'gmxcheckout/version'
module GmxCheckout
autoload :API, 'gmxcheckout/api'
autoload :Base, 'gmxcheckout/base'
autoload :JsonMiddleware, 'gmxcheckout/json_middleware'
autoload :Model, 'gmxcheckout/model'
autoload :Response, 'gmxcheckout/response'
autoload :Subscriptions, 'gmxcheckout/subscriptions'
module Models
autoload :Transaction, 'gmxcheckout/models/transaction'
autoload :Notification, 'gmxcheckout/models/notification'
end
class << self
attr_accessor :api_key
end
def self.configure(&block)
instance_eval(&block)
end
def self.statuses
@statuses ||= {
initiated: 0,
captured: 1,
cancelled: 3,
suspended: 4,
approved: 5,
rejected: 6,
incommunicable: 7
}
end
end
| 21.74359 | 61 | 0.665094 |
6a3a71e99986ff9973675189b5ca59199effeb11 | 973 | #!/usr/bin/env ruby
# This script only support ASCII format
# and tick download.
require 'uri'
require 'net/http'
require 'mechanize'
agent = Mechanize.new
agent.pluggable_parser.default = Mechanize::Download
for i_date in 2020..2023 # change date
date = i_date.to_s
for i_month in 1..12 # change month
month = '%02d' % i_month
datemonth = date + month
fxpair = 'SPXUSD' # change your instrument
platform = 'ASCII'
timeframe = 'T'
saved_name = 'HISTDATA_COM_' + [platform, fxpair, timeframe, datemonth].join('_') + '.zip'
referer_uri = "http://www.histdata.com/download-free-forex-historical-data/?/#{platform.downcase}/tick-data-quotes/#{fxpair.downcase}/#{date}/#{month}"
puts "Downloading: #{saved_name}"
if File.exist?saved_name
puts "Downloaded: #{saved_name}, skipping."
next
end
p = agent.get(referer_uri)
next_p = p.form_with(:name => 'file_down').submit
next_p.save(saved_name)
end
end
| 26.297297 | 155 | 0.679342 |
bbe5397d9c1f3b76b47f0e0841e3112f02cdb176 | 442 | # InSpec test for recipe pola::default
# The InSpec reference, with examples and extensive documentation, can be
# found at https://www.inspec.io/docs/reference/resources/
unless os.windows?
# This is an example test, replace with your own test.
describe user('root'), :skip do
it { should exist }
end
end
# This is an example test, replace it with your own test.
describe port(80), :skip do
it { should_not be_listening }
end
| 26 | 73 | 0.728507 |
1da791c0d22e79b4c531ee7d4f920e01725b485f | 862 | class LmSensors < Formula
desc "Tools for monitoring the temperatures, voltages, and fans"
homepage "https://github.com/groeck/lm-sensors"
url "https://github.com/lm-sensors/lm-sensors/archive/V3-6-0.tar.gz"
version "3.6.0"
sha256 "0591f9fa0339f0d15e75326d0365871c2d4e2ed8aa1ff759b3a55d3734b7d197"
license any_of: ["GPL-2.0-or-later", "LGPL-2.1-or-later"]
bottle do
sha256 x86_64_linux: "bf3fea16c4ebf78f4234e9c7d00088fb4990433d135e5bb958a1c107dcbf63cd" # linuxbrew-core
end
depends_on "bison" => :build
depends_on "flex" => :build
depends_on :linux
def install
args = %W[
PREFIX=#{prefix}
BUILD_STATIC_LIB=0
MANDIR=#{man}
ETCDIR=#{prefix}/etc
]
system "make", *args
system "make", *args, "install"
end
test do
assert_match("Usage", shell_output("#{bin}/sensors --help"))
end
end
| 26.9375 | 108 | 0.696056 |
f8d045d4fbb0978e13dd0881d4157a626f3d2b4e | 557 | class User < ActiveRecord::Base
has_many :lightbulbs
belongs_to :family
def self.from_omniauth(auth)
where({uid: auth.uid}).first_or_initialize.tap do |user|
user.uid = auth.uid
user.name = auth.info.name
user.first_name = auth.info.name.split(' ')[0]
user.image_url = auth.info.image
user.last_name = auth.info.name.split(' ')[1]
user.email = auth.info.email
user.oauth_token = auth.credentials.token
user.oauth_expires_at = Time.at(auth.credentials.expires_at)
user.save!
end
end
end
| 29.315789 | 66 | 0.671454 |
1a55b0924a1727f0da4dca99d1a132a94ebc8185 | 27,697 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2019_09_01
#
# ExpressRouteGateways
#
class ExpressRouteGateways
include MsRestAzure
#
# Creates and initializes a new instance of the ExpressRouteGateways class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [NetworkManagementClient] reference to the NetworkManagementClient
attr_reader :client
#
# Lists ExpressRoute gateways under a given subscription.
#
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ExpressRouteGatewayList] operation results.
#
def list_by_subscription(custom_headers:nil)
response = list_by_subscription_async(custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Lists ExpressRoute gateways under a given subscription.
#
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_subscription_with_http_info(custom_headers:nil)
list_by_subscription_async(custom_headers:custom_headers).value!
end
#
# Lists ExpressRoute gateways under a given subscription.
#
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_subscription_async(custom_headers:nil)
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/providers/Microsoft.Network/expressRouteGateways'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2019_09_01::Models::ExpressRouteGatewayList.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Lists ExpressRoute gateways in a given resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ExpressRouteGatewayList] operation results.
#
def list_by_resource_group(resource_group_name, custom_headers:nil)
response = list_by_resource_group_async(resource_group_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Lists ExpressRoute gateways in a given resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_resource_group_with_http_info(resource_group_name, custom_headers:nil)
list_by_resource_group_async(resource_group_name, custom_headers:custom_headers).value!
end
#
# Lists ExpressRoute gateways in a given resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_resource_group_async(resource_group_name, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteGateways'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2019_09_01::Models::ExpressRouteGatewayList.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Creates or updates a ExpressRoute gateway in a specified resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param put_express_route_gateway_parameters [ExpressRouteGateway] Parameters
# required in an ExpressRoute gateway PUT operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ExpressRouteGateway] operation results.
#
def create_or_update(resource_group_name, express_route_gateway_name, put_express_route_gateway_parameters, custom_headers:nil)
response = create_or_update_async(resource_group_name, express_route_gateway_name, put_express_route_gateway_parameters, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param put_express_route_gateway_parameters [ExpressRouteGateway] Parameters
# required in an ExpressRoute gateway PUT operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def create_or_update_async(resource_group_name, express_route_gateway_name, put_express_route_gateway_parameters, custom_headers:nil)
# Send request
promise = begin_create_or_update_async(resource_group_name, express_route_gateway_name, put_express_route_gateway_parameters, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
result_mapper = Azure::Network::Mgmt::V2019_09_01::Models::ExpressRouteGateway.mapper()
parsed_response = @client.deserialize(result_mapper, parsed_response)
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Fetches the details of a ExpressRoute gateway in a resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ExpressRouteGateway] operation results.
#
def get(resource_group_name, express_route_gateway_name, custom_headers:nil)
response = get_async(resource_group_name, express_route_gateway_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Fetches the details of a ExpressRoute gateway in a resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_with_http_info(resource_group_name, express_route_gateway_name, custom_headers:nil)
get_async(resource_group_name, express_route_gateway_name, custom_headers:custom_headers).value!
end
#
# Fetches the details of a ExpressRoute gateway in a resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_async(resource_group_name, express_route_gateway_name, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'express_route_gateway_name is nil' if express_route_gateway_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteGateways/{expressRouteGatewayName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'expressRouteGatewayName' => express_route_gateway_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2019_09_01::Models::ExpressRouteGateway.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Deletes the specified ExpressRoute gateway in a resource group. An
# ExpressRoute gateway resource can only be deleted when there are no
# connection subresources.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
def delete(resource_group_name, express_route_gateway_name, custom_headers:nil)
response = delete_async(resource_group_name, express_route_gateway_name, custom_headers:custom_headers).value!
nil
end
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def delete_async(resource_group_name, express_route_gateway_name, custom_headers:nil)
# Send request
promise = begin_delete_async(resource_group_name, express_route_gateway_name, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Creates or updates a ExpressRoute gateway in a specified resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param put_express_route_gateway_parameters [ExpressRouteGateway] Parameters
# required in an ExpressRoute gateway PUT operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ExpressRouteGateway] operation results.
#
def begin_create_or_update(resource_group_name, express_route_gateway_name, put_express_route_gateway_parameters, custom_headers:nil)
response = begin_create_or_update_async(resource_group_name, express_route_gateway_name, put_express_route_gateway_parameters, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Creates or updates a ExpressRoute gateway in a specified resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param put_express_route_gateway_parameters [ExpressRouteGateway] Parameters
# required in an ExpressRoute gateway PUT operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_create_or_update_with_http_info(resource_group_name, express_route_gateway_name, put_express_route_gateway_parameters, custom_headers:nil)
begin_create_or_update_async(resource_group_name, express_route_gateway_name, put_express_route_gateway_parameters, custom_headers:custom_headers).value!
end
#
# Creates or updates a ExpressRoute gateway in a specified resource group.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param put_express_route_gateway_parameters [ExpressRouteGateway] Parameters
# required in an ExpressRoute gateway PUT operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_create_or_update_async(resource_group_name, express_route_gateway_name, put_express_route_gateway_parameters, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'express_route_gateway_name is nil' if express_route_gateway_name.nil?
fail ArgumentError, 'put_express_route_gateway_parameters is nil' if put_express_route_gateway_parameters.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::Network::Mgmt::V2019_09_01::Models::ExpressRouteGateway.mapper()
request_content = @client.serialize(request_mapper, put_express_route_gateway_parameters)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteGateways/{expressRouteGatewayName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'expressRouteGatewayName' => express_route_gateway_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:put, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200 || status_code == 201
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2019_09_01::Models::ExpressRouteGateway.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
# Deserialize Response
if status_code == 201
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2019_09_01::Models::ExpressRouteGateway.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Deletes the specified ExpressRoute gateway in a resource group. An
# ExpressRoute gateway resource can only be deleted when there are no
# connection subresources.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
#
def begin_delete(resource_group_name, express_route_gateway_name, custom_headers:nil)
response = begin_delete_async(resource_group_name, express_route_gateway_name, custom_headers:custom_headers).value!
nil
end
#
# Deletes the specified ExpressRoute gateway in a resource group. An
# ExpressRoute gateway resource can only be deleted when there are no
# connection subresources.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_delete_with_http_info(resource_group_name, express_route_gateway_name, custom_headers:nil)
begin_delete_async(resource_group_name, express_route_gateway_name, custom_headers:custom_headers).value!
end
#
# Deletes the specified ExpressRoute gateway in a resource group. An
# ExpressRoute gateway resource can only be deleted when there are no
# connection subresources.
#
# @param resource_group_name [String] The name of the resource group.
# @param express_route_gateway_name [String] The name of the ExpressRoute
# gateway.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_delete_async(resource_group_name, express_route_gateway_name, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'express_route_gateway_name is nil' if express_route_gateway_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteGateways/{expressRouteGatewayName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'expressRouteGatewayName' => express_route_gateway_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:delete, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 202 || status_code == 200 || status_code == 204
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
result
end
promise.execute
end
end
end
| 46.008306 | 170 | 0.716937 |
0835199231dba5d61b72ace9b21bed1d33c71825 | 220 | require 'spec_helper'
RSpec.describe ArrayPrinter do
it 'can properly format its output' do
input = %w(A B C D H L K J I E F G)
expect(ArrayPrinter.new(input).print).to eq('a b c d h l k j i e f g')
end
end
| 24.444444 | 74 | 0.663636 |
1c3cd5d3dc5abca84966616d68fe972c8a14e983 | 152 | require_relative './mugatu/version'
require_relative './mugatu/entity'
require_relative './mugatu/attribute'
require_relative './mugatu/attribute_type'
| 30.4 | 42 | 0.815789 |
f7c0438691c4f55c5dcf00153bc8feceac023086 | 2,832 | class RequestsController < ApplicationController
before_action :set_request, only: [:show, :edit, :update, :destroy]
before_action :authenticate_user!
before_action :require_permission, only: [:show, :edit, :update]
# GET /requests
# GET /requests.json
def index
@incoming_requests = Request.where(:user_id => current_user.id).where.not(:ignore => 'true', :accept => 'true')
@outgoing_requests = Request.where(:passenger_id => current_user.id).where.not(:ignore => 'true', :accept => 'true')
end
# GET /requests/1
# GET /requests/1.json
def show
end
# GET /requests/new
def new
@request = Request.new
end
# GET /requests/1/edit
# def edit
# end
# POST /requests
# POST /requests.json
def create
@request = Request.new(request_params)
respond_to do |format|
if @request.save
format.html { redirect_to rides_path, notice: 'Ride request was sent successfully.' }
format.json { render :show, status: :created, location: @request}
else
format.html { render :new }
format.json { render json: @request.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /requests/1
# PATCH/PUT /requests/1.json
def update
respond_to do |format|
if @request.update(request_params)
format.html { redirect_to requests_path, notice: 'Request was successfully updated.' }
format.json { render :show, status: :ok, location: @request }
else
format.html { render :edit }
format.json { render json: @request.errors, status: :unprocessable_entity }
end
end
end
# DELETE /requests/1
# DELETE /requests/1.json
def destroy
@request.destroy
respond_to do |format|
format.html { redirect_to requests_url, notice: 'Request was successfully destroyed.' }
format.json { head :no_content }
end
end
def driver_bookings
@rides = Ride.where(:user_id => current_user.id)
@driver_bookings = Request.where(:user_id => current_user.id, :accept => 'true')
end
def passenger_bookings
@rides = Ride.where(:passenger_id => current_user.id)
@passenger_bookings = Request.where(:passenger_id => current_user.id, :accept => 'true')
end
private
# Use callbacks to share common setup or constraints between actions.
def set_request
@request = Request.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def request_params
params.require(:request).permit(:user_id, :ride_id, :passenger, :ignore, :passenger_id, :accept)
end
def require_permission
if current_user.id != Request.find(params[:id]).user_id
redirect_to requests_path, alert: "You're only allowed to edit your own requests"
end
end
end
| 29.5 | 120 | 0.671963 |
01ade74f7b7281e8ca1facd8a1b6439f21d394d3 | 1,851 | #
# Be sure to run `pod lib lint CAFSequenceImageView.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'CAFSequenceImageView'
s.version = '0.2.0'
s.summary = 'CAFSequenceImageView allows you to easily display sequence of images'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
Image view used to display a sequence of image. You can customize the image sequence and the speed of the change.
DESC
s.homepage = 'https://github.com/Fourni-j/CAFSequenceImageView'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Fourni-j' => '[email protected]' }
s.source = { :git => 'https://github.com/Fourni-j/CAFSequenceImageView.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '10.0'
s.pod_target_xcconfig = { 'SWIFT_VERSION' => '4.0' }
s.swift_version = '4.0'
s.source_files = 'CAFSequenceImageView/Classes/**/*'
# s.resource_bundles = {
# 'CAFSequenceImageView' => ['CAFSequenceImageView/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 39.382979 | 115 | 0.659103 |
f725866fce8612b0263c9f90f6ae1915cb3e46b1 | 106 | Pollett.configure do |config|
config.reset_url = ->(token) { "https://example.com/#{token}/reset" }
end
| 26.5 | 71 | 0.679245 |
ab2f00ca1ae3e30648827f09faa8c5d54e248c01 | 228 | # frozen_string_literal: true
require 'graphql'
require_relative 'base_object'
module Types
class Game < BaseObject
description 'Games item'
field :id, ID, null: false
field :name, String, null: false
end
end
| 16.285714 | 36 | 0.719298 |
e97a2bd26202a192ed70e5ae357db470d72a8cf8 | 272 | module Tomo
class Runtime
class TaskAbortedError < Tomo::Error
attr_accessor :task, :host
def to_console
<<~ERROR
The #{yellow(task)} task failed on #{yellow(host)}.
#{red(message)}
ERROR
end
end
end
end
| 17 | 61 | 0.5625 |
267fda77b5d99a232fc4087efd68a65f1b76c28b | 5,547 | #
# Author:: AJ Christensen (<[email protected]>)
# Copyright:: Copyright (c) 2008 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "spec_helper"
require "ostruct"
describe Chef::Daemon do
before do
if windows?
mock_struct = #Struct::Passwd.new(nil, nil, 111, 111)
mock_struct = OpenStruct.new(:uid => 2342, :gid => 2342)
allow(Etc).to receive(:getpwnam).and_return mock_struct
allow(Etc).to receive(:getgrnam).and_return mock_struct
# mock unimplemented methods
allow(Process).to receive(:initgroups).and_return nil
allow(Process::GID).to receive(:change_privilege).and_return 11
allow(Process::UID).to receive(:change_privilege).and_return 11
end
end
describe ".pid_file" do
describe "when the pid_file option has been set" do
before do
Chef::Config[:pid_file] = "/var/run/chef/chef-client.pid"
end
it "should return the supplied value" do
expect(Chef::Daemon.pid_file).to eql("/var/run/chef/chef-client.pid")
end
end
describe "without the pid_file option set" do
before do
Chef::Daemon.name = "chef-client"
end
it "should return a valued based on @name" do
expect(Chef::Daemon.pid_file).to eql("/tmp/chef-client.pid")
end
end
end
describe ".pid_from_file" do
before do
Chef::Config[:pid_file] = "/var/run/chef/chef-client.pid"
end
it "should suck the pid out of pid_file" do
expect(File).to receive(:read).with("/var/run/chef/chef-client.pid").and_return("1337")
Chef::Daemon.pid_from_file
end
end
describe ".change_privilege" do
before do
allow(Chef::Application).to receive(:fatal!).and_return(true)
Chef::Config[:user] = "aj"
allow(Dir).to receive(:chdir)
end
it "changes the working directory to root" do
expect(Dir).to receive(:chdir).with("/").and_return(0)
Chef::Daemon.change_privilege
end
describe "when the user and group options are supplied" do
before do
Chef::Config[:group] = "staff"
end
it "should log an appropriate info message" do
expect(Chef::Log).to receive(:info).with("About to change privilege to aj:staff")
Chef::Daemon.change_privilege
end
it "should call _change_privilege with the user and group" do
expect(Chef::Daemon).to receive(:_change_privilege).with("aj", "staff")
Chef::Daemon.change_privilege
end
end
describe "when just the user option is supplied" do
it "should log an appropriate info message" do
expect(Chef::Log).to receive(:info).with("About to change privilege to aj")
Chef::Daemon.change_privilege
end
it "should call _change_privilege with just the user" do
expect(Chef::Daemon).to receive(:_change_privilege).with("aj")
Chef::Daemon.change_privilege
end
end
end
describe "._change_privilege" do
before do
allow(Process).to receive(:euid).and_return(0)
allow(Process).to receive(:egid).and_return(0)
allow(Process::UID).to receive(:change_privilege).and_return(nil)
allow(Process::GID).to receive(:change_privilege).and_return(nil)
@pw_user = double("Struct::Passwd", :uid => 501)
@pw_group = double("Struct::Group", :gid => 20)
allow(Process).to receive(:initgroups).and_return(true)
allow(Etc).to receive(:getpwnam).and_return(@pw_user)
allow(Etc).to receive(:getgrnam).and_return(@pw_group)
end
describe "with sufficient privileges" do
before do
allow(Process).to receive(:euid).and_return(0)
allow(Process).to receive(:egid).and_return(0)
end
it "should initialize the supplemental group list" do
expect(Process).to receive(:initgroups).with("aj", 20)
Chef::Daemon._change_privilege("aj")
end
it "should attempt to change the process GID" do
expect(Process::GID).to receive(:change_privilege).with(20).and_return(20)
Chef::Daemon._change_privilege("aj")
end
it "should attempt to change the process UID" do
expect(Process::UID).to receive(:change_privilege).with(501).and_return(501)
Chef::Daemon._change_privilege("aj")
end
end
describe "with insufficient privileges" do
before do
allow(Process).to receive(:euid).and_return(999)
allow(Process).to receive(:egid).and_return(999)
end
it "should log an appropriate error message and fail miserably" do
allow(Process).to receive(:initgroups).and_raise(Errno::EPERM)
error = "Operation not permitted"
if RUBY_PLATFORM.match("solaris2") || RUBY_PLATFORM.match("aix")
error = "Not owner"
end
expect(Chef::Application).to receive(:fatal!).with("Permission denied when trying to change 999:999 to 501:20. #{error}")
Chef::Daemon._change_privilege("aj")
end
end
end
end
| 31.697143 | 129 | 0.667027 |
01d92b9ade8bfc2fd517e3d2496140927b5618bd | 1,109 | require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Cv
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
end
end
| 41.074074 | 99 | 0.732191 |
bb2240da8055388cb64219e7850699b80f84f4a1 | 3,144 | module TournamentSystem
module Algorithm
# This module provides utility functions for helping implement other
# algorithms.
module Util
extend self
# @deprecated Please use {#padd_teams_even} instead.
def padd_teams(teams)
message = 'NOTE: padd_teams is now deprecated in favour of padd_teams_even. '\
'It will be removed in the next major version.'\
"Util.padd_teams called from #{Gem.location_of_caller.join(':')}"
warn message unless Gem::Deprecate.skip
padd_teams_even(teams)
end
# Padd an array of teams to be even.
#
# @param teams [Array<team>]
# @return [Array<team, nil>]
def padd_teams_even(teams)
if teams.length.odd?
teams + [nil]
else
teams
end
end
# @deprecated Please use {#padded_teams_even_count}
def padded_teams_count(teams_count)
message = 'Node: padded_teams_count is now deprecated in favour of padded_teams_even_count. '\
'It will be removed in the next major version.'\
"Util.padded_teams_count called from #{Gem.location_of_caller.join(':')}"
warn message unless Gem::Deprecate.skip
padded_teams_even_count(teams_count)
end
# Padd the count of teams to be even.
#
# @example
# padded_teams_even_count(teams.length) == padd_teams_even(teams).length
#
# @param teams_count [Integer] the number of teams
# @return [Integer]
def padded_teams_even_count(teams_count)
(teams_count / 2.0).ceil * 2
end
# pow2 is not uncommunicative
# :reek:UncommunicativeMethodName
# Padd an array of teams to the next power of 2.
#
# @param teams [Array<team>]
# @return [Array<team, nil>]
def padd_teams_pow2(teams)
required = padded_teams_pow2_count(teams.length)
Array.new(required) { |index| teams[index] }
end
# Padd the count of teams to be a power of 2.
#
# @example
# padded_teams_pow2_count(teams.length) == padd_teams_pow2(teams).length
#
# @param teams_count [Integer] the number of teams
# @return [Integer]
def padded_teams_pow2_count(teams_count)
2**Math.log2(teams_count).ceil
end
# rubocop:disable Metrics/MethodLength
# Collect all values in an array with a minimum value.
#
# @param array [Array<element>]
# @yieldparam element an element of the array
# @yieldreturn [#<, #==] some value to find the minimum of
# @return [Array<element>] all elements with the minimum value
def all_min_by(array)
min_elements = []
min_value = nil
array.each do |element|
value = yield element
if !min_value || value < min_value
min_elements = [element]
min_value = value
elsif value == min_value
min_elements << element
end
end
min_elements
end
# rubocop:enable Metrics/MethodLength
end
end
end
| 30.230769 | 102 | 0.611323 |
9123546c03c84d3a92039fc247d3ed84d4c55876 | 5,815 | require File.expand_path("../lib/reptar", File.dirname(__FILE__))
require "ostruct"
User = OpenStruct
Post = OpenStruct
Company = OpenStruct
test "single attribute" do
UserReptar = Class.new(Reptar) do
attribute :name
end
user = User.new(name: "Julio")
result = {name: "Julio"}.to_json
assert_equal UserReptar.new(user).to_json, result
end
test "nil attribute" do
UserReptar = Class.new(Reptar) do
attribute :name
end
user = User.new(name: nil)
result = {name: nil}.to_json
assert_equal UserReptar.new(user).to_json, result
end
test "changing the key attribute" do
PostReptar = Class.new(Reptar) do
attribute :slug, key: :url
end
post = Post.new(slug: "this-is-cool")
result = {url: "this-is-cool"}.to_json
assert_equal PostReptar.new(post).to_json, result
end
test "multiple attributes" do
UserReptar = Class.new(Reptar) do
attributes :name, :email
end
user = User.new(name: "Julio", email: "[email protected]")
result = {name: "Julio", email: "[email protected]"}.to_json
assert_equal UserReptar.new(user).to_json, result
end
test "method as attribute" do
PostReptar = Class.new(Reptar) do
attribute :slug
def slug
"#{name}-#{id}"
end
end
post = Post.new(name: "a-demo-post", id: 1)
result = {slug: "a-demo-post-1"}.to_json
assert_equal PostReptar.new(post).to_json, result
end
test "aplying root to single element" do
UserReptar = Class.new(Reptar) do
attribute :name
end
user = User.new(name: "Julio")
result = {user: {name: "Julio"}}.to_json
assert_equal UserReptar.new(user).to_json(root: :user), result
end
test "aplying root to a multiple elements" do
UserReptar = Class.new(Reptar) do
attribute :name
end
users = [User.new(name: "Julio"), User.new(name: "Piero")]
result = {
users: [
{ name: "Julio" },
{ name: "Piero" }
]
}.to_json
assert_equal UserReptar.new(users).to_json(root: :users), result
end
test "initialize with an array" do
UserReptar = Class.new(Reptar) do
attribute :name
end
users = [User.new(name: "Julio"), User.new(name: "Piero")]
result = [
{ name: "Julio" },
{ name: "Piero" }
].to_json
assert_equal UserReptar.new(users).to_json, result
end
test "array collection" do
UserReptar = Class.new(Reptar) do
attribute :name
collection :languages
end
user = User.new(name: "Julio", languages: ["Ruby", "Js", "Go"])
result = {
name: "Julio",
languages: ["Ruby", "Js", "Go"]
}.to_json
assert_equal UserReptar.new(user).to_json, result
end
test "a single representable association" do
UserReptar = Class.new(Reptar) do
attribute :name
attribute :company, with: "CompanyReptar"
end
CompanyReptar = Class.new(Reptar) do
attribute :name
end
user = User.new(name: "Julio")
user.company = Company.new(name: "Codalot")
result = {
name: "Julio",
company: {
name: "Codalot"
}
}.to_json
assert_equal UserReptar.new(user).to_json, result
end
test "single representable association as nil" do
UserReptar = Class.new(Reptar) do
attribute :name
attribute :company, with: "CompanyReptar"
end
user = User.new(name: "Julio")
user.company = nil
result = {
name: "Julio",
company: nil
}.to_json
assert_equal UserReptar.new(user).to_json, result
end
test "array with association" do
UserReptar = Class.new(Reptar) do
attribute :name
attribute :company, with: "CompanyReptar"
end
CompanyReptar = Class.new(Reptar) do
attribute :name
end
users = [
User.new(name: "Julio", company: Company.new(name: "Codalot")),
User.new(name: "Piero", company: Company.new(name: "Velocis"))
]
result = [
{
name: "Julio",
company: {
name: "Codalot"
}
},
{
name: "Piero",
company: {
name: "Velocis"
}
}
].to_json
assert_equal UserReptar.new(users).to_json, result
end
test "serializes object with collection" do
UserReptar = Class.new(Reptar) do
attribute :name
collection :posts, with: "PostReptar"
end
PostReptar = Class.new(Reptar) do
attribute :title
end
user = User.new(name: "Julio")
user.posts = [
Post.new(title: "Awesome title"),
Post.new(title: "Cats are dominating the world right now")
]
result = {
name: "Julio",
posts: [
{
title: "Awesome title"
},
{
title: "Cats are dominating the world right now"
}
]
}.to_json
assert_equal UserReptar.new(user).to_json, result
end
test "array with representable collections" do
UserReptar = Class.new(Reptar) do
attribute :name
collection :posts, with: "PostReptar"
end
PostReptar = Class.new(Reptar) do
attribute :title
end
users = [
User.new(
name: "Julio",
posts: [
Post.new(title: "Hi, I'm a dog")
]
),
User.new(
name: "Piero",
posts: [
Post.new(title: "I like turtles"),
Post.new(title: "Please come back PT!"),
]
)
]
result = [
{
name: "Julio",
posts: [
{
title: "Hi, I'm a dog"
}
]
},
{
name: "Piero",
posts: [
{
title: "I like turtles"
},
{
title: "Please come back PT!"
}
]
}
].to_json
assert_equal UserReptar.new(users).to_json, result
end
test "custom association method" do
PostReptar = Class.new(Reptar) do
attribute :title
end
UserReptar = Class.new(Reptar) do
collection :posts, with: "PostReptar"
def posts
[Post.new(title: "I like turtles")]
end
end
user = User.new
result = { posts: [{title: "I like turtles" }] }.to_json
assert_equal UserReptar.new(user).to_json, result
end | 20.261324 | 67 | 0.624764 |
87de446dd3306d8da7c99d98f84d724fa24cc96f | 1,792 | #
# Author:: Vasundhara Jagdale (<[email protected]>)
# Copyright:: Copyright 2008-2016, Chef Software, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
describe Chef::Resource::CabPackage do
let(:resource) { Chef::Resource::CabPackage.new("test_pkg") }
it "is a subclass of Chef::Resource::Package" do
expect(resource).to be_a_kind_of(Chef::Resource::Package)
end
it "sets resource name as :cab_package" do
expect(resource.resource_name).to eql(:cab_package)
end
it "sets the default action as :install" do
expect(resource.action).to eql([:install])
end
it "coerces name property to package_name property" do
expect(resource.package_name).to eql("test_pkg")
end
it "coerces name property to a source property if source not provided" do
expect(resource.source).to end_with("test_pkg")
end
it "coerces name property to a source property if source not provided and package_name is" do
resource.package_name("package.cab")
expect(resource.source).to end_with("package.cab")
end
it "coerces source property if it does not looks like a path" do
resource.source("package.cab")
expect(resource.source).not_to eq("package.cab")
end
end
| 32.581818 | 95 | 0.739955 |
08a911d049ae8221cae3be60d6a088bc11dcd876 | 7,117 | module FormulaCellarChecks
def check_PATH(bin)
# warn the user if stuff was installed outside of their PATH
return unless bin.directory?
return unless bin.children.length > 0
prefix_bin = (HOMEBREW_PREFIX/bin.basename)
return unless prefix_bin.directory?
prefix_bin = prefix_bin.realpath
return if ORIGINAL_PATHS.include? prefix_bin
<<-EOS.undent
#{prefix_bin} is not in your PATH
You can amend this by altering your #{shell_profile} file
EOS
end
def check_manpages
# Check for man pages that aren't in share/man
return unless (formula.prefix+"man").directory?
<<-EOS.undent
A top-level "man" directory was found
Homebrew requires that man pages live under share.
This can often be fixed by passing "--mandir=\#{man}" to configure.
EOS
end
def check_infopages
# Check for info pages that aren't in share/info
return unless (formula.prefix+"info").directory?
<<-EOS.undent
A top-level "info" directory was found
Homebrew suggests that info pages live under share.
This can often be fixed by passing "--infodir=\#{info}" to configure.
EOS
end
def check_jars
return unless formula.lib.directory?
jars = formula.lib.children.select { |g| g.extname == ".jar" }
return if jars.empty?
<<-EOS.undent
JARs were installed to "#{formula.lib}"
Installing JARs to "lib" can cause conflicts between packages.
For Java software, it is typically better for the formula to
install to "libexec" and then symlink or wrap binaries into "bin".
See "activemq", "jruby", etc. for examples.
The offending files are:
#{jars * "\n "}
EOS
end
def check_non_libraries
return unless formula.lib.directory?
valid_extensions = %w[.a .dylib .framework .jnilib .la .o .so
.jar .prl .pm .sh]
non_libraries = formula.lib.children.select do |g|
next if g.directory?
!(valid_extensions.include?(g.extname) || g.basename.to_s.include?(".so."))
end
return if non_libraries.empty?
<<-EOS.undent
Non-libraries were installed to "#{formula.lib}"
Installing non-libraries to "lib" is discouraged.
The offending files are:
#{non_libraries * "\n "}
EOS
end
def check_non_executables(bin)
return unless bin.directory?
non_exes = bin.children.select { |g| g.directory? || !g.executable? }
return if non_exes.empty?
<<-EOS.undent
Non-executables were installed to "#{bin}"
The offending files are:
#{non_exes * "\n "}
EOS
end
def check_generic_executables(bin)
return unless bin.directory?
generic_names = %w[run service start stop]
generics = bin.children.select { |g| generic_names.include? g.basename.to_s }
return if generics.empty?
<<-EOS.undent
Generic binaries were installed to "#{bin}"
Binaries with generic names are likely to conflict with other software,
and suggest that this software should be installed to "libexec" and then
symlinked as needed.
The offending files are:
#{generics * "\n "}
EOS
end
def check_shadowed_headers
["libtool", "subversion", "berkeley-db"].each do |formula_name|
return if formula.name.start_with?(formula_name)
end
return if MacOS.version < :mavericks && formula.name.start_with?("postgresql")
return if MacOS.version < :yosemite && formula.name.start_with?("memcached")
return if formula.keg_only? || !formula.include.directory?
files = relative_glob(formula.include, "**/*.h")
files &= relative_glob("#{MacOS.sdk_path}/usr/include", "**/*.h")
files.map! { |p| File.join(formula.include, p) }
return if files.empty?
<<-EOS.undent
Header files that shadow system header files were installed to "#{formula.include}"
The offending files are:
#{files * "\n "}
EOS
end
def check_easy_install_pth(lib)
pth_found = Dir["#{lib}/python{2.7,3}*/site-packages/easy-install.pth"].map { |f| File.dirname(f) }
return if pth_found.empty?
<<-EOS.undent
easy-install.pth files were found
These .pth files are likely to cause link conflicts. Please invoke
setup.py using Language::Python.setup_install_args.
The offending files are
#{pth_found * "\n "}
EOS
end
def check_openssl_links
return unless formula.prefix.directory?
keg = Keg.new(formula.prefix)
system_openssl = keg.mach_o_files.select do |obj|
dlls = obj.dynamically_linked_libraries
dlls.any? { |dll| /\/usr\/lib\/lib(crypto|ssl).(\d\.)*dylib/.match dll }
end
return if system_openssl.empty?
<<-EOS.undent
object files were linked against system openssl
These object files were linked against the deprecated system OpenSSL.
Adding `depends_on "openssl"` to the formula may help.
#{system_openssl * "\n "}
EOS
end
def check_python_framework_links(lib)
python_modules = Pathname.glob lib/"python*/site-packages/**/*.so"
framework_links = python_modules.select do |obj|
dlls = obj.dynamically_linked_libraries
dlls.any? { |dll| /Python\.framework/.match dll }
end
return if framework_links.empty?
<<-EOS.undent
python modules have explicit framework links
These python extension modules were linked directly to a Python
framework binary. They should be linked with -undefined dynamic_lookup
instead of -lpython or -framework Python.
#{framework_links * "\n "}
EOS
end
def check_emacs_lisp(share, name)
return unless (share/"emacs/site-lisp").directory?
# Emacs itself can do what it wants
return if name == "emacs"
elisps = (share/"emacs/site-lisp").children.select { |file| %w[.el .elc].include? file.extname }
return if elisps.empty?
<<-EOS.undent
Emacs Lisp files were linked directly to #{HOMEBREW_PREFIX}/share/emacs/site-lisp
This may cause conflicts with other packages; install to a subdirectory instead, such as
#{share}/emacs/site-lisp/#{name}
The offending files are:
#{elisps * "\n "}
EOS
end
def audit_installed
audit_check_output(check_manpages)
audit_check_output(check_infopages)
audit_check_output(check_jars)
audit_check_output(check_non_libraries)
audit_check_output(check_non_executables(formula.bin))
audit_check_output(check_generic_executables(formula.bin))
audit_check_output(check_non_executables(formula.sbin))
audit_check_output(check_generic_executables(formula.sbin))
audit_check_output(check_shadowed_headers)
audit_check_output(check_easy_install_pth(formula.lib))
audit_check_output(check_openssl_links)
audit_check_output(check_python_framework_links(formula.lib))
audit_check_output(check_emacs_lisp(formula.share, formula.name))
end
private
def relative_glob(dir, pattern)
File.directory?(dir) ? Dir.chdir(dir) { Dir[pattern] } : []
end
end
| 32.646789 | 103 | 0.677392 |
4abe83c943f34561a2b410904efe2e21437d14ed | 132 | ##Patterns: Lint_BlockAlignment
variable = lambda do |i|
i
end
variable = lambda do |i|
i
##Warn: Lint_BlockAlignment
end
| 12 | 31 | 0.69697 |
03d3a8ac1f18ec8defb00b6cdf2c66719aa9c797 | 5,492 | # encoding: utf-8
# This file is autogenerated. Do not edit it manually.
# If you want change the content of this file, edit
#
# /spec/fixtures/responses/whois.comlaude.com/status_registered.expected
#
# and regenerate the tests with the following rake task
#
# $ rake spec:generate
#
require 'spec_helper'
require 'whois/record/parser/whois.comlaude.com.rb'
describe Whois::Record::Parser::WhoisComlaudeCom, "status_registered.expected" do
subject do
file = fixture("responses", "whois.comlaude.com/status_registered.txt")
part = Whois::Record::Part.new(body: File.read(file))
described_class.new(part)
end
describe "#status" do
it do
expect { subject.status }.to raise_error(Whois::AttributeNotSupported)
end
end
describe "#available?" do
it do
expect(subject.available?).to eq(false)
end
end
describe "#registered?" do
it do
expect(subject.registered?).to eq(true)
end
end
describe "#created_on" do
it do
expect(subject.created_on).to be_a(Time)
expect(subject.created_on).to eq(Time.parse("2005-01-30"))
end
end
describe "#updated_on" do
it do
expect { subject.updated_on }.to raise_error(Whois::AttributeNotSupported)
end
end
describe "#expires_on" do
it do
expect(subject.expires_on).to be_a(Time)
expect(subject.expires_on).to eq(Time.parse("2020-01-30"))
end
end
describe "#registrar" do
it do
expect(subject.registrar).to be_a(Whois::Record::Registrar)
expect(subject.registrar.id).to eq(nil)
expect(subject.registrar.name).to eq("NOM IQ LTD (DBA COM LAUDE)")
expect(subject.registrar.url).to eq("http://www.comlaude.com")
end
end
describe "#registrant_contacts" do
it do
expect(subject.registrant_contacts).to be_a(Array)
expect(subject.registrant_contacts.size).to eq(1)
expect(subject.registrant_contacts[0]).to be_a(Whois::Record::Contact)
expect(subject.registrant_contacts[0].type).to eq(Whois::Record::Contact::TYPE_REGISTRANT)
expect(subject.registrant_contacts[0].name).to eq("Domain Manager")
expect(subject.registrant_contacts[0].organization).to eq("Nom-IQ Ltd dba Com Laude")
expect(subject.registrant_contacts[0].address).to eq(nil)
expect(subject.registrant_contacts[0].city).to eq(nil)
expect(subject.registrant_contacts[0].zip).to eq(nil)
expect(subject.registrant_contacts[0].state).to eq(nil)
expect(subject.registrant_contacts[0].country).to eq(nil)
expect(subject.registrant_contacts[0].phone).to eq("+44.2078360070")
expect(subject.registrant_contacts[0].fax).to eq(nil)
expect(subject.registrant_contacts[0].email).to eq("[email protected]")
end
end
describe "#admin_contacts" do
it do
expect(subject.admin_contacts).to be_a(Array)
expect(subject.admin_contacts.size).to eq(1)
expect(subject.admin_contacts[0]).to be_a(Whois::Record::Contact)
expect(subject.admin_contacts[0].type).to eq(Whois::Record::Contact::TYPE_ADMINISTRATIVE)
expect(subject.admin_contacts[0].name).to eq("Domain Manager")
expect(subject.admin_contacts[0].organization).to eq("Nom-IQ Ltd dba Com Laude")
expect(subject.admin_contacts[0].address).to eq(nil)
expect(subject.admin_contacts[0].city).to eq(nil)
expect(subject.admin_contacts[0].zip).to eq(nil)
expect(subject.admin_contacts[0].state).to eq(nil)
expect(subject.admin_contacts[0].country).to eq(nil)
expect(subject.admin_contacts[0].phone).to eq("+44.2078360070")
expect(subject.admin_contacts[0].fax).to eq(nil)
expect(subject.admin_contacts[0].email).to eq("[email protected]")
end
end
describe "#technical_contacts" do
it do
expect(subject.technical_contacts).to be_a(Array)
expect(subject.technical_contacts.size).to eq(1)
expect(subject.technical_contacts[0]).to be_a(Whois::Record::Contact)
expect(subject.technical_contacts[0].type).to eq(Whois::Record::Contact::TYPE_TECHNICAL)
expect(subject.technical_contacts[0].name).to eq("Technical Manager")
expect(subject.technical_contacts[0].organization).to eq("Com Laude")
expect(subject.technical_contacts[0].address).to eq(nil)
expect(subject.technical_contacts[0].city).to eq(nil)
expect(subject.technical_contacts[0].zip).to eq(nil)
expect(subject.technical_contacts[0].state).to eq(nil)
expect(subject.technical_contacts[0].country).to eq(nil)
expect(subject.technical_contacts[0].phone).to eq("+44.2074218250")
expect(subject.technical_contacts[0].fax).to eq("+44.8700118187")
expect(subject.technical_contacts[0].email).to eq("[email protected]")
end
end
describe "#nameservers" do
it do
expect(subject.nameservers).to be_a(Array)
expect(subject.nameservers.size).to eq(4)
expect(subject.nameservers[0]).to be_a(Whois::Record::Nameserver)
expect(subject.nameservers[0].name).to eq("dns1.comlaude-dns.com")
expect(subject.nameservers[1]).to be_a(Whois::Record::Nameserver)
expect(subject.nameservers[1].name).to eq("dns2.comlaude-dns.net")
expect(subject.nameservers[2]).to be_a(Whois::Record::Nameserver)
expect(subject.nameservers[2].name).to eq("dns3.comlaude-dns.co.uk")
expect(subject.nameservers[3]).to be_a(Whois::Record::Nameserver)
expect(subject.nameservers[3].name).to eq("dns4.comlaude-dns.eu")
end
end
end
| 41.293233 | 96 | 0.708485 |
26486da1ed8423d4fe570f6a20bec1bb0461ae2c | 12,812 | # -*- coding: binary -*-
# toybox
module Msf
autoload :OptionContainer, 'msf/core/option_container'
###
#
# The module base class is responsible for providing the common interface
# that is used to interact with modules at the most basic levels, such as
# by inspecting a given module's attributes (name, description, version,
# authors, etc) and by managing the module's data store.
#
###
class Module
autoload :Alert, 'msf/core/module/alert'
autoload :Arch, 'msf/core/module/arch'
autoload :Auth, 'msf/core/module/auth'
autoload :Author, 'msf/core/module/author'
autoload :AuxiliaryAction, 'msf/core/module/auxiliary_action'
autoload :Compatibility, 'msf/core/module/compatibility'
autoload :DataStore, 'msf/core/module/data_store'
autoload :Deprecated, 'msf/core/module/deprecated'
autoload :Failure, 'msf/core/module/failure'
autoload :FullName, 'msf/core/module/full_name'
autoload :HasActions, 'msf/core/module/has_actions'
autoload :ModuleInfo, 'msf/core/module/module_info'
autoload :ModuleStore, 'msf/core/module/module_store'
autoload :Network, 'msf/core/module/network'
autoload :Options, 'msf/core/module/options'
autoload :Platform, 'msf/core/module/platform'
autoload :PlatformList, 'msf/core/module/platform_list'
autoload :Privileged, 'msf/core/module/privileged'
autoload :Ranking, 'msf/core/module/ranking'
autoload :Reference, 'msf/core/module/reference'
autoload :SiteReference, 'msf/core/module/reference'
autoload :Target, 'msf/core/module/target'
autoload :Type, 'msf/core/module/type'
autoload :UI, 'msf/core/module/ui'
autoload :UUID, 'msf/core/module/uuid'
autoload :SideEffects, 'msf/core/module/side_effects'
autoload :Stability, 'msf/core/module/stability'
autoload :Reliability, 'msf/core/module/reliability'
# toybox
autoload :Rpcredis,'msf/core/module/rpcredis'
include Msf::Module::Alert
include Msf::Module::Arch
include Msf::Module::Auth
include Msf::Module::Author
include Msf::Module::Compatibility
include Msf::Module::DataStore
include Msf::Module::FullName
include Msf::Module::ModuleInfo
include Msf::Module::ModuleStore
include Msf::Module::Network
include Msf::Module::Options
include Msf::Module::Privileged
include Msf::Module::Ranking
include Msf::Module::Type
include Msf::Module::UI
include Msf::Module::UUID
include Msf::Module::SideEffects
include Msf::Module::Stability
include Msf::Module::Reliability
# toybox
include Msf::Module::Rpcredis
# The key where a comma-separated list of Ruby module names will live in the
# datastore, consumed by #replicant to allow clean override of MSF module methods.
REPLICANT_EXTENSION_DS_KEY = 'ReplicantExtensions'
# Make include public so we can runtime extend
public_class_method :include
class << self
include Framework::Offspring
#
# This attribute holds the non-duplicated copy of the module
# implementation. This attribute is used for reloading purposes so that
# it can be re-duplicated.
#
attr_accessor :orig_cls
#
# The path from which the module was loaded.
#
attr_accessor :file_path
end
#
# Returns the class reference to the framework
#
def framework
self.class.framework
end
#
# Creates an instance of an abstract module using the supplied information
# hash.
#
def initialize(info = {})
@module_info_copy = info.dup
self.module_info = info
generate_uuid
set_defaults
# Initialize module compatibility hashes
init_compat
# Fixup module fields as needed
info_fixups
# Transform some of the fields to arrays as necessary
self.author = Msf::Author.transform(module_info['Author'])
self.arch = Rex::Transformer.transform(module_info['Arch'], Array, [ String ], 'Arch')
self.platform = PlatformList.transform(module_info['Platform'])
self.references = Rex::Transformer.transform(module_info['References'], Array, [ SiteReference, Reference ], 'Ref')
# Create and initialize the option container for this module
self.options = Msf::OptionContainer.new
self.options.add_options(info['Options'], self.class)
self.options.add_advanced_options(info['AdvancedOptions'], self.class)
self.options.add_evasion_options(info['EvasionOptions'], self.class)
# Create and initialize the data store for this module
self.datastore = ModuleDataStore.new(self)
# Import default options into the datastore
import_defaults
self.privileged = module_info['Privileged'] || false
self.license = module_info['License'] || MSF_LICENSE
# Allow all modules to track their current workspace
register_advanced_options(
[
OptString.new('WORKSPACE', [ false, "Specify the workspace for this module" ]),
OptBool.new('VERBOSE', [ false, 'Enable detailed status messages', false ])
], Msf::Module)
end
def has_check?
respond_to?(:check)
end
#
# Creates a fresh copy of an instantiated module
#
def replicant
obj = self.clone
self.instance_variables.each { |k|
v = instance_variable_get(k)
v = v.dup rescue v
obj.instance_variable_set(k, v)
}
obj.datastore = self.datastore.copy
obj.user_input = self.user_input
obj.user_output = self.user_output
obj.module_store = self.module_store.clone
obj.perform_extensions
obj
end
# Extends self with the constant list in the datastore
# @return [void]
def perform_extensions
if datastore[REPLICANT_EXTENSION_DS_KEY].present?
if datastore[REPLICANT_EXTENSION_DS_KEY].respond_to?(:each)
datastore[REPLICANT_EXTENSION_DS_KEY].each do |const|
self.extend(const)
end
else
fail "Invalid settings in datastore at key #{REPLICANT_EXTENSION_DS_KEY}"
end
end
end
# @param[Constant] One or more Ruby constants
# @return [void]
def register_extensions(*rb_modules)
datastore[REPLICANT_EXTENSION_DS_KEY] = [] unless datastore[REPLICANT_EXTENSION_DS_KEY].present?
rb_modules.each do |rb_mod|
datastore[REPLICANT_EXTENSION_DS_KEY] << rb_mod unless datastore[REPLICANT_EXTENSION_DS_KEY].include? rb_mod
end
end
#
# Returns the unduplicated class associated with this module.
#
def orig_cls
self.class.orig_cls
end
#
# The path to the file in which the module can be loaded from.
#
def file_path
self.class.file_path
end
#
# Returns the current workspace
#
def workspace
self.datastore['WORKSPACE'] ||
(framework.db and framework.db.active and framework.db.workspace and framework.db.workspace.name)
end
#
# Returns the username that instantiated this module, this tries a handful of methods
# to determine what actual user ran this module.
#
def owner
# Generic method to configure a module owner
username = self.datastore['MODULE_OWNER'].to_s.strip
# Specific method used by the commercial products
if username.empty?
username = self.datastore['PROUSER'].to_s.strip
end
# Fallback when neither prior method is available, common for msfconsole
if username.empty?
username = (ENV['LOGNAME'] || ENV['USERNAME'] || ENV['USER'] || "unknown").to_s.strip
end
username
end
#
# Scans the parent module reference to populate additional information. This
# is used to inherit common settings (owner, workspace, parent uuid, etc).
#
def register_parent(ref)
self.datastore['WORKSPACE'] = (ref.datastore['WORKSPACE'] ? ref.datastore['WORKSPACE'].dup : nil)
self.datastore['PROUSER'] = (ref.datastore['PROUSER'] ? ref.datastore['PROUSER'].dup : nil)
self.datastore['MODULE_OWNER'] = ref.owner.dup
self.datastore['ParentUUID'] = ref.uuid.dup
end
#
# Return a comma separated list of supported platforms, if any.
#
def platform_to_s
platform.all? ? "All" : platform.names.join(", ")
end
#
# Checks to see if this module is compatible with the supplied platform
#
def platform?(what)
(platform & what).empty? == false
end
#
# Returns true if this module is being debugged.
#
def debugging?
datastore['DEBUG']
end
#
# Raises a RuntimeError failure message. This is meant to be used for all non-exploits,
# and allows specific classes to override.
#
# @param reason [String] A reason about the failure.
# @param msg [String] (Optional) A message about the failure.
# @raise [RuntimeError]
# @return [void]
# @note If you are writing an exploit, you don't use this API. Instead, please refer to the
# API documentation from lib/msf/core/exploit.rb.
# @see Msf::Exploit#fail_with
# @example
# fail_with('No Access', 'Unable to login')
#
def fail_with(reason, msg=nil)
raise RuntimeError, "#{reason.to_s}: #{msg}"
end
##
#
# Just some handy quick checks
#
##
#
# Returns false since this is the real module
#
def self.cached?
false
end
def required_cred_options
@required_cred_options ||= lambda {
self.options.select { |name, opt|
(
opt.type?('string') &&
opt.required &&
(opt.name.match(/user(name)*$/i) || name.match(/pass(word)*$/i))
) ||
(
opt.type?('bool') &&
opt.required &&
opt.name.match(/^allow_guest$/i)
)
}
}.call
end
def black_listed_auth_filenames
@black_listed_auth_filenames ||= lambda {
[
'fileformat',
'browser'
]
}.call
end
def post_auth?
if self.kind_of?(Msf::Auxiliary::AuthBrute)
return true
else
# Some modules will never be post auth, so let's not waste our time
# determining it and create more potential false positives.
# If these modules happen to be post auth for some reason, then we it
# should manually override the post_auth? method as true.
directory_name = self.fullname.split('/')[0..-2]
black_listed_auth_filenames.each do |black_listed_name|
return false if directory_name.include?(black_listed_name)
end
# Some modules create their own username and password datastore
# options, not relying on the AuthBrute mixin. In that case we
# just have to go through the options and try to identify them.
!required_cred_options.empty?
end
end
def default_cred?
return false unless post_auth?
required_cred_options.all? do |name, opt|
if opt.type == 'string'
if !opt.default.blank?
true
else
false
end
else
true
end
end
false
end
#
# The array of zero or more platforms.
#
attr_reader :platform
#
# The reference count for the module.
#
attr_reader :references
#
# The license under which this module is provided.
#
attr_reader :license
#
# The job identifier that this module is running as, if any.
#
attr_accessor :job_id
#
# The last exception to occur using this module
#
attr_accessor :error
# An opaque bag of data to attach to a module. This is useful for attaching
# some piece of identifying info on to a module before calling
# {Msf::Simple::Exploit#exploit_simple} or
# {Msf::Simple::Auxiliary#run_simple} for correlating where modules came
# from.
#
attr_accessor :user_data
protected
#
# Sets the modules unsupplied info fields to their default values.
#
def set_defaults
self.module_info = {
'Name' => 'No module name',
'Description' => 'No module description',
'Version' => '0',
'Author' => nil,
'Arch' => nil, # No architectures by default.
'Platform' => [], # No platforms by default.
'Ref' => nil,
'Privileged' => false,
'License' => MSF_LICENSE,
'Notes' => {}
}.update(self.module_info)
self.module_store = {}
end
attr_writer :platform, :references # :nodoc:
attr_writer :privileged # :nodoc:
attr_writer :license # :nodoc:
end
end
| 30.004684 | 121 | 0.644864 |
bf0c46fbbc7886bf2eda0c6fce4bc3aee93c0539 | 218 | FactoryGirl.define do
factory :comment do
user
text { FFaker::CheesyLingo.paragraph }
factory :team_comment do
team
end
factory :application_comment do
application
end
end
end
| 14.533333 | 42 | 0.665138 |
26f06a4e8ce31c54c23c52595d3c5650e1fdac7c | 7,711 | # frozen_string_literal: true
require 'rake'
require 'resque/tasks'
#
# NOTE: This is an entrypoint for workers. DO NOT require this file from rails
# application
#
require "#{__dir__}/../../../../baw-app/lib/baw_app"
namespace :baw do
def init(is_worker: false, settings_file: nil, is_scheduler: false)
BawApp.setup(settings_file)
# initialize the app
# baw-app, the workers code, and all other requires are done through rails
# initialization in application.rb and then in other initializers.
# Be VERY careful changing the order of things here. It breaks in very
# subtle ways.
# For example, requiring baw_app will mean the ruby-config settings won't
# detect the rails constant and won't add the Rails railtie, and thus the settings
# won't load! ... but only for workers and not the rails server!
# We now force load the config railtie in application.rb!
require "#{__dir__}/../../../../../../config/application"
# set time zone
Time.zone = 'UTC'
BawWorkers::Config.set(is_resque_worker: is_worker, is_scheduler: is_scheduler)
# Initialize the Rails application.
Rails.application.initialize!
# which in turns run BawWorkers::Config.run from an initializer
end
namespace :worker do
# run a worker. Passes parameter to prerequisite 'setup_worker'. Takes one argument: settings_file
# start examples:
# bundle exec rake baw_workers:run_worker
# bundle exec rake baw_workers:run_worker['/home/user/folder/workers/settings.media.yml']
# stopping workers:
# kill -s QUIT $(/home/user/folder/workers/media.pid)
desc 'Run a resque:work with the specified settings file.'
task :setup, [:settings_file] do |_t, args|
init(is_worker: true, settings_file: args.settings_file)
end
desc 'Run a resque:work with the specified settings file.'
task :run, [:settings_file] => [:setup] do |_t, _args|
BawWorkers::Config.logger_worker.info('rake_task:baw:worker:run') do
'Resque worker starting...'
end
# invoke the resque rake task
Rake::Task['resque:work'].invoke
end
desc 'Run the resque scheduler with the specified settings file.'
task :run_scheduler, [:settings_file] do |_t, args|
init(is_worker: false, settings_file: args.settings_file, is_scheduler: true)
BawWorkers::Config.logger_worker.info('rake_task:baw:worker:run_scheduler') do
'Resque scheduler starting...'
end
require 'resque/scheduler/tasks'
require 'resque-scheduler'
# invoke the resque rake task
Rake::Task['resque:scheduler'].invoke
end
desc 'List running workers'
task :current, [:settings_file] do |_t, args|
init(settings_file: args.settings_file)
BawWorkers::ResqueApi.workers_running
end
desc 'Quit running workers'
task :stop_all, [:settings_file] do |_t, args|
init(settings_file: args.settings_file)
BawWorkers::ResqueApi.workers_running
BawWorkers::ResqueApi.workers_stop_all
end
desc 'Clear queue'
task :clear_queue, [:settings_file, :queue_name] do |_t, args|
init(settings_file: args.settings_file)
BawWorkers::ResqueApi.clear_queue(args.queue_name)
end
desc 'Clear stats'
task :clear_stats, [:settings_file] do |_t, args|
init(settings_file: args.settings_file)
BawWorkers::ResqueApi.clear_stats
end
desc 'Retry failed jobs'
task :retry_failed, [:settings_file] do |_t, args|
init(settings_file: args.settings_file)
BawWorkers::ResqueApi.retry_failed
end
end
namespace :analysis do
namespace :resque do
desc 'Enqueue a file to analyse using Resque'
task :from_files, [:settings_file, :analysis_config_file] do |_t, args|
init(settings_file: args.settings_file)
BawWorkers::Jobs::Analysis::Job.action_enqueue_rake(args.analysis_config_file)
end
desc 'Enqueue files to analyse using Resque from a csv file'
task :from_csv, [:settings_file, :csv_file, :config_file, :command_file] do |_t, args|
init(settings_file: args.settings_file)
BawWorkers::Jobs::Analysis::Job.action_enqueue_rake_csv(args.csv_file, args.config_file, args.command_file)
end
end
namespace :standalone do
desc 'Directly analyse an audio file'
task :from_files, [:settings_file, :analysis_config_file] do |_t, args|
init(settings_file: args.settings_file)
BawWorkers::Jobs::Analysis::Job.action_perform_rake(args.analysis_config_file)
end
desc 'Directly analyse audio files from csv file'
task :from_csv, [:settings_file, :csv_file, :config_file, :command_file] do |_t, args|
init(settings_file: args.settings_file)
BawWorkers::Jobs::Analysis::Job.action_perform_rake_csv(args.csv_file, args.config_file, args.command_file)
end
end
end
namespace :audio_check do
namespace :resque do
desc 'Enqueue audio recording file checks from a csv file to be processed using Resque worker'
task :from_csv, [:settings_file, :csv_file, :real_run] do |_t, args|
args.with_defaults(real_run: 'dry_run')
is_real_run = BawWorkers::Validation.is_real_run?(args.real_run)
init(settings_file: args.settings_file)
BawWorkers::Jobs::AudioCheck::Action.action_enqueue_rake(args.csv_file, is_real_run)
end
end
namespace :standalone do
desc 'Directly run audio recording file checks from a csv file'
task :from_csv, [:settings_file, :csv_file, :real_run] do |_t, args|
args.with_defaults(real_run: 'dry_run')
is_real_run = BawWorkers::Validation.is_real_run?(args.real_run)
init(settings_file: args.settings_file)
BawWorkers::Jobs::AudioCheck::Action.action_perform_rake(args.csv_file, is_real_run)
end
desc 'Test reading csv files'
task :test_csv, [:audio_recordings_csv, :hash_csv, :result_csv] do |_t, args|
init(settings_file: args.settings_file)
BawWorkers::Jobs::AudioCheck::CsvHelper.write_audio_recordings_csv(
args.audio_recordings_csv, args.hash_csv, args.result_csv
)
end
desc 'Extract CSV lines from a log file'
task :extract_csv_from_log, [:log_file, :output_file] do |_t, args|
init(settings_file: args.settings_file)
BawWorkers::Jobs::AudioCheck::CsvHelper.extract_csv_logs(args.log_file, args.output_file)
end
desc 'Confirm database and audio files match'
task :compare, [:settings_file, :csv_file] do |_t, args|
init(settings_file: args.settings_file)
BawWorkers::Jobs::AudioCheck::CsvHelper.compare_csv_db(args.csv_file)
end
end
end
namespace :harvest do
desc 'Enqueue files to harvest using Resque'
task :scan, [:real_run] => ['baw:worker:setup'] do |_t, args|
args.with_defaults(real_run: 'dry_run')
is_real_run = BawWorkers::Validation.is_real_run?(args.real_run)
invoke_dir = Rake.original_dir
BawWorkers::Jobs::Harvest::Enqueue.scan(invoke_dir, is_real_run)
end
end
namespace :media do
# No rake tasks - media cutting and spectrogram generation is done on demand for now
# If eager generation is needed, rake tasks can be made to enqueue jobs or run standalone
# Consider defaults and offsets: from start of file, or from time of day e.g. 22:54:00 / 22:54:30 for 30 second segments?
# This could be created for eager caching
end
end
# if no arguments, list available tasks
task :default do
Rake.application.options.show_tasks = :tasks
Rake.application.options.show_task_pattern = //
Rake.application.display_tasks_and_comments
end
| 38.555 | 125 | 0.70367 |
38a16ce7e4222dea04322deecea1c1cd2e27d20d | 651 | class ReportsController < ApplicationController
before_action :authenticate_user!
# Find the current household to use for household scoping
# This 3 lines should be present in every controller (except the User controller)
set_current_tenant_through_filter
before_action do
set_current_tenant current_household
end
def net_worth
result = Report.net_worth(current_household)
render json: result
end
def category_spending
result = Report.category_spending(current_household)
render json: result
end
def needs_vs_wants
result = Report.needs_vs_wants(current_household)
render json: result
end
end
| 23.25 | 83 | 0.778802 |
015db19c54974d3adcd4565460134be6e0600200 | 109 | require_relative 'range_value'
module MSFLVisitors
module Nodes
class DateTime < Date
end
end
end | 15.571429 | 30 | 0.752294 |
e22e80e4b6aafbebf20b56567ddece331f42e10b | 9,030 | require 'plist'
module Fastlane
module Actions
class VerifyBuildAction < Action
def self.run(params)
Dir.mktmpdir do |dir|
app_path = self.app_path(params, dir)
values = self.gather_cert_info(app_path)
values = self.update_with_profile_info(app_path, values)
self.print_values(values)
self.evaulate(params, values)
end
end
def self.app_path(params, dir)
build_path = params[:ipa_path] || params[:build_path] || Actions.lane_context[SharedValues::IPA_OUTPUT_PATH] || ''
UI.user_error!("Unable to find file '#{build_path}'") unless File.exist?(build_path)
build_path = File.expand_path(build_path)
case File.extname(build_path)
when ".ipa", ".zip"
`unzip #{build_path.shellescape} -d #{dir.shellescape} -x '__MACOSX/*' '*.DS_Store'`
UI.user_error!("Unable to unzip ipa") unless $? == 0
# Adding extra ** for edge-case ipas where Payload directory is nested.
app_path = Dir["#{dir}/**/Payload/*.app"].first
when ".xcarchive"
app_path = Dir["#{build_path}/Products/Applications/*.app"].first
else
app_path = build_path # Assume that input is an app file.
end
UI.user_error!("Unable to find app file") unless app_path && File.exist?(app_path)
app_path
end
def self.gather_cert_info(app_path)
cert_info = `codesign -vv -d #{app_path.shellescape} 2>&1`
UI.user_error!("Unable to verify code signing") unless $? == 0
values = {}
parts = cert_info.strip.split(/\r?\n/)
parts.each do |part|
if part =~ /\AAuthority=(iPhone|iOS|Apple)\s(Distribution|Development)/
type = part.split('=')[1].split(':')[0]
values['provisioning_type'] = type.downcase =~ /distribution/i ? "distribution" : "development"
end
if part.start_with?("Authority")
values['authority'] ||= []
values['authority'] << part.split('=')[1]
end
if part.start_with?("TeamIdentifier")
values['team_identifier'] = part.split('=')[1]
end
if part.start_with?("Identifier")
values['bundle_identifier'] = part.split('=')[1]
end
end
values
end
def self.update_with_profile_info(app_path, values)
profile = `cat #{app_path.shellescape}/embedded.mobileprovision | security cms -D`
UI.user_error!("Unable to extract profile") unless $? == 0
plist = Plist.parse_xml(profile)
values['app_name'] = plist['AppIDName']
values['provisioning_uuid'] = plist['UUID']
values['team_name'] = plist['TeamName']
values['team_identifier'] = plist['TeamIdentifier'].first
application_identifier_prefix = plist['ApplicationIdentifierPrefix'][0]
full_bundle_identifier = "#{application_identifier_prefix}.#{values['bundle_identifier']}"
UI.user_error!("Inconsistent identifier found; #{plist['Entitlements']['application-identifier']}, found in the embedded.mobileprovision file, should match #{full_bundle_identifier}, which is embedded in the codesign identity") unless plist['Entitlements']['application-identifier'] == full_bundle_identifier
UI.user_error!("Inconsistent identifier found") unless plist['Entitlements']['com.apple.developer.team-identifier'] == values['team_identifier']
values
end
def self.print_values(values)
FastlaneCore::PrintTable.print_values(config: values,
title: "Summary for verify_build #{Fastlane::VERSION}")
end
def self.evaulate(params, values)
if params[:provisioning_type]
UI.user_error!("Mismatched provisioning_type. Required: '#{params[:provisioning_type]}'; Found: '#{values['provisioning_type']}'") unless params[:provisioning_type] == values['provisioning_type']
end
if params[:provisioning_uuid]
UI.user_error!("Mismatched provisioning_uuid. Required: '#{params[:provisioning_uuid]}'; Found: '#{values['provisioning_uuid']}'") unless params[:provisioning_uuid] == values['provisioning_uuid']
end
if params[:team_identifier]
UI.user_error!("Mismatched team_identifier. Required: '#{params[:team_identifier]}'; Found: '#{values['team_identifier']}'") unless params[:team_identifier] == values['team_identifier']
end
if params[:team_name]
UI.user_error!("Mismatched team_name. Required: '#{params[:team_name]}'; Found: 'values['team_name']'") unless params[:team_name] == values['team_name']
end
if params[:app_name]
UI.user_error!("Mismatched app_name. Required: '#{params[:app_name]}'; Found: '#{values['app_name']}'") unless params[:app_name] == values['app_name']
end
if params[:bundle_identifier]
UI.user_error!("Mismatched bundle_identifier. Required: '#{params[:bundle_identifier]}'; Found: '#{values['bundle_identifier']}'") unless params[:bundle_identifier] == values['bundle_identifier']
end
UI.success("Build is verified, have a 🍪.")
end
#####################################################
# @!group Documentation
#####################################################
def self.description
"Able to verify various settings in ipa file"
end
def self.details
"Verifies that the built app was built using the expected build resources. This is relevant for people who build on machines that are used to build apps with different profiles, certificates and/or bundle identifiers to guard against configuration mistakes."
end
def self.available_options
[
FastlaneCore::ConfigItem.new(key: :provisioning_type,
env_name: "FL_VERIFY_BUILD_PROVISIONING_TYPE",
description: "Required type of provisioning",
optional: true,
verify_block: proc do |value|
av = %w(distribution development)
UI.user_error!("Unsupported provisioning_type, must be: #{av}") unless av.include?(value)
end),
FastlaneCore::ConfigItem.new(key: :provisioning_uuid,
env_name: "FL_VERIFY_BUILD_PROVISIONING_UUID",
description: "Required UUID of provisioning profile",
optional: true),
FastlaneCore::ConfigItem.new(key: :team_identifier,
env_name: "FL_VERIFY_BUILD_TEAM_IDENTIFIER",
description: "Required team identifier",
optional: true),
FastlaneCore::ConfigItem.new(key: :team_name,
env_name: "FL_VERIFY_BUILD_TEAM_NAME",
description: "Required team name",
optional: true),
FastlaneCore::ConfigItem.new(key: :app_name,
env_name: "FL_VERIFY_BUILD_APP_NAME",
description: "Required app name",
optional: true),
FastlaneCore::ConfigItem.new(key: :bundle_identifier,
env_name: "FL_VERIFY_BUILD_BUNDLE_IDENTIFIER",
description: "Required bundle identifier",
optional: true),
FastlaneCore::ConfigItem.new(key: :ipa_path,
env_name: "FL_VERIFY_BUILD_IPA_PATH",
description: "Explicitly set the ipa path",
conflicting_options: [:build_path],
optional: true),
FastlaneCore::ConfigItem.new(key: :build_path,
env_name: "FL_VERIFY_BUILD_BUILD_PATH",
description: "Explicitly set the ipa, app or xcarchive path",
conflicting_options: [:ipa_path],
optional: true)
]
end
def self.output
end
def self.return_value
end
def self.authors
["CodeReaper"]
end
def self.is_supported?(platform)
platform == :ios
end
def self.example_code
[
'verify_build(
provisioning_type: "distribution",
bundle_identifier: "com.example.myapp"
)'
]
end
def self.category
:misc
end
end
end
end
| 45.15 | 316 | 0.561462 |
6a78efde225391e579948ae04b88ad85c10b6940 | 1,044 | Trestle.resource(:batteries) do
menu do
group :categories, priority: :last do
item :batteries, icon: "fa fa-battery"
end
end
# Customize the table columns shown on the index view.
#
# table do
# column :name
# column :created_at, align: :center
# actions
# end
# Customize the form fields shown on the new/edit views.
#
# form do |battery|
# text_field :name
#
# row do
# col(xs: 6) { datetime_field :updated_at }
# col(xs: 6) { datetime_field :created_at }
# end
# end
# By default, all parameters passed to the update and create actions will be
# permitted. If you do not have full trust in your users, you should explicitly
# define the list of permitted parameters.
#
# For further information, see the Rails documentation on Strong Parameters:
# http://guides.rubyonrails.org/action_controller_overview.html#strong-parameters
#
# params do |params|
# params.require(:battery).permit(:name, ...)
# end
end
| 27.473684 | 86 | 0.645594 |
e90fa684ef4f46813fcb693782eaaf6233287c03 | 1,926 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
module Elasticsearch
module XPack
module API
module Rollup
module Actions
# Creates a rollup job.
#
# @option arguments [String] :id The ID of the job to create
# @option arguments [Hash] :headers Custom HTTP headers
# @option arguments [Hash] :body The job configuration (*Required*)
#
# @see https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-put-job.html
#
def put_job(arguments = {})
raise ArgumentError, "Required argument 'body' missing" unless arguments[:body]
raise ArgumentError, "Required argument 'id' missing" unless arguments[:id]
headers = arguments.delete(:headers) || {}
arguments = arguments.clone
_id = arguments.delete(:id)
method = Elasticsearch::API::HTTP_PUT
path = "_rollup/job/#{Elasticsearch::API::Utils.__listify(_id)}"
params = {}
body = arguments[:body]
perform_request(method, path, params, body, headers).body
end
end
end
end
end
end
| 36.339623 | 99 | 0.658879 |
21627b2fd38fca49c5d83df7833e6d46fd8bcbf7 | 103 | # frozen_string_literal: true
class WCC::Contentful::Model::DropdownMenu < WCC::Contentful::Model
end
| 20.6 | 67 | 0.786408 |
035e284b5a7c4bcc47c99584e512c5bab9c00e47 | 2,846 | module Fog
module Network
class OpenStack
class Real
CREATE_OPTIONS = [
:name,
:shared,
:admin_state_up,
:qos_policy_id,
:port_security_enabled,
:tenant_id,
].freeze
# Advanced Features through API Extensions
#
# Not strictly required but commonly found in OpenStack
# installs with Quantum networking.
#
# @see http://docs.openstack.org/trunk/openstack-network/admin/content/provider_attributes.html
EXTENTED_OPTIONS = [
:provider_network_type,
:provider_segmentation_id,
:provider_physical_network,
:router_external,
].freeze
# Map Fog::Network::OpenStack::Network
# model attributes to OpenStack provider attributes
ALIASES = {
:provider_network_type => 'provider:network_type',
# Not applicable to the "local" or "gre" network types
:provider_physical_network => 'provider:physical_network',
:provider_segmentation_id => 'provider:segmentation_id',
:router_external => 'router:external'
}.freeze
def self.create(options)
data = {}
CREATE_OPTIONS.reject { |o| options[o].nil? }.each do |key|
data[key.to_s] = options[key]
end
EXTENTED_OPTIONS.reject { |o| options[o].nil? }.each do |key|
aliased_key = ALIASES[key] || key
data[aliased_key] = options[key]
end
data
end
def create_network(options = {})
data = {}
data['network'] = self.class.create(options)
request(
:body => Fog::JSON.encode(data),
:expects => [201],
:method => 'POST',
:path => 'networks'
)
end
end
class Mock
def create_network(options = {})
response = Excon::Response.new
response.status = 201
data = {
'id' => Fog::Mock.random_numbers(6).to_s,
'name' => options[:name],
'shared' => options[:shared] || false,
'subnets' => [],
'status' => 'ACTIVE',
'admin_state_up' => options[:admin_state_up] || false,
'tenant_id' => options[:tenant_id],
'qos_policy_id' => options[:qos_policy_id],
'port_security_enabled' => options[:port_security_enabled] || false
}
data.merge!(Fog::Network::OpenStack::Real.create(options))
self.data[:networks][data['id']] = data
response.body = {'network' => data}
response
end
end
end
end
end
| 31.977528 | 103 | 0.517217 |
9120de3b509b52e77e49fbf3332243b5eccabba3 | 4,606 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ServiceFabric::V6_5_0_36
module Models
#
# Application Upgrade Started event.
#
class ApplicationUpgradeStartedEvent < ApplicationEvent
include MsRestAzure
def initialize
@Kind = "ApplicationUpgradeStarted"
end
attr_accessor :Kind
# @return [String] Application type name.
attr_accessor :application_type_name
# @return [String] Current Application type version.
attr_accessor :current_application_type_version
# @return [String] Target Application type version.
attr_accessor :application_type_version
# @return [String] Type of upgrade.
attr_accessor :upgrade_type
# @return [String] Mode of upgrade.
attr_accessor :rolling_upgrade_mode
# @return [String] Action if failed.
attr_accessor :failure_action
#
# Mapper for ApplicationUpgradeStartedEvent class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'ApplicationUpgradeStarted',
type: {
name: 'Composite',
class_name: 'ApplicationUpgradeStartedEvent',
model_properties: {
event_instance_id: {
client_side_validation: true,
required: true,
serialized_name: 'EventInstanceId',
type: {
name: 'String'
}
},
category: {
client_side_validation: true,
required: false,
serialized_name: 'Category',
type: {
name: 'String'
}
},
time_stamp: {
client_side_validation: true,
required: true,
serialized_name: 'TimeStamp',
type: {
name: 'DateTime'
}
},
has_correlated_events: {
client_side_validation: true,
required: false,
serialized_name: 'HasCorrelatedEvents',
type: {
name: 'Boolean'
}
},
Kind: {
client_side_validation: true,
required: true,
serialized_name: 'Kind',
type: {
name: 'String'
}
},
application_id: {
client_side_validation: true,
required: true,
serialized_name: 'ApplicationId',
type: {
name: 'String'
}
},
application_type_name: {
client_side_validation: true,
required: true,
serialized_name: 'ApplicationTypeName',
type: {
name: 'String'
}
},
current_application_type_version: {
client_side_validation: true,
required: true,
serialized_name: 'CurrentApplicationTypeVersion',
type: {
name: 'String'
}
},
application_type_version: {
client_side_validation: true,
required: true,
serialized_name: 'ApplicationTypeVersion',
type: {
name: 'String'
}
},
upgrade_type: {
client_side_validation: true,
required: true,
serialized_name: 'UpgradeType',
type: {
name: 'String'
}
},
rolling_upgrade_mode: {
client_side_validation: true,
required: true,
serialized_name: 'RollingUpgradeMode',
type: {
name: 'String'
}
},
failure_action: {
client_side_validation: true,
required: true,
serialized_name: 'FailureAction',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 29.33758 | 70 | 0.47373 |
796ff029bf086beab850f3c5eff2f83a26d17975 | 1,964 | require File.expand_path(File.dirname(__FILE__) + '/edgecase')
class AboutScope < EdgeCase::Koan
module Jims
class Dog
def identify
:jims_dog
end
end
end
module Joes
class Dog
def identify
:joes_dog
end
end
end
def test_dog_is_not_available_in_the_current_scope
assert_raise(___(NameError)) do
fido = Dog.new
end
end
def test_you_can_reference_nested_classes_using_the_scope_operator
fido = Jims::Dog.new
rover = Joes::Dog.new
assert_equal __(:jims_dog), fido.identify
assert_equal __(:joes_dog), rover.identify
assert_not_equal fido.class, rover.class
assert_not_equal Jims::Dog, Joes::Dog
end
# ------------------------------------------------------------------
class String
end
def test_bare_bones_class_names_assume_the_current_scope
assert_equal __(true), AboutScope::String == String
end
def test_nested_string_is_not_the_same_as_the_system_string
assert_equal __(false), String == "HI".class
end
def test_use_the_prefix_scope_operator_to_force_the_global_scope
assert_equal __(true), ::String == "HI".class
end
# ------------------------------------------------------------------
PI = 3.1416
def test_constants_are_defined_with_an_initial_uppercase_letter
assert_equal __(3.1416), PI
end
# ------------------------------------------------------------------
MyString = ::String
def test_class_names_are_just_constants
assert_equal __(true), MyString == ::String
assert_equal __(true), MyString == "HI".class
end
def test_constants_can_be_looked_up_explicitly
assert_equal __(true), PI == AboutScope.const_get("PI")
assert_equal __(true), MyString == AboutScope.const_get("MyString")
end
def test_you_can_get_a_list_of_constants_for_any_class_or_module
assert_equal __(["Dog"], [:Dog]), Jims.constants
assert Object.constants.size > _n_(10)
end
end
| 24.55 | 71 | 0.651222 |
336ead4cc6c8db8f523f71ed5651eeb59f6c7d5d | 2,392 | # This class represents a todo item and its associated
# data: name and description. There's also a "done"
# flag to show whether this todo item is done.
class Todo
DONE_MARKER = 'X'
UNDONE_MARKER = ' '
attr_accessor :title, :description, :done
def initialize(title, description='')
@title = title
@description = description
@done = false
end
def done!
self.done = true
end
def done?
done
end
def undone!
self.done = false
end
def to_s
"[#{done? ? DONE_MARKER : UNDONE_MARKER}] #{title}"
end
def ==(other_todo)
title == other_todo.title &&
description == other_todo.description &&
done == other_todo.done
end
end
# This class represents a collection of Todo objects.
# You can perform typical collection-oriented actions
# on a TodoList object, including iteration and selection.
class TodoList
attr_accessor :title
def initialize(title)
@title = title
@todos = []
end
# rest of class needs implementation
def add(todo)
raise TypeError, 'Can only add Todo objects' unless todo.instance_of? Todo
todos << todo
end
alias << add
def size
todos.size
end
def first
todos.first
end
def last
todos.last
end
def to_a
todos.clone
end
def done?
todos.all?(&:done?)
end
def item_at(index)
todos.fetch(index)
end
def mark_done_at(index)
item_at(index).done!
end
def mark_undone_at(index)
item_at(index).undone!
end
def done!
todos.each(&:done!)
end
def shift
todos.shift
end
def pop
todos.pop
end
def remove_at(index)
todos.delete(item_at(index))
end
def to_s
text = "---- #{title} ----\n"
text << @todos.map(&:to_s).join("\n")
text
end
def each
todos.each do |todo|
yield(todo)
end
self
end
def select
selection = TodoList.new(title)
each do |todo|
selection.add(todo) if yield(todo)
end
selection
end
def find_by_title(string)
select { |todo| todo.title == string }.first
end
def all_done
select(&:done?)
end
def all_not_done
select { |todo| !todo.done? }
end
def mark_done(string)
find_by_title(string).done! if find_by_title(string)
end
def mark_all_done
each(&:done!)
end
def mark_all_undone
each(&:undone!)
end
private
attr_accessor :todos
end
| 15.044025 | 78 | 0.639214 |
f7ffe288d571c4d9d06b2528101f11cca447ef43 | 3,987 | # frozen_string_literal: true
module ResourceAccessTokens
class CreateService < BaseService
def initialize(current_user, resource, params = {})
@resource_type = resource.class.name.downcase
@resource = resource
@current_user = current_user
@params = params.dup
end
def execute
return error("User does not have permission to create #{resource_type} access token") unless has_permission_to_create?
user = create_user
return error(user.errors.full_messages.to_sentence) unless user.persisted?
user.update!(external: true) if current_user.external?
access_level = params[:access_level] || Gitlab::Access::MAINTAINER
member = create_membership(resource, user, access_level)
unless member.persisted?
delete_failed_user(user)
return error("Could not provision #{Gitlab::Access.human_access(access_level).downcase} access to project access token")
end
token_response = create_personal_access_token(user)
if token_response.success?
log_event(token_response.payload[:personal_access_token])
success(token_response.payload[:personal_access_token])
else
delete_failed_user(user)
error(token_response.message)
end
end
private
attr_reader :resource_type, :resource
def has_permission_to_create?
%w(project group).include?(resource_type) && can?(current_user, :create_resource_access_tokens, resource)
end
def create_user
# Even project maintainers can create project access tokens, which in turn
# creates a bot user, and so it becomes necessary to have `skip_authorization: true`
# since someone like a project maintainer does not inherently have the ability
# to create a new user in the system.
::Users::AuthorizedCreateService.new(current_user, default_user_params).execute
end
def delete_failed_user(user)
DeleteUserWorker.perform_async(current_user.id, user.id, hard_delete: true, skip_authorization: true)
end
def default_user_params
{
name: params[:name] || "#{resource.name.to_s.humanize} bot",
email: generate_email,
username: generate_username,
user_type: :project_bot,
skip_confirmation: true # Bot users should always have their emails confirmed.
}
end
def generate_username
base_username = "#{resource_type}_#{resource.id}_bot"
uniquify.string(base_username) { |s| User.find_by_username(s) }
end
def generate_email
email_pattern = "#{resource_type}#{resource.id}_bot%s@noreply.#{Gitlab.config.gitlab.host}"
uniquify.string(-> (n) { Kernel.sprintf(email_pattern, n) }) do |s|
User.find_by_email(s)
end
end
def uniquify
Uniquify.new
end
def create_personal_access_token(user)
PersonalAccessTokens::CreateService.new(
current_user: user, target_user: user, params: personal_access_token_params
).execute
end
def personal_access_token_params
{
name: params[:name] || "#{resource_type}_bot",
impersonation: false,
scopes: params[:scopes] || default_scopes,
expires_at: params[:expires_at] || nil
}
end
def default_scopes
Gitlab::Auth.resource_bot_scopes
end
def create_membership(resource, user, access_level)
resource.add_user(user, access_level, expires_at: params[:expires_at])
end
def log_event(token)
::Gitlab::AppLogger.info "PROJECT ACCESS TOKEN CREATION: created_by: #{current_user.username}, project_id: #{resource.id}, token_user: #{token.user.name}, token_id: #{token.id}"
end
def error(message)
ServiceResponse.error(message: message)
end
def success(access_token)
ServiceResponse.success(payload: { access_token: access_token })
end
end
end
ResourceAccessTokens::CreateService.prepend_mod_with('ResourceAccessTokens::CreateService')
| 31.393701 | 183 | 0.700527 |
ac3ad18c7106a6754f37c8aa7d4f5d7a907b653c | 711 | class BoardPrinter
FILE_LETTERS = ("A".."H").to_a
ENCODING = 'utf-8'
def initialize
@unicode_dict = UnicodeChess.new
end
def print(board)
board_string = ""
Board::RANKS.reverse.each do |rank, index|
board_string << "#{rank}|"
Board::FILES.each do |file|
piece = board.find_piece_by_position("#{file}#{rank}")
board_string << (piece ? unicode_character_for_piece([piece.color[0], piece.short_name].join) : ' ')
end
board_string << "|\n"
end
board_string << " " + FILE_LETTERS.join(' ')
board_string
end
def unicode_character_for_piece(input_string)
"#{@unicode_dict.char_for_piece(input_string).encode(ENCODING)} "
end
end
| 26.333333 | 109 | 0.64557 |
acdc6bdc6704b14e6476ce512b21510a50afa142 | 16,765 | module Roby
module Distributed
class << self
# The block which is called when a new transaction has been proposed to us.
attr_accessor :transaction_handler
# Sets up the transaction handler. The given block will be called
# in a separate thread whenever a remote peer proposes a new
# transaction
def on_transaction(&block)
Distributed.transaction_handler = block
end
end
# Raised when an operation needs the edition token, while the local
# plan manager does not have it.
class NotEditor < RuntimeError; end
# Raised when a commit is attempted while the transaction is not ready,
# i.e. the token should be passed once more in the edition ring.
class NotReady < RuntimeError; end
# An implementation of a transaction distributed over multiple plan
# managers. The transaction modification protocol is based on an
# edition token, which is passed through all the transaction owners by
# #edit and #release.
#
# Most operations on this distributed transaction must be done outside
# the control thread, as they are blocking.
#
# See DistributedObject for a list of operations valid on distributed objects.
class Transaction < Roby::Transaction
attr_reader :owners
attr_reader :token_lock, :token_lock_signal
include DistributedObject
# Create a new distributed transaction based on the given plan. The
# transaction sole owner is the local plan manager, which is also
# the owner of the edition token.
def initialize(plan, options = {})
@owners = [Distributed]
@editor = true
@token_lock = Mutex.new
@token_lock_signal = ConditionVariable.new
super
end
def do_wrap(base_object, create) # :nodoc:
# It is allowed to add objects in a transaction only if
# * the object is not distribuable. It means that we are
# annotating *locally* remote tasks (like it is done for
# ConnectionTask for instance).
# * the object is owned by the transaction owners
if create && (base_object.distribute? && !(base_object.owners - owners).empty?)
raise OwnershipError, "plan owners #{owners} do not own #{base_object}: #{base_object.owners}"
end
temporarily_subscribed = !base_object.updated?
if temporarily_subscribed
peer = base_object.owners.first
base_object = peer.subscribe(base_object)
end
if object = super
object.extend DistributedObject
if !Distributed.updating?(self) && object.root_object? && base_object.distribute?
# The new proxy has been sent to remote hosts since it
# has been discovered in the transaction. Nonetheless,
# we don't want to return from #wrap until we know its
# sibling. Add a synchro point to wait for that
updated_peers.each do |peer|
peer.synchro_point
end
end
end
object
ensure
if temporarily_subscribed
peer.unsubscribe(base_object)
end
end
def copy_object_relations(object, proxy) # :nodoc:
# If the transaction is being updated, it means that we are
# discovering the new transaction. In that case, no need to
# discover the plan relations since our peer will send us all
# transaction relations
unless Distributed.updating?(self)
super
end
end
# Checks that +peer+ can be removed from the list of owners
def prepare_remove_owner(peer)
known_tasks.each do |t|
t = t.__getobj__ if t.respond_to?(:__getobj__)
if peer.owns?(t) && t.distribute?
raise OwnershipError, "#{peer} still owns tasks in the transaction (#{t})"
end
end
nil
end
# Announces the transaction on +peer+ or, if +peer+ is nil, to all
# owners who don't know about it yet. This operation is
# asynchronous, so the block, if given, will be called for each
# remote peer which has processed the message.
#
# See Peer#transaction_propose
def propose(peer = nil, &block)
if !self_owned?
raise OwnershipError, "cannot propose a transaction we don't own"
end
if peer
peer.transaction_propose(self, &block)
else
(owners - remote_siblings.keys).each do |peer|
if peer != Roby::Distributed
Distributed.debug "proposing #{self} to #{peer}"
propose(peer) do
yield(peer)
end
end
end
end
end
def add(objects) # :nodoc:
if objects
events, tasks = partition_event_task(objects)
for object in (events || []) + (tasks || [])
unless Distributed.updating?(object) ||
Distributed.owns?(object) ||
(object.owners - owners).empty?
raise OwnershipError, "#{object} is not owned by #{owners.to_a} (#{object.owners.to_a})"
end
end
super(events) if events
super(tasks) if tasks
else
super
end
end
# call-seq:
# commit_transaction => self
#
# Commits the transaction. This method can only be called by the
# first editor of the transaction, once all owners have requested
# no additional modifications.
#
# Distributed commits are done in two steps, to make sure that all
# owners agree to actually perform it. First, the
# PeerServer#transaction_prepare_commit message is sent, which can
# return either nil or an error object.
#
# If all peers return nil, the actual commit is performed by
# sending the PeerServer#transaction_commit message. Otherwise, the
# commit is abandonned by sending the
# PeerServer#transaction_abandon_commit message to the transaction
# owners.
def commit_transaction(synchro = true)
if !self_owned?
raise OwnershipError, "cannot commit a transaction which is not owned locally. #{self} is owned by #{owners.to_a}"
elsif synchro
if !editor?
raise NotEditor, "not editor of this transaction"
elsif !first_editor?
raise NotEditor, "transactions are committed by their first editor"
elsif edition_reloop
raise NotReady, "transaction still needs editing"
end
end
if synchro
result = call_owners(:transaction_prepare_commit, self)
error = result.find_all { |_, returned| returned }
if !error.empty?
call_owners(:transaction_abandon_commit, self, error)
return false
else
call_owners(:transaction_commit, self)
return true
end
else
all_objects = known_tasks.dup
proxy_objects.each_key { |o| all_objects << o }
Distributed.update(self) do
Distributed.update_all(all_objects) do
super() { yield if block_given? }
end
end
end
self
end
# Hook called when the transaction commit has been abandoned
# because a owner refused it. +reason+ is the value returned by
# this peer.
def abandoned_commit(error)
Distributed.debug { "abandoned commit of #{self} because of #{error}" }
super if defined? super
end
# call-seq:
# discard_transaction => self
#
# Discards the transaction. Unlike #commit_transaction, this can be
# called by any of the owners.
def discard_transaction(synchro = true) # :nodoc:
unless Distributed.owns?(self)
raise OwnershipError, "cannot discard a transaction which is not owned locally. #{self} is owned by #{owners}"
end
if synchro
call_siblings(:transaction_discard, self)
else super()
end
self
end
# True if we currently have the edition token
attr_predicate :editor?
# True if one of the editors request that the token is passed to
# them once more. The transaction can be committed only when all
# peers did not request that.
#
# See #release
attr_reader :edition_reloop
# True if this plan manager is the first editor, i.e. the plan
# manager whose responsibility is to manage the edition protocol.
def first_editor?
owners.first == Distributed
end
# Returns the peer which is after this plan manager in the edition
# order. The edition token will be sent to this peer by #release
def next_editor
if owners.last == Distributed
return owners.first
end
owners.each_cons(2) do |first, second|
if first == Distributed
return second
end
end
end
def edit!(reloop)
token_lock.synchronize do
@editor = true
@edition_reloop = reloop
token_lock_signal.broadcast
end
end
# Waits for the edition token. If a block is given, it is called
# when the token is achieved, and releases the token when the
# blocks returns.
def edit(reloop = false)
if Thread.current[:control_mutex_locked]
raise "cannot call #edit with the control mutex taken !"
end
token_lock.synchronize do
while !editor? # not the current editor
token_lock_signal.wait(token_lock)
end
end
if block_given?
begin
yield
ensure
release(reloop)
end
end
end
# Releases the edition token, giving it to the next owner. If
# +give_back+ is true, the local plan manager announces that it
# expects the token to be given back to it once more. The commit is
# allowed only when all peers have released the edition token
# without requesting it once more.
#
# It sends the #transaction_give_token to the peer returned by
# #next_editor.
#
# Raised NotEditor if the local plan manager is not the current
# transaction editor.
def release(give_back = false)
token_lock.synchronize do
if !editor?
raise NotEditor, "not editor"
else
reloop = if first_editor?
give_back
else
edition_reloop || give_back
end
return if owners.size == 1
@editor = false
next_editor.transaction_give_token(self, reloop)
true
end
end
end
# Intermediate representation of a Roby::Distributed::Transaction
# object, suitable for representing that transaction in the dRoby
# protocol.
class DRoby < Roby::BasicObject::DRoby
attr_reader :plan, :options
def initialize(remote_siblings, owners, plan, options)
super(remote_siblings, owners)
@plan, @options = plan, options
end
# Returns the local representation of this transaction, or
# raises InvalidRemoteOperation if none exists.
def proxy(peer)
raise InvalidRemoteOperation, "the transaction #{self} does not exist on #{peer.connection_space.name}"
end
# Create a local representation for this transaction.
def sibling(peer)
plan = peer.local_object(self.plan)
trsc = Roby::Distributed::Transaction.new(plan, peer.local_object(options))
update(peer, trsc)
trsc.instance_eval do
@editor = false
end
trsc
end
# Called when a new sibling has been created locally for a
# distributed transaction present on +peer+. +trsc+ is the
# local representation of this transaction.
#
# In practice, it announces the new transaction by calling the
# block stored in Distributed.transaction_handler (if there is
# one).
#
# See PeerServer#created_sibling
def created_sibling(peer, trsc)
Thread.new do
Thread.current.priority = 0
begin
Distributed.transaction_handler[trsc] if Distributed.transaction_handler
rescue
Roby::Distributed.warn "transaction handler for #{trsc} failed"
Roby::Distributed.warn $!.full_message
trsc.invalidate("failed transaction handler")
end
end
end
def to_s # :nodoc:
"#<dRoby:Trsc#{remote_siblings_to_s} owners=#{owners_to_s} plan=#{plan}>"
end
end
# Returns a representation of +self+ which can be used to reference
# it in our communication with +dest+.
def droby_dump(dest) # :nodoc:
if remote_siblings.has_key?(dest)
remote_id
else
DRoby.new(remote_siblings.droby_dump(dest), owners.droby_dump(dest),
plan.droby_dump(dest),
options.droby_dump(dest))
end
end
end
module Roby::Task::Proxying
def droby_dump(dest) # :nodoc:
DRoby.new(remote_siblings.droby_dump(dest), owners.droby_dump(dest),
Distributed.format(@__getobj__, dest), Distributed.format(plan, dest))
end
# A representation of a distributed transaction proxy suitable for
# communication with the remote plan managers.
class DRoby < Roby::BasicObject::DRoby
# The DRoby version of the underlying object
attr_reader :real_object
# The DRoby representation of the transaction
attr_reader :transaction
# Create a new dRoby representation for a transaction proxy.
# The proxy currently has the given set of remote siblings and
# owners, is a view on the given real object and is stored in
# the given transaction. All objects must already be formatted
# for marshalling using Distributed.format.
def initialize(remote_siblings, owners, real_object, transaction)
super(remote_siblings, owners)
@real_object, @transaction = real_object, transaction
end
# Returns the local object matching this dRoby-formatted
# representation of a remote transaction proxy present on
# +peer+.
def proxy(peer)
local_real = peer.local_object(real_object)
local_object = nil
local_transaction = peer.local_object(transaction)
Distributed.update(local_transaction) do
local_object = local_transaction[local_real]
end
local_object
end
def to_s # :nodoc:
"#<dRoby:mTrscProxy#{remote_siblings} transaction=#{transaction} real_object=#{real_object}>"
end
end
end
module Roby::TaskEventGenerator::Proxying
# A task event generator has no remote sibling. It is always
# referenced through its own task.
def has_sibling?(peer); false end
# Create an intermediate object which represent this task event
# generator in our communication with +dest+
def droby_dump(dest)
Roby::TaskEventGenerator::DRoby.new(controlable?, happened?, Distributed.format(task, dest), symbol)
end
end
class PeerServer
# Message received when the 'prepare' stage of the transaction
# commit is requested.
def transaction_prepare_commit(trsc)
trsc = peer.local_object(trsc)
peer.connection_space.transaction_prepare_commit(trsc)
trsc.freezed!
nil
end
# Message received when a transaction commit is requested.
def transaction_commit(trsc)
trsc = peer.local_object(trsc)
peer.connection_space.transaction_commit(trsc)
nil
end
# Message received when a transaction commit is to be abandonned.
def transaction_abandon_commit(trsc, error)
trsc = peer.local_object(trsc)
peer.connection_space.transaction_abandon_commit(trsc, error)
nil
end
# Message received when a transaction discard is requested.
def transaction_discard(trsc)
trsc = peer.local_object(trsc)
peer.connection_space.transaction_discard(trsc)
nil
end
# Message received when the transaction edition token is given to
# this plan manager.
def transaction_give_token(trsc, needs_edition)
trsc = peer.local_object(trsc)
trsc.edit!(needs_edition)
nil
end
end
class Peer
# Send the information related to the given transaction in the
# remote plan manager.
def transaction_propose(trsc)
synchro_point
create_sibling(trsc)
nil
end
# Give the edition token on +trsc+ to the given peer.
# +needs_edition+ is a flag which, if true, requests that the token
# is given back at least once to the local plan manager.
#
# Do not use this directly, it is part of the multi-robot
# communication protocol. Use the edition-related methods on
# Distributed::Transaction instead.
def transaction_give_token(trsc, needs_edition)
call(:transaction_give_token, trsc, needs_edition)
end
end
end
end
| 33.868687 | 120 | 0.654757 |
182306a97432603d940d021170082b4563708a9b | 3,673 | require File.expand_path('../helper', __FILE__)
class YouTrackTest < Service::TestCase
def setup
@stubs = Faraday::Adapter::Test::Stubs.new
@data = {'base_url' => 'http://yt.com/abc', 'committers' => 'c',
'username' => 'u', 'password' => 'p'}
end
def valid_process_stubs
@stubs.post "/abc/rest/user/login" do |env|
assert_equal 'yt.com', env[:url].host
assert_equal 'u', env[:params]["login"]
assert_equal 'p', env[:params]["password"]
[200, {'Set-Cookie' => 'sc'}, '']
end
@stubs.get "/abc/rest/admin/user" do |env|
assert_equal 'yt.com', env[:url].host
assert_equal 'sc', env[:request_headers]['Cookie']
assert_equal '[email protected]', env[:params]['q']
assert_equal 'c', env[:params]['group']
assert_equal '0', env[:params]['start']
[200, {}, %(<r><u login="mojombo" /></r>)]
end
@stubs.get "/abc/rest/admin/user/mojombo" do |env|
assert_equal 'yt.com', env[:url].host
assert_equal 'sc', env[:request_headers]['Cookie']
[200, {}, %(<u email="[email protected]" />)]
end
end
def valid_process_stubs_case_1
valid_process_stubs
@stubs.post "/abc/rest/issue/case-1/execute" do |env|
assert_equal 'yt.com', env[:url].host
assert_equal 'sc', env[:request_headers]['Cookie']
assert_equal 'zomg omg', env[:params]['command']
assert_equal 'mojombo', env[:params]['runAs']
[200, {}, '']
end
end
def test_push
valid_process_stubs_case_1
hash = payload
hash['commits'].first['message'].sub! /Case#1/, '#case-1 zomg omg'
svc = service(@data, hash)
svc.receive_push
@stubs.verify_stubbed_calls
end
def test_branch_match
valid_process_stubs
@stubs.post "/abc/rest/issue/case-2/execute" do |env|
assert_equal 'yt.com', env[:url].host
assert_equal 'sc', env[:request_headers]['Cookie']
assert_equal 'Fixed', env[:params]['command']
assert_equal 'mojombo', env[:params]['runAs']
[200, {}, '']
end
hash = payload
hash['commits'].first['message'].sub! /Case#1/, '#case-2!! zomg omg'
hash['ref'] = 'refs/heads/master'
svc = service(@data.merge({'branch' => 'master dev' }), hash)
svc.receive_push
@stubs.verify_stubbed_calls
end
def test_branch_mismatch
payload = {'ref' => 'refs/heads/master'}
svc = service({'base_url' => '', 'branch' => 'other'}, payload)
# Missing payload settings would lead to an exception on processing. Processing
# should never happen with mismatched branches.
assert_nothing_raised { svc.receive_push }
end
def test_process_not_distinct
valid_process_stubs_case_1
hash = payload
hash['commits'].each { |commit|
commit['distinct'] = false
}
hash['commits'].first['message'].sub! /Case#1/, '#case-1 zomg omg'
svc = service(@data.merge({'process_distinct' => false}), hash)
svc.receive_push
@stubs.verify_stubbed_calls
end
def test_process_distinct
valid_process_stubs_case_1
hash = payload
hash['commits'].first['message'].sub! /Case#1/, '#case-1 zomg omg'
svc = service(@data.merge({'process_distinct' => true}), hash)
svc.receive_push
@stubs.verify_stubbed_calls
end
def test_dont_process_not_distinct
hash = payload
hash['commits'].each { |commit|
commit['distinct'] = false
}
hash['commits'].first['message'].sub! /Case#1/, '#case-1 zomg omg'
svc = service(@data.merge({'process_distinct' => true}), hash)
svc.receive_push
@stubs.verify_stubbed_calls
end
def service(*args)
super Service::YouTrack, *args
end
end
| 26.42446 | 83 | 0.629186 |
9108592ec54894104b80dcac2831b03a4c36c7dd | 1,189 | require "language/node"
class Typescript < Formula
desc "Language for application scale JavaScript development"
homepage "https://www.typescriptlang.org/"
url "https://registry.npmjs.org/typescript/-/typescript-3.6.2.tgz"
sha256 "9495625742582db7cb5cc04d34a0b56b8b4b7af1b56b87944fbf30be439a1641"
head "https://github.com/Microsoft/TypeScript.git"
bottle do
cellar :any_skip_relocation
sha256 "27a07cd06eda691d289ff1060a3ed6b27dc345356bce6ca54c7de0a6a8dca777" => :mojave
sha256 "8987c59e518465529117e948e46069262bfb2c78b0b25aa8e1f5c80a8e2ef57a" => :high_sierra
sha256 "3c85ee0c66316904b79614cd7c1d1897381d2a8730ab3182def8142ac922c4f5" => :sierra
end
depends_on "node"
def install
system "npm", "install", *Language::Node.std_npm_install_args(libexec)
bin.install_symlink Dir["#{libexec}/bin/*"]
end
test do
(testpath/"test.ts").write <<~EOS
class Test {
greet() {
return "Hello, world!";
}
};
var test = new Test();
document.body.innerHTML = test.greet();
EOS
system bin/"tsc", "test.ts"
assert_predicate testpath/"test.js", :exist?, "test.js was not generated"
end
end
| 30.487179 | 93 | 0.71741 |
ac9fa140ae41216b3e6c97470bd41327c3ad4001 | 2,321 | require File.expand_path("../../helpers", __FILE__)
class ExpressionSet < Test::Unit::TestCase
def test_expression_set_expand_members_digit
set = RP.parse('[\d]').first
assert_equal ['0-9'], set.expand_members
assert_equal ['\p{Digit}'], set.expand_members(true)
end
def test_expression_set_expand_members_nondigit
set = RP.parse('[\D]').first
assert_equal ['^0-9'], set.expand_members
assert_equal ['\P{Digit}'], set.expand_members(true)
end
def test_expression_set_expand_members_word
set = RP.parse('[\w]').first
assert_equal ['A-Za-z0-9_'], set.expand_members
assert_equal ['\p{Word}'], set.expand_members(true)
end
def test_expression_set_expand_members_nonword
set = RP.parse('[\W]').first
assert_equal ['^A-Za-z0-9_'], set.expand_members
assert_equal ['\P{Word}'], set.expand_members(true)
end
def test_expression_set_expand_members_space
set = RP.parse('[\s]').first
assert_equal [' \t\f\v\n\r'], set.expand_members
assert_equal ['\p{Space}'], set.expand_members(true)
end
def test_expression_set_expand_members_nonspace
set = RP.parse('[\S]').first
assert_equal ['^ \t\f\v\n\r'], set.expand_members
assert_equal ['\P{Space}'], set.expand_members(true)
end
def test_expression_set_expand_members_xdigit
set = RP.parse('[\h]').first
assert_equal ['0-9A-Fa-f'], set.expand_members
assert_equal ['\p{Xdigit}'], set.expand_members(true)
end
def test_expression_set_expand_members_nonxdigit
set = RP.parse('[\H]').first
assert_equal ['^0-9A-Fa-f'], set.expand_members
assert_equal ['\P{Xdigit}'], set.expand_members(true)
end
def test_expression_set_include
set = RP.parse('[ac-eh\s[:digit:]\x20[b]]').first
assert set.include?('a')
assert set.include?('a', true)
assert set.include?('c-e')
assert set.include?('h')
assert set.include?('\s')
assert set.include?('[:digit:]')
assert set.include?('\x20')
assert set.include?('b')
refute set.include?('b', true) # should not include b directly
refute set.include?(']')
refute set.include?('[')
refute set.include?('x')
refute set.include?('\S')
subset = set.last
assert subset.include?('b')
refute subset.include?('a')
end
end
| 27.305882 | 66 | 0.663507 |
d56b4256cf1ed80b54685e2df6fb10010dd343ee | 660 | require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Railstime
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.0
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
end
end
| 33 | 82 | 0.765152 |
b9aa2a9da9ae166311d0a14cac674658d3d82ad0 | 318 | module TD::Types
# Contains information about saved card credentials.
#
# @attr id [String] Unique identifier of the saved credentials.
# @attr title [String] Title of the saved credentials.
class SavedCredentials < Base
attribute :id, TD::Types::String
attribute :title, TD::Types::String
end
end
| 28.909091 | 65 | 0.720126 |
ab2aabeb7cc5487cb036b2729d6c8762f04278c9 | 30 | module BuyTypeListsHelper
end
| 10 | 25 | 0.9 |
bfe69a0499e5d972c748b1016868aca1dde2c061 | 3,894 |
class Printer
def self.warning_message
puts "======================================================================================================"
puts "!!!!!!!!! The doctor, team or specialty or language that you have choosen does not exit !!!!!!!!!"
puts "======================================================================================================"
end
def self.warning_message_team
puts "======================================================================================================"
puts "!!!!!!!! You've chosen a doctor that either does not exist or is not part of any team !!!!!!!!"
puts "======================================================================================================"
end
def self.print_from_arr_of_s(array_to_print)
if array_to_print.length == 0 || array_to_print == nil
warning_message
else
puts "<<<<<<<<<<<< HERE IS THE LIST: >>>>>>>>>>>"
i = 0
while i < array_to_print.size
puts array_to_print[i]
i+=1
end
puts "^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^"
puts "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^"
end
end
def self.list_right_options
puts "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
puts "!!!!!! Please enter either 1, 2, 3, 4, 5, 6 or 7 !!!!!!!!!!"
puts "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
end
def self.menu_screen
puts "Choose from the following menu:"
puts "1) List of all providers"
puts "2) Details on a specific provider"
puts "3) List of providers by their team"
puts "4) List of providers by their specialty"
puts "5) List of providers by their languages"
puts "6) Get a specific provider's team?"
puts "7) Exit the program"
end
def self.print_from_arr_of_o(instances_of_objects)
if instances_of_objects == nil || instances_of_objects.size == 0
warning_message
else
puts "<<<<<<<<<<<< HERE IS THE LIST: >>>>>>>>>>>"
instances_of_objects.each do |object|
puts object.name
end
puts "^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^"
puts "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^"
end
end
def self.print_whole_profile(provider_instance)
puts "=============================="
if provider_instance.team != nil
puts "#{provider_instance.name}'s team: #{provider_instance.team}"
end
specialties_list = ""
provider_instance.specialites.each {|specialty| specialties_list = "#{specialties_list} #{specialty}"}
specialties_list = specialties_list.strip.gsub(" ", ", ")
languages_list = ""
provider_instance.languages.each {|languages| languages_list = "#{languages_list} #{languages}"}
languages_list = languages_list.strip.gsub(" ", ", ")
puts "#{provider_instance.name}'s specialties: #{specialties_list}"
puts "#{provider_instance.name}'s languages: #{languages_list}"
puts "#{provider_instance.name}'s qualifications: #{provider_instance.qualifications}"
if provider_instance.title != nil
puts "#{provider_instance.name}'s title: #{provider_instance.title}"
end
puts "=============================="
end
def self.get_choice_from_above
puts "Please choose from the list above to get the relevant providers:"
user_input = gets.strip
end
def self.print_this(for_printing)
puts "*****************************************************"
puts " #{for_printing} "
puts "*****************************************************"
end
end
| 33.282051 | 113 | 0.4453 |
6aa4f073b4a9f8e156f494cf5fab33263f3748bd | 2,223 | require 'rails_helper'
RSpec.describe OfferedCourseOptionDetailsCheck do
let(:course_option) { create(:course_option, study_mode: :full_time) }
it 'is successful when all the provided details correspond to the course option' do
service = described_class.new(provider_id: course_option.provider.id,
course_id: course_option.course.id,
course_option_id: course_option.id,
study_mode: course_option.study_mode)
expect { service.validate! }.not_to raise_error
end
it 'throws an InvalidProviderError when the provider does not correspond to the course option' do
provider = create(:provider)
service = described_class.new(provider_id: provider.id,
course_id: course_option.course.id,
course_option_id: course_option.id,
study_mode: course_option.study_mode)
expect { service.validate! }.to raise_error(OfferedCourseOptionDetailsCheck::InvalidStateError, 'Invalid provider for CourseOption')
end
it 'throws an InvalidCourseError when the course does not correspond to the course option' do
course = create(:course)
service = described_class.new(provider_id: course_option.provider.id,
course_id: course.id,
course_option_id: course_option.id,
study_mode: course_option.study_mode)
expect { service.validate! }.to raise_error(OfferedCourseOptionDetailsCheck::InvalidStateError, 'Invalid course for CourseOption')
end
it 'throws an InvalidStudyModeError when the study mode does not correspond to the course option' do
study_mode = :part_time
service = described_class.new(provider_id: course_option.provider.id,
course_id: course_option.course.id,
course_option_id: course_option.id,
study_mode: study_mode)
expect { service.validate! }.to raise_error(OfferedCourseOptionDetailsCheck::InvalidStateError, 'Invalid study mode for CourseOption')
end
end
| 49.4 | 138 | 0.651372 |
bb56349f6be9a63f5b8d079838cd017f57cfc97b | 6,637 | # Farmbot Device models all data related to an actual FarmBot in the real world.
class Device < ApplicationRecord
DEFAULT_MAX_CONFIGS = 300
DEFAULT_MAX_IMAGES = 100
DEFAULT_MAX_LOGS = 1000
TIMEZONES = TZInfo::Timezone.all_identifiers
BAD_TZ = "%{value} is not a valid timezone"
THROTTLE_ON = "Device is sending too many logs (%s). " \
"Suspending log storage and display until %s."
THROTTLE_OFF = "Cooldown period has ended. " \
"Resuming log storage."
CACHE_KEY = "devices:%s"
PLURAL_RESOURCES = %i(alerts farmware_envs farm_events farmware_installations
images logs peripherals pin_bindings plant_templates
points regimens saved_gardens sensor_readings sensors
sequences token_issuances tools webcam_feeds
diagnostic_dumps fragments)
PLURAL_RESOURCES.map { |resources| has_many resources, dependent: :destroy }
SINGULAR_RESOURCES = {
fbos_config: FbosConfig,
firmware_config: FirmwareConfig,
web_app_config: WebAppConfig,
}
SINGULAR_RESOURCES.map do |(name, klass)|
has_one name, dependent: :destroy
define_method(name) { super() || klass.create!(device: self) }
end
has_many :in_use_tools
has_many :in_use_points
has_many :users
validates_presence_of :name
validates :timezone, inclusion: {
in: TIMEZONES,
message: BAD_TZ,
allow_nil: true,
}
# Give the user back the amount of logs they are allowed to view.
def limited_log_list
Log
.order(created_at: :desc)
.where(device_id: self.id)
.limit(max_log_count || DEFAULT_MAX_LOGS)
end
def excess_logs
Log
.where
.not(id: limited_log_list.pluck(:id))
.where(device_id: self.id)
end
def self.current
RequestStore.store[:device]
end
def self.current=(dev)
RequestStore.store[:device] = dev
end
# Sets Device.current to `self` and returns it to the previous value when
# finished running block. Usually this is unnecessary, but may be required in
# background jobs. If you are not receiving auto_sync data on your client,
# you probably need to use this method.
def auto_sync_transaction
prev = Device.current
Device.current = self
yield
Device.current = prev
end
def tz_offset_hrs
Time.now.in_time_zone(self.timezone || "UTC").utc_offset / 1.hour
end
def plants
points.where(pointer_type: "Plant")
end
def tool_slots
points.where(pointer_type: "ToolSlot")
end
def generic_pointers
points.where(pointer_type: "GenericPointer")
end
TIMEOUT = 150.seconds
# Like Device.find, but with 150 seconds of caching to avoid DB calls.
def self.cached_find(id)
Rails
.cache
.fetch(CACHE_KEY % id, expires_in: TIMEOUT) { Device.find(id) }
end
def refresh_cache
# Why? Device.new(self.as_json)???
#
# "Some objects cannot be dumped: if the objects to be dumped include
# bindings, procedure or method objects, instances of class IO, or singleton
# objects, a TypeError will be raised."
# https://ruby-doc.org/core-2.3.1/Marshal.html
# TODO: Someone plz send help! - RC
Rails.cache.write(CACHE_KEY % self.id, Device.new(self.as_json))
end
# Sets the `throttled_at` field, but only if it is unpopulated.
# Performs no-op if `throttled_at` was already set.
def maybe_throttle(violation)
# Some log validation errors will result in until_time being `nil`.
if (violation && throttled_until.nil?)
et = violation.ends_at
reload.update_attributes!(throttled_until: et,
throttled_at: Time.now)
refresh_cache
cooldown = et.in_time_zone(self.timezone || "UTC").strftime("%I:%M%p")
info = [violation.explanation, cooldown]
cooldown_notice(THROTTLE_ON % info, et, "warn")
end
end
def maybe_unthrottle
if throttled_until.present?
old_time = throttled_until
reload # <= WHY!?! TODO: Find out why it crashes without this.
.update_attributes!(throttled_until: nil, throttled_at: nil)
refresh_cache
cooldown_notice(THROTTLE_OFF, old_time, "info")
end
end
# Send a realtime message to a logged in user.
def tell(message, channels = [], type = "info")
log = Log.new({ device: self,
message: message,
created_at: Time.now,
channels: channels,
major_version: 99,
minor_version: 99,
meta: {},
type: type })
json = LogSerializer.new(log).as_json.to_json
Transport.current.amqp_send(json, self.id, "logs")
return log
end
def cooldown_notice(message, throttle_time, type, now = Time.current)
hours = ((throttle_time - now) / 1.hour).round
channels = [(hours > 2) ? "email" : "toast"]
tell(message, channels, type).save
end
def regimina
regimens # :(
end
# CONTEXT:
# * We tried to use Rails low level caching, but it hit marshalling issues.
# * We did a hack with Device.new(self.as_json) to get around it.
# * Mutations does not allow unsaved models
# * We converted the `model :device, class: Device` to:
# `duck :device, methods [:id, :is_device]`
#
# This method is not required, but adds a layer of safety.
def is_device # SEE: Hack in Log::Create. TODO: Fix low level caching bug.
true
end
def unsent_routine_emails
logs
.where(sent_at: nil)
.where(Log::IS_EMAIL_ISH) # `email` and `fatal_email`
.where
.not(Log::IS_FATAL_EMAIL) # Filter out `fatal_email`s
.order(created_at: :desc)
end
# Helper method to create an auth token.
# Used by sys admins to debug problems without performing a password reset.
def create_token
# If something manages to call this method, I'd like to be alerted of it.
Rollbar.error("Someone is creating a debug user token", { device: self.id })
fbos_version = Api::AbstractController::EXPECTED_VER
SessionToken
.as_json(users.first, "SUPER", fbos_version)
.fetch(:token)
.encoded
end
TOO_MANY_CONNECTIONS =
"Your device is " +
"reconnecting to the server too often. Please " +
"see https://developer.farm.bot/docs/connectivity-issues"
def self.connection_warning(username)
device_id = username.split("_").last.to_i || 0
self
.find(device_id)
.tell(TOO_MANY_CONNECTIONS, ["fatal_email"]) if self.exists?(device_id)
end
end
| 31.755981 | 80 | 0.656471 |
39b8c536c9e105b24e3cd9576ca44c0e91e459f3 | 125 | require File.expand_path('../../lib/suitable_services', __FILE__)
RSpec.configure do |config|
config.mock_with :mocha
end
| 20.833333 | 65 | 0.76 |
f88c18c888635147c25c1bbe7963e48000e72bf8 | 822 | # frozen_string_literal: true
require 'dopp/error'
module Dopp
module Type
# PDF type "Literal String".
class Text
include ::Dopp::Error
# Initialize.
# @param [String] text String.
def initialize(text)
check_is_a!(text, String)
@string = text
end
# Convert to string.
# @return [String] Content.
def to_s
String.new('PDF:"').concat(@string, '"')
end
# Detailed description of this object.
# @return [String] Description.
def inspect
String.new('#<').concat(
self.class.name, ':',
object_id.to_s, ' ', to_s, '>'
)
end
# Render to string.
# @return [String] Content.
def render
String.new('(').concat(@string, ')')
end
end
end
end
| 20.04878 | 48 | 0.537713 |
0101a91ce6640feafd396046f621359615e765b6 | 930 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "alohaha/settings"
Gem::Specification.new do |spec|
spec.name = "Alohaha"
spec.version = Settings::VERSION
spec.authors = ["jiunjiun"]
spec.email = ["[email protected]"]
spec.summary = %q{Alohaha}
spec.description = %q{This API helper get Taoyuan airport flight info}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.7"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_dependency('curb')
spec.add_dependency('iconv')
spec.add_dependency('virtus')
end
| 33.214286 | 74 | 0.646237 |
ed4b406ec14019ef2b8c9b04137ec610e106a3d0 | 1,685 | class Codec2 < Formula
desc "Open source speech codec"
homepage "https://www.rowetel.com/?page_id=452"
# Linked from https://freedv.org/
url "https://github.com/drowe67/codec2/archive/v1.0.1.tar.gz"
sha256 "14227963940d79e0ec5af810f37101b30e1c7e8555abd96c56b3c0473abac8ef"
license "LGPL-2.1-only"
bottle do
sha256 cellar: :any, arm64_monterey: "bf90a6002cc03a2e1fe2716815f091da83e72353bf980fc64cca879cc795552d"
sha256 cellar: :any, arm64_big_sur: "dca98080fb9c5738ffcc298547ce0c92a79349b7f04fea8056d968f63c34c1ca"
sha256 cellar: :any, monterey: "b74c862b5802d2959ba8397e09bec3601f9290363fb33af1a6d53f9a27f1e1f5"
sha256 cellar: :any, big_sur: "5d4162b5b10568f57c326983cbebfe34c126bca31bd14923b0388d8f4ca785aa"
sha256 cellar: :any, catalina: "2834225209e520278515857dcada021ba2cc108f92131e8c6cc786070c336bf9"
sha256 cellar: :any, mojave: "a86e0264532c78b083ae12358ba569a43588c589c4f91569f620381e30a471b1"
sha256 cellar: :any_skip_relocation, x86_64_linux: "55802c4923f858e36c73a6d4e7488dd5a99e06b103f3e35c014f1e19f232c83d"
end
depends_on "cmake" => :build
def install
mkdir "build_osx" do
system "cmake", "..", *std_cmake_args, "-DCMAKE_EXE_LINKER_FLAGS=-Wl,-rpath,#{rpath}"
system "make", "install"
bin.install "demo/c2demo"
bin.install Dir["src/c2*"]
end
end
test do
# 8 bytes of raw audio data (silence).
(testpath/"test.raw").write([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00].pack("C*"))
system "#{bin}/c2enc", "2400", "test.raw", "test.c2"
end
end
| 45.540541 | 123 | 0.69911 |
1a9373feeee07357bde4bfbe1476cfe627e297f6 | 5,348 | require 'spec/runner/options'
require 'spec/runner/option_parser'
require 'spec/runner/example_group_runner'
require 'spec/runner/command_line'
require 'spec/runner/drb_command_line'
require 'spec/runner/backtrace_tweaker'
require 'spec/runner/reporter'
require 'spec/runner/spec_parser'
require 'spec/runner/class_and_arguments_parser'
module Spec
# == ExampleGroups and Examples
#
# Rather than expressing examples in classes, RSpec uses a custom DSLL (DSL light) to
# describe groups of examples.
#
# A ExampleGroup is the equivalent of a fixture in xUnit-speak. It is a metaphor for the context
# in which you will run your executable example - a set of known objects in a known starting state.
# We begin be describing
#
# describe Account do
#
# before do
# @account = Account.new
# end
#
# it "should have a balance of $0" do
# @account.balance.should == Money.new(0, :dollars)
# end
#
# end
#
# We use the before block to set up the Example (given), and then the #it method to
# hold the example code that expresses the event (when) and the expected outcome (then).
#
# == Helper Methods
#
# A primary goal of RSpec is to keep the examples clear. We therefore prefer
# less indirection than you might see in xUnit examples and in well factored, DRY production code. We feel
# that duplication is OK if removing it makes it harder to understand an example without
# having to look elsewhere to understand its context.
#
# That said, RSpec does support some level of encapsulating common code in helper
# methods that can exist within a context or within an included module.
#
# == Setup and Teardown
#
# You can use before and after within a Example. Both methods take an optional
# scope argument so you can run the block before :each example or before :all examples
#
# describe "..." do
# before :all do
# ...
# end
#
# before :each do
# ...
# end
#
# it "should do something" do
# ...
# end
#
# it "should do something else" do
# ...
# end
#
# after :each do
# ...
# end
#
# after :all do
# ...
# end
#
# end
#
# The <tt>before :each</tt> block will run before each of the examples, once for each example. Likewise,
# the <tt>after :each</tt> block will run after each of the examples.
#
# It is also possible to specify a <tt>before :all</tt> and <tt>after :all</tt>
# block that will run only once for each behaviour, respectively before the first <code>before :each</code>
# and after the last <code>after :each</code>. The use of these is generally discouraged, because it
# introduces dependencies between the examples. Still, it might prove useful for very expensive operations
# if you know what you are doing.
#
# == Local helper methods
#
# You can include local helper methods by simply expressing them within a context:
#
# describe "..." do
#
# it "..." do
# helper_method
# end
#
# def helper_method
# ...
# end
#
# end
#
# == Included helper methods
#
# You can include helper methods in multiple contexts by expressing them within
# a module, and then including that module in your context:
#
# module AccountExampleHelperMethods
# def helper_method
# ...
# end
# end
#
# describe "A new account" do
# include AccountExampleHelperMethods
# before do
# @account = Account.new
# end
#
# it "should have a balance of $0" do
# helper_method
# @account.balance.should eql(Money.new(0, :dollars))
# end
# end
#
# == Shared Example Groups
#
# You can define a shared Example Group, that may be used on other groups
#
# share_examples_for "All Editions" do
# it "all editions behaviour" ...
# end
#
# describe SmallEdition do
# it_should_behave_like "All Editions"
#
# it "should do small edition stuff" do
# ...
# end
# end
#
# You can also assign the shared group to a module and include that
#
# share_as :AllEditions do
# it "should do all editions stuff" ...
# end
#
# describe SmallEdition do
# it_should_behave_like AllEditions
#
# it "should do small edition stuff" do
# ...
# end
# end
#
# And, for those of you who prefer to use something more like Ruby, you
# can just include the module directly
#
# describe SmallEdition do
# include AllEditions
#
# it "should do small edition stuff" do
# ...
# end
# end
module Runner
class << self
def configuration # :nodoc:
@configuration ||= Spec::Example::Configuration.new
end
# Use this to configure various configurable aspects of
# RSpec:
#
# Spec::Runner.configure do |configuration|
# # Configure RSpec here
# end
#
# The yielded <tt>configuration</tt> object is a
# Spec::Example::Configuration instance. See its RDoc
# for details about what you can do with it.
#
def configure
yield configuration
end
end
end
end
| 28.296296 | 109 | 0.625841 |
1c26d274784b67b467a861d8e30a389d24851252 | 1,482 | class Mtools < Formula
desc "Tools for manipulating MSDOS files"
homepage "https://www.gnu.org/software/mtools/"
url "https://ftp.gnu.org/gnu/mtools/mtools-4.0.25.tar.gz"
mirror "https://ftpmirror.gnu.org/mtools/mtools-4.0.25.tar.gz"
sha256 "8b6d4a75122984350186250aaa6063665bfa69100253fd77b972d2744e07dc08"
license "GPL-3.0-or-later"
livecheck do
url :stable
end
bottle do
cellar :any_skip_relocation
sha256 "cb2c2d3fc7800cdea12f6be7ed16caba9a4c7c19ab88e3e63a5c1918997f574d" => :catalina
sha256 "6fdc5e5b10131648eeeadd8a76ceb81b6f06a0a7b8918fffae500b0d0b41d0ad" => :mojave
sha256 "753dd7f093256f2e8e0a609e9196f1365953e68661aeb4849441aef6c1b168da" => :high_sierra
end
conflicts_with "multimarkdown", because: "both install `mmd` binaries"
# 4.0.25 doesn't include the proper osx locale headers.
patch :DATA
def install
args = %W[
LIBS=-liconv
--disable-debug
--prefix=#{prefix}
--sysconfdir=#{etc}
--without-x
]
system "./configure", *args
system "make"
ENV.deparallelize
system "make", "install"
end
test do
assert_match /#{version}/, shell_output("#{bin}/mtools --version")
end
end
__END__
diff --git a/sysincludes.h b/sysincludes.h
index 056218e..ba3677b 100644
--- a/sysincludes.h
+++ b/sysincludes.h
@@ -279,6 +279,8 @@ extern int errno;
#include <pwd.h>
#endif
+#include <xlocale.h>
+#include <strings.h>
#ifdef HAVE_STRING_H
# include <string.h>
| 24.7 | 93 | 0.709177 |
bfa3bb1faa3f8e4bed0eb1e767f48ea050e744a1 | 705 | $:.push File.expand_path("../lib", __FILE__)
Gem::Specification.new do |gem|
gem.name = "administrate-field-ckeditor"
gem.version = "0.0.9"
gem.authors = ["Rikki Pitt"]
gem.email = ["[email protected]"]
gem.homepage = "https://github.com/jemcode/administrate-field-ckeditor"
gem.summary = "Plugin for adding ckeditor support in Administrate"
gem.description = gem.summary
gem.license = "MIT"
gem.require_paths = ["lib"]
gem.files = `git ls-files`.split("\n")
gem.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
gem.add_dependency "administrate", ">= 0.3", "< 1.0"
gem.add_dependency "rails", ">= 4.2", "< 7.0"
gem.add_dependency "ckeditor", "~> 4.1"
end
| 33.571429 | 73 | 0.66383 |
1103ee23b6411eef332103031ddde4a742f3e480 | 85 | json.array! @ag_apartments, partial: 'ag_apartments/ag_apartment', as: :ag_apartment
| 42.5 | 84 | 0.8 |
7939e7d714f9909ae66c6633e36bf1dbd1acd7f6 | 543 | class Shop < ApplicationRecord
belongs_to :user ,optional: true
validates :name, presence: true, length: { maximum: 100 }
validates :introduction, presence: true, length: { maximum: 255 }
validates :time, presence: true, length: { maximum: 50 }
validates :holiday, presence: true, length: { maximum: 50 }
validates :phone, presence: true, length: { maximum: 20 }
validates :address, presence: true, length: { maximum: 200 }
mount_uploader :picture, ImageUploader
geocoded_by :address
after_validation :geocode
end
| 33.9375 | 67 | 0.714549 |
87dd58f3f05c97107e74e89d4f9d6bc3f7173ea1 | 7,997 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20160530223238) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "admin_users", force: :cascade do |t|
t.string "name", null: false
t.string "last_names", null: false
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.integer "sign_in_count", default: 0, null: false
t.datetime "current_sign_in_at"
t.datetime "last_sign_in_at"
t.inet "current_sign_in_ip"
t.inet "last_sign_in_ip"
t.integer "role", default: 0, null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_index "admin_users", ["email"], name: "index_admin_users_on_email", unique: true, using: :btree
add_index "admin_users", ["reset_password_token"], name: "index_admin_users_on_reset_password_token", unique: true, using: :btree
create_table "app_user_markers", force: :cascade do |t|
t.integer "app_user_id", null: false
t.integer "marker_id", null: false
t.integer "zone_id", null: false
t.float "lat", null: false
t.float "lng", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_index "app_user_markers", ["app_user_id"], name: "index_app_user_markers_on_app_user_id", using: :btree
add_index "app_user_markers", ["marker_id"], name: "index_app_user_markers_on_marker_id", using: :btree
add_index "app_user_markers", ["zone_id"], name: "index_app_user_markers_on_zone_id", using: :btree
create_table "app_user_survey_responses", force: :cascade do |t|
t.integer "app_user_survey_id", null: false
t.integer "survey_field_id", null: false
t.text "response", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_index "app_user_survey_responses", ["app_user_survey_id"], name: "index_app_user_survey_responses_on_app_user_survey_id", using: :btree
add_index "app_user_survey_responses", ["survey_field_id"], name: "index_app_user_survey_responses_on_survey_field_id", using: :btree
create_table "app_user_surveys", force: :cascade do |t|
t.integer "app_user_id", null: false
t.integer "survey_id", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_index "app_user_surveys", ["app_user_id"], name: "index_app_user_surveys_on_app_user_id", using: :btree
add_index "app_user_surveys", ["survey_id"], name: "index_app_user_surveys_on_survey_id", using: :btree
create_table "app_user_tokens", force: :cascade do |t|
t.integer "user_id", limit: 8, null: false
t.integer "identity", null: false
t.text "token", null: false
t.text "secret", null: false
t.datetime "expires_at"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "app_users", force: :cascade do |t|
t.string "name", null: false
t.string "last_names", null: false
t.string "mail", null: false
t.string "password", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "markers", force: :cascade do |t|
t.string "name", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "icon_file_name"
t.string "icon_content_type"
t.integer "icon_file_size"
t.datetime "icon_updated_at"
end
create_table "survey_field_options", force: :cascade do |t|
t.integer "survey_field_id", null: false
t.string "name", null: false
t.text "option_value", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_index "survey_field_options", ["survey_field_id"], name: "index_survey_field_options_on_survey_field_id", using: :btree
create_table "survey_field_validations", force: :cascade do |t|
t.integer "survey_field_id", null: false
t.integer "identity", null: false
t.text "validation_args"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_index "survey_field_validations", ["survey_field_id"], name: "index_survey_field_validations_on_survey_field_id", using: :btree
create_table "survey_fields", force: :cascade do |t|
t.integer "survey_id", null: false
t.integer "position", default: 0, null: false
t.integer "data_type", null: false
t.integer "identity", null: false
t.string "name", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_index "survey_fields", ["survey_id"], name: "index_survey_fields_on_survey_id", using: :btree
create_table "surveys", force: :cascade do |t|
t.integer "zone_id", null: false
t.boolean "is_active", default: false, null: false
t.string "name", null: false
t.string "description"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_index "surveys", ["zone_id"], name: "index_surveys_on_zone_id", using: :btree
create_table "zones", force: :cascade do |t|
t.string "name", null: false
t.float "lat", null: false
t.float "lng", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_foreign_key "app_user_markers", "app_users", on_update: :cascade, on_delete: :cascade
add_foreign_key "app_user_markers", "markers", on_update: :cascade, on_delete: :cascade
add_foreign_key "app_user_markers", "zones", on_update: :cascade, on_delete: :cascade
add_foreign_key "app_user_survey_responses", "app_user_surveys", on_update: :cascade, on_delete: :cascade
add_foreign_key "app_user_survey_responses", "survey_fields", on_update: :cascade, on_delete: :cascade
add_foreign_key "app_user_surveys", "app_users", on_update: :cascade, on_delete: :cascade
add_foreign_key "app_user_surveys", "surveys", on_update: :cascade, on_delete: :cascade
add_foreign_key "app_user_tokens", "app_users", column: "user_id", on_update: :cascade, on_delete: :cascade
add_foreign_key "survey_field_options", "survey_fields", on_update: :cascade, on_delete: :cascade
add_foreign_key "survey_field_validations", "survey_fields", on_update: :cascade, on_delete: :cascade
add_foreign_key "survey_fields", "surveys", on_update: :cascade, on_delete: :cascade
add_foreign_key "surveys", "zones", on_update: :cascade, on_delete: :cascade
end
| 47.60119 | 141 | 0.672377 |
e878eaae5dc3cb7c39b1121132a1f8b9696429dc | 2,367 | require 'spec_helper'
describe Mastermind::Game_Engine do
let(:start) { Mastermind::Game_Engine.new }
before :each do
allow_message_expectations_on_nil
end
it 'display welcome message' do
allow(start).to receive(:puts).and_return(nil)
expect(start.welcome).to be (nil)
end
it 'prints logo' do
allow(start).to receive(:puts).and_return(nil)
expect(start.welcome).to be nil
end
it 'asks the user to play game, quit game or read instruction' do
allow(start).to receive(:puts).and_return(nil)
allow(start).to receive(:ask).and_return(nil)
expect(start.ask).to be nil
end
it 'allows user to choose to read instructions' do
allow(start).to receive(:user_input).and_return("i")
allow(start).to receive(:puts).and_return(nil)
allow(start).to receive(:instructions).and_return(nil)
expect(start.ask).to be nil
end
it 'asks user to proceed to play after reading instructions' do
allow(start).to receive(:ask).and_return(nil)
allow(start).to receive(:puts).and_return(nil)
expect(start.instructions).to be nil
end
it 'asks users to select difficulty level' do
allow(start).to receive(:user_input).and_return("p")
allow(start).to receive(:difficulty).and_return(0)
allow(start).to receive(:puts).and_return(nil)
allow(start).to receive(:game).and_return(nil)
expect(start.ask).to eq(nil)
end
it 'asks users to view leaderboard' do
allow(start).to receive(:user_input).and_return("l")
allow(start).to receive(:puts).and_return(nil)
allow(start).to receive(:leaderboard).and_return(nil)
expect(start.ask).to eq(nil)
end
it 'allows users to quit the game' do
allow(start).to receive(:user_input).and_return("q")
allow(start).to receive(:puts).and_return(nil)
expect{start.ask}.to raise_error SystemExit
end
it 'throws an error message if invalid entry is made' do
allow(start).to receive(:user_input).and_return("k")
allow(start).to receive(:puts).and_return(nil)
allow(start).to receive(:invalid).and_return(nil)
expect(start.ask).to be nil
end
it 'rrturns invalid message' do
allow(start).to receive(:puts).and_return(nil)
allow(start).to receive(:ask).and_return(nil)
expect(start.invalid).to be nil
end
end | 33.338028 | 68 | 0.683988 |
d5fccd1c1a0ee3613a35d5fbce9b6d6b2e8afdf8 | 5,593 | module Sp2db
class BaseTable
attr_accessor :name,
:sheet_name,
:worksheet,
:find_columns,
:spreadsheet_id,
:client
def initialize opts={}
if opts[:name].blank? && opts[:sheet_name].blank?
raise "Must specify at least one of name or sheet name"
end
opts.each do |k, v|
self.send "#{k}=", v
end
self.sheet_name ||= opts[:sheet_name] = config[:sheet_name] || worksheet.try(:title)
end
def active_record?
false
end
# Table name
def name
@name ||= sheet_name.try(:to_sym) || raise("Name cannot be nil")
end
def spreadsheet_id
@spreadsheet_id ||= config[:spreadsheet_id] || Sp2db.config.spreadsheet_id
end
def name=n
@name = n&.to_sym
end
def find_columns
@find_columns ||= config[:find_columns] || Sp2db.config.default_find_columns
end
def required_columns
@required_columns ||= config[:required_columns] || []
end
def client
@client = Sp2db.client
end
def spreadsheet
client.spreadsheet spreadsheet_id
end
def sheet_name
@sheet_name ||= (config[:sheet_name] || name)&.to_sym
end
def worksheet
@worksheet = spreadsheet.worksheet_by_name(self.sheet_name.to_s)
end
def sp_data
retries = 2
begin
raw_data = CSV.parse worksheet.export_as_string
rescue Google::Apis::RateLimitError => e
retries -= 1
sleep(5)
retry if retries >= 0
raise e
end
data = process_data raw_data, source: :sp
data
end
def csv_data
raw_data = CSV.parse File.open(csv_file)
data = process_data raw_data, source: :csv
data
end
def header_row
# @header_row ||= config[:header_row] || 0
0
end
def csv_folder
folder = "#{Sp2db.config.export_location}/csv"
FileUtils.mkdir_p folder
folder
end
def csv_file
"#{csv_folder}/#{name}.csv"
end
def sp_to_csv opts={}
write_csv to_csv(sp_data)
end
def write_csv data
File.open csv_file, "wb" do |f|
f.write data
end
csv_file
end
# Array of hash data to csv format
def to_csv data
attributes = data.first&.keys || []
CSV.generate(headers: true) do |csv|
csv << attributes
data.each do |row|
csv << attributes.map do |att|
row[att]
end
end
end
end
# Global config
def config
{}.with_indifferent_access
end
def process_data raw_data, opts={}
raw_data = data_transform raw_data, opts unless opts[:skip_data_transform]
raw_data = raw_filter raw_data, opts unless opts[:skip_data_filter]
data = call_process_data raw_data, opts
data
end
# Tranform data to standard csv format
def data_transform raw_data, opts={}
if config[:data_transform].present?
config[:data_transform].call *args, &block
else
raw_data
end
end
protected
# Remove header which starts with "#"
def valid_header? h
h.present? && !h.match("^#.*")
end
# Header with "!" at the beginning or ending is required
def require_header? h
h.present? && (h.match("^!.*") || h.match(".*?!$"))
end
# Convert number string to number
def standardize_cell_val v
v = ((float = Float(v)) && (float % 1.0 == 0) ? float.to_i : float) rescue v
v = v.force_encoding("UTF-8") if v.is_a?(String)
v
end
def call_process_data raw_data, opts={}
data = raw_data
if (data_proc = config[:process_data]).present?
data = data_proc.call raw_data
end
data
end
# Remove uncessary columns and invalid rows from csv format data
def raw_filter raw_data, opts={}
raw_header = raw_data[header_row].map.with_index do |h, idx|
is_valid = valid_header?(h)
{
idx: idx,
is_remove: !is_valid,
is_required: require_header?(h),
name: is_valid && h.gsub(/\s*/, '').gsub(/!/, '').downcase
}
end
rows = raw_data[(header_row + 1)..-1].map.with_index do |raw, rdx|
row = {}.with_indifferent_access
raw_header.each do |h|
val = raw[h[:idx]]
next if h[:is_remove]
if h[:is_required] && val.blank?
row = {}
break
end
row[h[:name]] = standardize_cell_val val
end
next if row.values.all?(&:blank?)
row[:id] = rdx + 1 if find_columns.include?(:id) && row[:id].blank?
row
end.compact
.reject(&:blank?)
rows = rows.select do |row|
if required_columns.present?
required_columns.all? {|col| row[col].present? }
else
true
end
end
rows
end
class << self
def all_tables
ModelTable.all_tables + NonModelTable.all_tables
end
def table_by_names *names
all_tables = self.all_tables
if names.blank?
all_tables
else
names.map do |n|
all_tables.find {|tb| tb.name == n.to_sym} || raise("Not found: #{n}")
end
end
end
def sp_to_csv *table_names
table_by_names(*table_names).map(&__method__)
end
def model_table_class
ModelTable
end
delegate :sp_to_db, :csv_to_db, to: :model_table_class
end
end
end
| 22.643725 | 90 | 0.570177 |
08dc68646709867692f74338f1b289bc9a7abb85 | 216 | # Add your variables here
first_number = 4
second_number = 3
sum = first_number + second_number
difference = first_number - second_number
product = first_number * second_number
quotient = first_number / second_number | 30.857143 | 41 | 0.810185 |
1d268600e22630e8e4095f7f8230e2532c8ab9ee | 616 | exclude :test_feed, "needs investigation"
exclude :test_feed_before_first_next, "needs investigation"
exclude :test_feed_mixed, "needs investigation"
exclude :test_feed_yielder, "needs investigation"
exclude :test_generator, "needs investigation"
exclude :test_initialize, "needs investigation"
exclude :test_initialize_copy, "needs investigation"
exclude :test_inspect_encoding, "needs investigation"
exclude :test_peek_modify, "needs investigation"
exclude :test_peek_values, "needs investigation"
exclude :test_rewind_clear_feed, "needs investigation"
exclude :test_with_index_large_offset, "needs investigation"
| 47.384615 | 60 | 0.844156 |
21fcea6b778f4380c9f53ecfbec9f52e9097ba5c | 391 | module CandidateInterface
class Reference::RefereeNameForm
include ActiveModel::Model
attr_accessor :name
validates :name, presence: true, length: { minimum: 2, maximum: 200 }
def self.build_from_reference(reference)
new(name: reference.name)
end
def save(reference)
return false unless valid?
reference.update!(name: name)
end
end
end
| 19.55 | 73 | 0.693095 |
2675827c2515cf8853af7babf169bef137fd67f5 | 753 | module Txbr
class CampaignsApi
CAMPAIGN_BATCH_SIZE = 100 # from braze docs
CAMPAIGN_LIST_PATH = 'campaigns/list'.freeze
CAMPAIGN_DETAILS_PATH = 'campaigns/details'.freeze
attr_reader :braze_api
def initialize(braze_api)
@braze_api = braze_api
end
def each(&block)
return to_enum(__method__) unless block_given?
page = 0
loop do
campaigns = braze_api.get_json(CAMPAIGN_LIST_PATH, page: page, include_archived: false)
campaigns['campaigns'].each(&block)
break if campaigns['campaigns'].size < CAMPAIGN_BATCH_SIZE
page += 1
end
end
def details(campaign_id:)
braze_api.get_json(CAMPAIGN_DETAILS_PATH, campaign_id: campaign_id)
end
end
end
| 25.1 | 95 | 0.686587 |
f88f9d0ff8de33849ae0f59e62755a8301de9d55 | 255 | require 'test/unit'
require 'rubygems'
require 'mocha'
ENV['RAILS_ENV'] = 'test'
require 'active_support'
require File.dirname(__FILE__) + '/../lib/rails-footnotes/footnotes'
require File.dirname(__FILE__) + '/../lib/rails-footnotes/notes/abstract_note' | 28.333333 | 78 | 0.752941 |
790e917b22f9583cbc0a9cf33be2e7ba044e08fb | 1,676 | module Hbc
class CLI
class InternalAppcastCheckpoint < InternalUseBase
def self.run(*args)
calculate = args.include? "--calculate"
cask_tokens = cask_tokens_from(args)
raise CaskUnspecifiedError if cask_tokens.empty?
if cask_tokens.all? { |t| t =~ %r{^https?://} && t !~ /\.rb$/ }
appcask_checkpoint_for_url(cask_tokens)
else
appcask_checkpoint(cask_tokens, calculate)
end
end
def self.appcask_checkpoint_for_url(urls)
urls.each do |url|
appcast = DSL::Appcast.new(url)
puts appcast.calculate_checkpoint[:checkpoint]
end
end
def self.appcask_checkpoint(cask_tokens, calculate)
count = 0
cask_tokens.each do |cask_token|
cask = Hbc.load(cask_token)
if cask.appcast.nil?
opoo "Cask '#{cask}' is missing an `appcast` stanza."
else
if calculate
result = cask.appcast.calculate_checkpoint
checkpoint = result[:checkpoint]
else
checkpoint = cask.appcast.checkpoint
end
if checkpoint.nil?
onoe "Could not retrieve `appcast` checkpoint for cask '#{cask}': #{result[:command_result].stderr}"
else
puts cask_tokens.count > 1 ? "#{checkpoint} #{cask}": checkpoint
count += 1
end
end
end
count == cask_tokens.count
end
def self.help
"prints or calculates a given Cask's or URL's appcast checkpoint"
end
def self.needs_init?
true
end
end
end
end
| 27.032258 | 114 | 0.569212 |
62572b841dbfdcd8b11543b1b5e000b183aa54c8 | 123 | class AddFilenameToCharts < ActiveRecord::Migration[5.2]
def change
add_column :charts, :filename, :string
end
end
| 20.5 | 56 | 0.747967 |
bbec74911be72ec3d303e3dc150ac9d99a1b7165 | 9,769 | #
# Author:: Daniel DeLeo (<[email protected]>)
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "chef-utils" unless defined?(ChefUtils::CANARY)
require_relative "config"
require_relative "exceptions"
require_relative "logger"
require_relative "path_helper"
require_relative "windows"
require_relative "mixin/dot_d"
require_relative "mixin/credentials"
module ChefConfig
class WorkstationConfigLoader
include ChefConfig::Mixin::DotD
include ChefConfig::Mixin::Credentials
# Path to a config file requested by user, (e.g., via command line option). Can be nil
attr_accessor :explicit_config_file
# The name of a credentials profile. Can be nil
attr_accessor :profile
attr_reader :credentials_found
# TODO: initialize this with a logger for Chef and Knife
def initialize(explicit_config_file, logger = nil, profile: nil)
@explicit_config_file = explicit_config_file
@chef_config_dir = nil
@config_location = nil
@profile = profile
@logger = logger || NullLogger.new
@credentials_found = false
end
def no_config_found?
config_location.nil? && !credentials_found
end
def config_location
@config_location ||= (explicit_config_file || locate_local_config)
end
def chef_config_dir
if @chef_config_dir.nil?
@chef_config_dir = false
full_path = working_directory.split(File::SEPARATOR)
(full_path.length - 1).downto(0) do |i|
candidate_directory = File.join(full_path[0..i] + [ChefUtils::Dist::Infra::USER_CONF_DIR])
if File.exist?(candidate_directory) && File.directory?(candidate_directory)
@chef_config_dir = candidate_directory
break
end
end
end
@chef_config_dir
end
def load
load_credentials(profile)
# Ignore it if there's no explicit_config_file and can't find one at a
# default path.
unless config_location.nil?
if explicit_config_file && !path_exists?(config_location)
raise ChefConfig::ConfigurationError, "Specified config file #{config_location} does not exist"
end
# Have to set Config.config_file b/c other config is derived from it.
Config.config_file = config_location
apply_config(IO.read(config_location), config_location)
end
load_dot_d(Config[:config_d_dir]) if Config[:config_d_dir]
apply_defaults
end
# (Private API, public for test purposes)
def env
ENV
end
# (Private API, public for test purposes)
def path_exists?(path)
Pathname.new(path).expand_path.exist?
end
private
def have_config?(path)
if path_exists?(path)
logger.info("Using config at #{path}")
true
else
logger.debug("Config not found at #{path}, trying next option")
false
end
end
def locate_local_config
candidate_configs = []
# Look for $KNIFE_HOME/knife.rb (allow multiple knives config on same machine)
if env["KNIFE_HOME"]
candidate_configs << File.join(env["KNIFE_HOME"], "config.rb")
candidate_configs << File.join(env["KNIFE_HOME"], "knife.rb")
end
# Look for $PWD/knife.rb
if Dir.pwd
candidate_configs << File.join(Dir.pwd, "config.rb")
candidate_configs << File.join(Dir.pwd, "knife.rb")
end
# Look for $UPWARD/.chef/knife.rb
if chef_config_dir
candidate_configs << File.join(chef_config_dir, "config.rb")
candidate_configs << File.join(chef_config_dir, "knife.rb")
end
# Look for $HOME/.chef/knife.rb
PathHelper.home(ChefUtils::Dist::Infra::USER_CONF_DIR) do |dot_chef_dir|
candidate_configs << File.join(dot_chef_dir, "config.rb")
candidate_configs << File.join(dot_chef_dir, "knife.rb")
end
candidate_configs.find do |candidate_config|
have_config?(candidate_config)
end
end
def working_directory
if ChefUtils.windows?
env["CD"]
else
env["PWD"]
end || Dir.pwd
end
def apply_credentials(creds, profile)
# Store the profile used in case other things want it.
Config.profile ||= profile
# Validate the credentials data.
if creds.key?("node_name") && creds.key?("client_name")
raise ChefConfig::ConfigurationError, "Do not specify both node_name and client_name. You should prefer client_name."
end
# Load credentials data into the Chef configuration.
creds.each do |key, value|
case key.to_s
when "client_name"
# Special case because it's weird to set your username via `node_name`.
Config.node_name = value
when "validation_key", "validator_key"
extract_key(value, :validation_key, :validation_key_contents)
when "client_key"
extract_key(value, :client_key, :client_key_contents)
when "knife"
Config.knife.merge!(value.transform_keys(&:to_sym))
else
Config[key.to_sym] = value
end
end
@credentials_found = true
end
def extract_key(key_value, config_path, config_contents)
if key_value.start_with?("-----BEGIN RSA PRIVATE KEY-----")
Config.send(config_contents, key_value)
else
abs_path = Pathname.new(key_value).expand_path(home_chef_dir)
Config.send(config_path, abs_path)
end
end
def home_chef_dir
@home_chef_dir ||= PathHelper.home(ChefUtils::Dist::Infra::USER_CONF_DIR)
end
def apply_config(config_content, config_file_path)
Config.from_string(config_content, config_file_path)
rescue SignalException
raise
rescue SyntaxError => e
message = ""
message << "You have invalid ruby syntax in your config file #{config_file_path}\n\n"
message << "#{e.class.name}: #{e.message}\n"
if file_line = e.message[/#{Regexp.escape(config_file_path)}:[\d]+/]
line = file_line[/:([\d]+)$/, 1].to_i
message << highlight_config_error(config_file_path, line)
end
raise ChefConfig::ConfigurationError, message
rescue Exception => e
message = "You have an error in your config file #{config_file_path}\n\n"
message << "#{e.class.name}: #{e.message}\n"
filtered_trace = e.backtrace.grep(/#{Regexp.escape(config_file_path)}/)
filtered_trace.each { |bt_line| message << " " << bt_line << "\n" }
unless filtered_trace.empty?
line_nr = filtered_trace.first[/#{Regexp.escape(config_file_path)}:([\d]+)/, 1]
message << highlight_config_error(config_file_path, line_nr.to_i)
end
raise ChefConfig::ConfigurationError, message
end
# Apply default configuration values for workstation-style tools.
#
# Global defaults should go in {ChefConfig::Config} instead, this is only
# for things like `knife` and `chef`.
#
# @api private
# @since 14.3
# @return [void]
def apply_defaults
# If we don't have a better guess use the username.
Config[:node_name] ||= Etc.getlogin
# If we don't have a key (path or inline) check user.pem and $node_name.pem.
unless Config.key?(:client_key) || Config.key?(:client_key_contents)
key_path = find_default_key(["#{Config[:node_name]}.pem", "user.pem"])
Config[:client_key] = key_path if key_path
end
# Similarly look for a validation key file, though this should be less
# common these days.
unless Config.key?(:validation_key) || Config.key?(:validation_key_contents)
key_path = find_default_key(["#{Config[:validation_client_name]}.pem", "validator.pem", "validation.pem"])
Config[:validation_key] = key_path if key_path
end
end
# Look for a default key file.
#
# This searches for any of a list of possible default keys, checking both
# the local `.chef/` folder and the home directory `~/.chef/`. Returns `nil`
# if no matching file is found.
#
# @api private
# @since 14.3
# @param key_names [Array<String>] A list of possible filenames to check for.
# The first one found will be returned.
# @return [String, nil]
def find_default_key(key_names)
key_names.each do |filename|
path = Pathname.new(filename)
# If we have a config location (like ./.chef/), look there first.
if config_location
local_path = path.expand_path(File.dirname(config_location))
return local_path.to_s if local_path.exist?
end
# Then check ~/.chef.
home_path = path.expand_path(home_chef_dir)
return home_path.to_s if home_path.exist?
end
nil
end
def highlight_config_error(file, line)
config_file_lines = []
IO.readlines(file).each_with_index { |l, i| config_file_lines << "#{(i + 1).to_s.rjust(3)}: #{l.chomp}" }
if line == 1
lines = config_file_lines[0..3]
else
lines = config_file_lines[Range.new(line - 2, line)]
end
"Relevant file content:\n" + lines.join("\n") + "\n"
end
def logger
@logger
end
end
end
| 34.641844 | 125 | 0.660354 |
3874de6390f2edcc725ea09558dc6240f7a3ab31 | 473 | require './auth_wrap'
title = ARGV.shift
dir = ARGV.shift
app = GDriveApp.new('messtin')
mid = app.messtinFolder
puts mid
folder_id = app.createFolder(title, title, mid)
tm_id = app.createFolder('tm', 'thumbnail', folder_id)
Dir.glob(dir + '/*.jpg').each do |jpg|
name = File.basename(jpg)
puts name
app.uploadJpeg(jpg, name, folder_id)
end
Dir.glob(dir + '/tm/*.jpg').each do |jpg|
name = File.basename(jpg)
puts name
app.uploadJpeg(jpg, name, tm_id)
end | 19.708333 | 54 | 0.691332 |
b98b2b186a709cdd70e5a6d1249ac38290495512 | 4,726 | #
# Be sure to run `pod spec lint KnobControl.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "KnobControl"
s.version = "1.0.0"
s.summary = "A knob control like the UISlider, but in a circular form."
s.description = "The knob control is a completely customizable widget that can be used in any iOS app. It also plays a little victory fanfare."
s.homepage = "http://raywenderlich.com"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = "MIT"
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "Sarvjeet Singh" => "[email protected]" }
# Or just: s.author = "Sarvjeet Singh"
# s.authors = { "Sarvjeet Singh" => "[email protected]" }
# s.social_media_url = "http://twitter.com/Sarvjeet Singh"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
s.platform = :ios, "12.0"
# s.platform = :ios, "5.0"
# When using multiple platforms
# s.ios.deployment_target = "5.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/3u4icT/KnobControl.git", :tag => "1.0.0" }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "KnobControl"
# s.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
# s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
s.swift_version = "4.2"
end
| 36.921875 | 144 | 0.591832 |
1820164445321957c4474559b8357986de629474 | 115 | # frozen_string_literal: true
class OrderItem < ActiveRecord::Base
belongs_to :order
belongs_to :beverage
end
| 16.428571 | 36 | 0.791304 |
bbb2378f5c56becac22646212beb343549da5170 | 28,558 | # Copyright (C) The Arvados Authors. All rights reserved.
#
# SPDX-License-Identifier: AGPL-3.0
require 'can_be_an_owner'
class User < ArvadosModel
include HasUuid
include KindAndEtag
include CommonApiTemplate
include CanBeAnOwner
extend CurrentApiClient
serialize :prefs, Hash
has_many :api_client_authorizations
validates(:username,
format: {
with: /\A[A-Za-z][A-Za-z0-9]*\z/,
message: "must begin with a letter and contain only alphanumerics",
},
uniqueness: true,
allow_nil: true)
validate :must_unsetup_to_deactivate
validate :identity_url_nil_if_empty
before_update :prevent_privilege_escalation
before_update :prevent_inactive_admin
before_update :verify_repositories_empty, :if => Proc.new {
username.nil? and username_changed?
}
after_update :setup_on_activate
before_create :check_auto_admin
before_create :set_initial_username, :if => Proc.new {
username.nil? and email
}
after_create :after_ownership_change
after_create :setup_on_activate
after_create :add_system_group_permission_link
after_create :auto_setup_new_user, :if => Proc.new {
Rails.configuration.Users.AutoSetupNewUsers and
(uuid != system_user_uuid) and
(uuid != anonymous_user_uuid) and
(uuid[0..4] == Rails.configuration.ClusterID)
}
after_create :send_admin_notifications
before_update :before_ownership_change
after_update :after_ownership_change
after_update :send_profile_created_notification
after_update :sync_repository_names, :if => Proc.new {
(uuid != system_user_uuid) and
saved_change_to_username? and
(not username_before_last_save.nil?)
}
before_destroy :clear_permissions
after_destroy :remove_self_from_permissions
has_many :authorized_keys, :foreign_key => :authorized_user_uuid, :primary_key => :uuid
has_many :repositories, foreign_key: :owner_uuid, primary_key: :uuid
default_scope { where('redirect_to_user_uuid is null') }
api_accessible :user, extend: :common do |t|
t.add :email
t.add :username
t.add :full_name
t.add :first_name
t.add :last_name
t.add :identity_url
t.add :is_active
t.add :is_admin
t.add :is_invited
t.add :prefs
t.add :writable_by
end
ALL_PERMISSIONS = {read: true, write: true, manage: true}
# Map numeric permission levels (see lib/create_permission_view.sql)
# back to read/write/manage flags.
PERMS_FOR_VAL =
[{},
{read: true},
{read: true, write: true},
{read: true, write: true, manage: true}]
VAL_FOR_PERM =
{:read => 1,
:write => 2,
:unfreeze => 3,
:manage => 3}
def full_name
"#{first_name} #{last_name}".strip
end
def is_invited
!!(self.is_active ||
Rails.configuration.Users.NewUsersAreActive ||
self.groups_i_can(:read).select { |x| x.match(/-f+$/) }.first)
end
def groups_i_can(verb)
my_groups = self.group_permissions(VAL_FOR_PERM[verb]).keys
if verb == :read
my_groups << anonymous_group_uuid
end
my_groups
end
def can?(actions)
return true if is_admin
actions.each do |action, target|
unless target.nil?
if target.respond_to? :uuid
target_uuid = target.uuid
else
target_uuid = target
target = ArvadosModel.find_by_uuid(target_uuid)
end
end
next if target_uuid == self.uuid
target_owner_uuid = target.owner_uuid if target.respond_to? :owner_uuid
user_uuids_subquery = USER_UUIDS_SUBQUERY_TEMPLATE % {user: "$1", perm_level: "$3"}
unless ActiveRecord::Base.connection.
exec_query(%{
SELECT 1 FROM #{PERMISSION_VIEW}
WHERE user_uuid in (#{user_uuids_subquery}) and
((target_uuid = $2 and perm_level >= $3)
or (target_uuid = $4 and perm_level >= $3 and traverse_owned))
},
# "name" arg is a query label that appears in logs:
"user_can_query",
[[nil, self.uuid],
[nil, target_uuid],
[nil, VAL_FOR_PERM[action]],
[nil, target_owner_uuid]]
).any?
return false
end
if action == :write
if FrozenGroup.where(uuid: [target_uuid, target_owner_uuid]).any?
# self or parent is frozen
return false
end
elsif action == :unfreeze
# "unfreeze" permission means "can write, but only if
# explicitly un-freezing at the same time" (see
# ArvadosModel#ensure_owner_uuid_is_permitted). If the
# permission query above passed the permission level of
# :unfreeze (which is the same as :manage), and the parent
# isn't also frozen, then un-freeze is allowed.
if FrozenGroup.where(uuid: target_owner_uuid).any?
return false
end
end
end
true
end
def before_ownership_change
if owner_uuid_changed? and !self.owner_uuid_was.nil?
MaterializedPermission.where(user_uuid: owner_uuid_was, target_uuid: uuid).delete_all
update_permissions self.owner_uuid_was, self.uuid, REVOKE_PERM
end
end
def after_ownership_change
if saved_change_to_owner_uuid?
update_permissions self.owner_uuid, self.uuid, CAN_MANAGE_PERM
end
end
def clear_permissions
MaterializedPermission.where("user_uuid = ? and target_uuid != ?", uuid, uuid).delete_all
end
def forget_cached_group_perms
@group_perms = nil
end
def remove_self_from_permissions
MaterializedPermission.where("target_uuid = ?", uuid).delete_all
check_permissions_against_full_refresh
end
# Return a hash of {user_uuid: group_perms}
#
# note: this does not account for permissions that a user gains by
# having can_manage on another user.
def self.all_group_permissions
all_perms = {}
ActiveRecord::Base.connection.
exec_query(%{
SELECT user_uuid, target_uuid, perm_level
FROM #{PERMISSION_VIEW}
WHERE traverse_owned
},
# "name" arg is a query label that appears in logs:
"all_group_permissions").
rows.each do |user_uuid, group_uuid, max_p_val|
all_perms[user_uuid] ||= {}
all_perms[user_uuid][group_uuid] = PERMS_FOR_VAL[max_p_val.to_i]
end
all_perms
end
# Return a hash of {group_uuid: perm_hash} where perm_hash[:read]
# and perm_hash[:write] are true if this user can read and write
# objects owned by group_uuid.
def group_permissions(level=1)
@group_perms ||= {}
if @group_perms.empty?
user_uuids_subquery = USER_UUIDS_SUBQUERY_TEMPLATE % {user: "$1", perm_level: 1}
ActiveRecord::Base.connection.
exec_query(%{
SELECT target_uuid, perm_level
FROM #{PERMISSION_VIEW}
WHERE user_uuid in (#{user_uuids_subquery}) and perm_level >= 1
},
# "name" arg is a query label that appears in logs:
"User.group_permissions",
# "binds" arg is an array of [col_id, value] for '$1' vars:
[[nil, uuid]]).
rows.each do |group_uuid, max_p_val|
@group_perms[group_uuid] = PERMS_FOR_VAL[max_p_val.to_i]
end
end
case level
when 1
@group_perms
when 2
@group_perms.select {|k,v| v[:write] }
when 3
@group_perms.select {|k,v| v[:manage] }
else
raise "level must be 1, 2 or 3"
end
end
# create links
def setup(repo_name: nil, vm_uuid: nil, send_notification_email: nil)
newly_invited = Link.where(tail_uuid: self.uuid,
head_uuid: all_users_group_uuid,
link_class: 'permission',
name: 'can_read').empty?
# Add can_read link from this user to "all users" which makes this
# user "invited", and (depending on config) a link in the opposite
# direction which makes this user visible to other users.
group_perms = add_to_all_users_group
# Add git repo
repo_perm = if (!repo_name.nil? || Rails.configuration.Users.AutoSetupNewUsersWithRepository) and !username.nil?
repo_name ||= "#{username}/#{username}"
create_user_repo_link repo_name
end
# Add virtual machine
if vm_uuid.nil? and !Rails.configuration.Users.AutoSetupNewUsersWithVmUUID.empty?
vm_uuid = Rails.configuration.Users.AutoSetupNewUsersWithVmUUID
end
vm_login_perm = if vm_uuid && username
create_vm_login_permission_link(vm_uuid, username)
end
# Send welcome email
if send_notification_email.nil?
send_notification_email = Rails.configuration.Mail.SendUserSetupNotificationEmail
end
if newly_invited and send_notification_email and !Rails.configuration.Users.UserSetupMailText.empty?
begin
UserNotifier.account_is_setup(self).deliver_now
rescue => e
logger.warn "Failed to send email to #{self.email}: #{e}"
end
end
forget_cached_group_perms
return [repo_perm, vm_login_perm, *group_perms, self].compact
end
# delete user signatures, login, repo, and vm perms, and mark as inactive
def unsetup
# delete oid_login_perms for this user
#
# note: these permission links are obsolete, they have no effect
# on anything and they are not created for new users.
Link.where(tail_uuid: self.email,
link_class: 'permission',
name: 'can_login').destroy_all
# delete repo_perms for this user
Link.where(tail_uuid: self.uuid,
link_class: 'permission',
name: 'can_manage').destroy_all
# delete vm_login_perms for this user
Link.where(tail_uuid: self.uuid,
link_class: 'permission',
name: 'can_login').destroy_all
# delete "All users" group read permissions for this user
Link.where(tail_uuid: self.uuid,
head_uuid: all_users_group_uuid,
link_class: 'permission',
name: 'can_read').destroy_all
# delete any signatures by this user
Link.where(link_class: 'signature',
tail_uuid: self.uuid).destroy_all
# delete tokens for this user
ApiClientAuthorization.where(user_id: self.id).destroy_all
# delete ssh keys for this user
AuthorizedKey.where(owner_uuid: self.uuid).destroy_all
AuthorizedKey.where(authorized_user_uuid: self.uuid).destroy_all
# delete user preferences (including profile)
self.prefs = {}
# mark the user as inactive
self.is_admin = false # can't be admin and inactive
self.is_active = false
forget_cached_group_perms
self.save!
end
def must_unsetup_to_deactivate
if !self.new_record? &&
self.uuid[0..4] == Rails.configuration.Login.LoginCluster &&
self.uuid[0..4] != Rails.configuration.ClusterID
# OK to update our local record to whatever the LoginCluster
# reports, because self-activate is not allowed.
return
elsif self.is_active_changed? &&
self.is_active_was &&
!self.is_active
# When a user is set up, they are added to the "All users"
# group. A user that is part of the "All users" group is
# allowed to self-activate.
#
# It doesn't make sense to deactivate a user (set is_active =
# false) without first removing them from the "All users" group,
# because they would be able to immediately reactivate
# themselves.
#
# The 'unsetup' method removes the user from the "All users"
# group (and also sets is_active = false) so send a message
# explaining the correct way to deactivate a user.
#
if Link.where(tail_uuid: self.uuid,
head_uuid: all_users_group_uuid,
link_class: 'permission',
name: 'can_read').any?
errors.add :is_active, "cannot be set to false directly, use the 'Deactivate' button on Workbench, or the 'unsetup' API call"
end
end
end
def set_initial_username(requested: false)
if !requested.is_a?(String) || requested.empty?
email_parts = email.partition("@")
local_parts = email_parts.first.partition("+")
if email_parts.any?(&:empty?)
return
elsif not local_parts.first.empty?
requested = local_parts.first
else
requested = email_parts.first
end
end
requested.sub!(/^[^A-Za-z]+/, "")
requested.gsub!(/[^A-Za-z0-9]/, "")
unless requested.empty?
self.username = find_usable_username_from(requested)
end
end
# Move this user's (i.e., self's) owned items to new_owner_uuid and
# new_user_uuid (for things normally owned directly by the user).
#
# If redirect_auth is true, also reassign auth tokens and ssh keys,
# and redirect this account to redirect_to_user_uuid, i.e., when a
# caller authenticates to this account in the future, the account
# redirect_to_user_uuid account will be used instead.
#
# current_user must have admin privileges, i.e., the caller is
# responsible for checking permission to do this.
def merge(new_owner_uuid:, new_user_uuid:, redirect_to_new_user:)
raise PermissionDeniedError if !current_user.andand.is_admin
raise "Missing new_owner_uuid" if !new_owner_uuid
raise "Missing new_user_uuid" if !new_user_uuid
transaction(requires_new: true) do
reload
raise "cannot merge an already merged user" if self.redirect_to_user_uuid
new_user = User.where(uuid: new_user_uuid).first
raise "user does not exist" if !new_user
raise "cannot merge to an already merged user" if new_user.redirect_to_user_uuid
self.clear_permissions
new_user.clear_permissions
# If 'self' is a remote user, don't transfer authorizations
# (i.e. ability to access the account) to the new user, because
# that gives the remote site the ability to access the 'new'
# user account that takes over the 'self' account.
#
# If 'self' is a local user, it is okay to transfer
# authorizations, even if the 'new' user is a remote account,
# because the remote site does not gain the ability to access an
# account it could not before.
if redirect_to_new_user and self.uuid[0..4] == Rails.configuration.ClusterID
# Existing API tokens and ssh keys are updated to authenticate
# to the new user.
ApiClientAuthorization.
where(user_id: id).
update_all(user_id: new_user.id)
user_updates = [
[AuthorizedKey, :owner_uuid],
[AuthorizedKey, :authorized_user_uuid],
[Link, :owner_uuid],
[Link, :tail_uuid],
[Link, :head_uuid],
]
else
# Destroy API tokens and ssh keys associated with the old
# user.
ApiClientAuthorization.where(user_id: id).destroy_all
AuthorizedKey.where(owner_uuid: uuid).destroy_all
AuthorizedKey.where(authorized_user_uuid: uuid).destroy_all
user_updates = [
[Link, :owner_uuid],
[Link, :tail_uuid]
]
end
# References to the old user UUID in the context of a user ID
# (rather than a "home project" in the project hierarchy) are
# updated to point to the new user.
user_updates.each do |klass, column|
klass.where(column => uuid).update_all(column => new_user.uuid)
end
# Need to update repository names to new username
if username
old_repo_name_re = /^#{Regexp.escape(username)}\//
Repository.where(:owner_uuid => uuid).each do |repo|
repo.owner_uuid = new_user.uuid
repo_name_sub = "#{new_user.username}/"
name = repo.name.sub(old_repo_name_re, repo_name_sub)
while (conflict = Repository.where(:name => name).first) != nil
repo_name_sub += "migrated"
name = repo.name.sub(old_repo_name_re, repo_name_sub)
end
repo.name = name
repo.save!
end
end
# References to the merged user's "home project" are updated to
# point to new_owner_uuid.
ActiveRecord::Base.descendants.reject(&:abstract_class?).each do |klass|
next if [ApiClientAuthorization,
AuthorizedKey,
Link,
Log,
Repository].include?(klass)
next if !klass.columns.collect(&:name).include?('owner_uuid')
klass.where(owner_uuid: uuid).update_all(owner_uuid: new_owner_uuid)
end
if redirect_to_new_user
update_attributes!(redirect_to_user_uuid: new_user.uuid, username: nil)
end
skip_check_permissions_against_full_refresh do
update_permissions self.uuid, self.uuid, CAN_MANAGE_PERM
update_permissions new_user.uuid, new_user.uuid, CAN_MANAGE_PERM
update_permissions new_user.owner_uuid, new_user.uuid, CAN_MANAGE_PERM
end
update_permissions self.owner_uuid, self.uuid, CAN_MANAGE_PERM
end
end
def redirects_to
user = self
redirects = 0
while (uuid = user.redirect_to_user_uuid)
break if uuid.empty?
nextuser = User.unscoped.find_by_uuid(uuid)
if !nextuser
raise Exception.new("user uuid #{user.uuid} redirects to nonexistent uuid '#{uuid}'")
end
user = nextuser
redirects += 1
if redirects > 15
raise "Starting from #{self.uuid} redirect_to_user_uuid exceeded maximum number of redirects"
end
end
user
end
def self.register info
# login info expected fields, all can be optional but at minimum
# must supply either 'identity_url' or 'email'
#
# email
# first_name
# last_name
# username
# alternate_emails
# identity_url
primary_user = nil
# local database
identity_url = info['identity_url']
if identity_url && identity_url.length > 0
# Only local users can create sessions, hence uuid_like_pattern
# here.
user = User.unscoped.where('identity_url = ? and uuid like ?',
identity_url,
User.uuid_like_pattern).first
primary_user = user.redirects_to if user
end
if !primary_user
# identity url is unset or didn't find matching record.
emails = [info['email']] + (info['alternate_emails'] || [])
emails.select! {|em| !em.nil? && !em.empty?}
User.unscoped.where('email in (?) and uuid like ?',
emails,
User.uuid_like_pattern).each do |user|
if !primary_user
primary_user = user.redirects_to
elsif primary_user.uuid != user.redirects_to.uuid
raise "Ambiguous email address, directs to both #{primary_user.uuid} and #{user.redirects_to.uuid}"
end
end
end
if !primary_user
# New user registration
primary_user = User.new(:owner_uuid => system_user_uuid,
:is_admin => false,
:is_active => Rails.configuration.Users.NewUsersAreActive)
primary_user.set_initial_username(requested: info['username']) if info['username'] && !info['username'].blank?
primary_user.identity_url = info['identity_url'] if identity_url
end
primary_user.email = info['email'] if info['email']
primary_user.first_name = info['first_name'] if info['first_name']
primary_user.last_name = info['last_name'] if info['last_name']
if (!primary_user.email or primary_user.email.empty?) and (!primary_user.identity_url or primary_user.identity_url.empty?)
raise "Must have supply at least one of 'email' or 'identity_url' to User.register"
end
act_as_system_user do
primary_user.save!
end
primary_user
end
protected
def change_all_uuid_refs(old_uuid:, new_uuid:)
ActiveRecord::Base.descendants.reject(&:abstract_class?).each do |klass|
klass.columns.each do |col|
if col.name.end_with?('_uuid')
column = col.name.to_sym
klass.where(column => old_uuid).update_all(column => new_uuid)
end
end
end
end
def ensure_ownership_path_leads_to_user
true
end
def permission_to_update
if username_changed? || redirect_to_user_uuid_changed? || email_changed?
current_user.andand.is_admin
else
# users must be able to update themselves (even if they are
# inactive) in order to create sessions
self == current_user or super
end
end
def permission_to_create
current_user.andand.is_admin or
(self == current_user &&
self.redirect_to_user_uuid.nil? &&
self.is_active == Rails.configuration.Users.NewUsersAreActive)
end
def check_auto_admin
return if self.uuid.end_with?('anonymouspublic')
if (User.where("email = ?",self.email).where(:is_admin => true).count == 0 and
!Rails.configuration.Users.AutoAdminUserWithEmail.empty? and self.email == Rails.configuration.Users["AutoAdminUserWithEmail"]) or
(User.where("uuid not like '%-000000000000000'").where(:is_admin => true).count == 0 and
Rails.configuration.Users.AutoAdminFirstUser)
self.is_admin = true
self.is_active = true
end
end
def find_usable_username_from(basename)
# If "basename" is a usable username, return that.
# Otherwise, find a unique username "basenameN", where N is the
# smallest integer greater than 1, and return that.
# Return nil if a unique username can't be found after reasonable
# searching.
quoted_name = self.class.connection.quote_string(basename)
next_username = basename
next_suffix = 1
while Rails.configuration.Users.AutoSetupUsernameBlacklist[next_username]
next_suffix += 1
next_username = "%s%i" % [basename, next_suffix]
end
0.upto(6).each do |suffix_len|
pattern = "%s%s" % [quoted_name, "_" * suffix_len]
self.class.unscoped.
where("username like '#{pattern}'").
select(:username).
order('username asc').
each do |other_user|
if other_user.username > next_username
break
elsif other_user.username == next_username
next_suffix += 1
next_username = "%s%i" % [basename, next_suffix]
end
end
return next_username if (next_username.size <= pattern.size)
end
nil
end
def prevent_privilege_escalation
if current_user.andand.is_admin
return true
end
if self.is_active_changed?
if self.is_active != self.is_active_was
logger.warn "User #{current_user.uuid} tried to change is_active from #{self.is_active_was} to #{self.is_active} for #{self.uuid}"
self.is_active = self.is_active_was
end
end
if self.is_admin_changed?
if self.is_admin != self.is_admin_was
logger.warn "User #{current_user.uuid} tried to change is_admin from #{self.is_admin_was} to #{self.is_admin} for #{self.uuid}"
self.is_admin = self.is_admin_was
end
end
true
end
def prevent_inactive_admin
if self.is_admin and not self.is_active
# There is no known use case for the strange set of permissions
# that would result from this change. It's safest to assume it's
# a mistake and disallow it outright.
raise "Admin users cannot be inactive"
end
true
end
def search_permissions(start, graph, merged={}, upstream_mask=nil, upstream_path={})
nextpaths = graph[start]
return merged if !nextpaths
return merged if upstream_path.has_key? start
upstream_path[start] = true
upstream_mask ||= ALL_PERMISSIONS
nextpaths.each do |head, mask|
merged[head] ||= {}
mask.each do |k,v|
merged[head][k] ||= v if upstream_mask[k]
end
search_permissions(head, graph, merged, upstream_mask.select { |k,v| v && merged[head][k] }, upstream_path)
end
upstream_path.delete start
merged
end
def create_user_repo_link(repo_name)
# repo_name is optional
if not repo_name
logger.warn ("Repository name not given for #{self.uuid}.")
return
end
repo = Repository.where(owner_uuid: uuid, name: repo_name).first_or_create!
logger.info { "repo uuid: " + repo[:uuid] }
repo_perm = Link.where(tail_uuid: uuid, head_uuid: repo.uuid,
link_class: "permission",
name: "can_manage").first_or_create!
logger.info { "repo permission: " + repo_perm[:uuid] }
return repo_perm
end
# create login permission for the given vm_uuid, if it does not already exist
def create_vm_login_permission_link(vm_uuid, repo_name)
# vm uuid is optional
return if vm_uuid == ""
vm = VirtualMachine.where(uuid: vm_uuid).first
if !vm
logger.warn "Could not find virtual machine for #{vm_uuid.inspect}"
raise "No vm found for #{vm_uuid}"
end
logger.info { "vm uuid: " + vm[:uuid] }
login_attrs = {
tail_uuid: uuid, head_uuid: vm.uuid,
link_class: "permission", name: "can_login",
}
login_perm = Link.
where(login_attrs).
select { |link| link.properties["username"] == repo_name }.
first
login_perm ||= Link.
create(login_attrs.merge(properties: {"username" => repo_name}))
logger.info { "login permission: " + login_perm[:uuid] }
login_perm
end
def add_to_all_users_group
resp = [Link.where(tail_uuid: self.uuid,
head_uuid: all_users_group_uuid,
link_class: 'permission',
name: 'can_read').first ||
Link.create(tail_uuid: self.uuid,
head_uuid: all_users_group_uuid,
link_class: 'permission',
name: 'can_read')]
if Rails.configuration.Users.ActivatedUsersAreVisibleToOthers
resp += [Link.where(tail_uuid: all_users_group_uuid,
head_uuid: self.uuid,
link_class: 'permission',
name: 'can_read').first ||
Link.create(tail_uuid: all_users_group_uuid,
head_uuid: self.uuid,
link_class: 'permission',
name: 'can_read')]
end
return resp
end
# Give the special "System group" permission to manage this user and
# all of this user's stuff.
def add_system_group_permission_link
return true if uuid == system_user_uuid
act_as_system_user do
Link.create(link_class: 'permission',
name: 'can_manage',
tail_uuid: system_group_uuid,
head_uuid: self.uuid)
end
end
# Send admin notifications
def send_admin_notifications
AdminNotifier.new_user(self).deliver_now
if not self.is_active then
AdminNotifier.new_inactive_user(self).deliver_now
end
end
# Automatically setup if is_active flag turns on
def setup_on_activate
return if [system_user_uuid, anonymous_user_uuid].include?(self.uuid)
if is_active &&
(new_record? || saved_change_to_is_active? || will_save_change_to_is_active?)
setup
end
end
# Automatically setup new user during creation
def auto_setup_new_user
setup
end
# Send notification if the user saved profile for the first time
def send_profile_created_notification
if saved_change_to_prefs?
if prefs_before_last_save.andand.empty? || !prefs_before_last_save.andand['profile']
profile_notification_address = Rails.configuration.Users.UserProfileNotificationAddress
ProfileNotifier.profile_created(self, profile_notification_address).deliver_now if profile_notification_address and !profile_notification_address.empty?
end
end
end
def verify_repositories_empty
unless repositories.first.nil?
errors.add(:username, "can't be unset when the user owns repositories")
throw(:abort)
end
end
def sync_repository_names
old_name_re = /^#{Regexp.escape(username_before_last_save)}\//
name_sub = "#{username}/"
repositories.find_each do |repo|
repo.name = repo.name.sub(old_name_re, name_sub)
repo.save!
end
end
def identity_url_nil_if_empty
if identity_url == ""
self.identity_url = nil
end
end
end
| 34.038141 | 160 | 0.654738 |
f88106f259b67a6a10d07d54555e26a8ff00b70b | 685 | # frozen_string_literal: true
module PerformanceMonitoring
class PrometheusPanelGroup
include ActiveModel::Model
attr_accessor :group, :priority, :panels
validates :group, presence: true
validates :panels, presence: true
class << self
def from_json(json_content)
build_from_hash(json_content).tap(&:validate!)
end
private
def build_from_hash(attributes)
return new unless attributes.is_a?(Hash)
new(
group: attributes['group'],
priority: attributes['priority'],
panels: attributes['panels']&.map { |panel| PrometheusPanel.from_json(panel) }
)
end
end
end
end
| 22.833333 | 88 | 0.652555 |
085d24da57d92f2d58f3386bbb85537a4c0a321f | 1,400 | # frozen_string_literal: true
# Copyright (C) 2015 Twitter, Inc.
# note: the following is just a simple example. before making any stats calls, make
# sure to read our best practices for analytics which can be found here:
#
# https://dev.twitter.com/ads/analytics/best-practices
# https://dev.twitter.com/ads/analytics/metrics-and-segmentation
# https://dev.twitter.com/ads/analytics/metrics-derived
require 'twitter-ads'
CONSUMER_KEY = 'your consumer key'.freeze
CONSUMER_SECRET = 'your consumer secret'.freeze
ACCESS_TOKEN = 'user access token'.freeze
ACCESS_TOKEN_SECRET = 'user access token secret'.freeze
ADS_ACCOUNT = 'ads account id'.freeze
# initialize the twitter ads api client
client = TwitterAds::Client.new(
CONSUMER_KEY,
CONSUMER_SECRET,
ACCESS_TOKEN,
ACCESS_TOKEN_SECRET
)
# load up the account instance
account = client.accounts(ADS_ACCOUNT)
# limit request count and grab the first 10 line items from TwitterAds::Cursor
line_items = account.line_items(nil, count: 10)[0..9]
# the list of metric groups we want to fetch, for a full list of possible metrics
# see: https://dev.twitter.com/ads/analytics/metrics-and-segmentation
metrics = %w(ENGAGEMENT VIDEO)
# fetching stats on the instance
line_items.first.stats(metrics)
# fetching stats for multiple line items
ids = line_items.map(&:id)
TwitterAds::LineItem.stats(account, ids, metrics)
| 32.55814 | 83 | 0.760714 |
18dccaf94c953265f48ff6e42fb004bdf9596bdd | 1,125 | require_dependency 'issue'
module SerialNumberField
module IssuePatch
extend ActiveSupport::Concern
included do
after_save :assign_serial_number!
end
def assign_serial_number!
serial_number_fields.each do |cf|
next if assigned_serial_number?(cf)
target_custom_value = serial_number_custom_value(cf)
new_serial_number = cf.format.generate_value(cf, self)
if target_custom_value.present?
target_custom_value.update_attributes!(
:value => new_serial_number)
end
end
end
def assigned_serial_number?(cf)
serial_number_custom_value(cf).try(:value).present?
end
def serial_number_custom_value(cf)
CustomValue.where(:custom_field_id => cf.id,
:customized_type => 'Issue',
:customized_id => self.id).first
end
def serial_number_fields
editable_custom_fields.select do |value|
value.field_format == SerialNumberField::Format::NAME
end
end
end
end
SerialNumberField::IssuePatch.tap do |mod|
Issue.send :include, mod unless Issue.include?(mod)
end
| 23.4375 | 62 | 0.690667 |
62c80ba07206e99817ebebe7aa0e5b431f1d7af2 | 4,339 | # frozen_string_literal: true
module GraphQL
class Query
# Turn query string values into something useful for query execution
class LiteralInput
def self.coerce(type, ast_node, variables)
case ast_node
when nil
nil
when Language::Nodes::NullValue
nil
when Language::Nodes::VariableIdentifier
variables[ast_node.name]
else
case type
when GraphQL::ScalarType
# TODO smell
# This gets used for plain values during subscriber.trigger
if variables
type.coerce_input(ast_node, variables.context)
else
type.coerce_isolated_input(ast_node)
end
when GraphQL::EnumType
# TODO smell
# This gets used for plain values sometimes
v = ast_node.is_a?(GraphQL::Language::Nodes::Enum) ? ast_node.name : ast_node
if variables
type.coerce_input(v, variables.context)
else
type.coerce_isolated_input(v)
end
when GraphQL::NonNullType
LiteralInput.coerce(type.of_type, ast_node, variables)
when GraphQL::ListType
if ast_node.is_a?(Array)
ast_node.map { |element_ast| LiteralInput.coerce(type.of_type, element_ast, variables) }
else
[LiteralInput.coerce(type.of_type, ast_node, variables)]
end
when GraphQL::InputObjectType
# TODO smell: handling AST vs handling plain Ruby
next_args = ast_node.is_a?(Hash) ? ast_node : ast_node.arguments
from_arguments(next_args, type, variables)
end
end
end
def self.from_arguments(ast_arguments, argument_owner, variables)
context = variables ? variables.context : nil
values_hash = {}
defaults_used = Set.new
indexed_arguments = case ast_arguments
when Hash
ast_arguments
when Array
ast_arguments.each_with_object({}) { |a, memo| memo[a.name] = a }
else
raise ArgumentError, "Unexpected ast_arguments: #{ast_arguments}"
end
argument_defns = argument_owner.arguments
argument_defns.each do |arg_name, arg_defn|
ast_arg = indexed_arguments[arg_name]
# First, check the argument in the AST.
# If the value is a variable,
# only add a value if the variable is actually present.
# Otherwise, coerce the value in the AST, prepare the value and add it.
#
# TODO: since indexed_arguments can come from a plain Ruby hash,
# have to check for `false` or `nil` as hash values. This is getting smelly :S
if indexed_arguments.key?(arg_name)
arg_value = ast_arg.is_a?(GraphQL::Language::Nodes::Argument) ? ast_arg.value : ast_arg
value_is_a_variable = arg_value.is_a?(GraphQL::Language::Nodes::VariableIdentifier)
if (!value_is_a_variable || (value_is_a_variable && variables.key?(arg_value.name)))
value = coerce(arg_defn.type, arg_value, variables)
value = arg_defn.prepare(value, context)
if value.is_a?(GraphQL::ExecutionError)
value.ast_node = ast_arg
raise value
end
values_hash[arg_name] = value
end
end
# Then, the definition for a default value.
# If the definition has a default value and
# a value wasn't provided from the AST,
# then add the default value.
if arg_defn.default_value? && !values_hash.key?(arg_name)
value = arg_defn.default_value
defaults_used << arg_name
# `context` isn't present when pre-calculating defaults
if context
value = arg_defn.prepare(value, context)
if value.is_a?(GraphQL::ExecutionError)
value.ast_node = ast_arg
raise value
end
end
values_hash[arg_name] = value
end
end
result = argument_owner.arguments_class.new(values_hash, context: context, defaults_used: defaults_used)
result.prepare
end
end
end
end
| 37.08547 | 112 | 0.596681 |
d5d7d80bb446ed6d9c778b76b9ef24bfc2c2231d | 133 | class User < ApplicationRecord
has_secure_password
has_many :dogs
accepts_nested_attributes_for :dogs, allow_destroy: true
end
| 22.166667 | 58 | 0.827068 |
032b3c31af9547631677f80b4862287743d6e405 | 5,997 | # -------------------------------------------------------------------------- #
# Copyright 2002-2020, OpenNebula Project, OpenNebula Systems #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
module NSXDriver
# Class VirtualWire NSX-V Network
class VirtualWire < LogicalSwitch
# CONSTRUCTOR
def initialize(nsx_client, ls_id = nil, tz_id = nil, ls_data = nil)
super(nsx_client)
if ls_id
initialize_with_id(ls_id)
else
if tz_id && ls_data
begin
@ls_id = new_logical_switch(ls_data, tz_id)
rescue NSXError::IncorrectResponseCodeError => e
raise 'VirtualWire not created in NSX Manager: ' \
"#{e.message}"
end
unless @ls_id
raise 'Virtual Wire not created in NSX Manager: ' \
'generic error'
end
# Construct URL of the created logical switch
@url_ls = NSXConstants::NSXV_LS_SECTION + @ls_id
@ls_vni = ls_vni
@ls_name = ls_name
@tz_id = ls_tz
@tenant_id = 'virtual wire tenant'
@guest_vlan_allowed = false
end
end
end
# Creates a VirtualWire from its name
def self.new_from_name(nsx_client, ls_name)
virtualwire = new(nsx_client)
ls_id = virtualwire.ls_id_from_name(nsx_client, ls_name)
unless ls_id
error_msg = "VirtualWire with name: #{ls_name} not found"
error = NSXError::ObjectNotFound
.new(error_msg)
raise error
end
# initialize_with_id(@ls_id)
virtualwire.initialize_with_id(ls_id)
virtualwire
end
# Creates a VirtualWire from its id
def initialize_with_id(ls_id)
@ls_id = ls_id
# Construct URL of the created logical switch
@url_ls = NSXConstants::NSXV_LS_SECTION + \
@ls_id
# Raise an error if VirtualWire id doesn't exists
unless ls?
error_msg = "VirtualWire with id: #{ls_id} not found"
error = NSXError::ObjectNotFound
.new(error_msg)
raise error
end
@ls_vni = ls_vni
@ls_name = ls_name
@tz_id = ls_tz
@tenant_id = 'virtual wire tenant'
@guest_vlan_allowed = false
end
# Get the logical switch id from its name
def ls_id_from_name(nsx_client, name)
url = NSXConstants::NSXV_LS_SECTION
virtualwires = nsx_client
.get(url)
.xpath(NSXConstants::NSXV_LS_XPATH)
virtualwires.each do |virtualwire|
lsname_arr = name.split(/-sid-/)
lsname = lsname_arr[-1].split('-', 2)[-1]
lsid = lsname_arr[0].split(/vxw-dvs-\w.-/)[-1]
if virtualwire.xpath('name').text == lsname &&
virtualwire.xpath('objectId').text == lsid
return virtualwire.xpath('objectId').text
end
end
nil
end
# METHODS
# Check if logical switch exists
def ls?
@nsx_client.get(@url_ls) ? true : false
end
# Get logical switch's name
def ls_name
@nsx_client.get(@url_ls)
.xpath(NSXConstants::NSXV_LS_NAME_XPATH).text
end
# Get logical switch's vni
def ls_vni
@nsx_client.get(@url_ls)
.xpath(NSXConstants::NSXV_LS_VNI_XPATH).text
end
# Get the Transport Zone of the logical switch
def ls_tz
@nsx_client.get(@url_ls)
.xpath(NSXConstants::NSXV_TZ_XPATH).text
end
# Get the logical switch reference into vcenter
def ls_vcenter_ref
@nsx_client.get(@url_ls)
.xpath(NSXConstants::NSXV_LS_BACKING_XPATH)
.text
end
# Get the distributed virtual switch's ref associated to a LS
def ls_dvs_ref
@nsx_client.get(@url_ls)
.xpath(NSXConstants::NSXV_LS_OBJECTID_XPATH)
.text
end
# Create a new logical switch (NSX-V: virtualwire)
def new_logical_switch(ls_data, tz_id)
url = "#{NSXConstants::NSXV_TZ_SECTION}#{tz_id}" \
'/virtualwires'
@nsx_client.post(url, ls_data)
end
# Delete a logical switch
def delete_logical_switch
@nsx_client.delete(@url_ls)
end
end
end
| 37.716981 | 78 | 0.481574 |
f7e7bb9453ce895a2e704b73572fd41590ea7513 | 1,486 | require 'rails/generators'
module SqlSearchNSort
class InstallGenerator < Rails::Generators::Base
source_paths << File.expand_path('../views/application', __FILE__)
source_paths << File.expand_path('../assets/javascripts', __FILE__)
source_paths << File.expand_path('../helpers', __FILE__)
source_paths << File.expand_path('../config/initializers', __FILE__)
def copy_files
base_path = "app/views/application"
copy_file('_search_form.html.haml', File.join(base_path, '_search_form.html.haml'))
copy_file('_sort_form.html.haml', File.join(base_path, '_sort_form.html.haml'))
base_path = "app/assets/javascripts"
copy_file('sql_search_n_sort.js', File.join(base_path, 'sql_search_n_sort.js'))
base_path = "config/initializers"
copy_file('sql_search_n_sort.rb', File.join(base_path, 'sql_search_n_sort.rb'))
end
def require_jquery
#could be either application.js or application.js.coffee or maybe something else in the future
if app_js_fl = Dir["#{destination_root}/app/assets/javascripts/*"].select{|f| f =~ /application\.js/}.first
inject_into_file app_js_fl,
before: "\n//= require_tree ." do
"\n//= require jquery"
end
end
end
def insert_into_app_controller
inject_into_file "app/controllers/application_controller.rb",
before: /^end/ do
%Q`\n\tinclude SqlSortSetup\n
before_action :setup_sql_sort, :only => [:index, :sort_only_index]
\n`
end
end
end
end | 34.55814 | 110 | 0.711306 |
91d712dd7a00883e7870db67f2051a8fe2cb3f7d | 1,469 | class Pioneers < Formula
desc "Settlers of Catan clone"
homepage "http://pio.sourceforge.net/"
url "https://downloads.sourceforge.net/project/pio/Source/pioneers-15.3.tar.gz"
sha256 "69afa51b71646565536b571b0f89786d3a7616965265f196fd51656b51381a89"
bottle do
sha256 "ab5a8f58765f5121b134507c3c12e0f4f6c0bf26a5ccddb1ad07f3b8046831f0" => :sierra
sha256 "125fda4a203f876a2e58f46986e778989d7b8edfaed38069a6dea2d8f11ea4f7" => :el_capitan
sha256 "4dcd7c97726388e3175cfbf9bfe9aee17deb7d5894d7a864bdcf2b9295b7b0ed" => :yosemite
end
fails_with :clang do
build 318
cause "'#line directive requires a positive integer' argument in generated file"
end
depends_on "pkg-config" => :build
depends_on "intltool" => :build
depends_on "gettext"
depends_on "gtk+3"
depends_on "librsvg" # svg images for gdk-pixbuf
def install
# fix usage of echo options not supported by sh
inreplace "Makefile.in", /\becho/, "/bin/echo"
# GNU ld-only options
inreplace Dir["configure{,.ac}"] do |s|
s.gsub!(/ -Wl\,--as-needed/, "")
s.gsub!(/ -Wl,-z,(relro|now)/, "")
end
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
test do
system "#{bin}/pioneers-editor", "--help"
server = fork do
system "#{bin}/pioneers-server-console"
end
sleep 5
Process.kill("TERM", server)
end
end
| 30.604167 | 92 | 0.686862 |
ed34df191db95c65380a703fd69980e3dfdb8ec8 | 15,847 | # frozen_string_literal: true
module Flexirest
# JSON API requests and responses
module JsonAPIProxy
@object ||= nil
@headers ||= {}
# Methods used across other modules
module Helpers
def singular?(word)
w = word.to_s
w.singularize == w && w.pluralize != w
end
def type(object)
# Retrieve the type value for JSON API from the Flexirest::Base class
# If `alias_type` has been defined within the class, use it
name = object.alias_type || object.class.alias_type
# If not, guess the type value from the class name itself
unless name
return object.class.name.underscore.split('/').last.pluralize
end
name
end
end
# Creating JSON API requests
module Request
# Creating and formatting JSON API parameters
module Params
extend self
extend Flexirest::JsonAPIProxy::Helpers
def create(params, object)
# Create a parameters object with the resource's type value and id
parameters = Parameters.new(object.id, type(object))
# Remove id attribute from top-level hash, this will be included
# in the resource object
params.delete(:id)
# Build the JSON API compliant parameters
parameters.create_from_hash(params)
# Return the parameters as a hash, so it can be used elsewhere
parameters.to_hash
end
def translate(params, include_associations)
# Return to caller if nothing is to be done
return params unless params.present? && include_associations.present?
# Format the linked resources array, and assign to include key
params[:include] = format_include_params(include_associations)
end
private
def format_include_params(associations)
includes = []
associations.each do |key|
# Format each association name
# if the key is a nested hash, format each nested association too
# e.g. [author, comments.likes]
if key.is_a?(Hash)
# Create a link from each association to nested association
key.each { |k, val| val.each { |v| includes << "#{k}.#{v}" } }
else
# Just convert the association to string, in case it is a Symbol
includes << key.to_s
end
end
# Join the includes array with comma separator
includes.join(',')
end
# Private class for building JSON API compliant parameters
class Parameters
include Flexirest::JsonAPIProxy::Helpers
def initialize(id, type)
@params = build(id, type)
end
def to_hash
@params
end
def create_from_hash(hash)
hash.each do |k, v|
# Build JSON API compliant parameters from each key and value
# in the standard-style parameters hash
if v.is_a?(Array)
# This is a one-to-many relationship
validate_relationships!(v)
# Add a relationship object for all related resources
v.each { |el| add_relationship(k, type(el), el.id) }
elsif v.is_a?(Flexirest::Base)
# This is a one-to-one relationship
add_relationship(k, type(v), v.id)
else
# This is a normal attribute
add_attribute(k, v)
end
end
end
def add_relationship(name, type, id)
# Use the `name` parameter to determine the type of relationship
if singular?(name)
# If `name` is a singular word (one-to-one relationship),
# add or overwrite the data object for the given `name`,
# containing a type and id value to the relationships object
@params[:data][:relationships][name] =
{ data: { type: type, id: id } }
elsif @params[:data][:relationships][name]
# If `name` is a plural word (one-to-many relationship),
# and the `name` object already exists in the relationships object,
# assume a nested data array exists, and add a new data object
# containing a type and id value to the data array
@params[:data][:relationships][name][:data] <<
{ type: type, id: id }
else
# If `name` is a plural word, but the `name` object does not exist,
# add a new `name` object containing a data array,
# which consists of exactly one data object with the type and id
@params[:data][:relationships][name] =
{ data: [{ type: type, id: id }] }
end
end
def add_attribute(key, value)
# Add a resource attribute to the attributes object
# within the resource object
@params[:data][:attributes][key] = value
end
def build(id, type)
# Build the standard resource object
pp = {}
pp[:data] = {}
pp[:data][:id] = id if id
pp[:data][:type] = type
pp[:data][:attributes] = {}
pp[:data][:relationships] = {}
pp
end
def validate_relationships!(v)
# Should always contain the same class in entire relationships array
raise_params_error! if v.map(&:class).count > 1
end
def raise_params_error!
raise Exception.new("Cannot contain different instance types!")
end
end
end
end
# Creating JSON API header
module Headers
extend self
def save(headers)
# Save headers used in a request for building lazy association
# loaders when parsing the response
@headers = headers
end
end
# Parsing JSON API responses
module Response
extend self
extend Flexirest::JsonAPIProxy::Helpers
ID_PFIX = '_id_'
def save_resource_class(object)
@resource_class = object.is_a?(Class) ? object : object.class
end
def parse(body, object)
# Save resource class for building lazy association loaders
save_resource_class(object)
# According to the spec:
# "The members data and errors MUST NOT coexist in the same document."
# Thus, if the "errors" key is present, we can return it and ignore the "data" key.
return body['errors'] if body.include?('errors')
# return early if data is an empty array
return [] if body['data'] == []
# Retrieve the resource(s) object or array from the data object
records = body['data']
# Convert the resource object to an array,
# because it is easier to work with an array than a single object
# Also keep track if record is singular or plural for the result later
is_singular_record = records.is_a?(Hash)
records = [records] if is_singular_record
# Retrieve all names of linked relationships
relationships = records.first['relationships']
relationships = relationships ? relationships.keys : []
included = body['included']
# Parse the records, and retrieve all resources in a
# (nested) array of resources that is easy to work with in Flexirest
resources = records.map do |record|
fetch_attributes_and_relationships(record, included, relationships)
end
# Pluck all attributed and associations into hashes
resources = resources.map do |resource|
pluck_attributes_and_relationships(resource, relationships)
end
# Depending on whether we got a resource object (hash) or array
# in the beginning, return to the caller with the same type
is_singular_record ? resources.first : resources
end
private
def fetch_attributes_and_relationships(record, included, rels, base: nil)
base = Array(base) unless base.is_a?(Array)
rels = rels - [base.last]
rels_object = record['relationships']
rels.each do |rel_name|
# Determine from `rel_name` (relationship name) whether the
# linked resource is a singular or plural (one-to-one or
# one-to-many, respectively)
is_singular_rel = singular?(rel_name)
if is_singular_rel
# Fetch a linked resource from the relationships object
# and add it as an association attribute in the resource hash
record[rel_name], record[ID_PFIX + rel_name], embedded =
fetch_one_to_one(base, rels_object, rel_name, included)
else
# Fetch linked resources from the relationships object
# and add it as an array into the resource hash
record[rel_name], record[ID_PFIX + rel_name], embedded =
fetch_one_to_many(base, rels_object, rel_name, included)
end
# Do not try to fetch embedded results if the response is not
# a compound document. Instead, a LazyAssociationLoader should
# have been created and inserted into the record
next record unless embedded
# Recursively fetch the relationships and embedded nested resources
linked_resources = record[rel_name].map do |nested_record|
# Find the relationships object in the linked resource
# and find whether there are any nested linked resources
nested_rels_object = nested_record['relationships']
if nested_rels_object && nested_rels_object.keys.present?
# Fetch the linked resources and its attributes recursively
fetch_attributes_and_relationships(
nested_record, included, nested_rels_object.keys,
base: base + [rel_name]
)
else
nested_record
end
end
record[rel_name] = linked_resources
end
record
end
def fetch_one_to_one(base, relationships, name, included)
# Parse the relationships object given the relationship name `name`,
# and look into the included object (in case of a compound document),
# to embed the linked resource into the response
if included.blank? || relationships[name]['data'].blank?
return build_lazy_loader(base, relationships, name), nil, false
end
# Retrieve the linked resource id and its pluralized type name
rel_id = relationships[name]['data']['id']
type_name = relationships[name]['data']['type']
plural_type_name = type_name.pluralize
# Traverse through the included object, and find the included
# linked resource, based on the given id and pluralized type name
linked_resource = included.select do |i|
i['id'] == rel_id && i['type'] == plural_type_name
end
return linked_resource, rel_id, true
end
def fetch_one_to_many(base, relationships, name, included)
# Parse the relationships object given the relationship name `name`,
# and look into the included object (in case of a compound document),
# to embed the linked resources into the response
if included.blank? || relationships[name]['data'].blank?
return build_lazy_loader(base, relationships, name), [], false
end
# Retrieve the linked resources ids
rel_ids = relationships[name]['data'].map { |r| r['id'] }
# Index the linked resources' id and types that we need to
# retrieve from the included resources
relations_to_include = relationships[name]['data'].map { |r| [r['id'], r['type']] }.to_set
# Traverse through the included object, and find the included
# linked resources, based on the given ids and type name
linked_resources = included.select do |i|
relations_to_include.include?([i['id'], i['type']])
end
return linked_resources, rel_ids, true
end
def pluck_attributes_and_relationships(record, rels)
cleaned = { id: record['id'] }
relationships = Hash[rels.map { |rel| [rel, singular?(rel)] }]
relationships.each do |rel_name, is_singular|
safe_name = rel_name.underscore
id_sfix = is_singular ? '_id' : '_ids'
cleaned[safe_name.singularize + id_sfix] = record[ID_PFIX + rel_name]
links = record[rel_name]
is_lazy_loader = links.is_a?(Flexirest::LazyAssociationLoader)
linked_resources =
if is_lazy_loader || links.blank?
# Skip this relationship if it hasn't been included
links
else
# Probe the linked resources
first_linked = links.first
# Retrieve all names of linked relationships
nested_rels =
if first_linked && first_linked['relationships']
first_linked['relationships'].keys
else
[]
end
# Recursively pluck attributes for all related resources
links.map do |linked_resource|
pluck_attributes_and_relationships(linked_resource, nested_rels)
end
end
# Depending on if the resource is singular or plural, add it as
# the original type (array or hash) into the record hash
cleaned[safe_name] =
if is_lazy_loader || !is_singular
linked_resources
else
linked_resources ? linked_resources.first : nil
end
end
# Fetch attribute keys and values from the resource object
# and insert into result record hash
record['attributes'].each do |k, v|
cleaned[k.underscore] = v
end
cleaned
end
def find_association_class(base, name)
stack = base + [name]
klass = @resource_class
until stack.empty?
shift = stack.shift
last = klass
klass = klass._associations[shift.underscore.to_sym]
if klass.nil?
raise "#{last} has no defined relation to #{shift}. " \
"Have you defined :has_one or :has_many :#{shift} in #{last}?"
end
end
klass
end
def build_lazy_loader(base, relationships, name)
is_singular = singular?(name)
# Create a new request, given the linked resource `name`,
# finding the association's class, and given the `url` to the linked
# resource
begin
# When the response is not a compound document (i.e. there is no
# includes object), build a LazyAssociationLoader for lazy loading
url = relationships[name]['links']['related']
rescue NoMethodError
# If the url for retrieving the linked resource is missing,
# we assume there is no linked resource available to fetch
# Default nulled linked resource is `nil` or `[]` for resources
return is_singular ? nil : []
end
klass = find_association_class(base, name)
request = Flexirest::Request.new({ url: url, method: :get }, klass.new)
# Also add the previous request's header, which may contain
# crucial authentication headers (or so), to connect with the service
request.headers = @headers
request.url = request.forced_url = url
Flexirest::LazyAssociationLoader.new(name, url, request)
end
end
end
end
| 36.180365 | 98 | 0.600682 |
61d11930f74479f14b93a967ec20ec1080abf822 | 1,979 | class CategoriesController < ApplicationController
before_action :set_category, only: [:show, :edit, :update, :destroy]
before_action :login_check
# GET /categories
# GET /categories.json
def index
@categories = Category.all.order(:no)
end
# GET /categories/1
# GET /categories/1.json
def show
end
# GET /categories/new
def new
@category = Category.new
end
# GET /categories/1/edit
def edit
end
# POST /categories
# POST /categories.json
def create
@category = Category.new(category_params)
respond_to do |format|
if @category.save
format.html { redirect_to @category, notice: 'Category was successfully created.' }
format.json { render :show, status: :created, location: @category }
else
format.html { render :new }
format.json { render json: @category.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /categories/1
# PATCH/PUT /categories/1.json
def update
respond_to do |format|
if @category.update(category_params)
format.html { redirect_to @category, notice: 'Category was successfully updated.' }
format.json { render :show, status: :ok, location: @category }
else
format.html { render :edit }
format.json { render json: @category.errors, status: :unprocessable_entity }
end
end
end
# DELETE /categories/1
# DELETE /categories/1.json
def destroy
@category.destroy
respond_to do |format|
format.html { redirect_to categories_url, notice: 'Category was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_category
@category = Category.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def category_params
params.require(:category).permit(:id,:no,:name)
end
end
| 26.039474 | 96 | 0.676604 |
7a95fb8081255a1498841cb0f0e71d5f99a81ae5 | 537 | require 'test_helper'
require 'vcr_setup'
class CqlToElmHelperTest < ActiveSupport::TestCase
include WebMock::API
setup do
dump_database
load_measure_fixtures_from_folder(File.join('measures', 'CMS160v6'))
@measure = CQM::Measure.first
end
test 'translate_cql_to_elm produces json' do
VCR.use_cassette('valid_translation_json_response') do
elm_json, elm_xml = CqlToElmHelper.translate_cql_to_elm(@measure[:cql])
assert_equal 1, elm_json.count
assert_equal 1, elm_xml.count
end
end
end
| 24.409091 | 77 | 0.748603 |
bf33cb9b526c39cc6bcb70b47a73274ff7188ad0 | 14,718 | #
# Author:: Adam Jacob (<[email protected]>)
# Author:: Seth Falcon (<[email protected]>)
# Author:: Kyle Goodwin (<[email protected]>)
# Copyright:: Copyright 2008-2010 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class Chef
# == Chef::Exceptions
# Chef's custom exceptions are all contained within the Chef::Exceptions
# namespace.
class Exceptions
# Backcompat with Chef::ShellOut code:
require 'mixlib/shellout/exceptions'
def self.const_missing(const_name)
if const_name == :ShellCommandFailed
Chef::Log.warn("Chef::Exceptions::ShellCommandFailed is deprecated, use Mixlib::ShellOut::ShellCommandFailed")
called_from = caller[0..3].inject("Called from:\n") {|msg, trace_line| msg << " #{trace_line}\n" }
Chef::Log.warn(called_from)
Mixlib::ShellOut::ShellCommandFailed
else
super
end
end
class Application < RuntimeError; end
class Cron < RuntimeError; end
class Env < RuntimeError; end
class Exec < RuntimeError; end
class ErlCall < RuntimeError; end
class FileNotFound < RuntimeError; end
class Package < RuntimeError; end
class Service < RuntimeError; end
class Script < RuntimeError; end
class Route < RuntimeError; end
class SearchIndex < RuntimeError; end
class Override < RuntimeError; end
class UnsupportedAction < RuntimeError; end
class MissingLibrary < RuntimeError; end
class CannotDetermineNodeName < RuntimeError
def initialize
super "Unable to determine node name: configure node_name or configure the system's hostname and fqdn"
end
end
class User < RuntimeError; end
class Group < RuntimeError; end
class Link < RuntimeError; end
class Mount < RuntimeError; end
class PrivateKeyMissing < RuntimeError; end
class CannotWritePrivateKey < RuntimeError; end
class RoleNotFound < RuntimeError; end
class DuplicateRole < RuntimeError; end
class ValidationFailed < ArgumentError; end
class InvalidPrivateKey < ArgumentError; end
class ConfigurationError < ArgumentError; end
class RedirectLimitExceeded < RuntimeError; end
class AmbiguousRunlistSpecification < ArgumentError; end
class CookbookFrozen < ArgumentError; end
class CookbookNotFound < RuntimeError; end
# Cookbook loader used to raise an argument error when cookbook not found.
# for back compat, need to raise an error that inherits from ArgumentError
class CookbookNotFoundInRepo < ArgumentError; end
class RecipeNotFound < ArgumentError; end
class AttributeNotFound < RuntimeError; end
class MissingCookbookDependency < StandardError; end # CHEF-5120
class InvalidCommandOption < RuntimeError; end
class CommandTimeout < RuntimeError; end
class RequestedUIDUnavailable < RuntimeError; end
class InvalidHomeDirectory < ArgumentError; end
class DsclCommandFailed < RuntimeError; end
class PlistUtilCommandFailed < RuntimeError; end
class UserIDNotFound < ArgumentError; end
class GroupIDNotFound < ArgumentError; end
class ConflictingMembersInGroup < ArgumentError; end
class InvalidResourceReference < RuntimeError; end
class ResourceNotFound < RuntimeError; end
# Can't find a Resource of this type that is valid on this platform.
class NoSuchResourceType < NameError
def initialize(short_name, node)
super "Cannot find a resource for #{short_name} on #{node[:platform]} version #{node[:platform_version]}"
end
end
class InvalidResourceSpecification < ArgumentError; end
class SolrConnectionError < RuntimeError; end
class IllegalChecksumRevert < RuntimeError; end
class CookbookVersionNameMismatch < ArgumentError; end
class MissingParentDirectory < RuntimeError; end
class UnresolvableGitReference < RuntimeError; end
class InvalidRemoteGitReference < RuntimeError; end
class InvalidEnvironmentRunListSpecification < ArgumentError; end
class InvalidDataBagItemID < ArgumentError; end
class InvalidDataBagName < ArgumentError; end
class EnclosingDirectoryDoesNotExist < ArgumentError; end
# Errors originating from calls to the Win32 API
class Win32APIError < RuntimeError; end
# Thrown when Win32 API layer binds to non-existent Win32 function. Occurs
# when older versions of Windows don't support newer Win32 API functions.
class Win32APIFunctionNotImplemented < NotImplementedError; end
# Attempting to run windows code on a not-windows node
class Win32NotWindows < RuntimeError; end
class WindowsNotAdmin < RuntimeError; end
# Attempting to access a 64-bit only resource on a 32-bit Windows system
class Win32ArchitectureIncorrect < RuntimeError; end
class ObsoleteDependencySyntax < ArgumentError; end
class InvalidDataBagPath < ArgumentError; end
class DuplicateDataBagItem < RuntimeError; end
class PowershellCmdletException < RuntimeError; end
class CannotDetermineHomebrewOwner < Package; end
# Can not create staging file during file deployment
class FileContentStagingError < RuntimeError
def initialize(errors)
super "Staging tempfile can not be created during file deployment.\n Errors: #{errors.join('\n')}!"
end
end
# A different version of a cookbook was added to a
# VersionedRecipeList than the one already there.
class CookbookVersionConflict < ArgumentError ; end
# does not follow X.Y.Z format. ArgumentError?
class InvalidPlatformVersion < ArgumentError; end
class InvalidCookbookVersion < ArgumentError; end
# version constraint should be a string or array, or it doesn't
# match OP VERSION. ArgumentError?
class InvalidVersionConstraint < ArgumentError; end
# Version constraints are not allowed in chef-solo
class IllegalVersionConstraint < NotImplementedError; end
class MetadataNotValid < StandardError; end
# File operation attempted but no permissions to perform it
class InsufficientPermissions < RuntimeError; end
# Ifconfig failed
class Ifconfig < RuntimeError; end
# Invalid "source" parameter to a remote_file resource
class InvalidRemoteFileURI < ArgumentError; end
# Node::Attribute computes the merged version of of attributes
# and makes it read-only. Attempting to modify a read-only
# attribute will cause this error.
class ImmutableAttributeModification < NoMethodError
def initialize
super "Node attributes are read-only when you do not specify which precedence level to set. " +
%Q(To set an attribute use code like `node.default["key"] = "value"')
end
end
# Merged node attributes are invalidated when the component
# attributes are updated. Attempting to read from a stale copy
# of merged attributes will trigger this error.
class StaleAttributeRead < StandardError; end
# Registry Helper throws the following errors
class Win32RegArchitectureIncorrect < Win32ArchitectureIncorrect; end
class Win32RegHiveMissing < ArgumentError; end
class Win32RegKeyMissing < RuntimeError; end
class Win32RegValueMissing < RuntimeError; end
class Win32RegDataMissing < RuntimeError; end
class Win32RegValueExists < RuntimeError; end
class Win32RegNoRecursive < ArgumentError; end
class Win32RegTypeDoesNotExist < ArgumentError; end
class Win32RegBadType < ArgumentError; end
class Win32RegBadValueSize < ArgumentError; end
class Win32RegTypesMismatch < ArgumentError; end
class InvalidEnvironmentPath < ArgumentError; end
class EnvironmentNotFound < RuntimeError; end
# File-like resource found a non-file (socket, pipe, directory, etc) at its destination
class FileTypeMismatch < RuntimeError; end
# File (or descendent) resource configured to manage symlink source, but
# the symlink that is there either loops or points to a nonexistent file
class InvalidSymlink < RuntimeError; end
class ChildConvergeError < RuntimeError; end
class NoProviderAvailable < RuntimeError; end
class MissingRole < RuntimeError
NULL = Object.new
attr_reader :expansion
def initialize(message_or_expansion=NULL)
@expansion = nil
case message_or_expansion
when NULL
super()
when String
super
when RunList::RunListExpansion
@expansion = message_or_expansion
missing_roles = @expansion.errors.join(', ')
super("The expanded run list includes nonexistent roles: #{missing_roles}")
end
end
end
# Exception class for collecting multiple failures. Used when running
# delayed notifications so that chef can process each delayed
# notification even if chef client or other notifications fail.
class MultipleFailures < StandardError
def initialize(*args)
super
@all_failures = []
end
def message
base = "Multiple failures occurred:\n"
@all_failures.inject(base) do |message, (location, error)|
message << "* #{error.class} occurred in #{location}: #{error.message}\n"
end
end
def client_run_failure(exception)
set_backtrace(exception.backtrace)
@all_failures << [ "chef run", exception ]
end
def notification_failure(exception)
@all_failures << [ "delayed notification", exception ]
end
def raise!
unless empty?
raise self.for_raise
end
end
def empty?
@all_failures.empty?
end
def for_raise
if @all_failures.size == 1
@all_failures[0][1]
else
self
end
end
end
class CookbookVersionSelection
# Compound exception: In run_list expansion and resolution,
# run_list items referred to cookbooks that don't exist and/or
# have no versions available.
class InvalidRunListItems < StandardError
attr_reader :non_existent_cookbooks
attr_reader :cookbooks_with_no_matching_versions
def initialize(message, non_existent_cookbooks, cookbooks_with_no_matching_versions)
super(message)
@non_existent_cookbooks = non_existent_cookbooks
@cookbooks_with_no_matching_versions = cookbooks_with_no_matching_versions
end
def to_json(*a)
result = {
"message" => message,
"non_existent_cookbooks" => non_existent_cookbooks,
"cookbooks_with_no_versions" => cookbooks_with_no_matching_versions
}
Chef::JSONCompat.to_json(result, *a)
end
end
# In run_list expansion and resolution, a constraint was
# unsatisfiable.
#
# This exception may not be the complete error report. If you
# resolve the misconfiguration represented by this exception and
# re-solve, you may get another exception
class UnsatisfiableRunListItem < StandardError
attr_reader :run_list_item
attr_reader :non_existent_cookbooks, :most_constrained_cookbooks
# most_constrained_cookbooks: if I were to remove constraints
# regarding these cookbooks, I would get a solution or move on
# to the next error (deeper in the graph). An item in this list
# may be unsatisfiable, but when resolved may also reveal
# further unsatisfiable constraints; this condition would not be
# reported.
def initialize(message, run_list_item, non_existent_cookbooks, most_constrained_cookbooks)
super(message)
@run_list_item = run_list_item
@non_existent_cookbooks = non_existent_cookbooks
@most_constrained_cookbooks = most_constrained_cookbooks
end
def to_json(*a)
result = {
"message" => message,
"unsatisfiable_run_list_item" => run_list_item,
"non_existent_cookbooks" => non_existent_cookbooks,
"most_constrained_cookbooks" => most_constrained_cookbooks
}
Chef::JSONCompat.to_json(result, *a)
end
end
end # CookbookVersionSelection
# When the server sends a redirect, RFC 2616 states a user-agent should
# not follow it with a method other than GET or HEAD, unless a specific
# action is taken by the user. A redirect received as response to a
# non-GET and non-HEAD request will thus raise an InvalidRedirect.
class InvalidRedirect < StandardError; end
# Raised when the content length of a download does not match the content
# length declared in the http response.
class ContentLengthMismatch < RuntimeError
def initialize(response_length, content_length)
super "Response body length #{response_length} does not match HTTP Content-Length header #{content_length}."
end
end
class UnsupportedPlatform < RuntimeError
def initialize(platform)
super "This functionality is not supported on platform #{platform}."
end
end
# Raised when Chef::Config[:run_lock_timeout] is set and some other client run fails
# to release the run lock becure Chef::Config[:run_lock_timeout] seconds pass.
class RunLockTimeout < RuntimeError
def initialize(duration, blocking_pid)
super "Unable to acquire lock. Waited #{duration} seconds for #{blocking_pid} to release."
end
end
class ChecksumMismatch < RuntimeError
def initialize(res_cksum, cont_cksum)
super "Checksum on resource (#{res_cksum}) does not match checksum on content (#{cont_cksum})"
end
end
class BadProxyURI < RuntimeError; end
# Raised by Chef::JSONCompat
class JSON
class EncodeError < RuntimeError; end
class ParseError < RuntimeError; end
end
class InvalidSearchQuery < ArgumentError; end
# Raised by Chef::ProviderResolver
class AmbiguousProviderResolution < RuntimeError
def initialize(resource, classes)
super "Found more than one provider for #{resource.resource_name} resource: #{classes}"
end
end
end
end
| 38.328125 | 118 | 0.71416 |
26d5413920698b70ee209d8a6c4c18567953c5b5 | 1,432 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'aws-sdk-core'
require 'aws-sigv4'
require_relative 'aws-sdk-sagemaker/types'
require_relative 'aws-sdk-sagemaker/client_api'
require_relative 'aws-sdk-sagemaker/client'
require_relative 'aws-sdk-sagemaker/errors'
require_relative 'aws-sdk-sagemaker/waiters'
require_relative 'aws-sdk-sagemaker/resource'
require_relative 'aws-sdk-sagemaker/customizations'
# This module provides support for Amazon SageMaker Service. This module is available in the
# `aws-sdk-sagemaker` gem.
#
# # Client
#
# The {Client} class provides one method for each API operation. Operation
# methods each accept a hash of request parameters and return a response
# structure.
#
# sage_maker = Aws::SageMaker::Client.new
# resp = sage_maker.add_tags(params)
#
# See {Client} for more information.
#
# # Errors
#
# Errors returned from Amazon SageMaker Service are defined in the
# {Errors} module and all extend {Errors::ServiceError}.
#
# begin
# # do stuff
# rescue Aws::SageMaker::Errors::ServiceError
# # rescues all Amazon SageMaker Service API errors
# end
#
# See {Errors} for more information.
#
# @!group service
module Aws::SageMaker
GEM_VERSION = '1.66.0'
end
| 26.518519 | 92 | 0.75 |
e2d398897092e8be1781a065118ee94278953a4e | 1,546 | require 'rails_helper'
describe 'users/delete/show.html.slim' do
let(:user) {build_stubbed(:user, :signed_up)}
let(:decorated_user) {user.decorate}
before do
allow(user).to receive(:decorate).and_return(decorated_user)
allow(view).to receive(:current_user).and_return(user)
end
it 'displays headings' do
render
expect(rendered).to have_content(t('users.delete.heading', app: APP_NAME))
expect(rendered).to have_content(t('users.delete.subheading', app: APP_NAME))
end
it 'displays bullets' do
render
expect(rendered).to have_content(t('users.delete.bullet_1', app: APP_NAME))
expect(rendered).to have_content(t(user.decorate.delete_account_bullet_key, app: APP_NAME))
expect(rendered).to have_content(t('users.delete.bullet_3', app: APP_NAME))
end
it 'displays bullets for loa1' do
allow(decorated_user).to receive(:identity_verified?).and_return(false)
expect(user.decorate.delete_account_bullet_key).to eq 'users.delete.bullet_2_loa1'
end
it 'displays bullets for loa1' do
allow(decorated_user).to receive(:identity_verified?).and_return(true)
expect(user.decorate.delete_account_bullet_key).to eq 'users.delete.bullet_2_loa3'
end
it 'contains link to delete account button' do
render
expect(rendered).to have_button t('users.delete.actions.delete', href: account_delete_path)
end
it 'contains link to cancel delete account link' do
render
expect(rendered).to have_link(t('users.delete.actions.cancel'), href: account_path)
end
end
| 31.55102 | 95 | 0.743208 |
61e70c8238e0b3879178c6d2bfb4dd5785392003 | 4,319 | # -*- coding: binary -*-
require 'rex/java/serialization'
require 'rex/text'
module Msf
module Java
module Rmi
module Util
# Calculates a method hash to make RMI calls as defined by the JDK 1.2
#
# @param signature [String] The remote method signature as specified by the JDK 1.2,
# method name + method descriptor (as explained in the Java Virtual Machine Specification)
# @return [Fixnum] The method hash
# @see http://docs.oracle.com/javase/8/docs/platform/rmi/spec/rmi-stubs24.html The RemoteRef Interface documentation to understand how method hashes are calculated
def calculate_method_hash(signature)
utf = Rex::Java::Serialization::Model::Utf.new(nil, signature)
sha1 = Rex::Text.sha1_raw(utf.encode)
sha1.unpack('Q<')[0]
end
# Calculates an interface hash to make RMI calls as defined by the JDK 1.1
#
# @param methods [Array] set of method names and their descriptors
# @param exceptions [Array] set of declared exceptions
# @return [Fixnum] The interface hash
# @see http://docs.oracle.com/javase/8/docs/platform/rmi/spec/rmi-stubs24.html The RemoteRef Interface documentation to understand how interface hashes are calculated
def calculate_interface_hash(methods)
stream = ''
stream << [1].pack('N') # stub version number
methods.each do |m|
utf_method = Rex::Java::Serialization::Model::Utf.new(nil, m[:name])
utf_descriptor = Rex::Java::Serialization::Model::Utf.new(nil, m[:descriptor])
stream << utf_method.encode
stream << utf_descriptor.encode
m[:exceptions].each do |e|
utf_exception = Rex::Java::Serialization::Model::Utf.new(nil, e)
stream << utf_exception.encode
end
end
sha1 = Rex::Text.sha1_raw(stream)
sha1.unpack('Q<')[0]
end
# Extracts an string from an IO
#
# @param io [IO] the io to extract the string from
# @return [String, nil] the extracted string if success, nil otherwise
def extract_string(io)
raw_length = io.read(2)
unless raw_length && raw_length.length == 2
return nil
end
length = raw_length.unpack('s>')[0]
string = io.read(length)
unless string && string.length == length
return nil
end
string
end
# Extracts an int from an IO
#
# @param io [IO] the io to extract the int from
# @return [Fixnum, nil] the extracted int if success, nil otherwise
def extract_int(io)
int_raw = io.read(4)
unless int_raw && int_raw.length == 4
return nil
end
int = int_raw.unpack('l>')[0]
int
end
# Extracts a long from an IO
#
# @param io [IO] the io to extract the long from
# @return [Fixnum, nil] the extracted int if success, nil otherwise
def extract_long(io)
int_raw = io.read(8)
unless int_raw && int_raw.length == 8
return nil
end
int = int_raw.unpack('q>')[0]
int
end
# Extract an RMI interface reference from an IO
#
# @param io [IO] the io to extract the reference from, should contain the data
# inside a BlockData with the reference information.
# @return [Hash, nil] the extracted reference if success, nil otherwise
# @see Msf::Java::Rmi::Client::Jmx:Server::Parser#parse_jmx_new_client_endpoint
# @see Msf::Java::Rmi::Client::Registry::Parser#parse_registry_lookup_endpoint
def extract_reference(io)
ref = extract_string(io)
unless ref && ref == 'UnicastRef'
return nil
end
address = extract_string(io)
return nil unless address
port = extract_int(io)
return nil unless port
object_number = extract_long(io)
uid = Rex::Proto::Rmi::Model::UniqueIdentifier.decode(io)
{address: address, port: port, object_number: object_number, uid: uid}
end
end
end
end
end
| 34.830645 | 174 | 0.59273 |
0386601d8926cb1ccfabfd6ed0586c6440f59acf | 1,430 | require 'rails_helper'
RSpec.describe TimelineEvent do
let(:timeline_event) { FactoryGirl.build(:timeline_event) }
#should_validate_presence_of :event_type
before(:each) do
FactoryGirl.create :package_rating
end
it 'has a valid factory' do
expect(timeline_event).to be_valid
end
describe 'validations' do
it 'requires an event_type' do
timeline_event.event_type = nil
expect(timeline_event).to_not be_valid
end
it 'allows actor to be blank' do
timeline_event.actor = nil
expect(timeline_event).to be_valid
end
it 'allows secondary_subject to be blank' do
timeline_event.secondary_subject = nil
expect(timeline_event).to be_valid
end
end
it "should cache package ratings" do
event = FactoryGirl.create(:package_rating_event)
expect(event.cached_value.to_i).to eq(event.subject.rating.to_i)
end
it "should cache task view versions" do
event = FactoryGirl.create(:task_view).update_version("2009-09-09")
expect(event.cached_value).to eq(TaskView.first.version)
end
{
"new_package" => true,
"new_version" => true,
"new_review" => false,
}.each do |event_type, is_package_event|
it "knows if #{event_type} is #{is_package_event ? "a" : "not a" } package event" do
timeline_event.event_type = event_type
expect(timeline_event.package_event?).to eq(is_package_event)
end
end
end
| 25.535714 | 88 | 0.709091 |
bba145c8bdb15750a0c20a1ee1c386801ec65499 | 114 | class AddMassToStars < ActiveRecord::Migration[5.1]
def change
add_column :stars, :mass, :decimal
end
end
| 19 | 51 | 0.72807 |
0874fcc577337fce059410316a67faf6ae4d0e8d | 5,303 | module Roadworker
class Client
include Roadworker::Log
include Roadworker::Utils::Helper
def initialize(options = {})
@options = OpenStruct.new(options)
@options.logger ||= Logger.new($stdout)
String.colorize = @options.color
@options.route53 = Aws::Route53::Client.new
@health_checks = HealthCheck.health_checks(@options.route53, :extended => true)
@options.health_checks = @health_checks
@route53 = Route53Wrapper.new(@options)
end
def apply(file)
dsl = load_file(file)
updated = false
if dsl.hosted_zones.empty? and not @options.force
log(:warn, "Nothing is defined (pass `--force` if you want to remove)", :yellow)
else
walk_hosted_zones(dsl)
updated = @options.updated
end
if updated and @options.health_check_gc
HealthCheck.gc(@options.route53, :logger => @options.logger)
end
return updated
end
def export
exported = @route53.export
if block_given?
yield(exported, DSL.method(:convert))
else
DSL.convert(exported)
end
end
def test(file)
dsl = load_file(file)
DSL.test(dsl, @options)
end
private
def load_file(file)
dsl = nil
if file.kind_of?(String)
open(file) do |f|
dsl = DSL.define(f.read, file).result
end
else
dsl = DSL.define(file.read, file.path).result
end
return dsl
end
def walk_hosted_zones(dsl)
expected = collection_to_hash(dsl.hosted_zones) {|i| [normalize_name(i.name), i.vpcs.empty?, normalize_id(i.id)] }
actual = collection_to_hash(@route53.hosted_zones) {|i| [normalize_name(i.name), i.vpcs.empty?, normalize_id(i.id)] }
expected.each do |keys, expected_zone|
name, private_zone, id = keys
next unless matched_zone?(name)
if id
actual_zone = actual.delete(keys)
unless actual_zone
log(:warn, "Hosted zone not found", :yellow, "#{name} (#{id})")
next
end
else
actual_keys, actual_zone = actual.find {|(n, p, _), _| n == name && p == private_zone }
actual.delete(actual_keys) if actual_keys
end
actual_zone ||= @route53.hosted_zones.create(name, :vpc => expected_zone.vpcs.first)
walk_vpcs(expected_zone, actual_zone)
walk_rrsets(expected_zone, actual_zone)
end
actual.each do |keys, zone|
name = keys[0]
next unless matched_zone?(name)
zone.delete
end
end
def walk_vpcs(expected_zone, actual_zone)
expected_vpcs = expected_zone.vpcs || []
actual_vpcs = actual_zone.vpcs || []
if not expected_vpcs.empty? and actual_vpcs.empty?
log(:warn, "Cannot associate VPC to public zone", :yellow, expected_zone.name)
else
(expected_vpcs - actual_vpcs).each do |vpc|
actual_zone.associate_vpc(vpc)
end
unexpected_vpcs = actual_vpcs - expected_vpcs
if unexpected_vpcs.length.nonzero? and expected_vpcs.length.zero?
log(:warn, "Private zone requires one or more of VPCs", :yellow, expected_zone.name)
else
unexpected_vpcs.each do |vpc|
actual_zone.disassociate_vpc(vpc)
end
end
end
end
def walk_rrsets(expected_zone, actual_zone)
expected = collection_to_hash(expected_zone.rrsets, :name, :type, :set_identifier)
actual = collection_to_hash(actual_zone.rrsets, :name, :type, :set_identifier)
expected.each do |keys, expected_record|
name = keys[0]
type = keys[1]
set_identifier = keys[2]
actual_record = actual.delete(keys)
if not actual_record and %w(A CNAME).include?(type)
actual_type = (type == 'A' ? 'CNAME' : 'A')
actual_record = actual.delete([name, actual_type, set_identifier])
end
if expected_zone.ignore_patterns.any? { |pattern| pattern === name }
log(:warn, "Ignoring defined record in DSL, because it is ignored record", :yellow) do
"#{name} #{type}" + (set_identifier ? " (#{set_identifier})" : '')
end
next
end
if actual_record
unless actual_record.eql?(expected_record)
actual_record.update(expected_record)
end
else
actual_record = actual_zone.rrsets.create(name, type, expected_record)
end
end
actual.each do |keys, record|
name = keys[0]
if expected_zone.ignore_patterns.any? { |pattern| pattern === name }
next
end
record.delete
end
end
def collection_to_hash(collection, *keys)
hash = {}
collection.each do |item|
if block_given?
key_list = yield(item)
else
key_list = keys.map do |k|
value = item.send(k)
(k == :name && value) ? normalize_name(value) : value
end
end
hash[key_list] = item
end
return hash
end
def normalize_name(name)
name.downcase.sub(/\.\z/, '')
end
def normalize_id(id)
id.sub(%r!^/hostedzone/!, '') if id
end
end # Client
end # Roadworker
| 27.910526 | 125 | 0.600226 |
1ae106a30a8730360223e9bf527121d40673df66 | 4,389 | require "rails_helper"
feature "View providers", type: :feature do
let(:organisation_page) { PageObjects::Page::Organisations::OrganisationPage.new }
let(:current_recruitment_cycle) { build(:recruitment_cycle) }
let(:provider_1) { build :provider, provider_code: "A0", include_counts: [:courses] }
let(:rollover) { false }
let(:user) { build(:user, :transitioned) }
before do
stub_omniauth(user: user)
stub_api_v2_request(
"/recruitment_cycles/#{current_recruitment_cycle.year}",
current_recruitment_cycle.to_jsonapi,
)
allow(Settings.features.rollover).to receive(:can_edit_current_and_next_cycles).and_return(rollover)
end
context "with two providers" do
let(:provider_2) { build :provider, provider_code: "A1", include_counts: [:courses] }
let(:provider_response) { provider_1.to_jsonapi(include: %i[courses accrediting_provider]) }
scenario "Navigate to /organisations" do
stub_api_v2_request(
"/recruitment_cycles/#{current_recruitment_cycle.year}" \
"/providers?page[page]=1",
resource_list_to_jsonapi([provider_1, provider_2], meta: { count: 2 }),
)
visit providers_path
expect(find("h1")).to have_content("Organisations")
expect(first(".govuk-list li")).to have_content(provider_1.provider_name.to_s)
end
scenario "Navigate to /organisations/A0" do
stub_api_v2_request(
"/recruitment_cycles/#{current_recruitment_cycle.year}" \
"/providers/#{provider_1.provider_code}",
provider_response,
)
visit provider_path(provider_1.provider_code)
expect(find("h1")).to have_content(provider_1.provider_name.to_s)
expect(organisation_page).not_to have_selector(".govuk-breadcrumbs")
expect(organisation_page).not_to have_current_cycle
expect(organisation_page).not_to have_next_cycle
expect(organisation_page).to have_link("Locations", href: "/organisations/A0/#{Settings.current_cycle}/locations")
expect(organisation_page).to have_link("Courses", href: "/organisations/A0/#{Settings.current_cycle}/courses")
expect(organisation_page).to have_link("UCAS contacts", href: "/organisations/A0/ucas-contacts")
expect(organisation_page).to have_link("Users", href: "/organisations/A0/users")
end
context "Rollover" do
let(:rollover) { true }
scenario "Navigate to /organisations/A0" do
stub_api_v2_request(
"/recruitment_cycles/#{current_recruitment_cycle.year}" \
"/providers/#{provider_1.provider_code}",
provider_response,
)
stub_api_v2_request("/users/#{user.id}", user.to_jsonapi)
visit provider_path(provider_1.provider_code)
expect(page.current_path).to eql("/rollover")
expect(find("h1")).to have_content("Prepare for the next cycle")
end
end
end
context "with more than ten providers" do
let(:provider_2) { build :provider, provider_code: "A1", include_counts: [:courses] }
it "displays pagination navigation" do
providers = []
11.times do
providers << build(:provider, provider_code: "A1", include_counts: [:courses])
end
stub_api_v2_request(
"/recruitment_cycles/#{current_recruitment_cycle.year}" \
"/providers?page[page]=1",
resource_list_to_jsonapi(providers, meta: { count: 11 }),
)
visit providers_path
expect(organisation_page.pagination).to have_next_page
end
end
context "with no providers" do
let(:no_providers_page) { PageObjects::Page::Organisations::NoProviders.new }
let(:forbidden_page) { PageObjects::Page::Forbidden.new }
scenario "Navigate to /organisations" do
stub_api_v2_request(
"/recruitment_cycles/#{current_recruitment_cycle.year}" \
"/providers?page[page]=1",
resource_list_to_jsonapi([], meta: { count: 0 }),
)
visit providers_path
expect(no_providers_page.no_providers_text).to be_visible
end
scenario "Navigate to /organisations/A0" do
stub_api_v2_request(
"/recruitment_cycles/#{current_recruitment_cycle.year}" \
"/providers/#{provider_1.provider_code}",
"",
:get,
403,
)
visit provider_path(provider_1.provider_code)
expect(forbidden_page.forbidden_text).to be_visible
end
end
end
| 34.833333 | 120 | 0.689451 |
08636e10553ce61f8e75946a894a52e3af00b10f | 260 | if RUBY_VERSION < '3'
require './lib/backports/3.0.0/ractor.rb'
require './lib/backports/2.3.0/string/uminus.rb'
PATH = './test/test_ractor.rb'
require './test/mri_runner_patched.rb'
nil.freeze
true.freeze
false.freeze
exec_test [PATH]
end
| 17.333333 | 50 | 0.688462 |
4ac489520aee6c565b1deb48aa4bf4afb5c2f4e8 | 6,399 | require 'rexml/document'
require 'html/document'
module ActionController
module Assertions
# Pair of assertions to testing elements in the HTML output of the response.
module TagAssertions
# Asserts that there is a tag/node/element in the body of the response
# that meets all of the given conditions. The +conditions+ parameter must
# be a hash of any of the following keys (all are optional):
#
# * <tt>:tag</tt>: the node type must match the corresponding value
# * <tt>:attributes</tt>: a hash. The node's attributes must match the
# corresponding values in the hash.
# * <tt>:parent</tt>: a hash. The node's parent must match the
# corresponding hash.
# * <tt>:child</tt>: a hash. At least one of the node's immediate children
# must meet the criteria described by the hash.
# * <tt>:ancestor</tt>: a hash. At least one of the node's ancestors must
# meet the criteria described by the hash.
# * <tt>:descendant</tt>: a hash. At least one of the node's descendants
# must meet the criteria described by the hash.
# * <tt>:sibling</tt>: a hash. At least one of the node's siblings must
# meet the criteria described by the hash.
# * <tt>:after</tt>: a hash. The node must be after any sibling meeting
# the criteria described by the hash, and at least one sibling must match.
# * <tt>:before</tt>: a hash. The node must be before any sibling meeting
# the criteria described by the hash, and at least one sibling must match.
# * <tt>:children</tt>: a hash, for counting children of a node. Accepts
# the keys:
# * <tt>:count</tt>: either a number or a range which must equal (or
# include) the number of children that match.
# * <tt>:less_than</tt>: the number of matching children must be less
# than this number.
# * <tt>:greater_than</tt>: the number of matching children must be
# greater than this number.
# * <tt>:only</tt>: another hash consisting of the keys to use
# to match on the children, and only matching children will be
# counted.
# * <tt>:content</tt>: the textual content of the node must match the
# given value. This will not match HTML tags in the body of a
# tag--only text.
#
# Conditions are matched using the following algorithm:
#
# * if the condition is a string, it must be a substring of the value.
# * if the condition is a regexp, it must match the value.
# * if the condition is a number, the value must match number.to_s.
# * if the condition is +true+, the value must not be +nil+.
# * if the condition is +false+ or +nil+, the value must be +nil+.
#
# === Examples
#
# # Assert that there is a "span" tag
# assert_tag :tag => "span"
#
# # Assert that there is a "span" tag with id="x"
# assert_tag :tag => "span", :attributes => { :id => "x" }
#
# # Assert that there is a "span" tag using the short-hand
# assert_tag :span
#
# # Assert that there is a "span" tag with id="x" using the short-hand
# assert_tag :span, :attributes => { :id => "x" }
#
# # Assert that there is a "span" inside of a "div"
# assert_tag :tag => "span", :parent => { :tag => "div" }
#
# # Assert that there is a "span" somewhere inside a table
# assert_tag :tag => "span", :ancestor => { :tag => "table" }
#
# # Assert that there is a "span" with at least one "em" child
# assert_tag :tag => "span", :child => { :tag => "em" }
#
# # Assert that there is a "span" containing a (possibly nested)
# # "strong" tag.
# assert_tag :tag => "span", :descendant => { :tag => "strong" }
#
# # Assert that there is a "span" containing between 2 and 4 "em" tags
# # as immediate children
# assert_tag :tag => "span",
# :children => { :count => 2..4, :only => { :tag => "em" } }
#
# # Get funky: assert that there is a "div", with an "ul" ancestor
# # and an "li" parent (with "class" = "enum"), and containing a
# # "span" descendant that contains text matching /hello world/
# assert_tag :tag => "div",
# :ancestor => { :tag => "ul" },
# :parent => { :tag => "li",
# :attributes => { :class => "enum" } },
# :descendant => { :tag => "span",
# :child => /hello world/ }
#
# <b>Please note</b>: #assert_tag and #assert_no_tag only work
# with well-formed XHTML. They recognize a few tags as implicitly self-closing
# (like br and hr and such) but will not work correctly with tags
# that allow optional closing tags (p, li, td). <em>You must explicitly
# close all of your tags to use these assertions.</em>
def assert_tag(*opts)
clean_backtrace do
opts = opts.size > 1 ? opts.last.merge({ :tag => opts.first.to_s }) : opts.first
tag = find_tag(opts)
assert tag, "expected tag, but no tag found matching #{opts.inspect} in:\n#{@response.body.inspect}"
end
end
# Identical to #assert_tag, but asserts that a matching tag does _not_
# exist. (See #assert_tag for a full discussion of the syntax.)
#
# === Examples
# # Assert that there is not a "div" containing a "p"
# assert_no_tag :tag => "div", :descendant => { :tag => "p" }
#
# # Assert that an unordered list is empty
# assert_no_tag :tag => "ul", :descendant => { :tag => "li" }
#
# # Assert that there is not a "p" tag with between 1 to 3 "img" tags
# # as immediate children
# assert_no_tag :tag => "p",
# :children => { :count => 1..3, :only => { :tag => "img" } }
def assert_no_tag(*opts)
clean_backtrace do
opts = opts.size > 1 ? opts.last.merge({ :tag => opts.first.to_s }) : opts.first
tag = find_tag(opts)
assert !tag, "expected no tag, but found tag matching #{opts.inspect} in:\n#{@response.body.inspect}"
end
end
end
end
end | 49.223077 | 111 | 0.571183 |
Subsets and Splits