hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e2b9bd185757eabd9c296b1f40b84c17c4afc0e1 | 661 | require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Onebitflix
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
end
end
| 33.05 | 82 | 0.765507 |
28ae68a08aec509f1cd70a0848d9c326365fd03b | 538 | class Job < ActiveRecord::Base
belongs_to :created_by, class_name: "Member", foreign_key: :created_by_id
belongs_to :approved_by, class_name: "Member", foreign_key: :approved_by_id
scope :approved, -> { where(approved: true) }
scope :submitted, -> { where(submitted: true, approved: false) }
scope :not_submitted, -> { where(submitted: false, approved: false) }
scope :ordered, -> { order('created_at desc') }
default_scope -> { where('expiry_date > ?', Date.today) }
def expired?
self.expiry_date.past?
end
end
| 31.647059 | 77 | 0.697026 |
bb0e7060202b278ccbaca50506c47d8577114fc1 | 1,217 | require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
require "active_model/railtie"
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module RailsSaml
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
end
end
| 39.258065 | 99 | 0.743632 |
ac6b5c2cb51dff865b61489991b535820f4b32bb | 747 | require 'spec_helper'
describe XClarityClient do
before :all do
WebMock.allow_net_connect! #-- Uncomment this line if you're testing with a external mock.
conf = XClarityClient::Configuration.new(
:username => ENV['LXCA_USERNAME'],
:password => ENV['LXCA_PASSWORD'],
:host => ENV['LXCA_HOST'],
:port => ENV['LXCA_PORT'],
:auth_type => ENV['LXCA_AUTH_TYPE'],
:verify_ssl => ENV['LXCA_VERIFY_SSL']
)
@client = XClarityClient::Client.new(conf)
end
it 'has a version number' do
expect(XClarityClient::VERSION).not_to be nil
end
describe 'GET /discovery' do
it 'should respond with an array' do
expect(@client.discover_devices_by_slp.class).to eq(Array)
end
end
end
| 23.34375 | 94 | 0.668005 |
1d60cf2243fd8a57c203fe22d557caf6ecd14f7b | 424 | # frozen_string_literal: true
# Load the Rails application.
require_relative 'application'
# Initialize the Rails application.
Rails.application.initialize!
ActionMailer::Base.smtp_settings = {
:user_name => ENV['SENDGRID_USERNAME'],
:password => ENV['SENDGRID_PASSWORD'],
:domain => 'yourdomain.com',
:address => 'smtp.sendgrid.net',
:port => 587,
:authentication => :plain,
:enable_starttls_auto => true
} | 24.941176 | 41 | 0.724057 |
28c58f3a334c7e90a12517b493d3b5261a8c3782 | 7,845 | # frozen_string_literal: true
module Webdrone
class MethodLogger < Module
class << self
attr_accessor :last_time, :screenshot
end
def initialize(methods = nil)
super()
@methods = methods
end
if Gem::Version.new(RUBY_VERSION) < Gem::Version.new('2.7')
def included(base)
@methods ||= base.instance_methods(false)
method_list = @methods
base.class_eval do
method_list.each do |method_name|
original_method = instance_method(method_name)
define_method method_name do |*args, &block|
caller_location = Kernel.caller_locations[0]
cl_path = caller_location.path
cl_line = caller_location.lineno
if @a0.conf.logger && Gem.path.none? { |path| cl_path.include? path }
ini = ::Webdrone::MethodLogger.last_time ||= Time.new
::Webdrone::MethodLogger.screenshot = nil
args_log = [args].compact.reject(&:empty?).map(&:to_s).join(' ')
begin
result = original_method.bind(self).call(*args, &block)
fin = ::Webdrone::MethodLogger.last_time = Time.new
@a0.logs.trace(ini, fin, cl_path, cl_line, base, method_name, args_log, result, nil, ::Webdrone::MethodLogger.screenshot)
result
rescue StandardError => exception
fin = ::Webdrone::MethodLogger.last_time = Time.new
@a0.logs.trace(ini, fin, cl_path, cl_line, base, method_name, args_log, nil, exception, ::Webdrone::MethodLogger.screenshot)
raise exception
end
else
original_method.bind(self).call(*args, &block)
end
end
end
end
end
else
def included(base)
@methods ||= base.instance_methods(false)
method_list = @methods
base.class_eval do
method_list.each do |method_name|
original_method = instance_method(method_name)
define_method method_name do |*args, **kwargs, &block|
caller_location = Kernel.caller_locations[0]
cl_path = caller_location.path
cl_line = caller_location.lineno
if @a0.conf.logger && Gem.path.none? { |path| cl_path.include? path }
ini = ::Webdrone::MethodLogger.last_time ||= Time.new
::Webdrone::MethodLogger.screenshot = nil
args_log = [args, kwargs].compact.reject(&:empty?).map(&:to_s).join(' ')
begin
result = original_method.bind(self).call(*args, **kwargs, &block)
fin = ::Webdrone::MethodLogger.last_time = Time.new
@a0.logs.trace(ini, fin, cl_path, cl_line, base, method_name, args_log, result, nil, ::Webdrone::MethodLogger.screenshot)
result
rescue StandardError => exception
fin = ::Webdrone::MethodLogger.last_time = Time.new
@a0.logs.trace(ini, fin, cl_path, cl_line, base, method_name, args_log, nil, exception, ::Webdrone::MethodLogger.screenshot)
raise exception
end
else
original_method.bind(self).call(*args, **kwargs, &block)
end
end
end
end
end
end
end
class Browser
def logs
@logs ||= Logs.new self
end
end
class Logs
attr_reader :a0
def initialize(a0)
@a0 = a0
@group_trace_count = []
setup_format
setup_trace
end
def trace(ini, fin, from, lineno, base, method_name, args, result, exception, screenshot)
exception = "#{exception.class}: #{exception}" if exception
printf @format, (fin - ini), base, method_name, args, (result || exception) unless a0.conf.logger.to_s == 'quiet'
CSV.open(@path, "a+") do |csv|
csv << [ini.strftime('%Y-%m-%d %H:%M:%S.%L %z'), (fin - ini), from, lineno, base, method_name, args, result, exception, screenshot]
end
@group_trace_count = @group_trace_count.map { |x| x + 1 }
end
def with_group(name, abort_error: false)
ini = Time.new
caller_location = Kernel.caller_locations[0]
cl_path = caller_location.path
cl_line = caller_location.lineno
result = {}
@group_trace_count << 0
exception = nil
begin
yield
rescue StandardError => e
exception = e
bindings = Kernel.binding.callers
bindings[0..].each do |binding|
location = { path: binding.source_location[0], lineno: binding.source_location[1] }
next unless Gem.path.none? { |path| location[:path].include? path }
result[:exception] = {}
result[:exception][:line] = location[:lineno]
result[:exception][:path] = location[:path]
break
end
end
result[:trace_count] = @group_trace_count.pop
fin = Time.new
trace(ini, fin, cl_path, cl_line, Logs, :with_group, [name, { abort_error: abort_error }], result, exception, nil)
puts "abort_error: #{abort_error} exception: #{exception}"
exit if abort_error == true && exception
end
def setup_format
begin
cols, _line = HighLine.default_instance.terminal.terminal_size
rescue StandardError => error
puts "ignoring error: #{error}"
end
cols ||= 120
total = 6 + 15 + 11 + 5
w = cols - total
w /= 2
w1 = w
w2 = cols - total - w1
w1 = 20 if w1 < 20
w2 = 20 if w2 < 20
@format = "%5.3f %14.14s %10s %#{w1}.#{w1}s => %#{w2}.#{w2}s\n"
end
def setup_trace
@path = File.join(a0.conf.outdir, 'a0_webdrone_trace.csv')
CSV.open(@path, "a+") do |csv|
os = "Windows" if OS.windows?
os = "Linux" if OS.linux?
os = "OS X" if OS.osx?
bits = OS.bits
hostname = Socket.gethostname
browser_name = a0.driver.capabilities[:browser_name]
browser_version = a0.driver.capabilities[:version]
browser_platform = a0.driver.capabilities[:platform]
webdrone_version = Webdrone::VERSION
webdrone_platform = "#{RUBY_ENGINE}-#{RUBY_VERSION} #{RUBY_PLATFORM}"
csv << %w.OS ARCH HOSTNAME BROWSER\ NAME BROWSER\ VERSION BROWSER\ PLATFORM WEBDRONE\ VERSION WEBDRONE\ PLATFORM.
csv << [os, bits, hostname, browser_name, browser_version, browser_platform, webdrone_version, webdrone_platform]
end
CSV.open(@path, "a+") do |csv|
csv << %w.DATE DUR FROM LINENO MODULE CALL PARAMS RESULT EXCEPTION SCREENSHOT.
end
end
end
class Clic
include MethodLogger.new %i[id css link button on option xpath]
end
class Conf
include MethodLogger.new %i[timeout= outdir= error= developer= logger=]
end
class Ctxt
include MethodLogger.new %i[create_tab close_tab with_frame reset with_alert ignore_alert with_conf]
end
class Find
include MethodLogger.new %i[id css link button on option xpath]
end
class Form
include MethodLogger.new %i[with_xpath save set get clic mark submit xlsx]
end
class Html
include MethodLogger.new %i[id css link button on option xpath]
end
class Mark
include MethodLogger.new %i[id css link button on option xpath]
end
class Open
include MethodLogger.new %i[url reload]
end
class Shot
include MethodLogger.new %i[screen]
end
class Text
include MethodLogger.new %i[id css link button on option xpath]
end
class Vrfy
include MethodLogger.new %i[id css link button on option xpath]
end
class Wait
include MethodLogger.new %i[for time]
end
class Xlsx
include MethodLogger.new %i[dict rows both save reset]
end
end
| 34.108696 | 142 | 0.604207 |
79aac960c0073de37d7bcd979560b4a43fbcdca7 | 1,368 | describe Pantograph do
describe Pantograph::PantFile do
describe "git_submodule_update" do
it "runs git submodule update without options by default" do
result = Pantograph::PantFile.new.parse("lane :test do
git_submodule_update
end").runner.execute(:test)
expect(result).to eq("git submodule update")
end
it "updates the submodules recursively if requested" do
result = Pantograph::PantFile.new.parse("lane :test do
git_submodule_update(
recursive: true
)
end").runner.execute(:test)
expect(result).to eq("git submodule update --recursive")
end
it "initialize the submodules if requested" do
result = Pantograph::PantFile.new.parse("lane :test do
git_submodule_update(
init: true
)
end").runner.execute(:test)
expect(result).to eq("git submodule update --init")
end
it "initialize the submodules and updates them recursively if requested" do
result = Pantograph::PantFile.new.parse("lane :test do
git_submodule_update(
recursive: true,
init: true
)
end").runner.execute(:test)
expect(result).to eq("git submodule update --init --recursive")
end
end
end
end
| 30.4 | 81 | 0.604532 |
6258a355bb980f49a3a253b83a3e20f8c7757a82 | 4,575 | class AddTriggerForEventToRolledupEventQueue < ActiveRecord::Migration
def up
execute <<-SQL
CREATE OR REPLACE FUNCTION flush_rolledup_event_queue()
RETURNS bool
LANGUAGE plpgsql
AS $body$
DECLARE
v_inserts int;
v_prunes int;
BEGIN
IF NOT pg_try_advisory_xact_lock('rolledup_event_queue'::regclass::oid::bigint) THEN
RAISE NOTICE 'skipping queue flush';
RETURN false;
END IF;
WITH
aggregated_queue AS (
SELECT created_at, dashboard_id, bot_user_id, bot_instance_id, (bot_instance_id::text || ':' || bot_user_id::text) AS bot_instance_id_bot_user_id, SUM(diff) AS value
FROM rolledup_event_queue
GROUP BY created_at, bot_instance_id, bot_user_id, dashboard_id
),
perform_inserts AS (
INSERT INTO rolledup_events(created_at, dashboard_id, bot_instance_id, bot_user_id, bot_instance_id_bot_user_id, count)
SELECT created_at, dashboard_id, bot_instance_id, bot_user_id, (bot_instance_id || ':' || coalesce(bot_user_id, 0)::text), value AS count
FROM aggregated_queue
ON CONFLICT (dashboard_id, bot_instance_id_bot_user_id, created_at) DO UPDATE SET
count = rolledup_events.count + EXCLUDED.count
RETURNING 1),
perform_prune AS (
DELETE FROM rolledup_event_queue
RETURNING 1
)
SELECT
(SELECT count(*) FROM perform_inserts) inserts,
(SELECT count(*) FROM perform_prune) prunes
INTO v_inserts, v_prunes;
RAISE NOTICE 'performed queue (hourly) flush: % prunes, % inserts', v_prunes, v_inserts;
RETURN true;
END;
$body$;
CREATE OR REPLACE FUNCTION custom_append_to_rolledup_events_queue()
RETURNS TRIGGER LANGUAGE plpgsql
AS $body$
DECLARE
__bot_instance_id int;
__bot_user_id int;
__created_at timestamp;
BEGIN
CASE TG_OP
WHEN 'INSERT' THEN
SELECT events.bot_instance_id, events.bot_user_id, events.created_at INTO __bot_instance_id, __bot_user_id, __created_at FROM events WHERE events.id = NEW.event_id LIMIT 1;
IF NOT FOUND THEN
RETURN NULL;
END IF;
INSERT INTO rolledup_event_queue(bot_instance_id, bot_user_id, dashboard_id, diff, created_at)
VALUES (__bot_instance_id, __bot_user_id, NEW.dashboard_id, +1, date_trunc('hour', __created_at));
END CASE;
RETURN NULL;
END;
$body$;
CREATE OR REPLACE FUNCTION append_to_rolledup_events_queue()
RETURNS TRIGGER LANGUAGE plpgsql
AS $body$
DECLARE
__bot_id int;
__dashboard_id int;
BEGIN
CASE TG_OP
WHEN 'INSERT' THEN
SELECT bot_instances.bot_id INTO __bot_id FROM bot_instances WHERE bot_instances.id = NEW.bot_instance_id LIMIT 1;
IF NOT FOUND THEN
RETURN NULL;
END IF;
IF NEW.event_type = 'message' AND NEW.is_from_bot = 't' THEN
SELECT dashboards.id FROM dashboards INTO __dashboard_id WHERE dashboards.dashboard_type = 'messages-from-bot' AND dashboards.bot_id = __bot_id LIMIT 1;
IF NOT FOUND THEN
RETURN NULL;
END IF;
ELSIF NEW.event_type = 'message' AND NEW.is_for_bot = 't' THEN
SELECT dashboards.id FROM dashboards INTO __dashboard_id WHERE dashboards.dashboard_type = 'messages-to-bot' AND dashboards.bot_id = __bot_id LIMIT 1;
IF NOT FOUND THEN
RETURN NULL;
END IF;
ELSE
SELECT dashboards.id FROM dashboards INTO __dashboard_id WHERE dashboards.event_type = NEW.event_type AND dashboards.bot_id = __bot_id LIMIT 1;
IF NOT FOUND THEN
RETURN NULL;
END IF;
END IF;
INSERT INTO rolledup_event_queue(bot_instance_id, bot_user_id, dashboard_id, diff, created_at)
VALUES (NEW.bot_instance_id, NEW.bot_user_id, __dashboard_id, +1, date_trunc('hour', NEW.created_at));
IF random() < 0.0001 THEN /* 1/10,000 probability */
PERFORM flush_rolledup_event_queue();
END IF;
END CASE;
RETURN NULL;
END;
$body$;
DROP TRIGGER IF EXISTS event_insert ON events;
CREATE TRIGGER event_insert after
INSERT
ON events FOR each row
EXECUTE PROCEDURE append_to_rolledup_events_queue();
DROP TRIGGER IF EXISTS custom_event_insert ON dashboard_events;
CREATE TRIGGER custom_event_insert after
INSERT
ON dashboard_events FOR each row
EXECUTE PROCEDURE custom_append_to_rolledup_events_queue();
SQL
end
def down
execute <<-SQL
DROP TRIGGER IF EXISTS event_insert ON events;
DROP FUNCTION IF EXISTS append_to_rolledup_events_queue();
DROP TRIGGER IF EXISTS custom_event_insert ON dashboard_events;
DROP FUNCTION IF EXISTS custom_append_to_rolledup_events_queue();
SQL
end
end
| 33.639706 | 178 | 0.730929 |
ed4655a58493ac2eb57764f4a7fa3f3f81516371 | 67 | module Public
def self.table_name_prefix
'public_'
end
end
| 11.166667 | 28 | 0.731343 |
7a5fae1f78fa44ddae7aa314d15ca3adb8abb239 | 214 | require "spec_helper"
RSpec.describe PriceComposer do
it "has a version number" do
expect(PriceComposer::VERSION).not_to be nil
end
it "does something useful" do
expect(false).to eq(true)
end
end
| 17.833333 | 48 | 0.724299 |
e83772c63cadf907b1093b2cd2fd952e7982cd2d | 586 | module Mugen
class Account < Client
class << self
#
# /account/stats (GET)
# Retrieves account stats, such as orders made.
#
def stats(options={})
res = self.get "/account/stats", :query => options
check_for_errors(res)
res['response']
end
#
# /account/balance (GET)
# Retrieves account balance in credits
#
def balance(options={})
res = self.get "/account/balance", :query => options
check_for_errors(res)
res['response']
end
end
end
end | 21.703704 | 58 | 0.539249 |
f7dc6d4311d618e5980ac74b29462f946bc1df08 | 416 | require 'spec_helper'
module SamlIdp
describe Fingerprint do
describe "certificate_digest" do
let(:cert) { sp_x509_cert }
let(:fingerprint) { "a2:cb:f6:6b:bc:2a:33:b9:4f:f3:c3:7e:26:a4:21:cd:41:83:ef:26:88:fa:ba:71:37:40:07:3e:d5:76:04:b7" }
it "returns the fingerprint string" do
expect(Fingerprint.certificate_digest(cert, :sha256)).to eq(fingerprint)
end
end
end
end
| 27.733333 | 125 | 0.673077 |
bbebb7e3d1d23711088978e902b9522949226f13 | 457 | module Isomorfeus
module Preact
class RedisComponentCache
def initialize(*args)
@redis_client = Redis.new(@args)
end
def fetch(key)
json = @redis_client.get(key)
Oj.load(json, mode: :strict)
end
def store(key, rendered_tree, response_status, styles)
json = Oj.dump([rendered_tree, response_status, styles], mode: :strict)
@redis_client.set(key, json)
end
end
end
end
| 22.85 | 79 | 0.623632 |
f70e2032f367a702fc3b91456875ba21fca64092 | 1,559 | ##
# This module requires Metasploit: http//metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'thread'
require 'msf/core'
require 'rex/proto/proxy/socks4a'
class Metasploit3 < Msf::Auxiliary
include Msf::Auxiliary::Report
def initialize
super(
'Name' => 'Socks4a Proxy Server',
'Description' => 'This module provides a socks4a proxy server that uses the builtin Metasploit routing to relay connections.',
'Author' => 'sf',
'License' => MSF_LICENSE,
'Actions' =>
[
[ 'Proxy' ]
],
'PassiveActions' =>
[
'Proxy'
],
'DefaultAction' => 'Proxy'
)
register_options(
[
OptString.new( 'SRVHOST', [ true, "The address to listen on", '0.0.0.0' ] ),
OptPort.new( 'SRVPORT', [ true, "The port to listen on.", 1080 ] )
], self.class )
end
def setup
super
@mutex = ::Mutex.new
@socks4a = nil
end
def cleanup
@mutex.synchronize do
if( @socks4a )
print_status( "Stopping the socks4a proxy server" )
@socks4a.stop
@socks4a = nil
end
end
super
end
def run
opts = {
'ServerHost' => datastore['SRVHOST'],
'ServerPort' => datastore['SRVPORT'],
'Context' => {'Msf' => framework, 'MsfExploit' => self}
}
@socks4a = Rex::Proto::Proxy::Socks4a.new( opts )
print_status( "Starting the socks4a proxy server" )
@socks4a.start
@socks4a.join
end
end
| 21.652778 | 132 | 0.574727 |
5d348902549485a633e7f94ccf5b19fee6188da0 | 1,028 | # frozen_string_literal: true
require 'spec_helper'
describe Ethon::Curl do
describe ".init" do
before { Ethon::Curl.send(:class_variable_set, :@@initialized, false) }
context "when global_init fails" do
it "raises global init error" do
expect(Ethon::Curl).to receive(:global_init).and_return(1)
expect { Ethon::Curl.init }.to raise_error(Ethon::Errors::GlobalInit)
end
end
context "when global_init works" do
before { expect(Ethon::Curl).to receive(:global_init).and_return(0) }
it "doesn't raises global init error" do
expect { Ethon::Curl.init }.to_not raise_error
end
it "logs" do
expect(Ethon.logger).to receive(:debug)
Ethon::Curl.init
end
end
context "when global_cleanup is called" do
before { expect(Ethon::Curl).to receive(:global_cleanup) }
it "logs" do
expect(Ethon.logger).to receive(:debug).twice
Ethon::Curl.init
Ethon::Curl.cleanup
end
end
end
end
| 25.7 | 77 | 0.642996 |
e864b632ac889c17f9cf43e3187da4a1d90f6ae6 | 6,490 | require 'timecop'
describe Kafka::OffsetManager do
let(:cluster) { double(:cluster) }
let(:group) { double(:group) }
let(:logger) { LOGGER }
let(:offset_manager) {
Kafka::OffsetManager.new(
cluster: cluster,
group: group,
logger: logger,
commit_interval: commit_interval,
commit_threshold: 0,
offset_retention_time: offset_retention_time
)
}
let(:offset_retention_time) { nil }
let(:commit_interval) { 0 }
before do
allow(group).to receive(:commit_offsets)
end
describe "#commit_offsets" do
it "commits the processed offsets to the group" do
offset_manager.mark_as_processed("greetings", 0, 42)
offset_manager.mark_as_processed("greetings", 1, 13)
offset_manager.commit_offsets
expected_offsets = {
"greetings" => {
0 => 43,
1 => 14,
}
}
expect(group).to have_received(:commit_offsets).with(expected_offsets)
end
end
describe "#commit_offsets_if_necessary" do
let(:fetched_offsets_response) do
Kafka::Protocol::OffsetFetchResponse.new(topics: {
"greetings" => {
0 => partition_offset_info(-1),
1 => partition_offset_info(24),
2 => partition_offset_info(4)
}
})
end
before do
allow(group).to receive(:fetch_offsets).and_return(fetched_offsets_response)
end
context "at the first commit" do
it "re-commits previously committed offsets" do
fetch_committed_offsets
offset_manager.mark_as_processed("greetings", 1, 25)
offset_manager.commit_offsets_if_necessary
expected_offsets = {
"greetings" => {
1 => 26,
2 => 4
}
}
expect(group).to have_received(:commit_offsets).with(expected_offsets)
end
end
context "commit intervals" do
let(:commit_interval) { 10 }
let(:offset_retention_time) { 300 }
let(:commits) { [] }
before do
allow(group).to receive(:commit_offsets) do |offsets|
commits << offsets
end
Timecop.freeze(Time.now)
# initial commit
fetch_committed_offsets
offset_manager.mark_as_processed("greetings", 0, 0)
offset_manager.commit_offsets_if_necessary
expect(commits.size).to eq(1)
end
after do
Timecop.return
end
context "before the commit timeout" do
before do
Timecop.travel(commit_interval - 1)
end
it "does not commit processed offsets to the group" do
expect do
offset_manager.mark_as_processed("greetings", 0, 1)
offset_manager.commit_offsets_if_necessary
end.not_to change(commits, :size)
end
end
context "after the commit timeout" do
before do
Timecop.travel(commit_interval + 1)
end
it "commits processed offsets without recommitting previously committed offsets" do
expect do
offset_manager.mark_as_processed("greetings", 0, 1)
offset_manager.commit_offsets_if_necessary
end.to change(commits, :size).by(1)
expected_offsets = {
"greetings" => { 0 => 2 }
}
expect(commits.last).to eq(expected_offsets)
end
end
context "after the recommit timeout" do
before do
Timecop.travel(offset_retention_time / 2 + 1)
end
it "commits processed offsets and recommits previously committed offsets" do
expect do
offset_manager.mark_as_processed("greetings", 0, 1)
offset_manager.commit_offsets_if_necessary
end.to change(commits, :size).by(1)
expected_offsets = {
"greetings" => {
0 => 2,
1 => 24,
2 => 4
}
}
expect(commits.last).to eq(expected_offsets)
end
end
end
def fetch_committed_offsets
offset_manager.next_offset_for("greetings", 1)
end
def partition_offset_info(offset)
Kafka::Protocol::OffsetFetchResponse::PartitionOffsetInfo.new(offset: offset, metadata: nil, error_code: 0)
end
end
describe "#next_offset_for" do
let(:fetched_offsets) { double(:fetched_offsets) }
before do
allow(group).to receive(:fetch_offsets).and_return(fetched_offsets)
end
it "returns the last committed offset" do
allow(fetched_offsets).to receive(:offset_for).with("greetings", 0) { 41 }
offset = offset_manager.next_offset_for("greetings", 0)
expect(offset).to eq 41
end
it "returns the default offset if none have been committed" do
allow(group).to receive(:assigned_partitions) { { "greetings" => [0] } }
allow(fetched_offsets).to receive(:offset_for).with("greetings", 0) { -1 }
allow(cluster).to receive(:resolve_offsets).with("greetings", [0], :latest) { { 0 => 42 } }
offset_manager.set_default_offset("greetings", :latest)
offset = offset_manager.next_offset_for("greetings", 0)
expect(offset).to eq 42
end
end
describe "#clear_offsets_excluding" do
it "clears offsets except for the partitions in the exclusion list" do
offset_manager.mark_as_processed("x", 0, 42)
offset_manager.mark_as_processed("x", 1, 13)
offset_manager.clear_offsets_excluding("x" => [0])
offset_manager.commit_offsets
expected_offsets = {
"x" => {
0 => 43,
}
}
expect(group).to have_received(:commit_offsets).with(expected_offsets)
end
end
describe "#seek_to" do
it "seeks to given topic-partition/offset" do
topic = "greetings"
partition = 0
seek_offset = 14
offset_manager.seek_to(topic, partition, seek_offset)
offset = offset_manager.next_offset_for(topic, partition)
expect(offset).to eq seek_offset
end
end
describe "#seek_to_default" do
it "seeks default offset" do
topic = "greetings"
partition = 0
offset_manager.set_default_offset(topic, :latest)
allow(group).to receive(:assigned_partitions) { { topic => [0] } }
allow(cluster).to receive(:resolve_offsets).with(topic, [0], :latest) { { 0 => 42 } }
offset_manager.seek_to_default(topic, partition)
offset = offset_manager.next_offset_for(topic, partition)
expect(offset).to eq 42
end
end
end
| 27.383966 | 113 | 0.626502 |
87c0f0d1c4cea8aa07bf703ab1b64f144f2e04ff | 10,819 | #-- copyright
# OpenProject Backlogs Plugin
#
# Copyright (C)2013-2014 the OpenProject Foundation (OPF)
# Copyright (C)2011 Stephan Eckardt, Tim Felgentreff, Marnen Laibow-Koser, Sandro Munda
# Copyright (C)2010-2011 friflaj
# Copyright (C)2010 Maxime Guilbot, Andrew Vit, Joakim Kolsjö, ibussieres, Daniel Passos, Jason Vasquez, jpic, Emiliano Heyns
# Copyright (C)2009-2010 Mark Maglana
# Copyright (C)2009 Joe Heck, Nate Lowrie
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 3.
#
# OpenProject Backlogs is a derivative work based on ChiliProject Backlogs.
# The copyright follows:
# Copyright (C) 2010-2011 - Emiliano Heyns, Mark Maglana, friflaj
# Copyright (C) 2011 - Jens Ulferts, Gregor Schmidt - Finn GmbH - Berlin, Germany
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See doc/COPYRIGHT.rdoc for more details.
#++
Then /^(.+) should be in the (\d+)(?:st|nd|rd|th) position of the sprint named (.+)$/ do |story_subject, position, sprint_name|
position = position.to_i
story = Story.where(subject: story_subject, versions: { name: sprint_name }).joins(:fixed_version).first
story_position(story).should == position.to_i
end
Then /^I should see (\d+) (?:product )?owner backlogs$/ do |count|
sprint_backlogs = page.all(:css, '#owner_backlogs_container .sprint')
sprint_backlogs.length.should == count.to_i
end
Then /^I should see (\d+) sprint backlogs$/ do |count|
sprint_backlogs = page.all(:css, '#sprint_backlogs_container .sprint')
sprint_backlogs.length.should == count.to_i
end
Then /^I should see the burndown chart for sprint "(.+?)"$/ do |sprint|
sprint = Sprint.find_by(name: sprint)
page.should have_css("#burndown_#{sprint.id}")
end
Then /^I should see the WorkPackages page$/ do
page.should have_css('.workpackages-table')
end
Then /^I should see the taskboard$/ do
page.should have_css('#taskboard')
end
Then /^I should see the product backlog$/ do
page.should have_css('#owner_backlogs_container')
end
Then /^I should see (\d+) stories in (?:the )?"(.+?)"$/ do |count, backlog_name|
sprint = Sprint.find_by(name: backlog_name)
page.all(:css, "#backlog_#{sprint.id} .story").size.should == count.to_i
end
Then /^the velocity of "(.+?)" should be "(.+?)"$/ do |backlog_name, velocity|
sprint = Sprint.find_by(name: backlog_name)
page.find(:css, "#backlog_#{sprint.id} .velocity").text.should == velocity
end
Then /^show me the list of sprints$/ do
sprints = Sprint.where(project_id: @project.id)
puts "\n"
puts "\t| #{'id'.ljust(3)} | #{'name'.ljust(18)} | #{'start_date'.ljust(18)} | #{'effective_date'.ljust(18)} | #{'updated_on'.ljust(20)}"
sprints.each do |sprint|
puts "\t| #{sprint.id.to_s.ljust(3)} | #{sprint.name.to_s.ljust(18)} | #{sprint.start_date.to_s.ljust(18)} | #{sprint.effective_date.to_s.ljust(18)} | #{sprint.updated_on.to_s.ljust(20)} |"
end
puts "\n\n"
end
Then /^show me the list of stories$/ do
stories = Story.where(project_id: @project.id).order(Arel.sql('position ASC'))
subject_max = (stories.map(&:subject) << 'subject').sort { |a, b| a.length <=> b.length }.last.length
sprints = @project.versions
sprint_max = (sprints.map(&:name) << 'sprint').sort { |a, b| a.length <=> b.length }.last.length
puts "\n"
puts "\t| #{'id'.ljust(5)} | #{'position'.ljust(8)} | #{'status'.ljust(12)} | #{'rank'.ljust(4)} | #{'subject'.ljust(subject_max)} | #{'sprint'.ljust(sprint_max)} |"
stories.each do |story|
puts "\t| #{story.id.to_s.ljust(5)} | #{story.position.to_s.ljust(8)} | #{story.status.name[0, 12].ljust(12)} | #{story.rank.to_s.ljust(4)} | #{story.subject.ljust(subject_max)} | #{(story.fixed_version_id.nil? ? Sprint.new : Sprint.find(story.fixed_version_id)).name.ljust(sprint_max)} |"
end
puts "\n\n"
end
Then /^(.+) should be the higher (story|item|task) of (.+)$/ do |higher_subject, type, lower_subject|
work_package_class = (type == 'task') ? Task : Story
higher = work_package_class.where(subject: higher_subject)
higher.length.should == 1
lower = work_package_class.where(subject: lower_subject)
lower.length.should == 1
lower.first.higher_item.id.should == higher.first.id
end
Then /^the request should complete successfully$/ do
page.driver.response.status.should == 200
end
Then /^the request should fail$/ do
page.driver.response.status.should == 401
end
Then /^the (\d+)(?:st|nd|rd|th) story in (?:the )?"(.+?)" should be "(.+)"$/ do |position, version_name, subject|
version = Version.find_by(name: version_name)
story = Story.at_rank(@project.id, version.id, position.to_i)
story.should_not be_nil
story.subject.should == subject
end
Then /^the (\d+)(?:st|nd|rd|th) story in (?:the )?"(.+?)" should be in the "(.+?)" type$/ do |position, version_name, type_name|
version = Version.find_by(name: version_name)
type = Type.find_by(name: type_name)
story = Story.at_rank(@project.id, version.id, position.to_i)
story.should_not be_nil
story.type.should == type
end
Then /^the (\d+)(?:st|nd|rd|th) story in (?:the )?"(.+?)" should have the ID of "(.+?)"$/ do |position, version_name, subject|
version = Version.find_by(name: version_name)
actual_story = WorkPackage.find_by(subject: subject, fixed_version_id: version.id)
step %%I should see "#{actual_story.id}" within "#backlog_#{version.id} .story:nth-child(#{position}) .id div.t"%
end
Then /^all positions should be unique for each version$/ do
Story.find_by_sql("select project_id, fixed_version_id, position, count(*) as dups from #{WorkPackage.table_name} where not position is NULL group by project_id, fixed_version_id, position having count(*) > 1").length.should == 0
end
Then /^the (\d+)(?:st|nd|rd|th) task for (.+) should be (.+)$/ do |position, story_subject, task_subject|
story = Story.find_by(subject: story_subject)
expect(story.children.order(position: :asc)[position.to_i - 1].subject)
.to eql(task_subject)
end
Then /^the server should return an update error$/ do
page.driver.response.status.should == 400
end
Then /^the server should return (\d+) updated (.+)$/ do |count, object_type|
page.all("##{object_type.pluralize} .#{object_type.singularize}").length.should == count.to_i
end
Then /^the sprint named (.+) should have (\d+) impediments? named (.+)$/ do |sprint_name, count, impediment_subject|
sprints = Sprint.where(name: sprint_name)
sprints.length.should == 1
sprints.first.impediments.map { |i| i.subject == impediment_subject }.length.should == count.to_i
end
Then /^the impediment "(.+)" should signal( | un)successful saving$/ do |impediment_subject, negative|
pos_or_neg_should = !negative.blank? ? :should : :should_not
page.send(pos_or_neg_should, have_selector('div.impediment.error', text: impediment_subject))
end
Then /^the sprint should be updated accordingly$/ do
sprint = Sprint.find(@sprint_params['id'])
sprint.attributes.each_key do |key|
unless ['updated_on', 'created_on'].include?(key)
(key.include?('_date') ? sprint[key].strftime('%Y-%m-%d') : sprint[key]).should == @sprint_params[key]
end
end
end
Then /^the status of the story should be set as (.+)$/ do |status|
@story.reload
@story.status.name.downcase.should == status
end
Then /^the story named (.+) should have 1 task named (.+)$/ do |story_subject, task_subject|
stories = Story.where(subject: story_subject)
stories.length.should == 1
tasks = Task
.children_of(stories.first)
.where(subject: task_subject)
tasks.length.should == 1
end
Then /^the story should be at the (top|bottom)$/ do |position|
if position == 'top'
story_position(@story).should == 1
else
story_position(@story).should == @story_ids.length
end
end
Then /^the story should be at position (.+)$/ do |position|
story_position(@story).should == position.to_i
end
Then /^the story should have a (.+) of (.+)$/ do |attribute, value|
@story.reload
if attribute == 'type'
attribute = 'type_id'
value = Type.find_by(name: value).id
end
@story[attribute].should == value
end
Then /^the wiki page (.+) should contain (.+)$/ do |title, content|
page = @project.wiki.find_page(title)
page.should_not be_nil
raise "\"#{content}\" not found on page \"#{title}\"" unless page.content.text.match(/#{content}/)
end
Then /^(work_package|task|story) (.+) should have (.+) set to (.+)$/ do |_type, subject, attribute, value|
work_package = WorkPackage.find_by(subject: subject)
work_package[attribute].should == value.to_i
end
Then /^the error alert should show "(.+?)"$/ do |msg|
step %{I should see "#{msg}" within "#msgBox"}
end
Then /^the start date of "(.+?)" should be "(.+?)"$/ do |sprint_name, date|
version = Version.find_by(name: sprint_name)
step %{I should see "#{date}" within "div#sprint_#{version.id} div.start_date"}
end
Then /^I should see "(.+?)" as a task to story "(.+?)"$/ do |task_name, story_name|
story = Story.find_by(subject: story_name)
step %{I should see "#{task_name}" within "tr.story_#{story.id}"}
end
Then /^the (?:work_package|task|story) "(.+?)" should have "(.+?)" as its target version$/ do |task_name, version_name|
work_package = WorkPackage.find_by(subject: task_name)
version = Version.find_by(name: version_name)
work_package.fixed_version.should eql version
end
Then /^there should not be a saving error on task "(.+?)"$/ do |task_name|
elements = all(:xpath, "//*[contains(., \"#{task_name}\")]")
task_div = elements.find { |e| e.tag_name == 'div' && e[:class].include?('task') }
task_div[:class].should_not include('error')
end
Then /^I should be notified that the work_package "(.+?)" is an invalid parent to the work_package "(.+?)" because of cross project limitations$/ do |parent_name, child_name|
step %{I should see "Parent is invalid because the work package '#{child_name}' is a backlog task and therefore cannot have a parent outside of the current project." within "#errorExplanation"}
end
Then /^the PDF download dialog should be displayed$/ do
# As far as I'm aware there's nothing that can be done here to check for this.
end
| 39.922509 | 293 | 0.698216 |
03aa2d8ba659aa8b8c935ceb1a5b259fadad387d | 708 | require_relative "boot"
require "rails/all"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module RailsDocker
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.1
# Configuration for the application, engines, and railties goes here.
#
# These settings can be overridden in specific environments using the files
# in config/environments, which are processed later.
#
# config.time_zone = "Central Time (US & Canada)"
# config.eager_load_paths << Rails.root.join("extras")
end
end
| 30.782609 | 79 | 0.738701 |
ff9d299dbb0285bbbb73174e231b80bc7b3e421a | 1,691 | require 'i18n'
module Enumerize
class Value < String
def initialize(attr, value)
@attr = attr
super(value.to_s)
end
def text
I18n.t(i18n_keys[0], :default => i18n_keys[1..-1])
end
def method_missing(method, *args, &block)
if method[-1] == '?' && @attr.values.include?(method[0..-2])
define_query_methods
send(method, *args, &block)
elsif @attr.data && @attr.data[self] && @attr.data[self].is_a?(Hash) && @attr.data[self].has_key?(method.to_s)
@attr.data[self].fetch(method.to_s)
else
super
end
end
def respond_to?(method, include_private=false)
if super
true
elsif @attr.data && @attr.data[self] && @attr.data[self].is_a?(Hash) && @attr.data[self].has_key?(method.to_s)
true
elsif method[-1] == '?' && @attr.values.include?(method[0..-2])
define_query_methods
super
end
end
private
def define_query_methods
@attr.values.each do |value|
unless singleton_methods.include?(:"#{value}?")
singleton_class.class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{value}?
#{value == self}
end
RUBY
end
end
end
def i18n_keys
@i18n_keys ||= begin
i18n_keys = []
i18n_keys << i18n_scope
i18n_keys << i18n_scope(i18n_suffix)
i18n_keys << self.humanize # humanize value if there are no translations
end
end
def i18n_scope(suffix = nil)
:"enumerize.#{suffix}#{@attr.name}.#{self}"
end
def i18n_suffix
"#{@attr.i18n_suffix}." if @attr.i18n_suffix
end
end
end
| 24.507246 | 116 | 0.573034 |
3341522456246735923897238ffcbf586ce601d6 | 244 | module Jekyll
class MarkdownConverter
alias :old_convert :convert
# This adds the gist #1 syntax.
def convert(content)
old_convert content.gsub(/gist #(\d+)/, '[gist #\1](https://gist.github.com/\1)')
end
end
end
| 22.181818 | 87 | 0.639344 |
e9ef2a8dc6c02e3bfdf970a43b546ee15c053bf9 | 4,573 | # Research Methods
# I spent [] hours on this challenge.
i_want_pets = ["I", "want", 3, "pets", "but", "only", "have", 2]
my_family_pets_ages = {"Evi" => 6, "Ditto" => 3, "Hoobie" => 3, "George" => 12, "Bogart" => 4, "Poly" => 4, "Annabelle" => 0}
# Person 1's solution
def my_array_finding_method(source, thing_to_find)
source # This line is here to make sure all tests initially fail. Delete it when you begin coding.
end
def my_hash_finding_method(source, thing_to_find)
source # This line is here to make sure all tests initially fail. Delete it when you begin coding.
end
# Identify and describe the Ruby method(s) you implemented.
#
#
#
# Person 2
def my_array_modification_method!(source, thing_to_modify)
source.dup # This line is here to make sure all tests initially fail. Delete it when you begin coding.
end
def my_hash_modification_method!(source, thing_to_modify)
source.dup # This line is here to make sure all tests initially fail. Delete it when you begin coding.
end
# Identify and describe the Ruby method(s) you implemented.
#
#
#
# Person 3
def my_array_sorting_method(source)
source # This line is here to make sure all tests initially fail. Delete it when you begin coding.
end
def my_hash_sorting_method(source)
source # This line is here to make sure all tests initially fail. Delete it when you begin coding.
end
# Identify and describe the Ruby method(s) you implemented.
#
#
#
# Person 4
def my_array_deletion_method!(source, thing_to_delete)
source.dup # This line is here to make sure all tests initially fail. Delete it when you begin coding.
end
def my_hash_deletion_method!(source, thing_to_delete)
source.dup # This line is here to make sure all tests initially fail. Delete it when you begin coding.
end
# Identify and describe the Ruby method(s) you implemented.
#
#
#
# .find_all is the same as .select
# Person 5
def my_array_splitting_method(source)
main_array = []
integer_array = []
string_array = []
dupsource = source.dup
dupsource.each do |a|
if a.is_a? Integer
integer_array.push(a)
elsif a.is_a? String
string_array.push(a)
end
end
main_array.push(integer_array).push(string_array)
end
print my_array_splitting_method(i_want_pets)
def my_hash_splitting_method(source, age)
main_array = []
younger_array = []
older_array = []
dupe_source = source.dup
dupe_source.each do |a,b|
if b <= age
younger_array.push([a,b])
elsif b > age
older_array.push([a,b])
end
end
main_array.push(younger_array).push(older_array)
end
print my_hash_splitting_method(my_family_pets_ages, 4)
# Identify and describe the Ruby method(s) you implemented.
# Quick summary: After entirely too much initial overthinking/sweating/screaming, I realized we needed something very simple: to iterate through the arrays here, saying "If something is an integer, shove it into the first little array. If it's a string, shove it into the second. Also make sure there's a giant array that you can shove both little guys into."
# The big 3 methods I used were 1) .dup, 2) .is_a? <Class>, and 3) .push
# 1) .dup method ~ creates a "shallow copy" of the object (the variable is copied, but not the object it points to). So, er... I'll try to regurgitate what the guide told me:
# => If we just made a new variable and pointed it at the object, the object is still getting "destroyed" in the process of the method. Since we can't be destructive with this split method, duplicating lets us hack away at a shallow copy -- "A shallow copy means that only the fundamental datatypes actually get copied into new objects, everything else gets copied as a reference to the original object!"
# 2) .is_a? <Class> ~ simpler one -- just checks if the object you call it on is that class type you listed! (eg is it a string, integer, float, etc)
# 3) .push ~ "Pushes the given object(s) on to the end of this array." Cool. I believe this is inherently destructive? Anyway, the return value is the array itself, so you can "chain together several appends."
# Release 1: Identify and describe the Ruby method you implemented. Teach your
# accountability group how to use the methods.
#
#
#
# Release 3: Reflect!
# What did you learn about researching and explaining your research to others?
# Its bark is bigger than its bite! As with everything we've been doing, you have to start small (pseudocode?): ask what the basic steps are that you need from the computer and work from there. Looking might take awhile but you're bound to find your method (or few methods!). | 38.108333 | 404 | 0.741745 |
f78112bcd166246bf50baae3a15d3c69bb31cc67 | 493 | # frozen_string_literal: true
module Vnstat
class Result
##
# A module that is included by result types that can be compared based
# on their particular time information.
module TimeComparable
##
# @return [Integer, nil]
def <=>(other)
return nil unless other.respond_to?(:bytes_transmitted)
return nil unless other.respond_to?(:time)
[time, bytes_transmitted] <=> [other.time, other.bytes_transmitted]
end
end
end
end
| 24.65 | 75 | 0.659229 |
1c76e4cf2bcd27430fd26c5c4416a01b3a7af8f7 | 1,550 | class Docker < Formula
desc "Pack, ship and run any application as a lightweight container"
homepage "https://www.docker.com/"
url "https://github.com/docker/docker.git",
:tag => "v17.03.1-ce",
:revision => "c6d412e329c85f32a4b2269b49aaa0794affcf88"
head "https://github.com/docker/docker.git"
bottle do
cellar :any_skip_relocation
sha256 "1e8b89941439abf2c0d34e58bf64c8a581463e95499c6bf01c7634fbc65374a7" => :sierra
sha256 "3ff83f93694db2ab9ed58bf0a60da07cf3ade9a60e378b68c77d42de3de02348" => :el_capitan
sha256 "0cdac342b9bf4785ef1cc680c37434a555383a81be33cfd798656ddfbd0582e3" => :yosemite
end
option "with-experimental", "Enable experimental features"
option "without-completions", "Disable bash/zsh completions"
depends_on "go" => :build
if build.with? "experimental"
depends_on "libtool" => :run
depends_on "yubico-piv-tool" => :recommended
end
def install
ENV["AUTO_GOPATH"] = "1"
ENV["DOCKER_EXPERIMENTAL"] = "1" if build.with? "experimental"
system "hack/make.sh", "dynbinary-client"
build_version = build.head? ? File.read("VERSION").chomp : "#{version}-ce"
bin.install "bundles/#{build_version}/dynbinary-client/docker-#{build_version}" => "docker"
if build.with? "completions"
bash_completion.install "contrib/completion/bash/docker"
fish_completion.install "contrib/completion/fish/docker.fish"
zsh_completion.install "contrib/completion/zsh/_docker"
end
end
test do
system "#{bin}/docker", "--version"
end
end
| 32.978723 | 95 | 0.723226 |
e265a05a9570ee40bf481cb9f4fb9df073e87e61 | 121 | class RenameActionsTable < ActiveRecord::Migration[5.2]
def change
rename_table :action_tables, :actions
end
end
| 20.166667 | 55 | 0.768595 |
e2d5f43b443a4cdb45e7eefcfbe7c79a9e38cc76 | 320 | require_relative './config/env'
use Rack::MethodOverride
use SessionsController
use UserController
use BookController
use PodcastController
use ArticleController
use RatingController
use CommentController
use FaveQuoteController
use ReactionController
use AuthorController
run ApplicationController | 17.777778 | 32 | 0.83125 |
87a079597df0a4dbf582b517bc36898e346e06c6 | 516 | cask "mweb" do
version "3.4.4,592"
sha256 :no_check
# dl.devmate.com/com.coderforart.MWeb3/ was verified as official when first introduced to the cask
url "https://dl.devmate.com/com.coderforart.MWeb3/MWeb3-ProMarkdownwriting,notetakingandstaticbloggeneratorApp.dmg"
appcast "https://updates.devmate.com/com.coderforart.MWeb3.xml"
name "MWeb"
desc "Markdown writing, note taking, and static blog generator app"
homepage "https://www.mweb.im/"
depends_on macos: ">= :sierra"
app "MWeb.app"
end
| 32.25 | 117 | 0.748062 |
d59564f6707d08a8e74ab178fea1326e1e46f7f0 | 1,909 | require File.expand_path(File.join(File.dirname(__FILE__), 'spec_helper'))
describe 'Jpmobile::Mobile::valid_ip?' do
[
[ '210.153.84.1' , :Docomo ],
[ '210.153.84.254' , :Docomo ],
[ '210.230.128.225' , :Au ],
[ '210.230.128.238' , :Au ],
[ '123.108.237.1' , :Softbank ],
[ '123.108.237.31' , :Softbank ],
[ '123.108.237.1' , :Vodafone ],
[ '123.108.237.31' , :Vodafone ],
[ '61.198.142.1' , :Willcom ],
[ '61.198.142.254' , :Willcom ],
[ '61.198.142.1' , :Ddipocket ],
[ '61.198.142.254' , :Ddipocket ],
[ '117.55.1.224' , :Emobile ],
[ '117.55.1.254' , :Emobile ],
].each do |remote_ip, carrier|
it "should be return true if #{remote_ip} is in #{:carrier} address" do
expect(Jpmobile::Mobile.const_get(carrier).valid_ip?(remote_ip)).to eq(true)
end
end
[
[ '127.0.0.1' , :Docomo ],
[ '210.153.83.1' , :Docomo ],
[ '210.153.83.254' , :Docomo ],
[ '127.0.0.1' , :Au ],
[ '210.169.41.1' , :Au ],
[ '210.169.41.254' , :Au ],
[ '127.0.0.1' , :Softbank ],
[ '123.108.238.1' , :Softbank ],
[ '123.108.238.254' , :Softbank ],
[ '127.0.0.1' , :Vodafone ],
[ '123.108.238.1' , :Vodafone ],
[ '123.108.238.254' , :Vodafone ],
[ '127.0.0.1' , :Willcom ],
[ '61.198.144.1' , :Willcom ],
[ '61.198.144.254' , :Willcom ],
[ '127.0.0.1' , :Ddipocket ],
[ '61.198.144.1' , :Ddipocket ],
[ '61.198.144.254' , :Ddipocket ],
[ '127.0.0.1' , :Emobile ],
[ '117.55.1.223' , :Emobile ],
].each do |remote_ip, carrier|
it 'should not be return true if #{:remote_ip} is in #{carrier} address' do
expect(Jpmobile::Mobile.const_get(carrier).valid_ip?(remote_ip)).not_to eq(true)
end
end
end
| 36.711538 | 86 | 0.486642 |
7a92bb50aba2304708abeeed0191e5b4e83d3d38 | 866 | require 'date'
describe "Date#>>" do
it "adds the number of months to a Date" do
d = Date.civil(2007,2,27) >> 10
d.should == Date.civil(2007, 12, 27)
end
it "sets the day to the last day of a month if the day doesn't exist" do
d = Date.civil(2008,3,31) >> 1
d.should == Date.civil(2008, 4, 30)
end
it "raise a TypeError when passed a Symbol" do
lambda { Date.civil(2007,2,27) >> :hello }.should raise_error(TypeError)
end
it "raise a TypeError when passed a String" do
lambda { Date.civil(2007,2,27) >> "hello" }.should raise_error(TypeError)
end
it "raise a TypeError when passed a Date" do
lambda { Date.civil(2007,2,27) >> Date.new }.should raise_error(TypeError)
end
it "raise a TypeError when passed an Object" do
lambda { Date.civil(2007,2,27) >> Object.new }.should raise_error(TypeError)
end
end
| 28.866667 | 80 | 0.665127 |
e91f97bdbe1903645d550e315c6bb506a2548b2b | 89 | require File.expand_path('../application', __FILE__)
Copycopter::Application.initialize!
| 29.666667 | 52 | 0.808989 |
037bfc3a67b8662f99f96e24751ab008c26d6182 | 1,963 | Pod::Spec.new do |s|
s.name = "FontAwesomeIconFactory"
s.version = "1.0"
s.summary = "Create icons using the resolution-independent Font Awesome."
s.description = <<-DESC
A factory for turning Font Awesome pictograms into icon
images for user interface controls and buttons. Works for
iOS and OS X.
Font Awesome by Dave Gandy -
http://fortawesome.github.com/Font-Awesome
For iOS apps, edit your app's Info.plist to contain the key:
"Fonts provided by application" (UIAppFonts). Then add
"FontAwesome.otf" to the list under that key.
<key>UIAppFonts</key>
<array>
<string>FontAwesome.otf</string>
</array>
For OSX apps, edit your app's Info.plist to contain the key:
"Application fonts resource path" (ATSApplicationFontsPath).
Set the value to ".".
<key>ATSApplicationFontsPath</key>
<string>.</string>
DESC
s.homepage = "http://nschum.github.com/FontAwesomeIconFactory/"
s.license = 'OFL/MIT'
s.author = { "Nikolaj Schumacher" => "[email protected]" }
s.source = {
:git => 'https://github.com/nschum/FontAwesomeIconFactory.git',
:tag => '1.0',
:submodules => 'true'
}
s.ios.deployment_target = '5.0'
s.osx.deployment_target = '10.7'
s.source_files = '*.{h,m}'
s.ios.exclude_files = '**/*+OSX.*'
s.osx.exclude_files = '**/*+iOS.*'
s.public_header_files = ['NIKFontAwesomeIconFactory*.h', 'NIKFontAwesomeIcon.h']
s.resource = 'Font-Awesome/font/FontAwesome.otf'
s.ios.frameworks = 'CoreText', 'CoreGraphics'
s.osx.frameworks = 'CoreText', 'CoreGraphics', 'ApplicationServices'
s.requires_arc = true
end
| 35.690909 | 82 | 0.560876 |
5df58bfe772cc268e9fcb8b91787e7be614b2ca0 | 26,376 | # = net/pop.rb
#
# Copyright (c) 1999-2007 Yukihiro Matsumoto.
#
# Copyright (c) 1999-2007 Minero Aoki.
#
# Written & maintained by Minero Aoki <[email protected]>.
#
# Documented by William Webber and Minero Aoki.
#
# This program is free software. You can re-distribute and/or
# modify this program under the same terms as Ruby itself,
# Ruby Distribute License.
#
# NOTE: You can find Japanese version of this document at:
# http://www.ruby-lang.org/ja/man/html/net_pop.html
#
# $Id: pop.rb 22002 2009-02-03 05:35:56Z shyouhei $
#
# See Net::POP3 for documentation.
#
require 'net/protocol'
require 'digest/md5'
require 'timeout'
begin
require "openssl/ssl"
rescue LoadError
end
module Net
# Non-authentication POP3 protocol error
# (reply code "-ERR", except authentication).
class POPError < ProtocolError; end
# POP3 authentication error.
class POPAuthenticationError < ProtoAuthError; end
# Unexpected response from the server.
class POPBadResponse < POPError; end
#
# = Net::POP3
#
# == What is This Library?
#
# This library provides functionality for retrieving
# email via POP3, the Post Office Protocol version 3. For details
# of POP3, see [RFC1939] (http://www.ietf.org/rfc/rfc1939.txt).
#
# == Examples
#
# === Retrieving Messages
#
# This example retrieves messages from the server and deletes them
# on the server.
#
# Messages are written to files named 'inbox/1', 'inbox/2', ....
# Replace 'pop.example.com' with your POP3 server address, and
# 'YourAccount' and 'YourPassword' with the appropriate account
# details.
#
# require 'net/pop'
#
# pop = Net::POP3.new('pop.example.com')
# pop.start('YourAccount', 'YourPassword') # (1)
# if pop.mails.empty?
# puts 'No mail.'
# else
# i = 0
# pop.each_mail do |m| # or "pop.mails.each ..." # (2)
# File.open("inbox/#{i}", 'w') do |f|
# f.write m.pop
# end
# m.delete
# i += 1
# end
# puts "#{pop.mails.size} mails popped."
# end
# pop.finish # (3)
#
# 1. Call Net::POP3#start and start POP session.
# 2. Access messages by using POP3#each_mail and/or POP3#mails.
# 3. Close POP session by calling POP3#finish or use the block form of #start.
#
# === Shortened Code
#
# The example above is very verbose. You can shorten the code by using
# some utility methods. First, the block form of Net::POP3.start can
# be used instead of POP3.new, POP3#start and POP3#finish.
#
# require 'net/pop'
#
# Net::POP3.start('pop.example.com', 110,
# 'YourAccount', 'YourPassword') do |pop|
# if pop.mails.empty?
# puts 'No mail.'
# else
# i = 0
# pop.each_mail do |m| # or "pop.mails.each ..."
# File.open("inbox/#{i}", 'w') do |f|
# f.write m.pop
# end
# m.delete
# i += 1
# end
# puts "#{pop.mails.size} mails popped."
# end
# end
#
# POP3#delete_all is an alternative for #each_mail and #delete.
#
# require 'net/pop'
#
# Net::POP3.start('pop.example.com', 110,
# 'YourAccount', 'YourPassword') do |pop|
# if pop.mails.empty?
# puts 'No mail.'
# else
# i = 1
# pop.delete_all do |m|
# File.open("inbox/#{i}", 'w') do |f|
# f.write m.pop
# end
# i += 1
# end
# end
# end
#
# And here is an even shorter example.
#
# require 'net/pop'
#
# i = 0
# Net::POP3.delete_all('pop.example.com', 110,
# 'YourAccount', 'YourPassword') do |m|
# File.open("inbox/#{i}", 'w') do |f|
# f.write m.pop
# end
# i += 1
# end
#
# === Memory Space Issues
#
# All the examples above get each message as one big string.
# This example avoids this.
#
# require 'net/pop'
#
# i = 1
# Net::POP3.delete_all('pop.example.com', 110,
# 'YourAccount', 'YourPassword') do |m|
# File.open("inbox/#{i}", 'w') do |f|
# m.pop do |chunk| # get a message little by little.
# f.write chunk
# end
# i += 1
# end
# end
#
# === Using APOP
#
# The net/pop library supports APOP authentication.
# To use APOP, use the Net::APOP class instead of the Net::POP3 class.
# You can use the utility method, Net::POP3.APOP(). For example:
#
# require 'net/pop'
#
# # Use APOP authentication if $isapop == true
# pop = Net::POP3.APOP($is_apop).new('apop.example.com', 110)
# pop.start(YourAccount', 'YourPassword') do |pop|
# # Rest of the code is the same.
# end
#
# === Fetch Only Selected Mail Using 'UIDL' POP Command
#
# If your POP server provides UIDL functionality,
# you can grab only selected mails from the POP server.
# e.g.
#
# def need_pop?( id )
# # determine if we need pop this mail...
# end
#
# Net::POP3.start('pop.example.com', 110,
# 'Your account', 'Your password') do |pop|
# pop.mails.select { |m| need_pop?(m.unique_id) }.each do |m|
# do_something(m.pop)
# end
# end
#
# The POPMail#unique_id() method returns the unique-id of the message as a
# String. Normally the unique-id is a hash of the message.
#
class POP3 < Protocol
Revision = %q$Revision: 22002 $.split[1]
#
# Class Parameters
#
def POP3.default_port
default_pop3_port()
end
# The default port for POP3 connections, port 110
def POP3.default_pop3_port
110
end
# The default port for POP3S connections, port 995
def POP3.default_pop3s_port
995
end
def POP3.socket_type #:nodoc: obsolete
Net::InternetMessageIO
end
#
# Utilities
#
# Returns the APOP class if +isapop+ is true; otherwise, returns
# the POP class. For example:
#
# # Example 1
# pop = Net::POP3::APOP($is_apop).new(addr, port)
#
# # Example 2
# Net::POP3::APOP($is_apop).start(addr, port) do |pop|
# ....
# end
#
def POP3.APOP(isapop)
isapop ? APOP : POP3
end
# Starts a POP3 session and iterates over each POPMail object,
# yielding it to the +block+.
# This method is equivalent to:
#
# Net::POP3.start(address, port, account, password) do |pop|
# pop.each_mail do |m|
# yield m
# end
# end
#
# This method raises a POPAuthenticationError if authentication fails.
#
# === Example
#
# Net::POP3.foreach('pop.example.com', 110,
# 'YourAccount', 'YourPassword') do |m|
# file.write m.pop
# m.delete if $DELETE
# end
#
def POP3.foreach(address, port = nil,
account = nil, password = nil,
isapop = false, &block) # :yields: message
start(address, port, account, password, isapop) {|pop|
pop.each_mail(&block)
}
end
# Starts a POP3 session and deletes all messages on the server.
# If a block is given, each POPMail object is yielded to it before
# being deleted.
#
# This method raises a POPAuthenticationError if authentication fails.
#
# === Example
#
# Net::POP3.delete_all('pop.example.com', 110,
# 'YourAccount', 'YourPassword') do |m|
# file.write m.pop
# end
#
def POP3.delete_all(address, port = nil,
account = nil, password = nil,
isapop = false, &block)
start(address, port, account, password, isapop) {|pop|
pop.delete_all(&block)
}
end
# Opens a POP3 session, attempts authentication, and quits.
#
# This method raises POPAuthenticationError if authentication fails.
#
# === Example: normal POP3
#
# Net::POP3.auth_only('pop.example.com', 110,
# 'YourAccount', 'YourPassword')
#
# === Example: APOP
#
# Net::POP3.auth_only('pop.example.com', 110,
# 'YourAccount', 'YourPassword', true)
#
def POP3.auth_only(address, port = nil,
account = nil, password = nil,
isapop = false)
new(address, port, isapop).auth_only account, password
end
# Starts a pop3 session, attempts authentication, and quits.
# This method must not be called while POP3 session is opened.
# This method raises POPAuthenticationError if authentication fails.
def auth_only(account, password)
raise IOError, 'opening previously opened POP session' if started?
start(account, password) {
;
}
end
#
# SSL
#
@ssl_params = nil
# call-seq:
# Net::POP.enable_ssl(params = {})
#
# Enable SSL for all new instances.
# +params+ is passed to OpenSSL::SSLContext#set_params.
def POP3.enable_ssl(*args)
@ssl_params = create_ssl_params(*args)
end
def POP3.create_ssl_params(verify_or_params = {}, certs = nil)
begin
params = verify_or_params.to_hash
rescue NoMethodError
params = {}
params[:verify_mode] = verify_or_params
if certs
if File.file?(certs)
params[:ca_file] = certs
elsif File.directory?(certs)
params[:ca_path] = certs
end
end
end
return params
end
# Disable SSL for all new instances.
def POP3.disable_ssl
@ssl_params = nil
end
def POP3.ssl_params
return @ssl_params
end
def POP3.use_ssl?
return !@ssl_params.nil?
end
def POP3.verify
return @ssl_params[:verify_mode]
end
def POP3.certs
return @ssl_params[:ca_file] || @ssl_params[:ca_path]
end
#
# Session management
#
# Creates a new POP3 object and open the connection. Equivalent to
#
# Net::POP3.new(address, port, isapop).start(account, password)
#
# If +block+ is provided, yields the newly-opened POP3 object to it,
# and automatically closes it at the end of the session.
#
# === Example
#
# Net::POP3.start(addr, port, account, password) do |pop|
# pop.each_mail do |m|
# file.write m.pop
# m.delete
# end
# end
#
def POP3.start(address, port = nil,
account = nil, password = nil,
isapop = false, &block) # :yield: pop
new(address, port, isapop).start(account, password, &block)
end
# Creates a new POP3 object.
#
# +address+ is the hostname or ip address of your POP3 server.
#
# The optional +port+ is the port to connect to.
#
# The optional +isapop+ specifies whether this connection is going
# to use APOP authentication; it defaults to +false+.
#
# This method does *not* open the TCP connection.
def initialize(addr, port = nil, isapop = false)
@address = addr
@ssl_params = POP3.ssl_params
@port = port
@apop = isapop
@command = nil
@socket = nil
@started = false
@open_timeout = 30
@read_timeout = 60
@debug_output = nil
@mails = nil
@n_mails = nil
@n_bytes = nil
end
# Does this instance use APOP authentication?
def apop?
@apop
end
# does this instance use SSL?
def use_ssl?
return !@ssl_params.nil?
end
# call-seq:
# Net::POP#enable_ssl(params = {})
#
# Enables SSL for this instance. Must be called before the connection is
# established to have any effect.
# +params[:port]+ is port to establish the SSL connection on; Defaults to 995.
# +params+ (except :port) is passed to OpenSSL::SSLContext#set_params.
def enable_ssl(verify_or_params = {}, certs = nil, port = nil)
begin
@ssl_params = verify_or_params.to_hash.dup
@port = @ssl_params.delete(:port) || @port
rescue NoMethodError
@ssl_params = POP3.create_ssl_params(verify_or_params, certs)
@port = port || @port
end
end
def disable_ssl
@ssl_params = nil
end
# Provide human-readable stringification of class state.
def inspect
"#<#{self.class} #{@address}:#{@port} open=#{@started}>"
end
# *WARNING*: This method causes a serious security hole.
# Use this method only for debugging.
#
# Set an output stream for debugging.
#
# === Example
#
# pop = Net::POP.new(addr, port)
# pop.set_debug_output $stderr
# pop.start(account, passwd) do |pop|
# ....
# end
#
def set_debug_output(arg)
@debug_output = arg
end
# The address to connect to.
attr_reader :address
# The port number to connect to.
def port
return @port || (use_ssl? ? POP3.default_pop3s_port : POP3.default_pop3_port)
end
# Seconds to wait until a connection is opened.
# If the POP3 object cannot open a connection within this time,
# it raises a TimeoutError exception.
attr_accessor :open_timeout
# Seconds to wait until reading one block (by one read(1) call).
# If the POP3 object cannot complete a read() within this time,
# it raises a TimeoutError exception.
attr_reader :read_timeout
# Set the read timeout.
def read_timeout=(sec)
@command.socket.read_timeout = sec if @command
@read_timeout = sec
end
# +true+ if the POP3 session has started.
def started?
@started
end
alias active? started? #:nodoc: obsolete
# Starts a POP3 session.
#
# When called with block, gives a POP3 object to the block and
# closes the session after block call finishes.
#
# This method raises a POPAuthenticationError if authentication fails.
def start(account, password) # :yield: pop
raise IOError, 'POP session already started' if @started
if block_given?
begin
do_start account, password
return yield(self)
ensure
do_finish
end
else
do_start account, password
return self
end
end
def do_start(account, password)
s = timeout(@open_timeout) { TCPSocket.open(@address, port) }
if use_ssl?
raise 'openssl library not installed' unless defined?(OpenSSL)
context = OpenSSL::SSL::SSLContext.new
context.set_params(@ssl_params)
s = OpenSSL::SSL::SSLSocket.new(s, context)
s.sync_close = true
s.connect
if context.verify_mode != OpenSSL::SSL::VERIFY_NONE
s.post_connection_check(@address)
end
end
@socket = InternetMessageIO.new(s)
logging "POP session started: #{@address}:#{@port} (#{@apop ? 'APOP' : 'POP'})"
@socket.read_timeout = @read_timeout
@socket.debug_output = @debug_output
on_connect
@command = POP3Command.new(@socket)
if apop?
@command.apop account, password
else
@command.auth account, password
end
@started = true
ensure
# Authentication failed, clean up connection.
unless @started
s.close if s and not s.closed?
@socket = nil
@command = nil
end
end
private :do_start
def on_connect
end
private :on_connect
# Finishes a POP3 session and closes TCP connection.
def finish
raise IOError, 'POP session not yet started' unless started?
do_finish
end
def do_finish
@mails = nil
@n_mails = nil
@n_bytes = nil
@command.quit if @command
ensure
@started = false
@command = nil
@socket.close if @socket and not @socket.closed?
@socket = nil
end
private :do_finish
def command
raise IOError, 'POP session not opened yet' \
if not @socket or @socket.closed?
@command
end
private :command
#
# POP protocol wrapper
#
# Returns the number of messages on the POP server.
def n_mails
return @n_mails if @n_mails
@n_mails, @n_bytes = command().stat
@n_mails
end
# Returns the total size in bytes of all the messages on the POP server.
def n_bytes
return @n_bytes if @n_bytes
@n_mails, @n_bytes = command().stat
@n_bytes
end
# Returns an array of Net::POPMail objects, representing all the
# messages on the server. This array is renewed when the session
# restarts; otherwise, it is fetched from the server the first time
# this method is called (directly or indirectly) and cached.
#
# This method raises a POPError if an error occurs.
def mails
return @mails.dup if @mails
if n_mails() == 0
# some popd raises error for LIST on the empty mailbox.
@mails = []
return []
end
@mails = command().list.map {|num, size|
POPMail.new(num, size, self, command())
}
@mails.dup
end
# Yields each message to the passed-in block in turn.
# Equivalent to:
#
# pop3.mails.each do |popmail|
# ....
# end
#
# This method raises a POPError if an error occurs.
def each_mail(&block) # :yield: message
mails().each(&block)
end
alias each each_mail
# Deletes all messages on the server.
#
# If called with a block, yields each message in turn before deleting it.
#
# === Example
#
# n = 1
# pop.delete_all do |m|
# File.open("inbox/#{n}") do |f|
# f.write m.pop
# end
# n += 1
# end
#
# This method raises a POPError if an error occurs.
#
def delete_all # :yield: message
mails().each do |m|
yield m if block_given?
m.delete unless m.deleted?
end
end
# Resets the session. This clears all "deleted" marks from messages.
#
# This method raises a POPError if an error occurs.
def reset
command().rset
mails().each do |m|
m.instance_eval {
@deleted = false
}
end
end
def set_all_uids #:nodoc: internal use only (called from POPMail#uidl)
command().uidl.each do |num, uid|
@mails.find {|m| m.number == num }.uid = uid
end
end
def logging(msg)
@debug_output << msg + "\n" if @debug_output
end
end # class POP3
# class aliases
POP = POP3
POPSession = POP3
POP3Session = POP3
#
# This class is equivalent to POP3, except that it uses APOP authentication.
#
class APOP < POP3
# Always returns true.
def apop?
true
end
end
# class aliases
APOPSession = APOP
#
# This class represents a message which exists on the POP server.
# Instances of this class are created by the POP3 class; they should
# not be directly created by the user.
#
class POPMail
def initialize(num, len, pop, cmd) #:nodoc:
@number = num
@length = len
@pop = pop
@command = cmd
@deleted = false
@uid = nil
end
# The sequence number of the message on the server.
attr_reader :number
# The length of the message in octets.
attr_reader :length
alias size length
# Provide human-readable stringification of class state.
def inspect
"#<#{self.class} #{@number}#{@deleted ? ' deleted' : ''}>"
end
#
# This method fetches the message. If called with a block, the
# message is yielded to the block one chunk at a time. If called
# without a block, the message is returned as a String. The optional
# +dest+ argument will be prepended to the returned String; this
# argument is essentially obsolete.
#
# === Example without block
#
# POP3.start('pop.example.com', 110,
# 'YourAccount, 'YourPassword') do |pop|
# n = 1
# pop.mails.each do |popmail|
# File.open("inbox/#{n}", 'w') do |f|
# f.write popmail.pop
# end
# popmail.delete
# n += 1
# end
# end
#
# === Example with block
#
# POP3.start('pop.example.com', 110,
# 'YourAccount, 'YourPassword') do |pop|
# n = 1
# pop.mails.each do |popmail|
# File.open("inbox/#{n}", 'w') do |f|
# popmail.pop do |chunk| ####
# f.write chunk
# end
# end
# n += 1
# end
# end
#
# This method raises a POPError if an error occurs.
#
def pop( dest = '', &block ) # :yield: message_chunk
if block_given?
@command.retr(@number, &block)
nil
else
@command.retr(@number) do |chunk|
dest << chunk
end
dest
end
end
alias all pop #:nodoc: obsolete
alias mail pop #:nodoc: obsolete
# Fetches the message header and +lines+ lines of body.
#
# The optional +dest+ argument is obsolete.
#
# This method raises a POPError if an error occurs.
def top(lines, dest = '')
@command.top(@number, lines) do |chunk|
dest << chunk
end
dest
end
# Fetches the message header.
#
# The optional +dest+ argument is obsolete.
#
# This method raises a POPError if an error occurs.
def header(dest = '')
top(0, dest)
end
# Marks a message for deletion on the server. Deletion does not
# actually occur until the end of the session; deletion may be
# cancelled for _all_ marked messages by calling POP3#reset().
#
# This method raises a POPError if an error occurs.
#
# === Example
#
# POP3.start('pop.example.com', 110,
# 'YourAccount, 'YourPassword') do |pop|
# n = 1
# pop.mails.each do |popmail|
# File.open("inbox/#{n}", 'w') do |f|
# f.write popmail.pop
# end
# popmail.delete ####
# n += 1
# end
# end
#
def delete
@command.dele @number
@deleted = true
end
alias delete! delete #:nodoc: obsolete
# True if the mail has been deleted.
def deleted?
@deleted
end
# Returns the unique-id of the message.
# Normally the unique-id is a hash string of the message.
#
# This method raises a POPError if an error occurs.
def unique_id
return @uid if @uid
@pop.set_all_uids
@uid
end
alias uidl unique_id
def uid=(uid) #:nodoc: internal use only
@uid = uid
end
end # class POPMail
class POP3Command #:nodoc: internal use only
def initialize(sock)
@socket = sock
@error_occured = false
res = check_response(critical { recv_response() })
@apop_stamp = res.slice(/<[!-~]+@[!-~]+>/)
end
def inspect
"#<#{self.class} socket=#{@socket}>"
end
def auth(account, password)
check_response_auth(critical {
check_response_auth(get_response('USER %s', account))
get_response('PASS %s', password)
})
end
def apop(account, password)
raise POPAuthenticationError, 'not APOP server; cannot login' \
unless @apop_stamp
check_response_auth(critical {
get_response('APOP %s %s',
account,
Digest::MD5.hexdigest(@apop_stamp + password))
})
end
def list
critical {
getok 'LIST'
list = []
@socket.each_list_item do |line|
m = /\A(\d+)[ \t]+(\d+)/.match(line) or
raise POPBadResponse, "bad response: #{line}"
list.push [m[1].to_i, m[2].to_i]
end
return list
}
end
def stat
res = check_response(critical { get_response('STAT') })
m = /\A\+OK\s+(\d+)\s+(\d+)/.match(res) or
raise POPBadResponse, "wrong response format: #{res}"
[m[1].to_i, m[2].to_i]
end
def rset
check_response(critical { get_response('RSET') })
end
def top(num, lines = 0, &block)
critical {
getok('TOP %d %d', num, lines)
@socket.each_message_chunk(&block)
}
end
def retr(num, &block)
critical {
getok('RETR %d', num)
@socket.each_message_chunk(&block)
}
end
def dele(num)
check_response(critical { get_response('DELE %d', num) })
end
def uidl(num = nil)
if num
res = check_response(critical { get_response('UIDL %d', num) })
return res.split(/ /)[1]
else
critical {
getok('UIDL')
table = {}
@socket.each_list_item do |line|
num, uid = line.split
table[num.to_i] = uid
end
return table
}
end
end
def quit
check_response(critical { get_response('QUIT') })
end
private
def getok(fmt, *fargs)
@socket.writeline sprintf(fmt, *fargs)
check_response(recv_response())
end
def get_response(fmt, *fargs)
@socket.writeline sprintf(fmt, *fargs)
recv_response()
end
def recv_response
@socket.readline
end
def check_response(res)
raise POPError, res unless /\A\+OK/i =~ res
res
end
def check_response_auth(res)
raise POPAuthenticationError, res unless /\A\+OK/i =~ res
res
end
def critical
return '+OK dummy ok response' if @error_occured
begin
return yield()
rescue Exception
@error_occured = true
raise
end
end
end # class POP3Command
end # module Net
| 26.376 | 85 | 0.565059 |
03c42fae5c539ba82fde70defe313816ef212acc | 322 | class TestJIRAService < MiniTest::Unit::TestCase
def setup; end
def teardown; end
def test_token_constructor
inst = JIRA::JIRAService.instance_with_token 'url', 'user', 'token'
assert_equal 'url', inst.endpoint_url
assert_equal 'user', inst.user
assert_equal 'token', inst.auth_token
end
end
| 23 | 71 | 0.720497 |
bb0a06955634ce3d6ce73ee17a5ddfcaaab3acc4 | 6,006 | ######################################################################
# Copyright (c) 2008-2014, Alliance for Sustainable Energy.
# All rights reserved.
#
# This library is free software you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
######################################################################
######################################################################
# == Synopsis
#
# Run a specific osm file, passing it through ModelToIdf and EnergyPlusJob
# using runmanager to run the simulation
#
# == Usage
#
# ruby ModelToIdfToEnergyPlus.rb ARGV[0]
#
# ARGV[0] - osm job to run
#
# == Examples
#
# ruby RunExampleFiles.rb mytest.osm
#
######################################################################
require 'openstudio'
require 'openstudio/energyplus/find_energyplus'
osm_path = OpenStudio::Path.new(ARGV[0])
# find energyplus
ep_hash = OpenStudio::EnergyPlus::find_energyplus(8,1)
ep_path = OpenStudio::Path.new(ep_hash[:energyplus_exe].to_s)
idd_path = OpenStudio::Path.new(ep_hash[:energyplus_idd].to_s)
ep_dir = File.dirname(ep_path.to_s)
expandobjects_path = ''
if (/mswin/.match(RUBY_PLATFORM) or /mingw/.match(RUBY_PLATFORM))
expandobjects_path = ep_dir + '/ExpandObjects.exe'
else
expandobjects_path = ep_dir + '/expandobjects'
end
readvars_path = ""
if (/mswin/.match(RUBY_PLATFORM) or /mingw/.match(RUBY_PLATFORM))
readvars_path = ep_dir + '/PostProcess/ReadVarsESO.exe'
else
readvars_path = ep_dir + '/readvars'
end
# just find first weather file for now
weatherDir = OpenStudio::Path.new(ep_hash[:energyplus_weatherdata].to_s)
weatherGlobSearch = weatherDir / OpenStudio::Path.new("*.epw")
weatherFilePath = Dir.glob(weatherGlobSearch.to_s).first
epw_path = OpenStudio::Path.new(weatherFilePath)
# make a run manager
run_manager = OpenStudio::Runmanager::RunManager.new(OpenStudio::Path.new("runmanager.db"))
parentpath = OpenStudio::Path.new();
# develop an intelligent output directory location
if osm_path.has_parent_path
parentpath = OpenStudio::Path.new(osm_path.parent_path().filename())
end
outdir = OpenStudio::tempDir() / OpenStudio::Path.new("ModelToIdfExample") / (osm_path.stem().to_s == "in"? parentpath : OpenStudio::Path.new(osm_path.stem()))
# our out dir is now something like:
# /tmp/ModelToIdfExample/InputFile
#
# Our overall created directory tree will be something like:
# /tmp/ModelToIdfExample/InputFile/
# ModelToIdf/
# in.osm
# in.idf
# in.epw
# EnergyPlusJob/
# in.idf
# in.epw
# Energy+.idd
# eplusout.*
# Show the jobs status as it processes
run_manager.showStatusDialog()
# create the "set of tools" needed.
tools = OpenStudio::Runmanager::Tools.new()
tools.append(OpenStudio::Runmanager::ToolInfo.new("energyplus", OpenStudio::Runmanager::ToolVersion.new(7,2), ep_path))
tools.append(OpenStudio::Runmanager::ToolInfo.new("expandobjects", OpenStudio::Runmanager::ToolVersion.new(), OpenStudio::Path.new(expandobjects_path)))
tools.append(OpenStudio::Runmanager::ToolInfo.new("readvars", OpenStudio::Runmanager::ToolVersion.new(), OpenStudio::Path.new(readvars_path)))
# Create the modelToIdf job and give it the osm path and the output directory
modeltoidfjob = OpenStudio::Runmanager::JobFactory::createModelToIdfJob(osm_path, outdir)
# expandbjects job
expandobjectsjob = OpenStudio::Runmanager::JobFactory::createExpandObjectsJob(tools, OpenStudio::Runmanager::JobParams.new(), OpenStudio::Runmanager::Files.new())
# now, create an EnergyPlusJob, passing it the set of tools
# It does not need any files or params, because it is getting those (in.idf and outdir, respectively)
# from its parent job
eplusjob = OpenStudio::Runmanager::JobFactory::createEnergyPlusJob(tools, OpenStudio::Runmanager::JobParams.new(), OpenStudio::Runmanager::Files.new())
# make the readvars job, could take an arbitrary list of rvi files to use
rvi_files = OpenStudio::Runmanager::Files.new()
readvarsjob = OpenStudio::Runmanager::JobFactory::createReadVarsJob(tools, OpenStudio::Runmanager::JobParams.new(), rvi_files)
# now that both jobs are created, establish the parent/child relationship
modeltoidfjob.addChild(expandobjectsjob)
expandobjectsjob.addChild(eplusjob)
eplusjob.addChild(readvarsjob)
# Queue up the parent, which also queues any children
run_manager.enqueue(modeltoidfjob, true)
# wait for jobs to complete
while run_manager.workPending()
sleep 0.1
OpenStudio::Application::instance().processEvents()
end
run_manager.hideStatusDialog()
# now let's see what output files we generated
# maybe there should be another way to do this, say to ask the parent for all output files generated by the children
puts "Output files generated by child: "
files = eplusjob.outputFiles()
files.each do |f|
puts f.fullPath.to_s
end
| 40.308725 | 163 | 0.654845 |
613bda4f8446e5011173d3fb6e0d7b04ba80ef1c | 9,684 | # frozen_string_literal: true
module Bundler
class Injector
INJECTED_GEMS = "injected gems".freeze
def self.inject(new_deps, options = {})
injector = new(new_deps, options)
injector.inject(Bundler.default_gemfile, Bundler.default_lockfile)
end
def self.remove(gems, options = {})
injector = new(gems, options)
injector.remove(Bundler.default_gemfile, Bundler.default_lockfile)
end
def initialize(deps, options = {})
@deps = deps
@options = options
end
# @param [Pathname] gemfile_path The Gemfile in which to inject the new dependency.
# @param [Pathname] lockfile_path The lockfile in which to inject the new dependency.
# @return [Array]
def inject(gemfile_path, lockfile_path)
if Bundler.frozen_bundle?
# ensure the lock and Gemfile are synced
Bundler.definition.ensure_equivalent_gemfile_and_lockfile(true)
end
# temporarily unfreeze
Bundler.settings.temporary(:deployment => false, :frozen => false) do
# evaluate the Gemfile we have now
builder = Dsl.new
builder.eval_gemfile(gemfile_path)
# don't inject any gems that are already in the Gemfile
@deps -= builder.dependencies
# add new deps to the end of the in-memory Gemfile
# Set conservative versioning to false because
# we want to let the resolver resolve the version first
builder.eval_gemfile(INJECTED_GEMS, build_gem_lines(false)) if @deps.any?
# resolve to see if the new deps broke anything
@definition = builder.to_definition(lockfile_path, {})
@definition.resolve_remotely!
# since nothing broke, we can add those gems to the gemfile
append_to(gemfile_path, build_gem_lines(@options[:conservative_versioning])) if @deps.any?
# since we resolved successfully, write out the lockfile
@definition.lock(Bundler.default_lockfile)
# invalidate the cached Bundler.definition
Bundler.reset_paths!
# return an array of the deps that we added
@deps
end
end
# @param [Pathname] gemfile_path The Gemfile from which to remove dependencies.
# @param [Pathname] lockfile_path The lockfile from which to remove dependencies.
# @return [Array]
def remove(gemfile_path, lockfile_path)
# remove gems from each gemfiles we have
Bundler.definition.gemfiles.each do |path|
deps = remove_deps(path)
show_warning("No gems were removed from the gemfile.") if deps.empty?
deps.each {|dep| Bundler.ui.confirm "#{SharedHelpers.pretty_dependency(dep, false)} was removed." }
end
end
private
def conservative_version(spec)
version = spec.version
return ">= 0" if version.nil?
segments = version.segments
seg_end_index = version >= Gem::Version.new("1.0") ? 1 : 2
prerelease_suffix = version.to_s.gsub(version.release.to_s, "") if version.prerelease?
"#{version_prefix}#{segments[0..seg_end_index].join(".")}#{prerelease_suffix}"
end
def version_prefix
if @options[:strict]
"= "
elsif @options[:optimistic]
">= "
else
"~> "
end
end
def build_gem_lines(conservative_versioning)
@deps.map do |d|
name = d.name.dump
requirement = if conservative_versioning
", \"#{conservative_version(@definition.specs[d.name][0])}\""
else
", #{d.requirement.as_list.map(&:dump).join(", ")}"
end
if d.groups != Array(:default)
group = d.groups.size == 1 ? ", :group => #{d.groups.first.inspect}" : ", :groups => #{d.groups.inspect}"
end
source = ", :source => \"#{d.source}\"" unless d.source.nil?
git = ", :git => \"#{d.git}\"" unless d.git.nil?
branch = ", :branch => \"#{d.branch}\"" unless d.branch.nil?
%(gem #{name}#{requirement}#{group}#{source}#{git}#{branch})
end.join("\n")
end
def append_to(gemfile_path, new_gem_lines)
gemfile_path.open("a") do |f|
f.puts
f.puts new_gem_lines
end
end
# evaluates a gemfile to remove the specified gem
# from it.
def remove_deps(gemfile_path)
initial_gemfile = File.readlines(gemfile_path)
Bundler.ui.info "Removing gems from #{gemfile_path}"
# evaluate the Gemfile we have
builder = Dsl.new
builder.eval_gemfile(gemfile_path)
removed_deps = remove_gems_from_dependencies(builder, @deps, gemfile_path)
# abort the operation if no gems were removed
# no need to operate on gemfile further
return [] if removed_deps.empty?
cleaned_gemfile = remove_gems_from_gemfile(@deps, gemfile_path)
SharedHelpers.write_to_gemfile(gemfile_path, cleaned_gemfile)
# check for errors
# including extra gems being removed
# or some gems not being removed
# and return the actual removed deps
cross_check_for_errors(gemfile_path, builder.dependencies, removed_deps, initial_gemfile)
end
# @param [Dsl] builder Dsl object of current Gemfile.
# @param [Array] gems Array of names of gems to be removed.
# @param [Pathname] gemfile_path Path of the Gemfile.
# @return [Array] Array of removed dependencies.
def remove_gems_from_dependencies(builder, gems, gemfile_path)
removed_deps = []
gems.each do |gem_name|
deleted_dep = builder.dependencies.find {|d| d.name == gem_name }
if deleted_dep.nil?
raise GemfileError, "`#{gem_name}` is not specified in #{gemfile_path} so it could not be removed."
end
builder.dependencies.delete(deleted_dep)
removed_deps << deleted_dep
end
removed_deps
end
# @param [Array] gems Array of names of gems to be removed.
# @param [Pathname] gemfile_path The Gemfile from which to remove dependencies.
def remove_gems_from_gemfile(gems, gemfile_path)
patterns = /gem\s+(['"])#{Regexp.union(gems)}\1|gem\s*\((['"])#{Regexp.union(gems)}\2\)/
new_gemfile = []
multiline_removal = false
File.readlines(gemfile_path).each do |line|
match_data = line.match(patterns)
if match_data && is_not_within_comment?(line, match_data)
multiline_removal = line.rstrip.end_with?(",")
# skip lines which match the regex
next
end
# skip followup lines until line does not end with ','
new_gemfile << line unless multiline_removal
multiline_removal = line.rstrip.end_with?(",") if multiline_removal
end
# remove line \n and append them with other strings
new_gemfile.each_with_index do |_line, index|
if new_gemfile[index + 1] == "\n"
new_gemfile[index] += new_gemfile[index + 1]
new_gemfile.delete_at(index + 1)
end
end
%w[group source env install_if].each {|block| remove_nested_blocks(new_gemfile, block) }
new_gemfile.join.chomp
end
# @param [String] line Individual line of gemfile content.
# @param [MatchData] match_data Data about Regex match.
def is_not_within_comment?(line, match_data)
match_start_index = match_data.offset(0).first
!line[0..match_start_index].include?("#")
end
# @param [Array] gemfile Array of gemfile contents.
# @param [String] block_name Name of block name to look for.
def remove_nested_blocks(gemfile, block_name)
nested_blocks = 0
# count number of nested blocks
gemfile.each_with_index {|line, index| nested_blocks += 1 if !gemfile[index + 1].nil? && gemfile[index + 1].include?(block_name) && line.include?(block_name) }
while nested_blocks >= 0
nested_blocks -= 1
gemfile.each_with_index do |line, index|
next unless !line.nil? && line.strip.start_with?(block_name)
if gemfile[index + 1] =~ /^\s*end\s*$/
gemfile[index] = nil
gemfile[index + 1] = nil
end
end
gemfile.compact!
end
end
# @param [Pathname] gemfile_path The Gemfile from which to remove dependencies.
# @param [Array] original_deps Array of original dependencies.
# @param [Array] removed_deps Array of removed dependencies.
# @param [Array] initial_gemfile Contents of original Gemfile before any operation.
def cross_check_for_errors(gemfile_path, original_deps, removed_deps, initial_gemfile)
# evaluate the new gemfile to look for any failure cases
builder = Dsl.new
builder.eval_gemfile(gemfile_path)
# record gems which were removed but not requested
extra_removed_gems = original_deps - builder.dependencies
# if some extra gems were removed then raise error
# and revert Gemfile to original
unless extra_removed_gems.empty?
SharedHelpers.write_to_gemfile(gemfile_path, initial_gemfile.join)
raise InvalidOption, "Gems could not be removed. #{extra_removed_gems.join(", ")} would also have been removed. Bundler cannot continue."
end
# record gems which could not be removed due to some reasons
errored_deps = builder.dependencies.select {|d| d.gemfile == gemfile_path } & removed_deps.select {|d| d.gemfile == gemfile_path }
show_warning "#{errored_deps.map(&:name).join(", ")} could not be removed." unless errored_deps.empty?
# return actual removed dependencies
removed_deps - errored_deps
end
def show_warning(message)
Bundler.ui.info Bundler.ui.add_color(message, :yellow)
end
end
end
| 35.343066 | 165 | 0.656547 |
6a52253b2a4858695e8fa7febdbe7d57f67badd8 | 1,446 | # frozen_string_literal: true
module RailFeeds
module NationalRail
# A wrapper class for ::Net::HTTP
class HTTPClient < RailFeeds::HTTPClient
def initialize(credentials: nil, **args)
credentials ||= RailFeeds::NationalRail::Credentials
super
end
# Fetch path from server.
# @param [String] path The path to fetch.
# @yield contents
# @yieldparam [IO] file Either a Tempfile or StringIO.
def fetch(path)
super "https://opendata.nationalrail.co.uk/#{path}", 'X-Auth-Token' => auth_token
end
private
# rubocop:disable Metrics/AbcSize
def auth_token
return @auth_token if !@auth_token.nil? && @auth_token_expires_at >= Time.now
logger.info 'Getting an auth token for national rail.'
response = Net::HTTP.post_form(
URI('https://opendata.nationalrail.co.uk/authenticate'),
credentials.to_h
)
response.value # Raise an exception if not successful
data = JSON.parse(response.body)
logger.debug "Got auth token data: #{data.inspect}"
token = data.fetch('token')
# Token expires in 1 hour. Using 55 minutes provides a safety margin.
@auth_token_expires_at = Time.now + (55 * 60)
logger.debug "Auth token expires at #{@auth_token_expires_at}."
@auth_token = token
end
# rubocop:enable Metrics/AbcSize
end
end
end
| 31.434783 | 89 | 0.636929 |
f7fe5cb3cdc24d2320286199af9869e6a937f58d | 1,463 | class Admin::ProgramsController < AdminController
before_action :authenticate_user!
before_action :set_program, only: [:edit,:show,:destroy,:update]
def index
@programs = Program.all
end
def new
@program = Program.new
end
def create
@program = Program.new(program_params)
respond_to do |format|
if @program.save
flash[:notice] = 'Program was successfully created.'
format.html { admin_programs_path }
else
format.html { render action: "new" }
end
end
end
def show
redirect_to action: :edit
end
def edit
end
def update
respond_to do |format|
if @program.update(program_params)
format.html { redirect_to admin_program_path(@program.id), notice: 'program was successfully updated.' }
format.json { render :show, status: :ok, location: @program }
else
format.html { render :edit }
format.json { render json: @program.errors, status: :unprocessable_entity }
end
end
end
def destroy
@program.destroy
respond_to do |format|
format.html { redirect_to admin_programs_path, notice: 'program was successfully destroyed.' }
format.json { head :no_content }
end
end
private
def set_program
@program = Program.find(params[:id])
end
def program_params
params.require(:program).permit(:camp_id,:name,:info,:nickname,:price,:start_date,:end_date,:old_link)
end
end
| 22.507692 | 112 | 0.664388 |
6add7643d062c03bbc3de8e58b7675082cb2f6da | 12,218 | =begin
#Datadog API V1 Collection
#Collection of all Datadog Public endpoints.
The version of the OpenAPI document: 1.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
This product includes software developed at Datadog (https://www.datadoghq.com/).
Copyright 2020-Present Datadog, Inc.
=end
require 'date'
require 'time'
module DatadogAPIClient::V1
# Updated change widget.
class ChangeWidgetRequest
# whether the object has unparsed attributes
attr_accessor :_unparsed
attr_accessor :apm_query
attr_accessor :change_type
attr_accessor :compare_to
attr_accessor :event_query
# List of formulas that operate on queries. **This feature is currently in beta.**
attr_accessor :formulas
# Whether to show increase as good.
attr_accessor :increase_good
attr_accessor :log_query
attr_accessor :network_query
attr_accessor :order_by
attr_accessor :order_dir
attr_accessor :process_query
attr_accessor :profile_metrics_query
# Query definition.
attr_accessor :q
# List of queries that can be returned directly or used in formulas. **This feature is currently in beta.**
attr_accessor :queries
attr_accessor :response_format
attr_accessor :rum_query
attr_accessor :security_query
# Whether to show the present value.
attr_accessor :show_present
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'apm_query' => :'apm_query',
:'change_type' => :'change_type',
:'compare_to' => :'compare_to',
:'event_query' => :'event_query',
:'formulas' => :'formulas',
:'increase_good' => :'increase_good',
:'log_query' => :'log_query',
:'network_query' => :'network_query',
:'order_by' => :'order_by',
:'order_dir' => :'order_dir',
:'process_query' => :'process_query',
:'profile_metrics_query' => :'profile_metrics_query',
:'q' => :'q',
:'queries' => :'queries',
:'response_format' => :'response_format',
:'rum_query' => :'rum_query',
:'security_query' => :'security_query',
:'show_present' => :'show_present'
}
end
# Returns all the JSON keys this model knows about
def self.acceptable_attributes
attribute_map.values
end
# Attribute type mapping.
def self.openapi_types
{
:'apm_query' => :'LogQueryDefinition',
:'change_type' => :'WidgetChangeType',
:'compare_to' => :'WidgetCompareTo',
:'event_query' => :'LogQueryDefinition',
:'formulas' => :'Array<WidgetFormula>',
:'increase_good' => :'Boolean',
:'log_query' => :'LogQueryDefinition',
:'network_query' => :'LogQueryDefinition',
:'order_by' => :'WidgetOrderBy',
:'order_dir' => :'WidgetSort',
:'process_query' => :'ProcessQueryDefinition',
:'profile_metrics_query' => :'LogQueryDefinition',
:'q' => :'String',
:'queries' => :'Array<FormulaAndFunctionQueryDefinition>',
:'response_format' => :'FormulaAndFunctionResponseFormat',
:'rum_query' => :'LogQueryDefinition',
:'security_query' => :'LogQueryDefinition',
:'show_present' => :'Boolean'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V1::ChangeWidgetRequest` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `DatadogAPIClient::V1::ChangeWidgetRequest`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'apm_query')
self.apm_query = attributes[:'apm_query']
end
if attributes.key?(:'change_type')
self.change_type = attributes[:'change_type']
end
if attributes.key?(:'compare_to')
self.compare_to = attributes[:'compare_to']
end
if attributes.key?(:'event_query')
self.event_query = attributes[:'event_query']
end
if attributes.key?(:'formulas')
if (value = attributes[:'formulas']).is_a?(Array)
self.formulas = value
end
end
if attributes.key?(:'increase_good')
self.increase_good = attributes[:'increase_good']
end
if attributes.key?(:'log_query')
self.log_query = attributes[:'log_query']
end
if attributes.key?(:'network_query')
self.network_query = attributes[:'network_query']
end
if attributes.key?(:'order_by')
self.order_by = attributes[:'order_by']
end
if attributes.key?(:'order_dir')
self.order_dir = attributes[:'order_dir']
end
if attributes.key?(:'process_query')
self.process_query = attributes[:'process_query']
end
if attributes.key?(:'profile_metrics_query')
self.profile_metrics_query = attributes[:'profile_metrics_query']
end
if attributes.key?(:'q')
self.q = attributes[:'q']
end
if attributes.key?(:'queries')
if (value = attributes[:'queries']).is_a?(Array)
self.queries = value
end
end
if attributes.key?(:'response_format')
self.response_format = attributes[:'response_format']
end
if attributes.key?(:'rum_query')
self.rum_query = attributes[:'rum_query']
end
if attributes.key?(:'security_query')
self.security_query = attributes[:'security_query']
end
if attributes.key?(:'show_present')
self.show_present = attributes[:'show_present']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
apm_query == o.apm_query &&
change_type == o.change_type &&
compare_to == o.compare_to &&
event_query == o.event_query &&
formulas == o.formulas &&
increase_good == o.increase_good &&
log_query == o.log_query &&
network_query == o.network_query &&
order_by == o.order_by &&
order_dir == o.order_dir &&
process_query == o.process_query &&
profile_metrics_query == o.profile_metrics_query &&
q == o.q &&
queries == o.queries &&
response_format == o.response_format &&
rum_query == o.rum_query &&
security_query == o.security_query &&
show_present == o.show_present
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[apm_query, change_type, compare_to, event_query, formulas, increase_good, log_query, network_query, order_by, order_dir, process_query, profile_metrics_query, q, queries, response_format, rum_query, security_query, show_present].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
elsif type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :Time
Time.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when :Array
# generic array, return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
# models (e.g. Pet) or oneOf
klass = DatadogAPIClient::V1.const_get(type)
res = klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
if res.instance_of? DatadogAPIClient::V1::UnparsedObject
self._unparsed = true
end
res
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 30.931646 | 240 | 0.62367 |
ab1c2deb0eef7b9529213cc0bbf0b7ef80965da4 | 1,772 | class BondsController < ApplicationController
before_action :set_bond, only: [:show, :edit, :update, :destroy]
# GET /bonds
# GET /bonds.json
def index
@bonds = Bond.all
end
# GET /bonds/1
# GET /bonds/1.json
def show
end
# GET /bonds/new
def new
@bond = Bond.new
end
# GET /bonds/1/edit
def edit
end
# POST /bonds
# POST /bonds.json
def create
@bond = Bond.new(bond_params)
respond_to do |format|
if @bond.save
format.html { redirect_to @bond, notice: 'Bond was successfully created.' }
format.json { render :show, status: :created, location: @bond }
else
format.html { render :new }
format.json { render json: @bond.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /bonds/1
# PATCH/PUT /bonds/1.json
def update
respond_to do |format|
if @bond.update(bond_params)
format.html { redirect_to @bond, notice: 'Bond was successfully updated.' }
format.json { render :show, status: :ok, location: @bond }
else
format.html { render :edit }
format.json { render json: @bond.errors, status: :unprocessable_entity }
end
end
end
# DELETE /bonds/1
# DELETE /bonds/1.json
def destroy
@bond.destroy
respond_to do |format|
format.html { redirect_to bonds_url, notice: 'Bond was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_bond
@bond = Bond.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def bond_params
params.require(:bond).permit(:name)
end
end
| 23.626667 | 88 | 0.637133 |
1a3dc11467e0971377aa0ac174970cd167ff4c7e | 238 | $LOAD_PATH.unshift File.dirname(__FILE__) + "/../lib"
require "yaml"
require "minitest/autorun"
require "minitest/reporters"
require "secret_config"
require "amazing_print"
Minitest::Reporters.use! Minitest::Reporters::SpecReporter.new
| 23.8 | 62 | 0.785714 |
ed6af57b1a114b187a73c4b6e82576d3056ed32d | 758 | module KhipuChaski
#
class SuccessResponse < BaseObject
attr_accessor :message
# attribute mapping from ruby-style variable name to JSON key
def self.attribute_map
{
# Mensaje a desplegar al usuario
:'message' => :'message'
}
end
# attribute type
def self.swagger_types
{
:'message' => :'String'
}
end
def initialize(attributes = {})
return if !attributes.is_a?(Hash) || attributes.empty?
# convert string to symbol for hash key
attributes = attributes.inject({}){|memo,(k,v)| memo[k.to_sym] = v; memo}
if attributes[:'message']
self.message = attributes[:'message']
end
end
end
end
| 19.947368 | 79 | 0.568602 |
e9e9e912b12cb23f361c7e5d68832928591f56d3 | 191 | RSpec.describe OverwatchInfo do
it "has a version number" do
expect(OverwatchInfo::VERSION).not_to be nil
end
it "does something useful" do
expect(false).to eq(true)
end
end
| 19.1 | 48 | 0.717277 |
4aa2d5d7e224a2d479ec47a2df5ba60c44b9888d | 635 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name 'c2d-config'
chef_version '>= 12.1'
supports 'debian'
| 35.277778 | 74 | 0.755906 |
ab7262929e0ffa19c32ca406b269e3a0544dc3e0 | 1,863 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Batch::Mgmt::V2019_08_01
module Models
#
# The definition of the user identity under which the task is run.
# Specify either the userName or autoUser property, but not both.
#
class UserIdentity
include MsRestAzure
# @return [String] The name of the user identity under which the task is
# run. The userName and autoUser properties are mutually exclusive; you
# must specify one but not both.
attr_accessor :user_name
# @return [AutoUserSpecification] The auto user under which the task is
# run. The userName and autoUser properties are mutually exclusive; you
# must specify one but not both.
attr_accessor :auto_user
#
# Mapper for UserIdentity class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'UserIdentity',
type: {
name: 'Composite',
class_name: 'UserIdentity',
model_properties: {
user_name: {
client_side_validation: true,
required: false,
serialized_name: 'userName',
type: {
name: 'String'
}
},
auto_user: {
client_side_validation: true,
required: false,
serialized_name: 'autoUser',
type: {
name: 'Composite',
class_name: 'AutoUserSpecification'
}
}
}
}
}
end
end
end
end
| 29.109375 | 78 | 0.56146 |
61d66ba4980fed96566d10d0f689e1af784a569a | 1,095 | class Admin::UserController < Admin::AbstractModelController
model_class User
only_allow_access_to :index, :new, :edit, :remove, :when => :admin,
:denied_url => {:controller => 'page', :action => :index},
:denied_message => 'You must have administrative privileges to perform this action.'
def preferences
@user = User.find(session[:user].id)
if valid_params?
handle_new_or_edit_post(
:redirect_to => page_index_url,
:saved_message => 'Your preferences have been saved.'
)
else
announce_bad_data
end
end
def remove
if session[:user].id.to_s == params[:id].to_s
announce_cannot_delete_self
redirect_to user_index_url
else
super
end
end
private
def announce_cannot_delete_self
flash[:error] = 'You cannot delete yourself.'
end
def announce_bad_data
flash[:error] = 'Bad form data.'
end
def valid_params?
hash = (params[:user] || {}).symbolize_keys
(hash.keys - [:password, :password_confirmation, :email]).size == 0
end
end
| 24.886364 | 88 | 0.644749 |
d564448bc4cb0d5e8ed471bcd1a8028254dad3eb | 6,506 | require 'weixin_rails_middleware/helpers/reply_weixin_message_helper'
module WeixinPam
class PublicAccountReply
class KeyEventCallback
attr_accessor :key, :callback, :description
def execute(service)
callback.call(service)
end
end
module KeyEventMethods
extend ActiveSupport::Concern
def find_key_event(key)
key = key.to_s.intern
self.class.key_event_callbacks.find { |ke| ke.key == key }
end
class_methods do
def key_event_callbacks
@key_event_callbacks ||= []
end
def key_event_desc(desc)
@current_key_event = KeyEventCallback.new
@current_key_event.description = desc
end
def define_key_event(key, &block)
@current_key_event ||= KeyEventCallback.new
@current_key_event.key = key.intern
@current_key_event.callback = block
key_event_callbacks.push(@current_key_event)
@current_key_event = nil
end
end
end
include KeyEventMethods
include WeixinRailsMiddleware::ReplyWeixinMessageHelper
attr_reader :weixin_public_account, :weixin_message, :keyword, :weixin_user_account
NO_CONTENT = :no_content
def initialize(public_account, message, keyword)
@weixin_public_account = public_account
@weixin_user_account = public_account.user_accounts.find_or_create_by!(uid: message.FromUserName)
@weixin_message = message
@keyword = keyword
end
def reply
send("response_#{@weixin_message.MsgType}_message")
end
def response_text_message
reply_with_dev_message(reply_text_message("Your Message: #{@keyword}"))
end
# <Location_X>23.134521</Location_X>
# <Location_Y>113.358803</Location_Y>
# <Scale>20</Scale>
# <Label><![CDATA[位置信息]]></Label>
def response_location_message
@lx = @weixin_message.Location_X
@ly = @weixin_message.Location_Y
@scale = @weixin_message.Scale
@label = @weixin_message.Label
reply_with_dev_message(reply_text_message("Your Location: #{@lx}, #{@ly}, #{@scale}, #{@label}"))
end
# <PicUrl><![CDATA[this is a url]]></PicUrl>
# <MediaId><![CDATA[media_id]]></MediaId>
def response_image_message
@media_id = @weixin_message.MediaId # 可以调用多媒体文件下载接口拉取数据。
@pic_url = @weixin_message.PicUrl # 也可以直接通过此链接下载图片, 建议使用carrierwave.
reply_with_dev_message(generate_image(@media_id))
end
# <Title><![CDATA[公众平台官网链接]]></Title>
# <Description><![CDATA[公众平台官网链接]]></Description>
# <Url><![CDATA[url]]></Url>
def response_link_message
@title = @weixin_message.Title
@desc = @weixin_message.Description
@url = @weixin_message.Url
reply_with_dev_message(reply_text_message("回复链接信息"))
end
# <MediaId><![CDATA[media_id]]></MediaId>
# <Format><![CDATA[Format]]></Format>
def response_voice_message
@media_id = @weixin_message.MediaId # 可以调用多媒体文件下载接口拉取数据。
@format = @weixin_message.Format
# 如果开启了语音翻译功能,@keyword则为翻译的结果
# reply_text_message("回复语音信息: #{@keyword}")
reply_with_dev_message(generate_voice(@media_id))
end
# <MediaId><![CDATA[media_id]]></MediaId>
# <ThumbMediaId><![CDATA[thumb_media_id]]></ThumbMediaId>
def response_video_message
@media_id = @weixin_message.MediaId # 可以调用多媒体文件下载接口拉取数据。
# 视频消息缩略图的媒体id,可以调用多媒体文件下载接口拉取数据。
@thumb_media_id = @weixin_message.ThumbMediaId
reply_with_dev_message(reply_text_message("回复视频信息"))
end
def response_event_message
event_type = @weixin_message.Event
case event_type.downcase
when 'unsubscribe'
@weixin_public_account.user_accounts.where(uid: @weixin_message.FromUserName).limit(1).update_all(subscribed: false)
when 'subscribe'
@weixin_public_account.user_accounts.where(uid: @weixin_message.FromUserName).limit(1).update_all(subscribed: true)
end
send("handle_#{event_type.downcase}_event")
end
# 关注公众账号
def handle_subscribe_event
if @keyword.present?
# 扫描带参数二维码事件: 1. 用户未关注时,进行关注后的事件推送
return reply_with_dev_message(reply_text_message("扫描带参数二维码事件: 1. 用户未关注时,进行关注后的事件推送, keyword: #{@keyword}"))
end
reply_with_dev_message(reply_text_message("关注公众账号"))
end
# 取消关注
def handle_unsubscribe_event
Rails.logger.info("取消关注")
NO_CONTENT
end
# 扫描带参数二维码事件: 2. 用户已关注时的事件推送
def handle_scan_event
reply_with_dev_message(reply_text_message("扫描带参数二维码事件: 2. 用户已关注时的事件推送, keyword: #{@keyword}"))
end
def handle_location_event # 上报地理位置事件
@lat = @weixin_message.Latitude
@lgt = @weixin_message.Longitude
@precision = @weixin_message.Precision
reply_with_dev_message(reply_text_message("Your Location: #{@lat}, #{@lgt}, #{@precision}"))
end
# 点击菜单拉取消息时的事件推送
def handle_click_event
key_event = find_key_event(@keyword)
if key_event
key_event.execute(self)
else
reply_with_dev_message(reply_text_message("你点击了: #{@keyword}"))
end
end
# 点击菜单跳转链接时的事件推送
def handle_view_event
Rails.logger.info("你点击了: #{@keyword}")
NO_CONTENT
end
# 弹出系统拍照发图
def handle_pic_sysphoto_event
NO_CONTENT
end
# 弹出拍照或者相册发图的事件推送
def handle_pic_photo_or_album_event
NO_CONTENT
end
# 扫码事件
def handle_scancode_push_event
NO_CONTENT
end
# 帮助文档: https://github.com/lanrion/weixin_authorize/issues/22
# 由于群发任务提交后,群发任务可能在一定时间后才完成,因此,群发接口调用时,仅会给出群发任务是否提交成功的提示,若群发任务提交成功,则在群发任务结束时,会向开发者在公众平台填写的开发者URL(callback URL)推送事件。
# 推送的XML结构如下(发送成功时):
# <xml>
# <ToUserName><![CDATA[gh_3e8adccde292]]></ToUserName>
# <FromUserName><![CDATA[oR5Gjjl_eiZoUpGozMo7dbBJ362A]]></FromUserName>
# <CreateTime>1394524295</CreateTime>
# <MsgType><![CDATA[event]]></MsgType>
# <Event><![CDATA[MASSSENDJOBFINISH]]></Event>
# <MsgID>1988</MsgID>
# <Status><![CDATA[sendsuccess]]></Status>
# <TotalCount>100</TotalCount>
# <FilterCount>80</FilterCount>
# <SentCount>75</SentCount>
# <ErrorCount>5</ErrorCount>
# </xml>
def handle_masssendjobfinish_event
Rails.logger.info("回调事件处理")
NO_CONTENT
end
def handle_templatesendjobfinish_event
Rails.logger.info("回调模板任务")
NO_CONTENT
end
def reply_with_dev_message(msg)
Rails.env.development? ? msg : NO_CONTENT
end
end
end
| 30.401869 | 124 | 0.678758 |
7a133a503c9faf89ca96de45cd2536ec3134608d | 6,662 | =begin
#Ory APIs
#Documentation for all public and administrative Ory APIs. Administrative APIs can only be accessed with a valid Personal Access Token. Public APIs are mostly used in browsers.
The version of the OpenAPI document: v0.0.1-alpha.19
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 5.2.1
=end
require 'date'
require 'time'
module OryClient
# ContainerWaitOKBodyError container waiting error, if any
class ContainerWaitOKBodyError
# Details of an error
attr_accessor :message
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'message' => :'Message'
}
end
# Returns all the JSON keys this model knows about
def self.acceptable_attributes
attribute_map.values
end
# Attribute type mapping.
def self.openapi_types
{
:'message' => :'String'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `OryClient::ContainerWaitOKBodyError` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `OryClient::ContainerWaitOKBodyError`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'message')
self.message = attributes[:'message']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
message == o.message
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[message].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
elsif type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :Time
Time.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
# models (e.g. Pet) or oneOf
klass = OryClient.const_get(type)
klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 30.144796 | 213 | 0.630892 |
082b3494aedb5adbbec97eb7f7e55619438b52c9 | 757 | require 'active_support/concern'
module Versacommerce
class ThemeAPIClient
module Resources
module FileBehaviour
extend ActiveSupport::Concern
included do
define_attribute_method :path
attr_reader :path
validates :path, presence: true
validates :name, length: {in: 3..64}, if: proc { name.present? }
end
def path=(value)
path = Pathname.new(value.sub(/\A\/*/, ''))
unless path == @path
path_will_change!
@path = path
end
end
def name
path.basename.to_s
end
def ==(other)
other.kind_of?(self.class) && path == other.path
end
end
end
end
end
| 20.459459 | 74 | 0.541612 |
01f40a7a465dc0bed59b6196ab6002ab688f5cca | 1,689 | # frozen_string_literal: true
require 'ffaker'
FactoryBot.define do
factory :alert_management_alert, class: 'AlertManagement::Alert' do
triggered
project
title { FFaker::Lorem.sentence }
started_at { Time.current }
trait :with_issue do
issue
end
trait :with_fingerprint do
fingerprint { SecureRandom.hex }
end
trait :with_service do
service { FFaker::Product.product_name }
end
trait :with_monitoring_tool do
monitoring_tool { FFaker::AWS.product_description }
end
trait :with_description do
description { FFaker::Lorem.sentence }
end
trait :with_host do
hosts { [FFaker::Internet.ip_v4_address] }
end
trait :with_ended_at do
ended_at { Time.current }
end
trait :without_ended_at do
ended_at { nil }
end
trait :triggered do
status { AlertManagement::Alert::STATUSES[:triggered] }
without_ended_at
end
trait :acknowledged do
status { AlertManagement::Alert::STATUSES[:acknowledged] }
without_ended_at
end
trait :resolved do
status { AlertManagement::Alert::STATUSES[:resolved] }
with_ended_at
end
trait :ignored do
status { AlertManagement::Alert::STATUSES[:ignored] }
without_ended_at
end
trait :low_severity do
severity { 'low' }
end
trait :prometheus do
monitoring_tool { Gitlab::AlertManagement::AlertParams::MONITORING_TOOLS[:prometheus] }
end
trait :all_fields do
with_issue
with_fingerprint
with_service
with_monitoring_tool
with_host
with_description
low_severity
end
end
end
| 20.597561 | 93 | 0.662522 |
e8ac375f68fbee293e4f1a9921e1e253d3c721f9 | 586 | cask :v1 => 'zooom' do
version :latest
sha256 :no_check
url 'http://software.coderage-software.com/zooom/Zooom_Latest.dmg'
homepage 'http://coderage-software.com/zooom'
license :unknown # todo: change license and remove this comment; ':unknown' is a machine-generated placeholder
pkg 'Zooom2.pkg'
uninstall :pkgutil => 'com.coderage.pkg.Zooom2'
depends_on :macos => '>= :mavericks'
caveats do
"There are known issues with installing this package, so if installation fails you may need to run the installer at #{staged_path}/Zooom2.pkg manually."
end
end
| 30.842105 | 156 | 0.728669 |
bbd09d34187118c2d172d7a91230de9d0d702c81 | 155 | class WatchingController < ApplicationController
before_action :authorize
def feed
@changes = Change.order('created_at DESC').first(20)
end
end
| 19.375 | 56 | 0.76129 |
bb4565c05f69cf698c68a4fdefae56889b06bfa0 | 121 | module PaxfulEngine
class SyncOrderBookJob < ApplicationJob
def perform
SyncOrderBook.()
end
end
end
| 12.1 | 41 | 0.702479 |
6af086f958ad8c7d550382a9cbcca621049bb8e9 | 6,920 | require_relative 'data_generation/json_to_seed'
def run
remove_old_seeds
data = run_json_to_seed
countries = seed_countries(data)
positions = seed_positions(data)
competitions = seed_competitions(data)
stadia = seed_stadia(data)
clubs = seed_clubs(data)
players = seed_players(data)
player_stats = seed_player_stats(data)
club_stats = seed_club_stats(data)
matches = seed_matches(data)
seed_users
end
def remove_old_seeds
Country.destroy_all
puts "Countries cleared"
Position.destroy_all
puts "Positions cleared"
Stadium.destroy_all
puts "Stadia cleared"
Club.destroy_all
puts "Clubs cleared"
Player.destroy_all
puts "Players cleared"
PlayerStat.destroy_all
puts "Player stats cleared"
ClubStat.destroy_all
puts "Club stats cleared"
Match.destroy_all
puts "Matches cleared"
User.destroy_all
puts "Users cleared"
end
def seed_countries(data)
countries = data[:country_hash].keys
countries.each do |country|
Country.create(name: country)
end
puts "Countries seeded"
end
def seed_positions(data)
positions = data[:position_hash].keys
positions.each do |position|
Position.create(name: position)
end
puts "Positions seeded"
end
def seed_competitions(data)
competitions = data[:competition_array]
competitions.each do |comp|
Competition.create(fs_league_id: comp[:fs_league_id], name: comp[:name])
end
puts "Competitions seeded"
end
def seed_stadia(data)
stadia = data[:stadium_hash]
stadia.each do |stadium|
Stadium.create(fs_club_id: stadium[0], name: stadium[1])
end
puts "Stadia seeded"
end
def seed_clubs(data)
clubs = data[:club_array]
stadiums = data[:stadium_hash]
clubs.each do |c|
Club.create(
name: c[:name],
shorthand: c[:shorthand],
country_id: Country.find_by(name:c[:country]).id,
image: c[:image],
founded: c[:founded],
fs_club_id: c[:fs_club_id],
competition_id: Competition.find_by(fs_league_id:c[:fs_league_id]).id,
stadium_id: Stadium.find_by(fs_club_id:c[:fs_club_id]).id
)
end
puts "Clubs seeded"
end
def seed_players(data)
players = data[:player_array]
players.each do |p|
Player.create(
name: p[:name],
shorthand: p[:shorthand],
country_id: Country.find_by(name:p[:country]).id,
birthday: p[:birthday],
position_id: Position.find_by(name:p[:position]).id,
club_id: Club.find_by(fs_club_id:p[:fs_club_id]).id,
fs_player_id: p[:fs_player_id],
competition_id: Competition.find_by(fs_league_id:p[:fs_competition_id]).id
)
end
puts "Players seeded"
end
def seed_player_stats(data)
player_stats = data[:player_stats_array]
player_stats.each do |p|
PlayerStat.create(
player_id: Player.find_by(fs_player_id:p[:fs_player_id]).id,
appearances_overall: p[:appearances_overall],
minutes_played_overall: p[:minutes_played_overall],
goals_overall: p[:goals_overall],
clean_sheets_overall: p[:clean_sheets_overall],
conceded_overall: p[:conceded_overall],
penalty_goals: p[:penalty_goals],
penalty_misses: p[:penalty_misses],
assists_overall: p[:assists_overall],
yellow_cards_overall: p[:yellow_cards_overall],
red_cards_overall: p[:red_cards_overall]
)
end
puts "Player Stats seeded"
end
def seed_club_stats(data)
club_stats = data[:club_stats_array]
club_stats.each do |c|
ClubStat.create(
club_id: Club.find_by(fs_club_id:c[:fs_club_id]).id,
seasonGoals_overall: c[:seasonGoals_overall],
seasonConceded_overall: c[:seasonConceded_overall],
seasonWinsNum_overall: c[:seasonWinsNum_overall],
seasonDrawsNum_overall: c[:seasonDrawsNum_overall],
seasonLossesNum_overall: c[:seasonLossesNum_overall],
seasonMatchesPlayed_overall: c[:seasonMatchesPlayed_overall],
seasonCS_overall: c[:seasonCS_overall],
leaguePosition_overall: c[:leaguePosition_overall],
average_attendance_home: c[:average_attendance_home],
cornersTotal_overall: c[:cornersTotal_overall],
shotsTotal_overall: c[:shotsTotal_overall],
shotsOnTargetTotal_overall: c[:shotsOnTargetTotal_overall],
possessionAVG_overall: c[:possessionAVG_overall],
foulsTotal_overall: c[:foulsTotal_overall],
offsidesTotal_overall: c[:offsidesTotal_overall]
)
end
puts "Club Stats seeded"
end
def seed_matches(data)
matches = data[:matches_array]
matches.each_with_index do |m, i|
home_club = Club.find_by(fs_club_id:m[:home_id])
away_club = Club.find_by(fs_club_id:m[:away_id])
my_match = Match.create(
home_id: home_club.id,
away_id: away_club.id,
home_goal_count: m[:home_goal_count],
away_goal_count: m[:away_goal_count],
stadium_id: home_club.stadium_id,
date: m[:date],
attendance: m[:attendance],
status: m[:status],
fs_match_id: m[:fs_match_id],
competition_id: Competition.find_by(fs_league_id:m[:fs_competition_id]).id,
game_week: m[:game_week],
winningTeam: (m[:home_goal_count] <=> m[:away_goal_count]),
team_a_corners: m[:team_a_corners],
team_b_corners: m[:team_b_corners],
team_a_offsides: m[:team_a_offsides],
team_b_offsides: m[:team_b_offsides],
team_a_yellow_cards: m[:team_a_yellow_cards],
team_b_yellow_cards: m[:team_b_yellow_cards],
team_a_red_cards: m[:team_a_red_cards],
team_b_red_cards: m[:team_b_red_cards],
team_a_shotsOnTarget: m[:team_a_shotsOnTarget],
team_b_shotsOnTarget: m[:team_b_shotsOnTarget],
team_a_shots: m[:team_a_shots],
team_b_shots: m[:team_b_shots],
team_a_fouls: m[:team_a_fouls],
team_b_fouls: m[:team_b_fouls],
team_a_possession: m[:team_a_possession],
team_b_possession: m[:team_b_possession]
)
end
puts "Matches seeded"
end
def seed_users
users_arr = [
{username: "testuser1", email: "[email protected]", password: "user1", password_confirmation: "user1"},
{username: "testuser2", email: "[email protected]", password: "user2", password_confirmation: "user2"},
{username: "testuser3", email: "[email protected]", password: "user3", password_confirmation: "user3"}
]
User.create(users_arr)
end
run | 32.186047 | 109 | 0.648988 |
388df2b8df51ff8adb38a9f191ea50d082c34775 | 1,045 | Pod::Spec.new do |s|
name = "BytedanceAds"
version = "3.3.1.5"
s.name = "Yumi#{name}"
s.version = version
s.summary = "Yumi#{name}."
s.description = "Yumi#{name} is the #{name} SDK cocoapods created by Yumimobi"
s.homepage = "http://www.yumimobi.com/"
s.license = "MIT"
s.author = { "Yumimobi sdk team" => "[email protected]" }
s.ios.deployment_target = "9.0"
s.source = { :http => "https://adsdk.yumimobi.com/iOS/ThirdPartySDK/#{name}/#{name}-#{version}.tar.bz2" }
s.libraries = 'c++', 'resolv', 'z','sqlite3','bz2','xml2','c++abi'
s.xcconfig = { 'OTHER_LDFLAGS' => '-ObjC' }
s.vendored_frameworks = 'BUAdSDK.framework', 'BUFoundation.framework'
s.resource = 'BUAdSDK.bundle'
s.frameworks = 'UIKit','MapKit','WebKit','MediaPlayer','CoreLocation','AdSupport','CoreMedia','AVFoundation','CoreTelephony','StoreKit','SystemConfiguration','MobileCoreServices','CoreMotion','Accelerate'
valid_archs = ['armv7', 'arm64', 'x86_64']
s.xcconfig = {'VALID_ARCHS' => valid_archs.join(' ')}
end
| 47.5 | 206 | 0.644976 |
b997f9d46f306e95c58d7d4e29c6c221ee95dfd7 | 589 | # typed: true
require "net/http"
require "net/smtp"
# Example:
# begin
# some http call
# rescue *HTTP_ERRORS => error
# notify_hoptoad error
# end
HTTP_ERRORS = [
EOFError,
Errno::ECONNRESET,
Errno::EINVAL,
Net::HTTPBadResponse,
Net::HTTPHeaderSyntaxError,
Net::ProtocolError,
Timeout::Error,
]
SMTP_SERVER_ERRORS = [
IOError,
Net::SMTPAuthenticationError,
Net::SMTPServerBusy,
Net::SMTPUnknownError,
Timeout::Error,
]
SMTP_CLIENT_ERRORS = [
Net::SMTPFatalError,
Net::SMTPSyntaxError,
]
SMTP_ERRORS = SMTP_SERVER_ERRORS + SMTP_CLIENT_ERRORS
| 16.361111 | 53 | 0.711375 |
01bb33538b37ee6b6ab26314f7b3f8c39e4382e5 | 290 | class CreateLessonGroups < ActiveRecord::Migration[5.0]
def change
create_table :lesson_groups do |t|
t.string :name
t.integer :script_id, null: false
t.boolean :user_facing, null: false, default: true
t.index :script_id
t.timestamps
end
end
end
| 20.714286 | 56 | 0.668966 |
ed12d2f07d8171aa19812bf145e2927dc4e82d46 | 10,850 | require 'optparse'
require 'fileutils'
module Haml
# This module contains code for working with the
# haml, sass, and haml2html executables,
# such as command-line parsing stuff.
# It shouldn't need to be invoked by client code.
module Exec # :nodoc:
# A class that encapsulates the executable code
# for all three executables.
class Generic # :nodoc:
def initialize(args)
@args = args
@options = {}
end
def parse!
begin
@opts = OptionParser.new(&method(:set_opts))
@opts.parse!(@args)
process_result
@options
rescue Exception => e
raise e if @options[:trace] || e.is_a?(SystemExit)
$stderr.puts e.message
exit 1
end
exit 0
end
def to_s
@opts.to_s
end
protected
def get_line(exception)
# SyntaxErrors have weird line reporting
# when there's trailing whitespace,
# which there is for Haml documents.
return exception.message.scan(/:(\d+)/)[0] if exception.is_a?(::SyntaxError)
exception.backtrace[0].scan(/:(\d+)/)[0]
end
private
def set_opts(opts)
opts.on('-s', '--stdin', :NONE, 'Read input from standard input instead of an input file') do
@options[:input] = $stdin
end
opts.on('--trace', :NONE, 'Show a full traceback on error') do
@options[:trace] = true
end
opts.on_tail("-?", "-h", "--help", "Show this message") do
puts opts
exit
end
opts.on_tail("-v", "--version", "Print version") do
puts("Haml #{::Haml.version[:string]}")
exit
end
end
def process_result
input, output = @options[:input], @options[:output]
input_file, output_file = if input
[nil, open_file(ARGV[0], 'w')]
else
@options[:filename] = ARGV[0]
[open_file(ARGV[0]), open_file(ARGV[1], 'w')]
end
input ||= input_file
output ||= output_file
input ||= $stdin
output ||= $stdout
@options[:input], @options[:output] = input, output
end
def open_file(filename, flag = 'r')
return if filename.nil?
File.open(filename, flag)
end
end
# A class encapsulating the executable functionality
# specific to Haml and Sass.
class HamlSass < Generic # :nodoc:
def initialize(args)
super
@options[:for_engine] = {}
end
private
def set_opts(opts)
opts.banner = <<END
Usage: #{@name.downcase} [options] [INPUT] [OUTPUT]
Description:
Uses the #{@name} engine to parse the specified template
and outputs the result to the specified file.
Options:
END
opts.on('--rails RAILS_DIR', "Install Haml and Sass from the Gem to a Rails project") do |dir|
original_dir = dir
dir = File.join(dir, 'vendor', 'plugins')
unless File.exists?(dir)
puts "Directory #{dir} doesn't exist"
exit
end
dir = File.join(dir, 'haml')
if File.exists?(dir)
print "Directory #{dir} already exists, overwrite [y/N]? "
exit if gets !~ /y/i
FileUtils.rm_rf(dir)
end
begin
Dir.mkdir(dir)
rescue SystemCallError
puts "Cannot create #{dir}"
exit
end
File.open(File.join(dir, 'init.rb'), 'w') do |file|
file.puts "require 'rubygems'"
file << File.read(File.dirname(__FILE__) + "/../../init.rb")
end
puts "Haml plugin added to #{original_dir}"
exit
end
opts.on('-c', '--check', "Just check syntax, don't evaluate.") do
require 'stringio'
@options[:check_syntax] = true
@options[:output] = StringIO.new
end
super
end
def process_result
super
@options[:for_engine][:filename] = @options[:filename] if @options[:filename]
require File.dirname(__FILE__) + "/../#{@name.downcase}"
end
end
# A class encapsulating executable functionality
# specific to Sass.
class Sass < HamlSass # :nodoc:
def initialize(args)
super
@name = "Sass"
@options[:for_engine][:load_paths] = ['.'] + (ENV['SASSPATH'] || '').split(File::PATH_SEPARATOR)
end
def set_opts(opts)
super
opts.on('-t', '--style NAME',
'Output style. Can be nested (default), compact, compressed, or expanded.') do |name|
@options[:for_engine][:style] = name.to_sym
end
opts.on('-l', '--line-comments',
'Line Comments. Emit comments in the generated CSS indicating the corresponding sass line.') do
@options[:for_engine][:line_comments] = true
end
opts.on('-i', '--interactive',
'Run an interactive SassScript shell.') do
@options[:interactive] = true
end
opts.on('-I', '--load-path PATH', 'Add a sass import path.') do |path|
@options[:for_engine][:load_paths] << path
end
opts.on('-S', '--smart-tabs', 'Use smart tabs for indentation.') do
@options[:for_engine][:smart_tabs] = true
end
end
def process_result
if @options[:interactive]
require 'sass'
require 'sass/repl'
::Sass::Repl.run
return
end
super
input = @options[:input]
output = @options[:output]
template = input.read()
input.close() if input.is_a? File
@options[:for_engine][:smart_tabs] = ( @options[:for_engine][:smart_tabs] ? true : false )
begin
# We don't need to do any special handling of @options[:check_syntax] here,
# because the Sass syntax checking happens alongside evaluation
# and evaluation doesn't actually evaluate any code anyway.
result = ::Sass::Engine.new(template, @options[:for_engine]).render
rescue ::Sass::SyntaxError => e
raise e if @options[:trace]
raise "Syntax error on line #{get_line e}: #{e.message}"
end
output.write(result)
output.close() if output.is_a? File
end
end
# A class encapsulating executable functionality
# specific to Haml.
class Haml < HamlSass # :nodoc:
def initialize(args)
super
@name = "Haml"
@options[:requires] = []
@options[:load_paths] = []
end
def set_opts(opts)
super
opts.on('-t', '--style NAME',
'Output style. Can be indented (default) or ugly.') do |name|
@options[:for_engine][:ugly] = true if name.to_sym == :ugly
end
opts.on('-f', '--format NAME',
'Output format. Can be xhtml (default), html4, or html5.') do |name|
@options[:for_engine][:format] = name.to_sym
end
opts.on('-e', '--escape-html',
'Escape HTML characters (like ampersands and angle brackets) by default.') do
@options[:for_engine][:escape_html] = true
end
opts.on('-r', '--require FILE', "Same as 'ruby -r'.") do |file|
@options[:requires] << file
end
opts.on('-I', '--load-path PATH', "Same as 'ruby -I'.") do |path|
@options[:load_paths] << path
end
end
def process_result
super
input = @options[:input]
output = @options[:output]
template = input.read()
input.close() if input.is_a? File
begin
engine = ::Haml::Engine.new(template, @options[:for_engine])
if @options[:check_syntax]
puts "Syntax OK"
return
end
@options[:load_paths].each {|p| $LOAD_PATH << p}
@options[:requires].each {|f| require f}
result = engine.to_html
rescue Exception => e
raise e if @options[:trace]
case e
when ::Haml::SyntaxError; raise "Syntax error on line #{get_line e}: #{e.message}"
when ::Haml::Error; raise "Haml error on line #{get_line e}: #{e.message}"
else raise "Exception on line #{get_line e}: #{e.message}\n Use --trace for backtrace."
end
end
output.write(result)
output.close() if output.is_a? File
end
end
# A class encapsulating executable functionality
# specific to the html2haml executable.
class HTML2Haml < Generic # :nodoc:
def initialize(args)
super
@module_opts = {}
begin
require 'haml/html'
rescue LoadError => err
dep = err.message.scan(/^no such file to load -- (.*)/)[0]
puts "Required dependency #{dep} not found!"
exit 1
end
end
def set_opts(opts)
opts.banner = <<END
Usage: html2haml [options] [INPUT] [OUTPUT]
Description: Transforms an HTML file into corresponding Haml code.
Options:
END
opts.on('-r', '--rhtml', 'Parse RHTML tags.') do
@module_opts[:rhtml] = true
end
opts.on('--no-rhtml', "Don't parse RHTML tags.") do
@options[:no_rhtml] = true
end
opts.on('-x', '--xhtml', 'Parse the input using the more strict XHTML parser.') do
@module_opts[:xhtml] = true
end
super
end
def process_result
super
input = @options[:input]
output = @options[:output]
@module_opts[:rhtml] ||= input.respond_to?(:path) && input.path =~ /\.(rhtml|erb)$/
@module_opts[:rhtml] &&= @options[:no_rhtml] != false
output.write(::Haml::HTML.new(input, @module_opts).render)
end
end
# A class encapsulating executable functionality
# specific to the css2sass executable.
class CSS2Sass < Generic # :nodoc:
def initialize(args)
super
@module_opts = {}
require 'sass/css'
end
def set_opts(opts)
opts.banner = <<END
Usage: css2sass [options] [INPUT] [OUTPUT]
Description: Transforms a CSS file into corresponding Sass code.
Options:
END
opts.on('-a', '--alternate', 'Output using alternative Sass syntax (margin: 1px)') do
@module_opts[:alternate] = true
end
super
end
def process_result
super
input = @options[:input]
output = @options[:output]
output.write(::Sass::CSS.new(input, @module_opts).render)
end
end
end
end
| 27.749361 | 111 | 0.546175 |
6a9a78ec5c686b90b930db491790127c209802c7 | 2,541 | # encoding: utf-8
require "stud/task"
def silence_warnings
warn_level = $VERBOSE
$VERBOSE = nil
yield
ensure
$VERBOSE = warn_level
end
def clear_data_dir
if defined?(agent_settings)
data_path = agent_settings.get("path.data")
else
data_path = LogStash::SETTINGS.get("path.data")
end
Dir.foreach(data_path) do |f|
next if f == "." || f == ".." || f == ".gitkeep"
FileUtils.rm_rf(File.join(data_path, f))
end
end
def mock_settings(settings_values)
settings = LogStash::SETTINGS.clone
settings_values.each do |key, value|
settings.set(key, value)
end
settings
end
def mock_pipeline(pipeline_id, reloadable = true, config_hash = nil)
config_string = "input { stdin { id => '#{pipeline_id}' }}"
settings = mock_settings("pipeline.id" => pipeline_id.to_s,
"config.string" => config_string,
"config.reload.automatic" => reloadable)
pipeline = LogStash::Pipeline.new(config_string, settings)
pipeline
end
def mock_pipeline_config(pipeline_id, config_string = nil, settings = {})
config_string = "input { stdin { id => '#{pipeline_id}' }}" if config_string.nil?
# This is for older tests when we already have a config
unless settings.is_a?(LogStash::Settings)
settings.merge!({ "pipeline.id" => pipeline_id.to_s })
settings = mock_settings(settings)
end
config_part = LogStash::Config::ConfigPart.new(:config_string, "config_string", config_string)
LogStash::Config::PipelineConfig.new(LogStash::Config::Source::Local, pipeline_id, config_part, settings)
end
def start_agent(agent)
agent_task = Stud::Task.new do
begin
agent.execute
rescue => e
raise "Start Agent exception: #{e}"
end
end
sleep(0.1) unless subject.running?
agent_task
end
def temporary_file(content, file_name = Time.now.to_i.to_s, directory = Stud::Temporary.pathname)
FileUtils.mkdir_p(directory)
target = ::File.join(directory, file_name)
File.open(target, "w+") do |f|
f.write(content)
end
target
end
RSpec::Matchers.define :ir_eql do |expected|
match do |actual|
next unless expected.java_kind_of?(org.logstash.config.ir.SourceComponent) && actual.java_kind_of?(org.logstash.config.ir.SourceComponent)
expected.sourceComponentEquals(actual)
end
failure_message do |actual|
"actual value \n#{actual.to_s}\nis not .sourceComponentEquals to the expected value: \n#{expected.to_s}\n"
end
end
SUPPORT_DIR = Pathname.new(::File.join(::File.dirname(__FILE__), "support"))
| 27.031915 | 142 | 0.700118 |
6a276edef217c683d9a43281de26e42c07ded76e | 2,007 | # -*- coding: utf-8 -*-
ActiveSupport.on_load(:action_controller) do
require 'jpmobile/docomo_guid'
require 'jpmobile/filter'
require 'jpmobile/helpers'
require 'jpmobile/hook_action_view'
require 'jpmobile/trans_sid'
require 'jpmobile/hook_test_request'
ActionDispatch::Request.send :prepend, Jpmobile::Encoding
ActionDispatch::Request.send :include, Jpmobile::RequestWithMobile
ActionController::Base.send :prepend, Jpmobile::FallbackViewSelector
ActionController::Base.send :prepend, Jpmobile::TransSidRedirecting
end
ActiveSupport.on_load(:after_initialize) do
case Rails.application.config.session_store.to_s
when 'ActionDispatch::Session::MemCacheStore'
require 'jpmobile/session/mem_cache_store'
ActionDispatch::Session::MemCacheStore.send :prepend, Jpmobile::ParamsOverCookie
when 'ActionDispatch::Session::ActiveRecordStore'
require 'jpmobile/session/active_record_store'
ActionDispatch::Session::AbstractStore.send :prepend, Jpmobile::ParamsOverCookie
else
Rails.application.config.jpmobile.mount_session_store
end
end
ActiveSupport.on_load(:before_configuration) do
# MobileCarrierのみデフォルトで有効
config.middleware.insert_after ActionDispatch::Flash, ::Jpmobile::MobileCarrier
module Rails
class Application
class Configuration
def jpmobile
@jpmobile ||= ::Jpmobile.config
end
end
end
end
end
module Jpmobile
module ViewSelector
def self.included(base)
base.class_eval do
before_action :register_mobile
self._view_paths = self._view_paths.dup
self.view_paths.unshift(*self.view_paths.map { |resolver| Jpmobile::Resolver.new(resolver.to_path) })
end
end
def register_mobile
if request.mobile
# register mobile
self.lookup_context.mobile = request.mobile.variants
end
end
def disable_mobile_view!
self.lookup_context.mobile = []
end
private :register_mobile, :disable_mobile_view!
end
end
| 29.514706 | 109 | 0.747384 |
62e4c3662053f96fad08c054d51a9e93c5516a78 | 4,964 | require "cleaner"
require "formula"
describe Cleaner do
include FileUtils
subject { described_class.new(f) }
let(:f) { formula("cleaner_test") { url "foo-1.0" } }
before do
f.prefix.mkpath
end
describe "#clean" do
it "cleans files" do
f.bin.mkpath
f.lib.mkpath
if OS.mac?
cp "#{TEST_FIXTURE_DIR}/mach/a.out", f.bin
cp Dir["#{TEST_FIXTURE_DIR}/mach/*.dylib"], f.lib
elsif OS.linux?
cp "#{TEST_FIXTURE_DIR}/elf/hello", f.bin
cp Dir["#{TEST_FIXTURE_DIR}/elf/libhello.so.0"], f.lib
end
subject.clean
if OS.mac?
expect((f.bin/"a.out").stat.mode).to eq(0100555)
expect((f.lib/"fat.dylib").stat.mode).to eq(0100444)
expect((f.lib/"x86_64.dylib").stat.mode).to eq(0100444)
expect((f.lib/"i386.dylib").stat.mode).to eq(0100444)
elsif OS.linux?
expect((f.bin/"hello").stat.mode).to eq(0100555)
expect((f.lib/"libhello.so.0").stat.mode).to eq(0100555)
end
end
it "prunes the prefix if it is empty" do
subject.clean
expect(f.prefix).not_to be_a_directory
end
it "prunes empty directories" do
subdir = f.bin/"subdir"
subdir.mkpath
subject.clean
expect(f.bin).not_to be_a_directory
expect(subdir).not_to be_a_directory
end
it "removes a symlink when its target was pruned before" do
dir = f.prefix/"b"
symlink = f.prefix/"a"
dir.mkpath
ln_s dir.basename, symlink
subject.clean
expect(dir).not_to exist
expect(symlink).not_to be_a_symlink
expect(symlink).not_to exist
end
it "removes symlinks pointing to an empty directory" do
dir = f.prefix/"b"
symlink = f.prefix/"c"
dir.mkpath
ln_s dir.basename, symlink
subject.clean
expect(dir).not_to exist
expect(symlink).not_to be_a_symlink
expect(symlink).not_to exist
end
it "removes broken symlinks" do
symlink = f.prefix/"symlink"
ln_s "target", symlink
subject.clean
expect(symlink).not_to be_a_symlink
end
it "removes '.la' files" do
file = f.lib/"foo.la"
f.lib.mkpath
touch file
subject.clean
expect(file).not_to exist
end
it "removes 'perllocal' files" do
file = f.lib/"perl5/darwin-thread-multi-2level/perllocal.pod"
(f.lib/"perl5/darwin-thread-multi-2level").mkpath
touch file
subject.clean
expect(file).not_to exist
end
it "removes '.packlist' files" do
file = f.lib/"perl5/darwin-thread-multi-2level/auto/test/.packlist"
(f.lib/"perl5/darwin-thread-multi-2level/auto/test").mkpath
touch file
subject.clean
expect(file).not_to exist
end
it "removes 'charset.alias' files" do
file = f.lib/"charset.alias"
f.lib.mkpath
touch file
subject.clean
expect(file).not_to exist
end
end
describe "::skip_clean" do
it "adds paths that should be skipped" do
f.class.skip_clean "bin"
f.bin.mkpath
subject.clean
expect(f.bin).to be_a_directory
end
it "also skips empty sub-directories under the added paths" do
f.class.skip_clean "bin"
subdir = f.bin/"subdir"
subdir.mkpath
subject.clean
expect(f.bin).to be_a_directory
expect(subdir).to be_a_directory
end
it "allows skipping broken symlinks" do
f.class.skip_clean "symlink"
symlink = f.prefix/"symlink"
ln_s "target", symlink
subject.clean
expect(symlink).to be_a_symlink
end
it "allows skipping symlinks pointing to an empty directory" do
f.class.skip_clean "c"
dir = f.prefix/"b"
symlink = f.prefix/"c"
dir.mkpath
ln_s dir.basename, symlink
subject.clean
expect(dir).not_to exist
expect(symlink).to be_a_symlink
expect(symlink).not_to exist
end
it "allows skipping symlinks whose target was pruned before" do
f.class.skip_clean "a"
dir = f.prefix/"b"
symlink = f.prefix/"a"
dir.mkpath
ln_s dir.basename, symlink
subject.clean
expect(dir).not_to exist
expect(symlink).to be_a_symlink
expect(symlink).not_to exist
end
it "allows skipping '.la' files" do
file = f.lib/"foo.la"
f.class.skip_clean :la
f.lib.mkpath
touch file
subject.clean
expect(file).to exist
end
it "allows skipping sub-directories" do
dir = f.lib/"subdir"
f.class.skip_clean "lib/subdir"
dir.mkpath
subject.clean
expect(dir).to be_a_directory
end
it "allows skipping paths relative to prefix" do
dir1 = f.bin/"a"
dir2 = f.lib/"bin/a"
f.class.skip_clean "bin/a"
dir1.mkpath
dir2.mkpath
subject.clean
expect(dir1).to exist
expect(dir2).not_to exist
end
end
end
| 20.945148 | 73 | 0.615834 |
edfd347a84d102e1e634e2e5eec14d05b2fc701b | 1,497 | # frozen_string_literal: true
class DistributionManager
def initialize(fastlane:, build_path:, firebase_token:)
@fastlane = fastlane
@build_path = build_path
@firebase_token = firebase_token
end
def upload_to_deploygate(product_name:, api_token:, user:, message:)
ipa_path = "#{@build_path}/#{product_name}.ipa"
@fastlane.deploygate(
api_token: api_token,
user: user,
ipa: ipa_path,
message: message
)
end
def upload_to_firebase(product_name:, firebase_app_id:, notes:, tester_groups:)
ipa_path = "#{@build_path}/#{product_name}.ipa"
@fastlane.firebase_app_distribution(
app: firebase_app_id,
ipa_path: ipa_path,
groups: tester_groups,
firebase_cli_token: @firebase_token,
release_notes: notes
)
end
def upload_to_app_store_connect(product_name:, bundle_identifier:)
@fastlane.deliver(
ipa: "#{@build_path}/#{product_name}.ipa",
app_identifier: bundle_identifier,
force: true,
skip_metadata: true,
skip_screenshots: true,
run_precheck_before_submit: false
)
end
def upload_to_testflight(product_name:, bundle_identifier:, notes:, tester_groups:, app_review_info:)
@fastlane.pilot(
ipa: "#{@build_path}/#{product_name}.ipa",
app_identifier: bundle_identifier,
groups: tester_groups,
changelog: notes,
beta_app_review_info: app_review_info,
demo_account_required: !app_review_info.nil?
)
end
end
| 28.245283 | 103 | 0.696059 |
ab376aefaf0bdfd91ad990dbd9dae3741c75c796 | 3,532 | require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
require CLUSTER_CHEF_DIR("lib/cluster_chef")
describe ClusterChef::Server do
before do
Chef::Config.stub!(:validation_key).and_return("I_AM_VALID")
@cluster = get_example_cluster('demoweb')
@cluster.resolve!
@facet = @cluster.facet(:dbnode)
@server = @facet.server(0)
end
describe 'volumes' do
describe '#composite_volumes' do
it 'assembles cluster, facet and server volumes' do
@server.composite_volumes.length.should == 5
@cluster.volumes.length.should == 4
@facet.volumes.length.should == 1
@server.volumes.length.should == 1
end
it 'composites server attributes onto a volume defined in the facet' do
vol = @server.composite_volumes[:data]
vol.to_hash.should == {
:name => :data,
:tags => {},
:snapshot_id => "snap-d9c1edb1",
:size => 50,
:keep => true,
:device => "/dev/sdi",
:mount_point => "/data/db",
:mount_options => "defaults,nouuid,noatime",
:fs_type => "xfs",
:availability_zone => "us-east-1a"
}
end
it 'makes block_device_mapping for non-ephemeral storage' do
vol = @server.composite_volumes[:data]
vol.block_device_mapping.should == {
"DeviceName" => "/dev/sdi",
"Ebs.SnapshotId" => "snap-d9c1edb1",
"Ebs.VolumeSize" => 50,
"Ebs.DeleteOnTermination" => "false"
}
end
it 'skips block_device_mapping for non-ephemeral storage if volume id is present' do
vol = @facet.server(1).composite_volumes[:data]
vol.block_device_mapping.should be_nil
end
end
end
describe 'launch' do
describe '#fog_description_for_launch' do
it 'has right attributes' do
hsh = @server.fog_description_for_launch
hsh.delete(:user_data)
hsh.should == {
:image_id => "ami-08f40561",
:flavor_id => "m1.large",
:groups => ["demoweb-redis_client", "demoweb-dbnode", "default", "ssh", "nfs_client", "demoweb"],
:key_name => :demoweb,
:tags => {:cluster=>:demoweb, :facet=>:dbnode, :index=>0},
:block_device_mapping => [
{"DeviceName"=>"/dev/sdi", "Ebs.SnapshotId"=>"snap-d9c1edb1", "Ebs.VolumeSize"=>50, "Ebs.DeleteOnTermination"=>"false"},
{"DeviceName"=>"/dev/sdb", "VirtualName"=>"ephemeral0"},
{"DeviceName"=>"/dev/sdc", "VirtualName"=>"ephemeral1"},
{"DeviceName"=>"/dev/sdd", "VirtualName"=>"ephemeral2"},
{"DeviceName"=>"/dev/sde", "VirtualName"=>"ephemeral3"},
],
:availability_zone => "us-east-1a",
:monitoring => nil
}
end
it 'has right user_data' do
hsh = @server.fog_description_for_launch
user_data_hsh = JSON.parse( hsh[:user_data] )
user_data_hsh.keys.should == ["chef_server", "validation_client_name", "validation_key", "attributes"]
user_data_hsh["attributes"].keys.sort.should == [
"cluster_chef", "cluster_name", "cluster_role", "cluster_role_index",
"facet_index", "facet_name", "node_name",
"run_list", "webnode_count",
]
end
end
end
end
| 37.574468 | 132 | 0.557191 |
38f1674e025b87c335ebd25c5d9f07a735e5625d | 1,260 | #
# DOPv command line main module
#
require 'gli'
require 'dop_common/cli/node_selection'
require 'dop_common/cli/log'
require 'dop_common/cli/global_options'
require 'dopv'
require 'dopv/cli/command_validate'
require 'dopv/cli/command_add'
require 'dopv/cli/command_remove'
require 'dopv/cli/command_list'
require 'dopv/cli/command_update'
require 'dopv/cli/command_import'
require 'dopv/cli/command_export'
require 'dopv/cli/command_run'
require 'logger/colors'
module Dopv
module Cli
include GLI::App
extend self
trace = false
program_desc 'DOPv command line tool'
version Dopv::VERSION
subcommand_option_handling :normal
arguments :strict
DopCommon::Cli.global_options(self)
pre do |global,command,options,args|
DopCommon.configure = global
ENV['GLI_DEBUG'] = 'true' if global[:trace] == true
DopCommon::Cli.initialize_logger('dopv.log', global[:log_level], global[:verbosity], global[:trace])
true
end
command_validate(self)
command_add(self)
command_remove(self)
command_list(self)
command_update(self)
command_import(self)
command_export(self)
command_run(self, :deploy)
command_run(self, :undeploy)
command_run(self, :refresh)
end
end
| 22.909091 | 106 | 0.727778 |
6171ec0bee37dd0e8e6a4d0a02f84ac5a221ce98 | 3,424 | # options
inspec_bin = 'BUNDLE_GEMFILE=/inspec/Gemfile bundle exec inspec'
api_url = 'https://0.0.0.0'
profile = '/inspec/examples/profile'
user = command('whoami').stdout.strip
pwd = command('pwd').stdout.strip
puts "Run test as #{user} in path #{pwd}"
# TODO: determine tokens automatically, define in kitchen yml
access_token = ENV['COMPLIANCE_ACCESSTOKEN']
refresh_token = ENV['COMPLIANCE_REFRESHTOKEN']
%w{refresh_token access_token}.each do |type| # rubocop:disable Metrics/BlockLength
case type
when 'access_token'
token_options = "--token '#{access_token}'"
when 'refresh_token'
token_options = "--refresh_token '#{refresh_token}'"
end
# verifies that the help command works
describe command("#{inspec_bin} compliance help") do
its('stdout') { should include 'inspec compliance help [COMMAND]' }
its('stderr') { should eq '' }
its('exit_status') { should eq 0 }
end
# version command fails gracefully when server not configured
describe command("#{inspec_bin} compliance version") do
its('stdout') { should include 'Server configuration information is missing' }
its('stderr') { should eq '' }
its('exit_status') { should eq 1 }
end
# submitting a wrong token should have an exit of 0
describe command("#{inspec_bin} compliance login #{api_url} --insecure --user 'admin' --token 'wrong-token'") do
its('stdout') { should include 'token stored' }
end
# compliance login --help should give an accurate message for login
describe command("#{inspec_bin} compliance login --help") do
its('stdout') { should include "inspec compliance login SERVER --insecure --user='USER' --token='TOKEN'" }
its('exit_status') { should eq 0 }
end
# profiles command fails gracefully when token/server info is incorrect
describe command("#{inspec_bin} compliance profiles") do
its('stdout') { should include '401 Unauthorized. Please check your token' }
its('stderr') { should eq '' }
its('exit_status') { should eq 1 }
end
# login via access token token
describe command("#{inspec_bin} compliance login #{api_url} --insecure --user 'admin' #{token_options}") do
its('stdout') { should include 'token', 'stored' }
its('stdout') { should_not include 'Your server supports --user and --password only' }
its('stderr') { should eq '' }
its('exit_status') { should eq 0 }
end
# see available resources
describe command("#{inspec_bin} compliance profiles") do
its('stdout') { should include 'base/ssh' }
its('stderr') { should eq '' }
its('exit_status') { should eq 0 }
end
# upload a compliance profile
describe command("#{inspec_bin} compliance upload #{profile} --overwrite") do
its('stdout') { should include 'Profile is valid' }
its('stdout') { should include 'Successfully uploaded profile' }
its('stdout') { should_not include 'error(s)' }
its('stderr') { should eq '' }
its('exit_status') { should eq 0 }
end
# returns the version of the server
describe command("#{inspec_bin} compliance version") do
its('stdout') { should include 'Chef Compliance version:' }
its('stderr') { should eq '' }
its('exit_status') { should eq 0 }
end
# logout
describe command("#{inspec_bin} compliance logout") do
its('stdout') { should include 'Successfully logged out' }
its('stderr') { should eq '' }
its('exit_status') { should eq 0 }
end
end
| 37.217391 | 114 | 0.681951 |
5d3e696e97970bf18a772085762983c2ab2a5485 | 1,783 | # -*- coding: utf-8 -*-
# Copyright (C) 2010 Rocky Bernstein <[email protected]>
require 'rubygems'; require 'require_relative'
require_relative '../../base/subsubcmd'
class Trepan::SubSubcommand::SetAutoEval < Trepan::SetBoolSubSubcommand
unless defined?(HELP)
Trepanning::Subcommand.set_name_prefix(__FILE__, self)
HELP = "Evaluate unrecognized debugger commands.
Often inside the debugger, one would like to be able to run arbitrary
Ruby commands without having to preface Python expressions with \"print\" or
\"eval\". Setting \"auto eval\" on will cause unrecognized debugger
commands to be eval'd as a Ruby expression.
Note that if this is set, on error the message shown on type a bad
debugger command changes from:
Undefined command: \"fdafds\". Try \"help\".
to something more Ruby-eval-specific such as:
NameError: name 'fdafds' is not defined
One other thing that trips people up is when setting auto eval is that
there are some short debugger commands that sometimes one wants to use
as a variable, such as in an assignment statement. For example:
s = 5
which produce when 'auto eval' is on:
*** Command 'step' can take at most 1 argument(s); got 2.
because by default, 's' is an alias for the debugger 'step'
command. It is possible to remove that alias if this causes constant
problem. Another possibility is to go into a real Ruby shell via the
'irb' command.
"
MIN_ABBREV = 'ev'.size
SHORT_HELP = "Set evaluation of unrecognized debugger commands"
end
end
if __FILE__ == $0
# Demo it.
require_relative '../../../mock'
require_relative '../auto'
cmd = MockDebugger::subsub_setup(Trepan::SubSubcommand::SetAuto,
Trepan::SubSubcommand::SetAutoEval)
cmd.run([cmd.name, 'off'])
end
| 33.018519 | 76 | 0.727426 |
f737cbfdb6cafc63ba3068b08b14df78245688d6 | 1,152 | cask "spitfire-audio" do
version "3.3.22,1639562400"
sha256 "0a4b97c384b790a5ded9e426fb6b14a8401f67222af68225bebcea90e61b630b"
url "https://d1t3zg51rvnesz.cloudfront.net/p/files/lm/#{version.csv.second}/mac/SpitfireAudio-Mac-#{version.csv.first}.dmg",
verified: "d1t3zg51rvnesz.cloudfront.net/p/files/lm/"
name "Spitfire Audio"
desc "Download manager for Spitfire audio libraries"
homepage "https://www.spitfireaudio.com/info/library-manager/"
livecheck do
url "https://www.spitfireaudio.com/library-manager/download/mac/"
strategy :header_match do |headers|
match = headers["location"].match(%r{/(\d+)/.*-(\d+(?:\.\d+)+)\.dmg}i)
next if match.blank?
"#{match[2]},#{match[1]}"
end
end
auto_updates true
app "Spitfire Audio.app"
uninstall delete: [
"/Library/LaunchDaemons/com.spitfireaudio.LibraryManagerHelper.plist",
"/Library/Logs/Spitfire Audio",
"/Library/PrivilegedHelperTools/com.spitfireaudio.LibraryManagerHelper",
]
zap delete: [
"~/Library/Caches/com.spitfireaudio.spitfireaudio",
"~/Library/Preferences/com.spitfireaudio.spitfireaudio.plist",
]
end
| 32 | 126 | 0.715278 |
21ccefacb187fdb1cff67de83cc01b68c4c7c729 | 10,483 | # PuppetX::Cisco - Common utility methods used by Cisco Types/Providers
#
# November 2015
#
# Copyright (c) 2015-2018 Cisco and/or its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module PuppetX
module Cisco
# PuppetX::Cisco::Utils: - Common helper methods shared by any Type/Provider
# rubocop:disable Metrics/ClassLength
class Utils
require 'ipaddr'
TACACS_SERVER_ENC_NONE = 0
TACACS_SERVER_ENC_CISCO_TYPE_7 = 7
TACACS_SERVER_ENC_UNKNOWN = 8
# Helper utility method for ip/prefix format networks.
# For ip/prefix format '1.1.1.1/24' or '2000:123:38::34/64',
# we need to mask the address using the prefix length so that they
# are converted to '1.1.1.0/24' or '2000:123:38::/64'
def self.process_network_mask(network)
mask = network.split('/')[1]
address = IPAddr.new(network).to_s
network = address + '/' + mask unless mask.nil?
network
end
# Convert boolean symbols to strings
def self.bool_sym_to_s(val)
return val unless val == :true || val == :false
(val == :true)
end
# Special handling for boolean properties.
# This helper method returns true if the property
# flush contains a TrueClass or FalseClass value.
def self.flush_boolean?(prop)
prop.is_a?(TrueClass) || prop.is_a?(FalseClass)
end
# normalize_range_array
#
# Given a list of ranges, merge any overlapping ranges and normalize the
# them as a string that can be used directly on the switch.
#
# Note: The ranges are converted to ruby ranges for easy merging,
# then converted back to a cli-syntax ranges.
#
# Accepts an array or string:
# ["2-5", "9", "4-6"] -or- '2-5, 9, 4-6' -or- ["2-5, 9, 4-6"]
# Returns a merged and ordered range:
# ["2-6", "9"]
#
def self.normalize_range_array(range, type=:array)
return range if range.nil? || range.empty?
# This step is puppet only
return range if range[0] == :default
# Handle string within an array: ["2-5, 9, 4-6"] to '2-5, 9, 4-6'
range = range.shift if range.is_a?(Array) && range.length == 1
# Handle string only: '2-5, 9, 4-6' to ["2-5", "9", "4-6"]
range = range.split(',') if range.is_a?(String)
# Convert to ruby-syntax ranges
range = dash_range_to_ruby_range(range)
# Sort & Merge
merged = merge_range(range)
# Convert back to cli dash-syntax
ruby_range_to_dash_range(merged, type)
end
def self.normalize_range_string(range)
range = range.to_s
return normalize_range_array(range, :string) if range[/[-,]/]
range
end
# Convert a cli-dash-syntax range to ruby-range. This is useful for
# preparing inputs to merge_range().
#
# Inputs an array or string of dash-syntax ranges -> returns an array
# of ruby ranges.
#
# Accepts an array or string: ["2-5", "9", "4-6"] or '2-5, 9, 4-6'
# Returns an array of ranges: [2..5, 9..9, 4..6]
#
def self.dash_range_to_ruby_range(range)
range = range.split(',') if range.is_a?(String)
# [["45", "7-8"], ["46", "9,10"]]
range.map! do |rng|
if rng[/-/]
# '2-5' -> 2..5
rng.split('-').inject { |a, e| a.to_i..e.to_i }
else
# '9' -> 9..9
rng.to_i..rng.to_i
end
end
range
end
# Convert a ruby-range to cli-dash-syntax.
#
# Inputs an array of ruby ranges -> returns an array or string of
# dash-syntax ranges.
#
# when (:array) [2..6, 9..9] -> ['2-6', '9']
#
# when (:string) [2..6, 9..9] -> '2-6, 9'
#
def self.ruby_range_to_dash_range(range, type=:array)
range.map! do |r|
if r.first == r.last
# 9..9 -> '9'
r.first.to_s
else
# 2..6 -> '2-6'
r.first.to_s + '-' + r.last.to_s
end
end
return range.join(',') if type == :string
range
end
# Merge overlapping ranges.
#
# Inputs an array of ruby ranges: [2..5, 9..9, 4..6]
# Returns an array of merged ruby ranges: [2..6, 9..9]
#
def self.merge_range(range)
# sort to lowest range 'first' values:
# [2..5, 9..9, 4..6] -> [2..5, 4..6, 9..9]
range = range.sort_by(&:first)
*merged = range.shift
range.each do |r|
lastr = merged[-1]
if lastr.last >= r.first - 1
merged[-1] = lastr.first..[r.last, lastr.last].max
else
merged.push(r)
end
end
merged
end # merge_range
# TBD: Investigate replacing fail_array_overlap() and range_summarize()
# with above methods.
# Helper utility for checking if arrays are overlapping in a
# give list.
# For ex: if the list has '2-10,32,42,44-89' and '11-33'
# then this will fail as they overlap
def self.fail_array_overlap(list)
array = []
list.each do |range, _val|
larray = range.split(',')
larray.each do |elem|
if elem.include?('-')
elema = elem.split('-').map { |d| Integer(d) }
ele = elema[0]..elema[1]
if (array & ele.to_a).empty?
array << ele.to_a
array = array.flatten
else
fail 'overlapping arrays not allowed'
end
else
elema = []
elema << elem.to_i
if (array & elema).empty?
array << elema
array = array.flatten
else
fail 'overlapping arrays not allowed'
end
end
end
end
end
# Helper utility method for range summarization of VLAN and BD ranges
# Input is a range string. For example: '10-20, 30, 14, 100-105, 21'
# Output should be: '10-21,30,100-105'
def self.range_summarize(range_str, sort=true)
ranges = []
range_str.split(/,/).each do |elem|
if elem =~ /\d+\s*\-\s*\d+/
range_limits = elem.split(/\-/).map { |d| Integer(d) }
ranges << (range_limits[0]..range_limits[1])
else
ranges << Integer(elem)
end
end
# nrange array below will expand the ranges and get a single list
nrange = []
ranges.each do |item|
# OR operations below will get rid of duplicates
if item.class == Range
nrange |= item.to_a
else
nrange |= [item]
end
end
nrange.sort! if sort
ranges = []
left = nrange.first
right = nil
nrange.each do |obj|
if right && obj != right.succ
# obj cannot be included in the current range, end this range
if left != right
ranges << Range.new(left, right)
else
ranges << left
end
left = obj # start of new range
end
right = obj # move right to point to obj
end
if left != right
ranges << Range.new(left, right)
else
ranges << left
end
ranges.join(',').gsub('..', '-')
end
# fretta check
def self.check_slot_pid(inv)
inv.each do |_x, slot|
return true if slot['pid'][/-R/]
end
false
end
def self.product_tag
data = Facter.value('cisco')
case data['inventory']['chassis']['pid']
when /N3/
tag = check_slot_pid(data['inventory']) ? 'n3k-f' : 'n3k'
when /N5/
tag = 'n5k'
when /N6/
tag = 'n6k'
when /N7/
tag = 'n7k'
when /N9/
tag = check_slot_pid(data['inventory']) ? 'n9k-f' : 'n9k'
else
fail "Unrecognized product_id: #{data['inventory']['chassis']['pid']}"
end
tag
end
# Convert encryption type to symbol
def self.enc_type_to_sym(type)
case type
when TACACS_SERVER_ENC_UNKNOWN
:none
when TACACS_SERVER_ENC_NONE
:clear
when TACACS_SERVER_ENC_CISCO_TYPE_7
:encrypted
end
end
# Convert encryption symbol to type
def self.enc_sym_to_type(sym)
case sym
when :none
TACACS_SERVER_ENC_UNKNOWN
when :clear, :default
TACACS_SERVER_ENC_NONE
when :encrypted
TACACS_SERVER_ENC_CISCO_TYPE_7
end
end
end # class Utils
# rubocop:enable Metrics/ClassLength
# PuppetX::Cisco::BgpUtil - Common BGP methods used by BGP Types/Providers
class BgpUtils
def self.process_asnum(asnum)
err_msg = "BGP asnum must be either a 'String' or an" \
" 'Integer' object"
fail ArgumentError, err_msg unless asnum.is_a?(Integer) ||
asnum.is_a?(String)
if asnum.is_a? String
# Match ASDOT '1.5' or ASPLAIN '55' strings
fail ArgumentError unless /^(\d+|\d+\.\d+)$/.match(asnum)
asnum = dot_to_big(asnum) if /\d+\.\d+/.match(asnum)
end
asnum.to_i
end
# Convert BGP ASN ASDOT+ to ASPLAIN
def self.dot_to_big(dot_str)
fail ArgumentError unless dot_str.is_a? String
return dot_str unless /\d+\.\d+/.match(dot_str)
mask = 0b1111111111111111
high = dot_str.to_i
low = 0
low_match = dot_str.match(/\.(\d+)/)
low = low_match[1].to_i if low_match
high_bits = (mask & high) << 16
low_bits = mask & low
high_bits + low_bits
end
end
end
end
| 31.766667 | 80 | 0.543833 |
219e8b4fc3f14550bbaf84a5719b6299a37d4671 | 566 | class AddEnergyTracking < ActiveRecord::Migration
def self.up
create_table :energy_consumptions, :force => true do |t|
t.column :project_id, :integer
t.column :name, :string
t.column :amee_profile_item_id, :string
t.column :energy_consumption_type, :string
t.column :carbon_output_cache, :float
t.timestamps
end
add_index :energy_consumptions, :project_id
add_index :energy_consumptions, :created_at
end
def self.down
drop_table :energy_consumptions
end
end | 31.444444 | 60 | 0.662544 |
7a917b1ae8ddef646d2a245607a3a636bc981974 | 215 | class CreateResponses < ActiveRecord::Migration[5.2]
def change
create_table :responses do |t|
t.integer :statement_id
t.string :text
t.string :timestamp
t.timestamps
end
end
end
| 19.545455 | 52 | 0.665116 |
26d79932f9e034277ccaef5f410cc05d7a1bde41 | 864 | require 'spec_helper_min'
require_dependency 'carto/oauth_provider/scopes/scopes'
require_relative '../../../../factories/organizations_contexts'
describe Carto::OauthProvider::Scopes::DataservicesScope do
include_context 'organization with users helper'
describe '#add_to_api_key_grants' do
let(:scope) { Carto::OauthProvider::Scopes::DataservicesScope.new('geocoding', 'GC') }
it 'adds SQL api and dataservice' do
grants = [{ type: 'apis', apis: [] }]
scope.add_to_api_key_grants(grants, nil)
expect(grants).to(eq([{ type: 'apis', apis: ['sql'] }, { type: 'dataservices', services: ['geocoding'] }]))
end
it 'does not add duplicate SQL api' do
grants = [{ type: 'apis', apis: ['sql'] }]
scope.add_to_api_key_grants(grants, nil)
expect(grants).to(include(type: 'apis', apis: ['sql']))
end
end
end
| 36 | 113 | 0.674769 |
33f3649cb8c524a6cef8ca562dfddab883d745ec | 617 | require "erb"
module Terraforming
module GKE
class GoogleContainerCluster
def self.tf(client = nil)
self.new(client).tf
end
def initialize(client)
@client = client
end
def tf(project, zone)
clusters = @client.list_clusters(project, zone).clusters
template = open(File.join(File.expand_path(File.dirname(__FILE__)), "template", "google_container_cluster.tf.erb")).read
ERB.new(template, nil, "-").result(binding)
end
private
def resource_name_of(cluster)
cluster.name.gsub("-", "_")
end
end
end
end
| 22.035714 | 128 | 0.623987 |
031edb90527f8f4e2b608cdccf088637267ed04a | 528 | # frozen_string_literal: true
require "rails_helper"
describe "Browsing by Type of Resource" do
before do
VCR.use_cassette("browse_by_type_of_resource") do
create(
:public_etd,
work_type: [
RDF::URI("http://id.loc.gov/vocabulary/resourceTypes/txt"),
]
)
end
end
specify do
visit root_path
click_on "Format"
expect(page).to(
have_link(
"Text",
href: search_catalog_path(f: { work_type_label_sim: ["Text"] })
)
)
end
end
| 18.857143 | 71 | 0.604167 |
eda9c464b2374f730d8b2aa5e7e6a206ca6f0fcf | 1,869 | module Tatami
module Parsers
module Csv
class HttpRequestParser
include Tatami::Constants::HeaderNames
def self.parse(header, row, name)
raise ArgumentError, 'header must not be null.' if header.to_s.strip == ''
raise ArgumentError, 'row must not be null.' if row.to_s.strip == ''
raise ArgumentError, 'name must no be null.' if name.to_s.strip == ''
is_empty = true
(header.from..header.to).each { |i|
if row[i].to_s.strip != ''
is_empty = false
break
end
}
return nil if is_empty
validate(header, row, name)
method = Tatami::Models::Csv::Header.get_string(header, METHOD, row)
Tatami::Models::HttpRequest.new(
:name => name,
:base_uri => Tatami::Models::Csv::Header.get_string(header, BASE_URI, row),
:method => method,
:user_agent => Tatami::Models::Csv::Header.get_string(header, USER_AGENT, row),
:headers => Tatami::Models::Csv::Header.get_hash(header, HEADERS, row),
:cookies => Tatami::Models::Csv::Header.get_hash(header, COOKIES, row),
:path_infos => Tatami::Models::Csv::Header.get_string_list(header, PATH_INFOS, row),
:query_strings => Tatami::Models::Csv::Header.get_hash(header, QUERY_STRINGS, row),
:fragment => Tatami::Models::Csv::Header.get_string(header, FRAGMENT, row),
:content => Tatami::Models::Csv::Header.get_string(header, CONTENT, row))
end
def self.validate(header, row, name)
base_uri = Tatami::Models::Csv::Header.get_string(header, BASE_URI, row)
raise Tatami::WrongFileFormatError, '<BaseUri> should be not null. name=%s' % [name] if base_uri.nil?
end
end
end
end
end | 42.477273 | 111 | 0.5939 |
d50821c677b0a8b82df3c54ad31fbb5896ee26a3 | 16,942 | # require "pry"
# require "pry-rescue"
require "json"
Puppet::Type.type(:azure_bandwidth_schedule).provide(:arm) do
mk_resource_methods
def initialize(value = {})
super(value)
@property_flush = {}
@is_create = false
@is_delete = false
end
def id=(value)
Puppet.info("id setter called to change to #{value}")
@property_flush[:id] = value
end
def name=(value)
Puppet.info("name setter called to change to #{value}")
@property_flush[:name] = value
end
def properties=(value)
Puppet.info("properties setter called to change to #{value}")
@property_flush[:properties] = value
end
def type=(value)
Puppet.info("type setter called to change to #{value}")
@property_flush[:type] = value
end
def create
@is_create = true
Puppet.info("Entered create for resource #{name} of type BandwidthSchedule")
hash = build_hash
response = self.class.invoke_create(resource, hash)
if response.is_a? Net::HTTPSuccess
@property_hash[:ensure] = :present
Puppet.info("Added :ensure to property hash")
else
raise Puppet::Error, "Create failed. Response is #{response} and body is #{response.body}"
end
rescue Exception => ex
Puppet.alert("Exception during create. The state of the resource is unknown. ex is #{ex} and backtrace is #{ex.backtrace}")
raise
end
def flush
Puppet.info("Entered flush for resource #{name} of type BandwidthSchedule - creating ? #{@is_create}, deleting ? #{@is_delete}")
if @is_create || @is_delete
return # we've already done the create or delete
end
hash = build_hash
response = self.class.invoke_update(resource, hash)
if response.is_a? Net::HTTPSuccess
@property_hash[:ensure] = :present
Puppet.info("Added :ensure to property hash")
else
raise Puppet::Error, "Flush failed. The state of the resource is unknown. Response is #{response} and body is #{response.body}"
end
rescue Exception => ex
Puppet.alert("Exception during flush. ex is #{ex} and backtrace is #{ex.backtrace}")
raise
end
def build_hash
bandwidth_schedule = {}
bandwidth_schedule["id"] = resource[:id] unless resource[:id].nil?
bandwidth_schedule["name"] = resource[:name] unless resource[:name].nil?
bandwidth_schedule["properties"] = resource[:properties] unless resource[:properties].nil?
bandwidth_schedule["type"] = resource[:type] unless resource[:type].nil?
return bandwidth_schedule
end
def self.build_key_values
key_values = {}
key_values["api-version"] = "2018-07-01"
key_values
end
def destroy
delete(resource)
end
def delete(hash)
Puppet.info("Entered delete for resource #{hash[:name]} of type <no value>")
@is_delete = true
response = self.class.invoke_delete(hash)
if response.is_a? Net::HTTPSuccess
@property_hash[:ensure] = :present
Puppet.info "Added :absent to property_hash"
else
raise Puppet::Error, "Delete failed. The state of the resource is unknown. Response is #{response} and body is #{response.body}"
end
rescue Exception => ex
Puppet.alert("Exception during destroy. ex is #{ex} and backtrace is #{ex.backtrace}")
raise
end
def self.invoke_create(resource = nil, body_params = nil)
key_values = self.build_key_values
Puppet.info("Calling operation BandwidthSchedules_CreateOrUpdate")
path_params = {}
query_params = {}
header_params = {}
header_params["User-Agent"] = "puppetlabs-azure_arm/0.2.1"
op_params = [
self.op_param("api-version", "query", "api_version", "api_version"),
self.op_param("deviceName", "path", "device_name", "device_name"),
self.op_param("id", "body", "id", "id"),
self.op_param("name", "path", "name", "name"),
self.op_param("parameters", "body", "parameters", "parameters"),
self.op_param("properties", "body", "properties", "properties"),
self.op_param("resourceGroupName", "path", "resource_group_name", "resource_group_name"),
self.op_param("subscriptionId", "path", "subscription_id", "subscription_id"),
self.op_param("type", "body", "type", "type"),
]
op_params.each do |i|
inquery = i[:inquery]
name = i[:name]
paramalias = i[:paramalias]
name_snake = i[:namesnake]
if inquery == "query"
query_params[name] = key_values[name] unless key_values[name].nil?
query_params[name] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
query_params[name] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
else
path_params[name_snake.to_sym] = key_values[name] unless key_values[name].nil?
path_params[name_snake.to_sym] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
path_params[name_snake.to_sym] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
end
end
self.call_op(path_params, query_params, header_params, body_params, "management.azure.com", "/subscriptions/%{subscription_id}/resourceGroups/%{resource_group_name}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/%{device_name}/bandwidthSchedules/%{name}", "Put", "[application/json]")
end
def self.invoke_update(resource = nil, body_params = nil)
key_values = self.build_key_values
Puppet.info("Calling operation BandwidthSchedules_CreateOrUpdate")
path_params = {}
query_params = {}
header_params = {}
header_params["User-Agent"] = "puppetlabs-azure_arm/0.2.1"
op_params = [
self.op_param("api-version", "query", "api_version", "api_version"),
self.op_param("deviceName", "path", "device_name", "device_name"),
self.op_param("id", "body", "id", "id"),
self.op_param("name", "path", "name", "name"),
self.op_param("parameters", "body", "parameters", "parameters"),
self.op_param("properties", "body", "properties", "properties"),
self.op_param("resourceGroupName", "path", "resource_group_name", "resource_group_name"),
self.op_param("subscriptionId", "path", "subscription_id", "subscription_id"),
self.op_param("type", "body", "type", "type"),
]
op_params.each do |i|
inquery = i[:inquery]
name = i[:name]
paramalias = i[:paramalias]
name_snake = i[:namesnake]
if inquery == "query"
query_params[name] = key_values[name] unless key_values[name].nil?
query_params[name] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
query_params[name] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
else
path_params[name_snake.to_sym] = key_values[name] unless key_values[name].nil?
path_params[name_snake.to_sym] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
path_params[name_snake.to_sym] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
end
end
self.call_op(path_params, query_params, header_params, body_params, "management.azure.com", "/subscriptions/%{subscription_id}/resourceGroups/%{resource_group_name}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/%{device_name}/bandwidthSchedules/%{name}", "Put", "[application/json]")
end
def self.invoke_delete(resource = nil, body_params = nil)
key_values = self.build_key_values
Puppet.info("Calling operation BandwidthSchedules_Delete")
path_params = {}
query_params = {}
header_params = {}
header_params["User-Agent"] = "puppetlabs-azure_arm/0.2.1"
op_params = [
self.op_param("api-version", "query", "api_version", "api_version"),
self.op_param("deviceName", "path", "device_name", "device_name"),
self.op_param("id", "body", "id", "id"),
self.op_param("name", "path", "name", "name"),
self.op_param("properties", "body", "properties", "properties"),
self.op_param("resourceGroupName", "path", "resource_group_name", "resource_group_name"),
self.op_param("subscriptionId", "path", "subscription_id", "subscription_id"),
self.op_param("type", "body", "type", "type"),
]
op_params.each do |i|
inquery = i[:inquery]
name = i[:name]
paramalias = i[:paramalias]
name_snake = i[:namesnake]
if inquery == "query"
query_params[name] = key_values[name] unless key_values[name].nil?
query_params[name] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
query_params[name] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
else
path_params[name_snake.to_sym] = key_values[name] unless key_values[name].nil?
path_params[name_snake.to_sym] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
path_params[name_snake.to_sym] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
end
end
self.call_op(path_params, query_params, header_params, body_params, "management.azure.com", "/subscriptions/%{subscription_id}/resourceGroups/%{resource_group_name}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/%{device_name}/bandwidthSchedules/%{name}", "Delete", "[application/json]")
end
def self.invoke_list_with_params(resource = nil, body_params = nil)
key_values = self.build_key_values
Puppet.info("Calling operation BandwidthSchedules_ListByDataBoxEdgeDevice")
path_params = {}
query_params = {}
header_params = {}
header_params["User-Agent"] = "puppetlabs-azure_arm/0.2.1"
op_params = [
self.op_param("api-version", "query", "api_version", "api_version"),
self.op_param("deviceName", "path", "device_name", "device_name"),
self.op_param("id", "body", "id", "id"),
self.op_param("name", "body", "name", "name"),
self.op_param("properties", "body", "properties", "properties"),
self.op_param("resourceGroupName", "path", "resource_group_name", "resource_group_name"),
self.op_param("subscriptionId", "path", "subscription_id", "subscription_id"),
self.op_param("type", "body", "type", "type"),
]
op_params.each do |i|
inquery = i[:inquery]
name = i[:name]
paramalias = i[:paramalias]
name_snake = i[:namesnake]
if inquery == "query"
query_params[name] = key_values[name] unless key_values[name].nil?
query_params[name] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
query_params[name] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
else
path_params[name_snake.to_sym] = key_values[name] unless key_values[name].nil?
path_params[name_snake.to_sym] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
path_params[name_snake.to_sym] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
end
end
self.call_op(path_params, query_params, header_params, body_params, "management.azure.com", "/subscriptions/%{subscription_id}/resourceGroups/%{resource_group_name}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/%{device_name}/bandwidthSchedules", "Get", "[application/json]")
end
def self.invoke_get_one(resource = nil, body_params = nil)
key_values = self.build_key_values
Puppet.info("Calling operation BandwidthSchedules_Get")
path_params = {}
query_params = {}
header_params = {}
header_params["User-Agent"] = "puppetlabs-azure_arm/0.2.1"
op_params = [
self.op_param("api-version", "query", "api_version", "api_version"),
self.op_param("deviceName", "path", "device_name", "device_name"),
self.op_param("id", "body", "id", "id"),
self.op_param("name", "path", "name", "name"),
self.op_param("properties", "body", "properties", "properties"),
self.op_param("resourceGroupName", "path", "resource_group_name", "resource_group_name"),
self.op_param("subscriptionId", "path", "subscription_id", "subscription_id"),
self.op_param("type", "body", "type", "type"),
]
op_params.each do |i|
inquery = i[:inquery]
name = i[:name]
paramalias = i[:paramalias]
name_snake = i[:namesnake]
if inquery == "query"
query_params[name] = key_values[name] unless key_values[name].nil?
query_params[name] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
query_params[name] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
else
path_params[name_snake.to_sym] = key_values[name] unless key_values[name].nil?
path_params[name_snake.to_sym] = ENV["azure_#{name_snake}"] unless ENV["azure_#{name_snake}"].nil?
path_params[name_snake.to_sym] = resource[paramalias.to_sym] unless resource.nil? || resource[paramalias.to_sym].nil?
end
end
self.call_op(path_params, query_params, header_params, body_params, "management.azure.com", "/subscriptions/%{subscription_id}/resourceGroups/%{resource_group_name}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/%{device_name}/bandwidthSchedules/%{name}", "Get", "[application/json]")
end
def self.authenticate(path_params, query_params, header_params, body_params)
token = fetch_oauth2_token
if token
header_params["Authorization"] = "Bearer #{token}"
return true
else
return false
end
end
def self.fetch_oauth2_token
Puppet.info("Getting oauth2 token")
@client_id = ENV["azure_client_id"]
@client_secret = ENV["azure_client_secret"]
@tenant_id = ENV["azure_tenant_id"]
uri = URI("https://login.microsoftonline.com/#{@tenant_id}/oauth2/token")
response = Net::HTTP.post_form(uri,
"grant_type" => "client_credentials",
"client_id" => @client_id,
"client_secret" => @client_secret,
"resource" => "https://management.azure.com/")
Puppet.debug("get oauth2 token response code is #{response.code} and body is #{response.body}")
success = response.is_a? Net::HTTPSuccess
if success
return JSON[response.body]["access_token"]
else
raise Puppet::Error, "Unable to get oauth2 token - response is #{response} and body is #{response.body}"
end
end
def exists?
Puppet.info("exists_one for resource #{name} of type <no value>")
return exists_one(resource)
end
def exists_one(resource)
response = self.class.invoke_get_one(resource)
if response.is_a? Net::HTTPSuccess
return true
else
return false
end
rescue Exception => ex
Puppet.alert("Exception during exists_one. ex is #{ex} and backtrace is #{ex.backtrace}")
raise
end
def self.add_keys_to_request(request, hash)
if hash
hash.each { |x, v| request[x] = v }
end
end
def self.to_query(hash)
if hash
return_value = hash.map { |x, v| "#{x}=#{v}" }.reduce { |x, v| "#{x}&#{v}" }
if !return_value.nil?
return return_value
end
end
return ""
end
def self.op_param(name, inquery, paramalias, namesnake)
operation_param = {:name => name, :inquery => inquery, :paramalias => paramalias, :namesnake => namesnake}
return operation_param
end
def self.call_op(path_params, query_params, header_params, body_params, parent_host, operation_path, operation_verb, parent_consumes)
uri_string = "https://#{parent_host}#{operation_path}" % path_params
uri_string = uri_string + "?" + to_query(query_params)
header_params["Content-Type"] = "application/json" # first of #{parent_consumes}
if authenticate(path_params, query_params, header_params, body_params)
Puppet.info("Authentication succeeded")
uri = URI(uri_string)
Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == "https") do |http|
if operation_verb == "Get"
req = Net::HTTP::Get.new(uri)
elsif operation_verb == "Put"
req = Net::HTTP::Put.new(uri)
elsif operation_verb == "Delete"
req = Net::HTTP::Delete.new(uri)
end
add_keys_to_request(req, header_params)
if body_params
req.body = body_params.to_json
end
Puppet.debug("URI is (#{operation_verb}) #{uri}, body is #{body_params}, query params are #{query_params}, headers are #{header_params}")
response = http.request req # Net::HTTPResponse object
Puppet.debug("response code is #{response.code} and body is #{response.body}")
success = response.is_a? Net::HTTPSuccess
Puppet.info("Called (#{operation_verb}) endpoint at #{uri}, success was #{success}")
return response
end
end
end
end
# this is the end of the ruby class
| 43.552699 | 295 | 0.671585 |
ab68420935b7f8d3ff2a9e1c4500962a30a7cd2b | 36 | module Yarf
VERSION = "0.1.0"
end
| 9 | 19 | 0.638889 |
38699f6caefbf2040071b52ec9aeee0c587bd86d | 774 | class NotificationsTakeTwo < ActiveRecord::Migration
def change
drop_table :feeds
create_table :notifications do |t|
t.integer :notified_user_id
t.integer :originating_user_id
t.integer :event_id
t.integer :subscription_id
t.boolean :seen, default: false
t.integer :type
t.text :description
t.timestamps
end
add_foreign_key :notifications, :users, column: :notified_user_id
add_foreign_key :notifications, :users, column: :originating_user_id
add_foreign_key :notifications, :events
add_foreign_key :notifications, :subscriptions
add_index :notifications, :notified_user_id
add_index :notifications, :created_at
add_column :events, :unlinkable, :boolean, default: false
end
end
| 28.666667 | 72 | 0.72739 |
ab7b5a57b3ab72801705cf486c375e5992ffcabb | 12,248 | # WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'seahorse/client/plugins/content_length.rb'
require 'aws-sdk-core/plugins/credentials_configuration.rb'
require 'aws-sdk-core/plugins/logging.rb'
require 'aws-sdk-core/plugins/param_converter.rb'
require 'aws-sdk-core/plugins/param_validator.rb'
require 'aws-sdk-core/plugins/user_agent.rb'
require 'aws-sdk-core/plugins/helpful_socket_errors.rb'
require 'aws-sdk-core/plugins/retry_errors.rb'
require 'aws-sdk-core/plugins/global_configuration.rb'
require 'aws-sdk-core/plugins/regional_endpoint.rb'
require 'aws-sdk-core/plugins/response_paging.rb'
require 'aws-sdk-core/plugins/stub_responses.rb'
require 'aws-sdk-core/plugins/idempotency_token.rb'
require 'aws-sdk-core/plugins/jsonvalue_converter.rb'
require 'aws-sdk-core/plugins/signature_v4.rb'
require 'aws-sdk-core/plugins/protocols/json_rpc.rb'
Aws::Plugins::GlobalConfiguration.add_identifier(:costandusagereportservice)
module Aws::CostandUsageReportService
class Client < Seahorse::Client::Base
include Aws::ClientStubs
@identifier = :costandusagereportservice
set_api(ClientApi::API)
add_plugin(Seahorse::Client::Plugins::ContentLength)
add_plugin(Aws::Plugins::CredentialsConfiguration)
add_plugin(Aws::Plugins::Logging)
add_plugin(Aws::Plugins::ParamConverter)
add_plugin(Aws::Plugins::ParamValidator)
add_plugin(Aws::Plugins::UserAgent)
add_plugin(Aws::Plugins::HelpfulSocketErrors)
add_plugin(Aws::Plugins::RetryErrors)
add_plugin(Aws::Plugins::GlobalConfiguration)
add_plugin(Aws::Plugins::RegionalEndpoint)
add_plugin(Aws::Plugins::ResponsePaging)
add_plugin(Aws::Plugins::StubResponses)
add_plugin(Aws::Plugins::IdempotencyToken)
add_plugin(Aws::Plugins::JsonvalueConverter)
add_plugin(Aws::Plugins::SignatureV4)
add_plugin(Aws::Plugins::Protocols::JsonRpc)
# @option options [required, Aws::CredentialProvider] :credentials
# Your AWS credentials. This can be an instance of any one of the
# following classes:
#
# * `Aws::Credentials` - Used for configuring static, non-refreshing
# credentials.
#
# * `Aws::InstanceProfileCredentials` - Used for loading credentials
# from an EC2 IMDS on an EC2 instance.
#
# * `Aws::SharedCredentials` - Used for loading credentials from a
# shared file, such as `~/.aws/config`.
#
# * `Aws::AssumeRoleCredentials` - Used when you need to assume a role.
#
# When `:credentials` are not configured directly, the following
# locations will be searched for credentials:
#
# * `Aws.config[:credentials]`
# * The `:access_key_id`, `:secret_access_key`, and `:session_token` options.
# * ENV['AWS_ACCESS_KEY_ID'], ENV['AWS_SECRET_ACCESS_KEY']
# * `~/.aws/credentials`
# * `~/.aws/config`
# * EC2 IMDS instance profile - When used by default, the timeouts are
# very aggressive. Construct and pass an instance of
# `Aws::InstanceProfileCredentails` to enable retries and extended
# timeouts.
#
# @option options [required, String] :region
# The AWS region to connect to. The configured `:region` is
# used to determine the service `:endpoint`. When not passed,
# a default `:region` is search for in the following locations:
#
# * `Aws.config[:region]`
# * `ENV['AWS_REGION']`
# * `ENV['AMAZON_REGION']`
# * `ENV['AWS_DEFAULT_REGION']`
# * `~/.aws/credentials`
# * `~/.aws/config`
#
# @option options [String] :access_key_id
#
# @option options [Boolean] :convert_params (true)
# When `true`, an attempt is made to coerce request parameters into
# the required types.
#
# @option options [String] :endpoint
# The client endpoint is normally constructed from the `:region`
# option. You should only configure an `:endpoint` when connecting
# to test endpoints. This should be avalid HTTP(S) URI.
#
# @option options [Aws::Log::Formatter] :log_formatter (Aws::Log::Formatter.default)
# The log formatter.
#
# @option options [Symbol] :log_level (:info)
# The log level to send messages to the `:logger` at.
#
# @option options [Logger] :logger
# The Logger instance to send log messages to. If this option
# is not set, logging will be disabled.
#
# @option options [String] :profile ("default")
# Used when loading credentials from the shared credentials file
# at HOME/.aws/credentials. When not specified, 'default' is used.
#
# @option options [Integer] :retry_limit (3)
# The maximum number of times to retry failed requests. Only
# ~ 500 level server errors and certain ~ 400 level client errors
# are retried. Generally, these are throttling errors, data
# checksum errors, networking errors, timeout errors and auth
# errors from expired credentials.
#
# @option options [String] :secret_access_key
#
# @option options [String] :session_token
#
# @option options [Boolean] :simple_json (false)
# Disables request parameter conversion, validation, and formatting.
# Also disable response data type conversions. This option is useful
# when you want to ensure the highest level of performance by
# avoiding overhead of walking request parameters and response data
# structures.
#
# When `:simple_json` is enabled, the request parameters hash must
# be formatted exactly as the DynamoDB API expects.
#
# @option options [Boolean] :stub_responses (false)
# Causes the client to return stubbed responses. By default
# fake responses are generated and returned. You can specify
# the response data to return or errors to raise by calling
# {ClientStubs#stub_responses}. See {ClientStubs} for more information.
#
# ** Please note ** When response stubbing is enabled, no HTTP
# requests are made, and retries are disabled.
#
# @option options [Boolean] :validate_params (true)
# When `true`, request parameters are validated before
# sending the request.
#
def initialize(*args)
super
end
# @!group API Operations
# Delete a specified report definition
#
# @option params [String] :report_name
# Preferred name for a report, it has to be unique. Must starts with a
# number/letter, case sensitive. Limited to 256 characters.
#
# @return [Types::DeleteReportDefinitionResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DeleteReportDefinitionResponse#response_message #response_message} => String
#
# @example Request syntax with placeholder values
#
# resp = client.delete_report_definition({
# report_name: "ReportName",
# })
#
# @example Response structure
#
# resp.response_message #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/cur-2017-01-06/DeleteReportDefinition AWS API Documentation
#
# @overload delete_report_definition(params = {})
# @param [Hash] params ({})
def delete_report_definition(params = {}, options = {})
req = build_request(:delete_report_definition, params)
req.send_request(options)
end
# Describe a list of report definitions owned by the account
#
# @option params [Integer] :max_results
# The max number of results returned by the operation.
#
# @option params [String] :next_token
# A generic string.
#
# @return [Types::DescribeReportDefinitionsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeReportDefinitionsResponse#report_definitions #report_definitions} => Array<Types::ReportDefinition>
# * {Types::DescribeReportDefinitionsResponse#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.describe_report_definitions({
# max_results: 1,
# next_token: "GenericString",
# })
#
# @example Response structure
#
# resp.report_definitions #=> Array
# resp.report_definitions[0].report_name #=> String
# resp.report_definitions[0].time_unit #=> String, one of "HOURLY", "DAILY"
# resp.report_definitions[0].format #=> String, one of "textORcsv"
# resp.report_definitions[0].compression #=> String, one of "ZIP", "GZIP"
# resp.report_definitions[0].additional_schema_elements #=> Array
# resp.report_definitions[0].additional_schema_elements[0] #=> String, one of "RESOURCES"
# resp.report_definitions[0].s3_bucket #=> String
# resp.report_definitions[0].s3_prefix #=> String
# resp.report_definitions[0].s3_region #=> String, one of "us-east-1", "us-west-1", "us-west-2", "eu-central-1", "eu-west-1", "ap-southeast-1", "ap-southeast-2", "ap-northeast-1"
# resp.report_definitions[0].additional_artifacts #=> Array
# resp.report_definitions[0].additional_artifacts[0] #=> String, one of "REDSHIFT", "QUICKSIGHT"
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/cur-2017-01-06/DescribeReportDefinitions AWS API Documentation
#
# @overload describe_report_definitions(params = {})
# @param [Hash] params ({})
def describe_report_definitions(params = {}, options = {})
req = build_request(:describe_report_definitions, params)
req.send_request(options)
end
# Create a new report definition
#
# @option params [required, Types::ReportDefinition] :report_definition
# The definition of AWS Cost and Usage Report. Customer can specify the
# report name, time unit, report format, compression format, S3 bucket
# and additional artifacts and schema elements in the definition.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.put_report_definition({
# report_definition: { # required
# report_name: "ReportName", # required
# time_unit: "HOURLY", # required, accepts HOURLY, DAILY
# format: "textORcsv", # required, accepts textORcsv
# compression: "ZIP", # required, accepts ZIP, GZIP
# additional_schema_elements: ["RESOURCES"], # required, accepts RESOURCES
# s3_bucket: "S3Bucket", # required
# s3_prefix: "S3Prefix", # required
# s3_region: "us-east-1", # required, accepts us-east-1, us-west-1, us-west-2, eu-central-1, eu-west-1, ap-southeast-1, ap-southeast-2, ap-northeast-1
# additional_artifacts: ["REDSHIFT"], # accepts REDSHIFT, QUICKSIGHT
# },
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/cur-2017-01-06/PutReportDefinition AWS API Documentation
#
# @overload put_report_definition(params = {})
# @param [Hash] params ({})
def put_report_definition(params = {}, options = {})
req = build_request(:put_report_definition, params)
req.send_request(options)
end
# @!endgroup
# @param params ({})
# @api private
def build_request(operation_name, params = {})
handlers = @handlers.for(operation_name)
context = Seahorse::Client::RequestContext.new(
operation_name: operation_name,
operation: config.api.operation(operation_name),
client: self,
params: params,
config: config)
context[:gem_name] = 'aws-sdk-costandusagereportservice'
context[:gem_version] = '1.0.0'
Seahorse::Client::Request.new(handlers, context)
end
# @api private
# @deprecated
def waiter_names
[]
end
class << self
# @api private
attr_reader :identifier
# @api private
def errors_module
Errors
end
end
end
end
| 40.556291 | 184 | 0.67611 |
1a28faca9ad618fea8dff4bb7dc664a60dcb73ef | 597 | # frozen_string_literal: true
module Mercy
class Report
module Multiple
def as_json
{
status: status
}.merge(sub_reports_hash)
end
def error?
sub_reports.any?(&:error?)
end
private
def sub_reports_hash
sub_reports.map(&:as_json).as_hash(reports_ids.map(&:to_s))
end
def sub_reports
@sub_reports ||= reports_ids.map do |id|
build_sub_report(id)
end
end
def build_sub_report(id)
sub_report_class.new(json.merge(key => id))
end
end
end
end
| 17.558824 | 67 | 0.577889 |
333bef9c3089620cfbb0b874006ddd6a4b877274 | 1,995 | require 'homebus'
require 'homebus_app'
require 'mqtt'
require 'json'
require 'dotenv'
require 'net/http'
require 'base64'
require 'timeout'
class ESP32CamHomeBusApp < HomeBusApp
DDC = 'org.homebus.experimental.image'
def initialize(options)
@options = options
Dotenv.load('.env')
@url = @options['camera-url'] || ENV['CAMERA_URL']
@resolution = @options['camera-resolution'] || ENV['CAMERA_RESOLUTION']
super
end
def setup!
end
def _set_resolution
return true unless @resolution
begin
response = Timeout::timeout(30) do
uri = URI("#{@url}/control?var=framesize&val=#{@resolution}")
response = Net::HTTP.get_response(uri)
end
if response.code == "200"
return true
else
return false
end
rescue
puts "timeout"
return false
end
end
def _get_image
begin
response = Timeout::timeout(30) do
uri = URI(@url + '/capture')
response = Net::HTTP.get_response(uri)
end
if response.code == "200"
return {
mime_type: 'image/jpeg',
data: Base64.encode64(response.body)
}
else
nil
end
rescue
puts "timeout"
nil
end
end
def work!
if !@resolution || _set_resolution
if @resolution
sleep(5)
end
image = _get_image
if image
publish! DDC, image
else
puts "no image"
end
end
sleep 60
end
def manufacturer
'HomeBus'
end
def model
''
end
def friendly_name
'ESP32Cam'
end
def friendly_location
''
end
def serial_number
@url
end
def pin
''
end
def devices
[
{ friendly_name: 'ESP32CAM',
friendly_location: 'PDX Hackerspace',
update_frequency: 60,
index: 0,
accuracy: 0,
precision: 0,
wo_topics: [ DDC ],
ro_topics: [],
rw_topics: []
}
]
end
end
| 15.833333 | 75 | 0.566917 |
e8a497e83ff00a06fe30254d172d5286daa609c3 | 58 | module Fog
module Core
VERSION = "1.45.0"
end
end
| 9.666667 | 22 | 0.62069 |
e294c34f0d92de863ae700799d3cd8e1adae23cb | 10,010 | RSpec.describe MiqRegion do
let(:region) { FactoryBot.create(:miq_region, :region => ApplicationRecord.my_region_number) }
# the first id from a region other than ours
let(:external_region_id) do
remote_region_number = ApplicationRecord.my_region_number + 1
ApplicationRecord.region_to_range(remote_region_number).first
end
context "after seeding" do
before do
MiqRegion.seed
end
it "should increment naming sequence number after each call" do
expect(MiqRegion.my_region.next_naming_sequence("namingtest$n{3}", "naming")).to eq(1)
expect(MiqRegion.my_region.next_naming_sequence("namingtest$n{3}", "naming")).to eq(2)
expect(MiqRegion.my_region.next_naming_sequence("anothertest$n{3}", "naming")).to eq(1)
expect(MiqRegion.my_region.next_naming_sequence("anothertest$n{3}", "naming")).to eq(2)
end
context "with cloud and infra EMSes" do
before do
_, _, zone = EvmSpecHelper.create_guid_miq_server_zone
ems_vmware = FactoryBot.create(:ems_vmware, :zone => zone)
ems_openstack = FactoryBot.create(:ems_openstack, :zone => zone)
ems_redhat = FactoryBot.create(:ems_redhat, :zone => zone)
@ems_clouds = [ems_openstack]
@ems_infras = [ems_redhat, ems_vmware]
@region = MiqRegion.my_region
end
it "should be able to return the list of ems_clouds" do
expect(@region.ems_clouds).to include(*@ems_clouds)
expect(@region.ems_clouds).not_to include(*@ems_infras)
end
it "should be able to return the list of ems_infras" do
expect(@region.ems_infras).to include(*@ems_infras)
expect(@region.ems_infras).not_to include(*@ems_clouds)
end
end
end
context ".seed" do
before do
@region_number = 99
allow(MiqRegion).to receive_messages(:my_region_number => @region_number)
MiqRegion.seed
end
include_examples ".seed called multiple times"
it "should have the expected region number" do
expect(MiqRegion.first.region).to eq(@region_number)
end
it "replaces deleted current region" do
MiqRegion.where(:region => @region_number).destroy_all
expect(MiqRegion.count).to eq(0)
MiqRegion.seed
expect(MiqRegion.first.region).to eq(@region_number)
end
it "raises Exception if db region_id doesn't match my_region_number" do
@db = FactoryBot.create(:miq_database)
allow(MiqRegion).to receive_messages(:my_region_number => @region_number + 1)
expect { MiqRegion.seed }.to raise_error(Exception)
end
it "sets the migrations_ran column" do
expect(MiqRegion.first.migrations_ran).to match_array(ActiveRecord::SchemaMigration.normalized_versions)
end
end
describe ".replication_type" do
it "returns :global when configured as a pglogical subscriber" do
pgl = double(:provider? => false, :subscriber? => true, :node? => true)
allow(MiqPglogical).to receive(:new).and_return(pgl)
expect(described_class.replication_type).to eq(:global)
end
it "returns :remote when configured as a pglogical provider" do
pgl = double(:provider? => true, :subscriber? => false, :node? => true)
allow(MiqPglogical).to receive(:new).and_return(pgl)
expect(described_class.replication_type).to eq(:remote)
end
it "returns :none if pglogical is not configured" do
pgl = double(:provider? => false, :subscriber? => false, :node? => false)
allow(MiqPglogical).to receive(:new).and_return(pgl)
expect(described_class.replication_type).to eq(:none)
end
end
describe ".replication_type=" do
it "returns the replication_type, even when unchanged" do
pgl = double(:provider? => true, :subscriber? => false, :node? => true)
allow(MiqPglogical).to receive(:new).and_return(pgl)
expect(described_class.replication_type = :remote).to eq :remote
end
it "destroys the provider when transition is :remote -> :none" do
pgl = double(:provider? => true, :subscriber? => false, :node? => true)
allow(MiqPglogical).to receive(:new).and_return(pgl)
expect(pgl).to receive(:destroy_provider)
expect(described_class.replication_type = :none).to eq :none
end
it "deletes all subscriptions when transition is :global -> :none" do
pgl = double(:provider? => false, :subscriber? => true, :node? => true)
allow(MiqPglogical).to receive(:new).and_return(pgl)
expect(PglogicalSubscription).to receive(:delete_all)
expect(described_class.replication_type = :none).to eq :none
end
it "creates a new provider when transition is :none -> :remote" do
pgl = double(:provider? => false, :subscriber? => false, :node? => false)
allow(MiqPglogical).to receive(:new).and_return(pgl)
expect(pgl).to receive(:configure_provider)
expect(described_class.replication_type = :remote).to eq :remote
end
it "deletes all subscriptions and creates a new provider when transition is :global -> :remote" do
pgl = double(:provider? => false, :subscriber? => true, :node? => true)
allow(MiqPglogical).to receive(:new).and_return(pgl)
expect(PglogicalSubscription).to receive(:delete_all)
expect(pgl).to receive(:configure_provider)
expect(described_class.replication_type = :remote).to eq :remote
end
it "destroys the provider when transition is :remote -> :global" do
pgl = double(:provider? => true, :subscriber? => false, :node? => true)
allow(MiqPglogical).to receive(:new).and_return(pgl)
expect(pgl).to receive(:destroy_provider)
expect(described_class.replication_type = :global).to eq :global
end
end
describe "#api_system_auth_token" do
it "generates the token correctly" do
user = "admin"
server = FactoryBot.create(:miq_server, :has_active_webservices => true)
token = region.api_system_auth_token(user)
token_hash = YAML.load(ManageIQ::Password.decrypt(token))
expect(token_hash[:server_guid]).to eq(server.guid)
expect(token_hash[:userid]).to eq(user)
expect(token_hash[:timestamp]).to be > 5.minutes.ago.utc
end
end
describe "#vms" do
it "brings them back" do
FactoryBot.create(:vm_vmware, :id => external_region_id)
vm = FactoryBot.create(:vm_vmware)
FactoryBot.create(:template_vmware)
expect(region.vms).to eq([vm])
end
end
describe "#miq_templates" do
it "brings them back" do
FactoryBot.create(:vm_vmware, :id => external_region_id)
FactoryBot.create(:vm_vmware)
t = FactoryBot.create(:template_vmware)
expect(region.miq_templates).to eq([t])
end
end
describe "#vms_and_templates" do
it "brings them back" do
FactoryBot.create(:vm_vmware, :id => external_region_id)
vm = FactoryBot.create(:vm_vmware)
t = FactoryBot.create(:template_vmware)
expect(region.vms_and_templates).to match_array [vm, t]
end
end
describe "#remote_ws_url" do
let(:hostname) { "www.manageiq.org" }
context "with a recently active server" do
let(:ip) { "1.1.1.94" }
let(:url) { "https://www.manageiq.org" }
let!(:web_server) do
FactoryBot.create(:miq_server, :has_active_webservices => true,
:hostname => hostname,
:ipaddress => ip)
end
it "fetches the url from server" do
expect(region.remote_ws_url).to eq("https://#{ip}")
end
it "fetches the url from the setting" do
Vmdb::Settings.save!(web_server, :webservices => {:url => url})
expect(region.remote_ws_url).to eq(url)
end
end
it "with no recently active servers" do
FactoryBot.create(:miq_server, :has_active_webservices => true, :hostname => hostname, :last_heartbeat => 11.minutes.ago.utc)
expect(region.remote_ws_url).to be_nil
end
end
describe "#remote_ui_url" do
let(:hostname) { "www.manageiq.org" }
context "with a recently active server" do
let(:ip) { "1.1.1.94" }
let(:url) { "http://localhost:3000" }
let!(:ui_server) do
FactoryBot.create(:miq_server, :has_active_userinterface => true,
:hostname => hostname,
:ipaddress => ip)
end
it "fetches the url from server" do
expect(region.remote_ui_url).to eq("https://#{hostname}")
end
it "fetches the url from the setting" do
Vmdb::Settings.save!(ui_server, :ui => {:url => url})
expect(region.remote_ui_url).to eq(url)
end
end
it "with no recently active servers" do
FactoryBot.create(:miq_server, :has_active_userinterface => true, :hostname => hostname, :last_heartbeat => 11.minutes.ago.utc)
expect(region.remote_ws_url).to be_nil
end
end
describe "#remote_ui_miq_server" do
it "with no recently active servers" do
server = FactoryBot.create(:miq_server, :has_active_userinterface => true, :hostname => "example.com")
expect(region.remote_ui_miq_server).to eq(server)
end
it "with no recently active servers" do
FactoryBot.create(:miq_server, :has_active_userinterface => true, :hostname => "example.com", :last_heartbeat => 1.month.ago.utc)
expect(region.remote_ui_miq_server).to be_nil
end
end
describe "#remote_ws_miq_server" do
it "with no recently active servers" do
server = FactoryBot.create(:miq_server, :has_active_webservices => true, :hostname => "example.com")
expect(region.remote_ws_miq_server).to eq(server)
end
it "with no recently active servers" do
FactoryBot.create(:miq_server, :has_active_webservices => true, :hostname => "example.com", :last_heartbeat => 1.month.ago.utc)
expect(region.remote_ws_miq_server).to be_nil
end
end
end
| 35.371025 | 135 | 0.664236 |
910acbf89e9397aa94cda52dfac1df815c61106c | 5,079 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress CSS using a preprocessor.
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
#config.active_storage.service = :local
# アップロードされたファイルをAWSに保存する
config.active_storage.service = :amazon
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "sample_app_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
end
| 44.165217 | 114 | 0.762945 |
91aa71f3dbbbeb6904ee0f4a1dedebbf3137f0fe | 370 | class Edge
attr_accessor :from, :weight, :to
def initialize(from, weight, to)
@from = from
@weight = weight
@to = to
@from.edges << self
@to.edges << self
end
def nodes
return Set.new([@from, @to])
end
def other_node(node)
return node == @to ? @to : @from
end
def to_s
return "(#{@from},#{@weight},#{@to})"
end
end
| 14.8 | 41 | 0.562162 |
ed49dfccef0783b4422d8cea8aa6d8e86a9d798d | 155 | module SensuPluginsCpuUsage
module Version
MAJOR = 0
MINOR = 0
PATCH = 6
VER_STRING = [MAJOR, MINOR, PATCH].compact.join('.')
end
end
| 15.5 | 56 | 0.63871 |
0883a7cfa031b2e30da38ce175cd33e1a3b528b2 | 1,189 | class Instead < Formula
desc "Interpreter of simple text adventures"
homepage "https://instead.syscall.ru/"
url "https://github.com/instead-hub/instead/archive/3.1.2.tar.gz"
sha256 "622c04a58fd780d8efdf0706c03596ab68672b31e97865dad6a1fc1540619754"
head "https://github.com/instead-hub/instead.git"
bottle do
sha256 "c32e167811c88818b649817d4818b8d11b90b1645c2fe5617edef47e5ae0e0f1" => :high_sierra
sha256 "3bb245499347467119715fc9c8def74ef8f6f23775845ae5a37b266bf25f8951" => :sierra
sha256 "2ca1a0a758d0e7a404fb62082e8058e915dbd6922c3c0db62937899f3e99fdd8" => :el_capitan
sha256 "da97fc64cb2c10bc4aa7271d99cefedb3b6fdad308fcfaa3b16628ba1c9a9283" => :yosemite
sha256 "1561d0e35f092f641ca261a36aafb29c5295a10daf2fc29a856e51fe8c69afdf" => :x86_64_linux # glibc 2.19
end
depends_on "cmake" => :build
depends_on "lua"
depends_on "sdl"
depends_on "sdl_image"
depends_on "sdl_mixer"
depends_on "sdl_ttf"
def install
mkdir "build" do
system "cmake", "..", "-DWITH_GTK2=OFF", *std_cmake_args
system "make", "install"
end
end
test do
assert_match /INSTEAD #{version} /, shell_output("#{bin}/instead -h 2>&1")
end
end
| 34.970588 | 107 | 0.761144 |
1869fceee496a895ef67a292a528a9d49890f1cd | 634 | require 'spec_helper'
describe AsanaAPI::Team do
let(:subject) { AsanaAPI::Team.new }
let(:workspaces) { AsanaAPI::Workspace.index! }
it 'returns 200 for index', vcr: true do
expect(subject.index(organization: workspaces.first['id']).code).to eq(200)
end
it 'returns 200 for show', vcr: true do
subject.index(organization: workspaces.first['id'])
expect(subject.show(id: subject.parsed.first['id']).code).to eq(200)
end
it 'returns 200 for users', vcr: true do
subject.index(organization: workspaces.first['id'])
expect(subject.users(id: subject.parsed.first['id']).code).to eq(200)
end
end
| 30.190476 | 79 | 0.692429 |
ab18b595e65ba5cfe00d6026e6a5a10f1b304164 | 6,637 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ContainerInstance::Mgmt::V2018_02_01_preview
#
# StartContainer
#
class StartContainer
include MsRestAzure
#
# Creates and initializes a new instance of the StartContainer class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [ContainerInstanceManagementClient] reference to the ContainerInstanceManagementClient
attr_reader :client
#
# Starts the exec command for a specific container instance.
#
# Starts the exec command for a specified container instance in a specified
# resource group and container group.
#
# @param resource_group_name [String] The name of the resource group.
# @param container_group_name [String] The name of the container group.
# @param container_name [String] The name of the container instance.
# @param container_exec_request [ContainerExecRequest] The request for the exec
# command.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ContainerExecResponse] operation results.
#
def launch_exec(resource_group_name, container_group_name, container_name, container_exec_request, custom_headers:nil)
response = launch_exec_async(resource_group_name, container_group_name, container_name, container_exec_request, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Starts the exec command for a specific container instance.
#
# Starts the exec command for a specified container instance in a specified
# resource group and container group.
#
# @param resource_group_name [String] The name of the resource group.
# @param container_group_name [String] The name of the container group.
# @param container_name [String] The name of the container instance.
# @param container_exec_request [ContainerExecRequest] The request for the exec
# command.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def launch_exec_with_http_info(resource_group_name, container_group_name, container_name, container_exec_request, custom_headers:nil)
launch_exec_async(resource_group_name, container_group_name, container_name, container_exec_request, custom_headers:custom_headers).value!
end
#
# Starts the exec command for a specific container instance.
#
# Starts the exec command for a specified container instance in a specified
# resource group and container group.
#
# @param resource_group_name [String] The name of the resource group.
# @param container_group_name [String] The name of the container group.
# @param container_name [String] The name of the container instance.
# @param container_exec_request [ContainerExecRequest] The request for the exec
# command.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def launch_exec_async(resource_group_name, container_group_name, container_name, container_exec_request, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'container_group_name is nil' if container_group_name.nil?
fail ArgumentError, 'container_name is nil' if container_name.nil?
fail ArgumentError, 'container_exec_request is nil' if container_exec_request.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::ContainerInstance::Mgmt::V2018_02_01_preview::Models::ContainerExecRequest.mapper()
request_content = @client.serialize(request_mapper, container_exec_request)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerInstance/containerGroups/{containerGroupName}/containers/{containerName}/exec'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'containerGroupName' => container_group_name,'containerName' => container_name},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:post, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::ContainerInstance::Mgmt::V2018_02_01_preview::Models::ContainerExecResponse.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
end
end
| 45.772414 | 196 | 0.72593 |
1c0bb23947d64773b7d384462d0563a526a4be87 | 2,266 | require_relative 'base_generator'
module Fae
class NestedScaffoldGenerator < Fae::BaseGenerator
source_root ::File.expand_path('../templates', __FILE__)
class_option :parent_model, type: :string, desc: 'Sets the parent model this scaffold belongs_to.'
def go
generate_nested_model_file
generate_graphql_type
generate_nested_controller_file
generate_view_files
add_route
end
private
def generate_nested_model_file
generate "model #{file_name} #{@@attributes_flat}"
inject_concern
inject_display_field_to_model
inject_model_attachments
inject_position_scope
inject_parent_info if options.parent_model.present?
inject_polymorphic_info if options.polymorphic
end
def generate_nested_controller_file
@attachments = @@attachments
@polymorphic_name = polymorphic_name
template "controllers/nested_scaffold_controller.rb", "app/controllers/#{options.namespace}/#{file_name.pluralize}_controller.rb"
end
def generate_view_files
@form_attrs = set_form_attrs
@attachments = @@attachments
template "views/table_nested.html.#{options.template}", "app/views/#{options.namespace}/#{plural_file_name}/table.html.#{options.template}"
template "views/_form_nested.html.#{options.template}", "app/views/#{options.namespace}/#{plural_file_name}/_form.html.#{options.template}"
template "views/new_nested.html.#{options.template}", "app/views/#{options.namespace}/#{plural_file_name}/new.html.#{options.template}"
template "views/edit_nested.html.#{options.template}", "app/views/#{options.namespace}/#{plural_file_name}/edit.html.#{options.template}"
end
def inject_parent_info
inject_into_file "app/models/#{file_name}.rb", after: "BaseModelConcern\n" do <<-RUBY
\n belongs_to :#{options.parent_model.underscore}, touch: true
def fae_nested_parent
:#{options.parent_model.underscore}
end
RUBY
end
end
def inject_polymorphic_info
inject_into_file "app/models/#{file_name}.rb", after: "BaseModelConcern\n" do <<-RUBY
def fae_nested_parent
:#{polymorphic_name}
end
RUBY
end
end
end
end
| 34.333333 | 147 | 0.70962 |
ab3b633e8461fd48a78b882d061c1f5ac9812167 | 173 | require "middleman-core"
require "middleman-ogp/version"
::Middleman::Extensions.register(:ogp) do
require "middleman-ogp/extension"
::Middleman::OGP::OGPExtension
end
| 21.625 | 41 | 0.768786 |
392d141ea13e683dcba795d2df8ae939a3f1b48c | 339 | # frozen_string_literal: true
module Rails
# Returns the version of the currently loaded Rails as a <tt>Gem::Version</tt>
def self.gem_version
Gem::Version.new VERSION::STRING
end
module VERSION
MAJOR = 7
MINOR = 0
TINY = 0
PRE = "alpha"
STRING = [MAJOR, MINOR, TINY, PRE].compact.join(".")
end
end
| 18.833333 | 80 | 0.648968 |
91ddb9bde8a8d0f55771fe050d9a37129823a167 | 222 | module CarrierWave
module Support
module UriFilename
def self.filename(url)
path = url.split('?').first
URI.decode_www_form_component(path).gsub(/.*\/(.*?$)/, '\1')
end
end
end
end
| 18.5 | 68 | 0.59009 |
38d512a86cac8e2883155cda75b76c6affe43a0f | 7,700 | # AXKit
# Be sure to run `pod lib lint AXKit.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'AXKit'
s.version = '2.4.9'
s.summary = 'AXKit is kind'
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/aptx5788/AXKit'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'aptx5788' => '[email protected]' }
s.source = { :git => 'https://github.com/aptx5788/AXKit.git', :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/xuandian'
s.ios.deployment_target = '8.0'
s.source_files = 'AXKit/*.{h,m}'
s.subspec 'AXUIKit' do |ss|
ss.source_files = 'AXKit/AXUIKit/*.{h,m}'
ss.requires_arc = true
ss.subspec 'AXButton' do |sss|
sss.source_files = 'AXKit/AXUIKit/AXButton/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXCollectionView' do |sss|
sss.source_files = 'AXKit/AXUIKit/AXCollectionView/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXView' do |sss|
sss.source_files = 'AXKit/AXUIKit/AXView/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXTableView' do |sss|
sss.source_files = 'AXKit/AXUIKit/AXTableView/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXScrollView' do |sss|
sss.source_files = 'AXKit/AXUIKit/AXScrollView/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXSlideCover' do |sss|
sss.source_files = 'AXKit/AXUIKit/AXSlideCover/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXLabel' do |sss|
sss.source_files = 'AXKit/AXUIKit/AXLabel/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXPhotoBrowserView' do |sss|
sss.source_files = 'AXKit/AXUIKit/AXPhotoBrowserView/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXHud' do |sss|
sss.source_files = 'AXKit/AXUIKit/AXHud/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXCircleProgressView' do |sss|
sss.source_files = 'AXKit/AXUIKit/AXCircleProgressView/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXChart' do |sss|
sss.source_files = 'AXKit/AXUIKit/AXChart/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXWaterWaveView' do |sss|
sss.source_files = 'AXKit/AXUIKit/AXWaterWaveView/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXWaveLineView' do |sss|
sss.source_files = 'AXKit/AXUIKit/AXWaveLineView/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXLockView' do |sss|
sss.source_files = 'AXKit/AXUIKit/AXLockView/*.{h,m}'
sss.requires_arc = true
end
end
s.subspec 'AXObject' do |ss|
ss.source_files = 'AXKit/AXObject/*.{h,m}'
ss.requires_arc = true
ss.subspec 'AXDefine' do |sss|
sss.source_files = 'AXKit/AXObject/AXDefine/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXSingleton' do |sss|
sss.source_files = 'AXKit/AXObject/AXSingleton/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXNewFeatureTool' do |sss|
sss.source_files = 'AXKit/AXObject/AXNewFeatureTool/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXCalendar' do |sss|
sss.source_files = 'AXKit/AXObject/AXCalendar/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXShockFeedback' do |sss|
sss.source_files = 'AXKit/AXObject/AXShockFeedback/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXWeather' do |sss|
sss.source_files = 'AXKit/AXObject/AXWeather/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXTheme' do |sss|
sss.source_files = 'AXKit/AXObject/AXTheme/**/*'
sss.requires_arc = true
end
ss.subspec 'AXGetController' do |sss|
sss.source_files = 'AXKit/AXObject/AXGetController/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXGetImage' do |sss|
sss.source_files = 'AXKit/AXObject/AXGetImage/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXDeviceInfo' do |sss|
sss.source_files = 'AXKit/AXObject/AXDeviceInfo/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXPluginManager' do |sss|
sss.source_files = 'AXKit/AXObject/AXPluginManager/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXModel' do |sss|
sss.source_files = 'AXKit/AXObject/AXModel/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXString' do |sss|
sss.source_files = 'AXKit/AXObject/AXString/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXRouter' do |sss|
sss.source_files = 'AXKit/AXObject/AXRouter/*.{h,m}'
sss.requires_arc = true
sss.resource_bundles = {
'AXRouter' => ['AXKit/AXObject/AXRouter/Assets/*']
}
end
ss.subspec 'AXURL' do |sss|
sss.source_files = 'AXKit/AXObject/AXURL/*.{h,m}'
sss.requires_arc = true
sss.vendored_libraries = 'AXKit/AXObject/AXURL/*.a'
sss.xcconfig = { 'OTHER_LDFLAGS' => ' -ObjC' }
sss.static_framework = true
end
ss.subspec 'AXHeartRateManager' do |sss|
sss.source_files = 'AXKit/AXObject/AXHeartRateManager/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXColor' do |sss|
sss.source_files = 'AXKit/AXObject/AXColor/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXShareManager' do |sss|
sss.source_files = 'AXKit/AXObject/AXShareManager/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXRatingManager' do |sss|
sss.source_files = 'AXKit/AXObject/AXRatingManager/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXPurchasesManager' do |sss|
sss.source_files = 'AXKit/AXObject/AXPurchasesManager/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXMailManager' do |sss|
sss.source_files = 'AXKit/AXObject/AXMailManager/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXTransitionViewController' do |sss|
sss.source_files = 'AXKit/AXObject/AXTransitionViewController/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXAutoLayout' do |sss|
sss.source_files = 'AXKit/AXObject/AXAutoLayout/*.{h,m}'
sss.requires_arc = true
end
ss.subspec 'AXAuthorization' do |sss|
sss.source_files = 'AXKit/AXObject/AXAuthorization/*.{h,m}'
sss.requires_arc = true
end
end
end
| 32.627119 | 100 | 0.548831 |
18c3b4173bcc88b2c4cda00889f7cd0d3bce3a33 | 1,872 | # coding: utf-8
module ONIX
class Header
include ROXML
xml_name "Header"
xml_accessor :from_ean_number, :from => "FromEANNumber"
xml_accessor :from_san, :from => "FromSAN"
xml_accessor :sender_identifiers, :from => "SenderIdentifier", :as => [ONIX::SenderIdentifier]
xml_accessor :from_company, :from => "FromCompany"
xml_accessor :from_person, :from => "FromPerson"
xml_accessor :from_email, :from => "FromEmail"
xml_accessor :to_ean_number, :from => "ToEANNumber"
xml_accessor :to_san, :from => "ToSAN"
xml_accessor :addressee_identifiers, :from => "AddresseeIdentifier", :as => [ONIX::AddresseeIdentifier]
xml_accessor :to_company, :from => "ToCompany"
xml_accessor :to_person, :from => "ToPerson"
xml_accessor :message_number, :from => "MessageNumber"
xml_accessor :message_repeat, :from => "MessageRepeat", :as => Fixnum
xml_accessor(:sent_date, :from => "SentDate", :to_xml => ONIX::Formatters.yyyymmdd) do |val|
begin
Date.parse(val)
rescue
nil
end
end
xml_accessor :message_note, :from => "MessageNote"
# defaults
xml_accessor :default_language_of_text, :from => "DefaultLanguageOfText"
xml_accessor :default_price_type_code, :from => "DefaultPriceTypeCode", :as => Fixnum, :to_xml => ONIX::Formatters.two_digit
xml_accessor :default_currency_code, :from => "DefaultCurrencyCode"
xml_reader :default_linear_unit, :from => "DefaultLinearUnit" # deprecated in ONIX spec
xml_reader :default_weight_unit, :from => "DefaultWeightUnit" # deprecated in ONIX spec
xml_accessor :default_class_of_trade, :from => "DefaultClassOfTrade"
def initialize
self.sender_identifiers = []
self.addressee_identifiers = []
end
end
end
| 40.695652 | 129 | 0.667201 |
26fe80b8b0be34546ee2bb9f51807fc8036f33cf | 1,100 | cask '[email protected]' do
version '5.6.0f3,497a0f351392'
sha256 :no_check
url "https://download.unity3d.com/download_unity/497a0f351392/MacEditorTargetInstaller/UnitySetup-Samsung-TV-Support-for-Editor-5.6.0f3.pkg"
name 'SamsungTV Build Support'
homepage 'https://unity3d.com/unity/'
pkg 'UnitySetup-Samsung-TV-Support-for-Editor-5.6.0f3.pkg'
depends_on cask: '[email protected]'
preflight do
if File.exist? "/Applications/Unity"
FileUtils.move "/Applications/Unity", "/Applications/Unity.temp"
end
if File.exist? "/Applications/Unity-5.6.0f3"
FileUtils.move "/Applications/Unity-5.6.0f3", '/Applications/Unity'
end
end
postflight do
if File.exist? '/Applications/Unity'
FileUtils.move '/Applications/Unity', "/Applications/Unity-5.6.0f3"
end
if File.exist? '/Applications/Unity.temp'
FileUtils.move '/Applications/Unity.temp', '/Applications/Unity'
end
end
uninstall quit: 'com.unity3d.UnityEditor5.x',
delete: '/Applications/Unity-5.6.0f3/PlaybackEngines/STVPlayer'
end
| 30.555556 | 142 | 0.709091 |
874dd275edd73d9cdf4b965b2d36cbc79321b408 | 249 | module ActiveVlc
end
require 'active_support'
require 'activevlc/version'
require 'activevlc/parameters'
require 'activevlc/pipeline'
require 'activevlc/dsl'
require 'activevlc/libvlc'
require 'activevlc/runner'
require 'activevlc/syntactic_sugar'
| 20.75 | 35 | 0.827309 |
bfa5161484261c861249ae6331a3b7a84e1b509c | 2,192 | require 'test_helper'
class ModelTest < Minitest::Test
class FakeModel < ChangeHealth::Models::Model
property :dateObject, default: Date.new(2020, 5, 30)
property :someDateString, default: '2020-04-30'
property :someNotDateString, default: 'cat'
property :booleanWithDateInName, default: true
property :daate, default: '2020-06-30'
property :daateObject, default: Date.new(2020, 7, 30)
end
describe 'model' do
let(:model) { FakeModel.new }
describe '#DATE_FORMATTER' do
it 'formats dates to Change Health format' do
assert_equal('20200530', ChangeHealth::Models::DATE_FORMATTER.call(model.dateObject))
end
it 'formats date strings to Change Health format' do
assert_equal('20200430', ChangeHealth::Models::DATE_FORMATTER.call(model.someDateString))
end
it 'leaves strings alone that are not dates' do
assert_equal(model.someNotDateString, ChangeHealth::Models::DATE_FORMATTER.call(model.someNotDateString))
end
it 'leaves booleans alone' do
assert_equal(true, ChangeHealth::Models::DATE_FORMATTER.call(model.booleanWithDateInName))
end
end
describe '#to_h' do
let(:hmodel) { model.to_h }
it 'translates any property with date in the name that is a dateish object' do
assert_equal('20200530', hmodel[:dateObject])
assert_equal('20200430', hmodel[:someDateString])
end
it 'leaves alone proprites named date that are not dates' do
assert_equal('cat', hmodel[:someNotDateString])
assert_equal(true, hmodel[:booleanWithDateInName])
end
it 'leave other properties completely alone' do
assert_equal('2020-06-30', hmodel[:daate])
assert_equal(Date.new(2020, 7, 30), hmodel[:daateObject])
end
end
describe '#as_json' do
it 'equals the #to_h' do
assert_equal(model.to_h, model.as_json)
end
it 'can take an arg' do
assert_equal(model.to_h, model.as_json(bob: 10))
end
end
describe '#to_json' do
it 'equals the #to_h to json' do
assert_equal(model.to_h.to_json, model.to_json)
end
end
end
end
| 31.314286 | 113 | 0.673814 |
d56d245ca066993a62437536c6cf9ee196839f27 | 1,042 | class WireguardTools < Formula
desc "Tools for the WireGuard secure network tunnel"
homepage "https://www.wireguard.io/"
# Please only update version when the tools have been modified/updated,
# since the Linux module aspect isn't of utility for us.
url "https://git.zx2c4.com/WireGuard/snapshot/WireGuard-0.0.20170918.tar.xz"
sha256 "e083f18596574fb7050167090bfb4db4df09a1a99f3c1adc77f820c166368881"
head "https://git.zx2c4.com/WireGuard", :using => :git
bottle do
cellar :any_skip_relocation
sha256 "4126072919bbba83864deadbe2c3fab5ae02c7af73b7dc3771db54d98900547a" => :high_sierra
sha256 "b8293b2fa77eb22bd3c1c452b16c5526ee994814f4dbe1c47ae69162a5283709" => :sierra
sha256 "7b349e77994b70e10ee48f3da41bfd490708a14b13f44988cd798adb261dedd9" => :el_capitan
end
def install
system "make", "BASHCOMPDIR=#{bash_completion}", "WITH_BASHCOMPLETION=yes", "WITH_WGQUICK=no", "WITH_SYSTEMDUNITS=no", "PREFIX=#{prefix}", "-C", "src/tools", "install"
end
test do
system "#{bin}/wg", "help"
end
end
| 41.68 | 171 | 0.763916 |
ab3368750b61b45bd7f7699d85b15cd8caa5d639 | 1,354 | class Chakra < Formula
desc "The core part of the JavaScript engine that powers Microsoft Edge"
homepage "https://github.com/Microsoft/ChakraCore"
url "https://github.com/Microsoft/ChakraCore/archive/v1.11.16.tar.gz"
sha256 "81429055e51a786079002d33d3eae58771f8b7c383b3f47991d63e5be84a7f4d"
bottle do
cellar :any
sha256 "2ec63f90a86c7f733ad523dcaa7bf66d3e73d18bd12499f0585455b9527c275d" => :catalina
sha256 "1183bf174fc3413e7812c7448206136d35c803a4da1095ce9bcc1134adac8487" => :mojave
sha256 "6fb75a7c373ba765b95025f4de359ac6ca8111f7d363685cb3f9df4b518aec17" => :high_sierra
end
depends_on "cmake" => :build
depends_on "icu4c"
def install
args = [
"--lto-thin",
"--icu=#{Formula["icu4c"].opt_include}",
"--extra-defines=U_USING_ICU_NAMESPACE=1", # icu4c 61.1 compatability
"-j=#{ENV.make_jobs}",
"-y",
]
# Build dynamically for the shared library
system "./build.sh", *args
# Then statically to get a usable binary
system "./build.sh", "--static", *args
bin.install "out/Release/ch" => "chakra"
include.install Dir["out/Release/include/*"]
lib.install "out/Release/libChakraCore.dylib"
end
test do
(testpath/"test.js").write("print('Hello world!');\n")
assert_equal "Hello world!", shell_output("#{bin}/chakra test.js").chomp
end
end
| 33.02439 | 93 | 0.708272 |
6a3d006a06158bd1fc058b656d82a9cbe443cead | 1,683 | # frozen_string_literal: true
require 'ffi'
FFI.typedef :uint16, :word
FFI.typedef :uint32, :dword
FFI.typedef :uintptr_t, :handle
FFI.typedef :buffer_inout, :lpwstr
FFI.typedef :pointer, :lpcvoid
FFI.typedef :pointer, :lpvoid
FFI.typedef :pointer, :lpdword
FFI.typedef :pointer, :pdword
FFI.typedef :pointer, :phandle
FFI.typedef :pointer, :pbool
FFI.typedef :pointer, :ulong_ptr
FFI.typedef :uint32, :win32_ulong
FFI.typedef :int32, :win32_long
FFI.typedef :int32, :win32_bool
FFI.typedef :uint16, :wchar
FFI.typedef :uintptr_t, :hwnd
ERROR_MORE_DATA = 234
MAX_PATH = 32_767
module FFI
WIN32FALSE = 0
END_OF_WCHAR_STRING = "\0\0".encode('UTF-16LE')
class Pointer
def read_wide_string_with_length(char_length)
# char_length is number of wide chars (typically excluding NULLs), *not* bytes
str = get_bytes(0, char_length * 2).force_encoding('UTF-16LE')
str.encode('UTF-8', str.encoding, {})
end
def read_wide_string_without_length
wide_character = get_bytes(0, 2)
i = 2
str = []
while wide_character.encode('UTF-16LE') != END_OF_WCHAR_STRING
str << wide_character
wide_character = get_bytes(i, 2)
i += 2
end
str.join.force_encoding('UTF-16LE').encode('UTF-8')
end
def read_win32_bool
# BOOL is always a 32-bit integer in Win32
# some Win32 APIs return 1 for true, while others are non-0
read_int32 != WIN32FALSE
end
end
class Struct
def self.read_list(first_address)
instance = new(first_address)
while instance.to_ptr != Pointer::NULL
yield(instance)
instance = new(instance[:Next])
end
end
end
end
| 25.5 | 84 | 0.686869 |
f8fbb483c2744dfbf77f43dde3af7dbe29bca8ce | 1,313 | # This controller handles the login/logout function of the site.
class SessionsController < ApplicationController
# Be sure to include AuthenticationSystem in Application Controller instead
# render new.rhtml
def new
end
def create
logout_keeping_session!
user = User.authenticate(params[:login], params[:password])
if user
# Protects against session fixation attacks, causes request forgery
# protection if user resubmits an earlier form using back
# button. Uncomment if you understand the tradeoffs.
# reset_session
self.current_user = user
new_cookie_flag = (params[:remember_me] == "1")
handle_remember_cookie! new_cookie_flag
redirect_to admin_home_path
flash[:notice] = "Logged in successfully"
else
note_failed_signin
@login = params[:login]
@remember_me = params[:remember_me]
render :action => 'new'
end
end
def destroy
logout_killing_session!
flash[:notice] = "You have been logged out."
redirect_back_or_default('/')
end
protected
# Track failed login attempts
def note_failed_signin
flash[:error] = "Couldn't log you in as '#{params[:login]}'"
logger.warn "Failed login for '#{params[:login]}' from #{request.remote_ip} at #{Time.now.utc}"
end
end
| 30.534884 | 99 | 0.696116 |
62fa5b271c8fd6087d9de1ddb6fb8b8f48ac5242 | 5,931 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe MetricsController, type: :controller do
let(:user) { FactoryBot.create(:user) }
let(:bproce) { FactoryBot.create(:bproce, user_id: user.id) }
let(:valid_attributes) { { name: 'Metric name', description: 'description1', bproce_id: bproce.id, depth: 1 } }
let(:invalid_attributes) { { name: 'Metric name', description: 'description1', bproce_id: bproce.id, depth: 1 } }
let(:valid_session) { {} }
let!(:metric) { FactoryBot.create :metric, bproce_id: bproce.id }
let!(:metric1) { FactoryBot.create :metric, bproce_id: bproce.id }
let(:input) { Metric.all }
let(:params) { {} }
describe 'GET index' do
it 'assigns all metrics as @metrics' do
get :index, params: {}
expect(response).to be_successful
expect(response).to render_template('metrics/index')
end
it 'loads all of the metrics into @metricss' do
get :index
expect(assigns(:metrics)).to match_array([metric, metric1])
end
end
describe 'GET show' do
it 'assigns the requested metric as @metric' do
# get :show, params: { id: metric.to_param, depth: 3 }
# !!! error - Time zones not supported for SQLite
# expect(response).to be_successful
# expect(assigns(:metric)).to eq(@metric)
end
end
describe 'GET new' do
it 'assigns a new metric as @metric' do
get :new, params: {}
expect(assigns(:metric)).to be_a_new(Metric)
end
end
describe 'GET edit' do
it 'assigns the requested metric as @metric' do
metric = Metric.create! valid_attributes
get :edit, params: { id: metric.to_param }
expect(assigns(:metric)).to eq(metric)
end
end
describe 'POST create' do
describe 'with valid params' do
it 'creates a new Metric' do
expect do
post :create, params: { metric: valid_attributes }
end.to change(Metric, :count).by(1)
end
it 'assigns a newly created metric as @metric' do
post :create, params: { metric: valid_attributes }
expect(assigns(:metric)).to be_a(Metric)
expect(assigns(:metric)).to be_persisted
end
it 'redirects to the created metric' do
post :create, params: { metric: valid_attributes }
expect(response).to redirect_to(Metric.last)
end
end
describe 'with invalid params' do
it 'assigns a newly created but unsaved metric as @metric' do
expect_any_instance_of(Metric).to receive(:save).and_return(false)
post :create, params: { metric: invalid_attributes }
expect(assigns(:metric)).to be_a_new(Metric)
end
it "re-renders the 'new' template" do
expect_any_instance_of(Metric).to receive(:save).and_return(false)
post :create, params: { metric: { 'bproce' => 'invalid value' } }
expect(response).to render_template('new')
end
end
end
describe 'PUT update' do
describe 'with valid params' do
it 'updates the requested metric' do
metric = Metric.create! valid_attributes
expect_any_instance_of(Metric).to receive(:save).at_least(:once)
put :update, params: { id: metric.to_param, metric: { name: 'agent name' } }
end
it 'assigns the requested metric as @metric' do
metric = Metric.create! valid_attributes
put :update, params: { id: metric.to_param, metric: valid_attributes }
expect(assigns(:metric)).to eq(metric)
end
it 'redirects to the metric' do
metric = Metric.create! valid_attributes
put :update, params: { id: metric.to_param, metric: valid_attributes }
expect(response).to redirect_to(metric)
end
end
describe 'with invalid params' do
it 'assigns the metric as @metric' do
metric = Metric.create! valid_attributes
expect_any_instance_of(Metric).to receive(:save).and_return(false)
put :update, params: { id: metric.to_param, metric: { 'bproce' => 'invalid value' } }
expect(assigns(:metric)).to eq(metric)
end
it "re-renders the 'edit' template" do
metric = Metric.create! valid_attributes
expect_any_instance_of(Metric).to receive(:save).and_return(false)
put :update, params: { id: metric.to_param, metric: { 'bproce' => 'invalid value' } }
expect(response).to render_template('edit')
end
end
end
describe 'DELETE destroy' do
it 'destroys the requested metric' do
metric = Metric.create! valid_attributes
expect do
delete :destroy, params: { id: metric.to_param }
end.to change(Metric, :count).by(-1)
end
it 'redirects to the metrics list' do
metric = Metric.create! valid_attributes
delete :destroy, params: { id: metric.to_param }
expect(response).to redirect_to(metrics_url)
end
end
it 'render values' do
get :values, params: { id: metric.to_param }
expect(response).to render_template :values
end
it 'render new' do
get :new_value, params: { id: metric.to_param }
expect(response).to render_template :new
end
it 'set_values redirect to show' do
metric = create :metric, bproce_id: bproce.id, mtype: 'PGSQL'
get :set_values, params: { id: metric.to_param }
expect(response).to redirect_to(action: :show)
end
it 'set render' do
metric = create :metric, bproce_id: bproce.id, mhash: '123'
# get :set, params: { id: metric.to_param, v: '1', h: '123' }
# expect(response.status).to eq(404)
end
# subject { described_class.call(input, params) }
# specify "no matching params" do
# expect(subject).to eq input
# end
# specify "searching" do
# params[:search] = "sms"
# # p input.search('www')
# # expect(subject).to eq input.search('sms')
# end
# specify 'depth' do
# params[:depth] = 1
# expect(subject).to eq input.depth('sms')
# end
end
| 33.134078 | 115 | 0.647277 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.