hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
1125635716f5c48845c616830e834692a8c69a3c | 164 | class AddAdminToUsers < ActiveRecord::Migration[4.2]
def change
add_column :users, :admin, :boolean, :default => false
add_index :users, :admin
end
end
| 23.428571 | 58 | 0.707317 |
bfb0f7435ab921494eebf035d1cbff974f74874c | 283 | class ChangeDefaultValueMarkingSchemeType < ActiveRecord::Migration[4.2]
def self.up
change_column :assignments, :marking_scheme_type, :string, :default => 'rubric'
end
def self.down
change_column :assignments, :marking_scheme_type, :string, :default => nil
end
end
| 28.3 | 83 | 0.75265 |
e92099a016dd9883fe7bee2b98572e346ab603ac | 2,166 | class Libsoup < Formula
desc "HTTP client/server library for GNOME"
homepage "https://wiki.gnome.org/Projects/libsoup"
url "https://download.gnome.org/sources/libsoup/2.68/libsoup-2.68.4.tar.xz"
sha256 "2d50b12922cc516ab6a7c35844d42f9c8a331668bbdf139232743d82582b3294"
bottle do
sha256 "70b8fe5ee13ee2bb2c2b428f6620190b9cd38fe00bf9aca21d195f58535b7c36" => :catalina
sha256 "c542ab518dac51f074fb6fa32d72180852ea26f5afd9a2fb590cf6340a88fdef" => :mojave
sha256 "1bbbf474fe32edbfe877026778ea19942ae4c3d23208a0933d225b14b38a28de" => :high_sierra
end
depends_on :macos # Due to Python 2
depends_on "gobject-introspection" => :build
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "glib-networking"
depends_on "gnutls"
depends_on "libpsl"
depends_on "vala"
unless OS.mac?
depends_on "krb5"
depends_on "python@2" => :build
end
uses_from_macos "libxml2"
def install
mkdir "build" do
system "meson", "--prefix=#{prefix}", ".."
system "ninja", "-v"
system "ninja", "install", "-v"
end
end
test do
# if this test start failing, the problem might very well be in glib-networking instead of libsoup
(testpath/"test.c").write <<~EOS
#include <libsoup/soup.h>
int main(int argc, char *argv[]) {
SoupMessage *msg = soup_message_new("GET", "https://brew.sh");
SoupSession *session = soup_session_new();
soup_session_send_message(session, msg); // blocks
g_assert_true(SOUP_STATUS_IS_SUCCESSFUL(msg->status_code));
g_object_unref(msg);
g_object_unref(session);
return 0;
}
EOS
ENV.libxml2
gettext = Formula["gettext"]
glib = Formula["glib"]
flags = %W[
-I#{gettext.opt_include}
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{include}/libsoup-2.4
-D_REENTRANT
-L#{gettext.opt_lib}
-L#{glib.opt_lib}
-L#{lib}
-lgio-2.0
-lglib-2.0
-lgobject-2.0
-lsoup-2.4
]
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
| 29.671233 | 102 | 0.661127 |
1aa026f4347abd638a2ae6dc17f019bdad5742bc | 1,272 | #--
# ===============================================================================
# Copyright (c) 2005,2006,2007,2008 Christopher Kleckner
# All rights reserved
#
# This file is part of the Rio library for ruby.
#
# Rio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Rio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Rio; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# ===============================================================================
#++
#
# To create the documentation for Rio run the command
# ruby build_doc.rb
# from the distribution directory.
#
# Suggested Reading
# * RIO::Doc::SYNOPSIS
# * RIO::Doc::INTRO
# * RIO::Doc::HOWTO
# * RIO::Doc::EXAMPLES
# * RIO::Rio
#
module RIO
module FS
class Base
end
end
end
| 30.285714 | 82 | 0.633648 |
214f04b862233e04821c69f82497434c775ebfd9 | 2,181 | module ScoobySnacks
class BlacklightConfiguration
def self.add_all_fields(config)
self.add_show_fields(config)
self.add_search_fields(config)
self.add_facet_fields(config)
self.add_sort_fields(config)
self.add_search_result_display_fields(config)
end
def self.add_show_fields(config)
self.schema.display_fields.each do |field|
begin
config.add_show_field field.solr_name, label: field.label
rescue
Rails.logger.error "error adding field: #{field.solr_name} for property #{field.label}. Redundant definition?"
end
end
end
def self.add_search_fields(config)
self.schema.searchable_fields.each do |field|
config.add_search_field(field.name, label: field.label, include_in_advanced_search: true) do |new_field|
new_field.solr_parameters = {
qf: field.solr_search_name.to_s,
pf: field.solr_search_name
}
end
end
end
def self.add_facet_fields(config)
self.schema.facet_fields.each do |field|
config.add_facet_field field.solr_facet_name, {label: field.label, limit: field.facet_limit}
end
end
def self.add_sort_fields(config)
self.schema.sortable_fields.each do |field|
config.add_sort_field "#{field.solr_sort_name} desc", label: "#{field.label} \u25BC"
config.add_sort_field "#{field.solr_sort_name} asc", label: "#{field.label} \u25B2"
end
end
def self.add_search_result_display_fields(config)
self.schema.search_result_display_fields.each do |field|
config.add_index_field(field.solr_name, self.get_index_options(field))
end
end
def self.get_index_options field
options = {}
options[:label] = field.label || field.name
options[:index_itemprop] = field.itemprop if field.itemprop
options[:helper_method] = :date if (field.data_type == "date")
options[:link_to_search] = field.solr_search_name if field.searchable?
return options
end
private
def self.schema
ScoobySnacks::METADATA_SCHEMA
end
end
end
| 31.608696 | 121 | 0.673544 |
1a4192d1825e5ce5123dca65825e3e70987a7f83 | 1,472 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
require 'uri'
require 'cgi'
require 'date'
require 'json'
require 'base64'
require 'erb'
require 'securerandom'
require 'time'
require 'timeliness'
require 'faraday'
require 'faraday-cookie_jar'
require 'concurrent'
require 'ms_rest'
require '2017-08-01/generated/azure_mgmt_security/module_definition'
require 'ms_rest_azure'
module Azure::Security::Mgmt::V2017_08_01
autoload :ComplianceResults, '2017-08-01/generated/azure_mgmt_security/compliance_results.rb'
autoload :SecurityCenter, '2017-08-01/generated/azure_mgmt_security/security_center.rb'
module Models
autoload :ComplianceResultList, '2017-08-01/generated/azure_mgmt_security/models/compliance_result_list.rb'
autoload :Resource, '2017-08-01/generated/azure_mgmt_security/models/resource.rb'
autoload :ComplianceResult, '2017-08-01/generated/azure_mgmt_security/models/compliance_result.rb'
autoload :AscLocation, '2017-08-01/generated/azure_mgmt_security/models/asc_location.rb'
autoload :ResourceStatus, '2017-08-01/generated/azure_mgmt_security/models/resource_status.rb'
end
end
| 44.606061 | 141 | 0.669158 |
6a14589ad014f911c27885ef992bb2d7d49ba59d | 808 | Pod::Spec.new do |s|
s.name = "socket.IO"
s.version = "0.4.1"
s.summary = "socket.io v0.7.2+ for iOS devices."
s.description = <<-DESC
Interface to communicate between Objective C and Socket.IO with the help of websockets. It's based on fpotter's socketio-cocoa and uses other libraries/classes like SocketRocket, json-framework (optional) and jsonkit (optional).
DESC
s.homepage = "https://github.com/pkyeck/socket.IO-objc"
s.license = 'MIT'
s.author = { "Philipp Kyeck" => "[email protected]" }
s.source = { :git => "https://github.com/pkyeck/socket.IO-objc.git", :tag => '0.4.1' }
s.source_files = '*.{h,m}'
s.ios.deployment_target = '5.0'
s.requires_arc = true
s.dependency 'SocketRocket', '~> 0.2'
end
| 38.47619 | 232 | 0.626238 |
6ad5f78df855de00b178bccb718d0176ec1ab913 | 4,205 | class ManageIQ::Providers::Redhat::InfraManager::Vm < ManageIQ::Providers::InfraManager::Vm
include_concern 'Operations'
include_concern 'RemoteConsole'
include_concern 'Reconfigure'
include_concern 'ManageIQ::Providers::Redhat::InfraManager::VmOrTemplateShared'
supports :migrate do
if blank? || orphaned? || archived?
unsupported_reason_add(:migrate, "Migrate operation in not supported.")
elsif !ext_management_system.supports_migrate?
unsupported_reason_add(:migrate, 'RHV API version does not support migrate')
end
end
supports :reconfigure_disks do
if storage.blank?
unsupported_reason_add(:reconfigure_disks, _('storage is missing'))
elsif ext_management_system.blank?
unsupported_reason_add(:reconfigure_disks, _('The virtual machine is not associated with a provider'))
elsif !ext_management_system.supports_reconfigure_disks?
unsupported_reason_add(:reconfigure_disks, _('The provider does not support reconfigure disks'))
end
end
supports_not :reset
supports :publish do
if blank? || orphaned? || archived?
unsupported_reason_add(:publish, _('Publish operation in not supported'))
elsif ext_management_system.blank?
unsupported_reason_add(:publish, _('The virtual machine is not associated with a provider'))
elsif !ext_management_system.supports_publish?
unsupported_reason_add(:publish, _('This feature is not supported by the api version of the provider'))
elsif power_state != "off"
unsupported_reason_add(:publish, _('The virtual machine must be down'))
end
end
supports :conversion_host
supports :reconfigure_network_adapters
# supports :reconfigure_disksize
supports :reconfigure_disksize do
unsupported_reason_add(:reconfigure_disksize, 'Cannot resize disks of a VM with snapshots') if snapshots.count > 1
end
POWER_STATES = {
'up' => 'on',
'powering_up' => 'on',
'down' => 'off',
'suspended' => 'suspended',
}.freeze
def provider_object(connection = nil)
ManageIQ::Providers::Redhat::InfraManager::OvirtServices::V4.new(:ems => ext_management_system).get_vm_proxy(self, connection)
end
def scan_via_ems?
true
end
def parent_cluster
rp = parent_resource_pool
rp && rp.detect_ancestor(:of_type => "EmsCluster").first
end
alias owning_cluster parent_cluster
alias ems_cluster parent_cluster
def disconnect_storage(_s = nil)
return unless active?
vm_storages = ([storage] + storages).compact.uniq
return if vm_storages.empty?
vm_disks = collect_disks
storage = vm_disks.blank? ? nil : vm_storages.select { |store| !vm_disks.include?(store.ems_ref) }
super(storage)
end
def collect_disks
return [] if hardware.nil?
disks = hardware.disks.map do |disk|
unless disk.storage.nil?
"#{disk.storage.ems_ref}/disks/#{disk.filename}"
end
end
ext_management_system.ovirt_services.collect_disks_by_hrefs(disks.compact)
end
def exists_on_provider?
return false unless ext_management_system
ext_management_system.ovirt_services.vm_exists_on_provider?(self)
end
def params_for_create_snapshot
{
:fields => [
{
:component => 'textarea',
:name => 'description',
:id => 'description',
:label => _('Description'),
:isRequired => true,
:validate => [{:type => 'required'}],
},
{
:component => 'switch',
:name => 'memory',
:id => 'memory',
:label => _('Snapshot VM memory'),
:onText => _('Yes'),
:offText => _('No'),
:isDisabled => current_state != 'on',
:helperText => _('Snapshotting the memory is only available if the VM is powered on.'),
},
],
}
end
#
# UI Button Validation Methods
#
def has_required_host?
true
end
def self.calculate_power_state(raw_power_state)
POWER_STATES[raw_power_state] || super
end
def self.display_name(number = 1)
n_('Virtual Machine (Red Hat)', 'Virtual Machines (Red Hat)', number)
end
end
| 30.693431 | 130 | 0.673246 |
6a9ae4ac517a15b950d605cf7642e3fed7dca4ec | 421 | cask 'dropzone' do
version '3.6.4'
sha256 '41fcdb7eb2f8bbcaa1a860e8aa63a9fdf66932168bcb693485fdbdb26d422a93'
url "https://aptonic.com/dropzone3/sparkle/Dropzone-#{version}.zip"
appcast 'https://aptonic.com/sparkle/updates.xml',
checkpoint: 'c31c4a066ab90115e80741f800a518c0d3546f581b9fa9d7d0fb08fc5ff07be8'
name 'Dropzone'
homepage 'https://aptonic.com/'
app "Dropzone #{version.major}.app"
end
| 32.384615 | 88 | 0.767221 |
d5dc9f7f544836b7191913965ca7cc138a4c1ad4 | 23 | module SebusHelper
end
| 7.666667 | 18 | 0.869565 |
1870b8850a811ff3ee2467d0cd51102db1e1b6db | 1,506 | # -*- encoding: utf-8 -*-
# stub: nio4r 2.3.1 ruby lib
# stub: ext/nio4r/extconf.rb
Gem::Specification.new do |s|
s.name = "nio4r".freeze
s.version = "2.3.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Tony Arcieri".freeze]
s.date = "2018-05-02"
s.description = "Cross-platform asynchronous I/O primitives for scalable network clients and servers. Inspired by the Java NIO API, but simplified for ease-of-use.".freeze
s.email = ["[email protected]".freeze]
s.extensions = ["ext/nio4r/extconf.rb".freeze]
s.files = ["ext/nio4r/extconf.rb".freeze]
s.homepage = "https://github.com/socketry/nio4r".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.2.2".freeze)
s.rubygems_version = "3.0.3".freeze
s.summary = "New IO for Ruby".freeze
s.installed_by_version = "3.0.3" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<bundler>.freeze, [">= 0"])
s.add_development_dependency(%q<rake>.freeze, [">= 0"])
else
s.add_dependency(%q<bundler>.freeze, [">= 0"])
s.add_dependency(%q<rake>.freeze, [">= 0"])
end
else
s.add_dependency(%q<bundler>.freeze, [">= 0"])
s.add_dependency(%q<rake>.freeze, [">= 0"])
end
end
| 37.65 | 173 | 0.672643 |
ac88805529b5a56ceb96025db73c6ad055a9396d | 975 | require 'spec_helper'
describe Taobao do
describe 'set the public API key' do
it 'should have rw access' do
Taobao.public_key = :test
Taobao.public_key.should == :test
end
end
describe 'set the private API key' do
it 'should be write-only' do
Taobao.private_key = :test
expect { Taobao.private_key }
.to raise_error NoMethodError
end
end
describe 'API request' do
it 'should always return a Hash object' do
Net::HTTP.stub(:post_form).and_return 'category'.to_response
result = Taobao.api_request(method: 'taobao.itemcats.get',
fields: 'cid,parent_cid,name,is_parent', cids: 0)
result.should be_a_kind_of(Hash)
end
end
describe 'failed API request' do
it 'should throws an exception' do
Net::HTTP.stub(:post_form).and_return 'error'.to_response
expect { Taobao.api_request({}) }
.to raise_error(Taobao::ApiError, 'Invalid arguments:cid')
end
end
end | 30.46875 | 66 | 0.674872 |
0351cec1ad5260efd50d7d4df6dae1f19e66f137 | 68 | module Jsl
module Identity
VERSION = '1.1.6'.freeze
end
end
| 11.333333 | 28 | 0.661765 |
7a268ac0bc68929e282b8fe5db9e8ad021834bfb | 5,328 | class Qemu < Formula
desc "Emulator for x86 and PowerPC"
homepage "https://www.qemu.org/"
url "https://download.qemu.org/qemu-6.1.0.tar.xz"
sha256 "eebc089db3414bbeedf1e464beda0a7515aad30f73261abc246c9b27503a3c96"
license "GPL-2.0-only"
revision 1
head "https://git.qemu.org/git/qemu.git", branch: "master"
bottle do
sha256 arm64_monterey: "da4ef0870a91f46aff0c7ad70e55b7a50f42bc2c3987964bd0bc309d2045b9df"
sha256 arm64_big_sur: "94b094a62401c3384e57c572f1009545bd94765426ba39a7b0878cb883d0220a"
sha256 monterey: "77d4932355c38028915d640bc74936f5a7b4c2bb731177914d0275239b996d22"
sha256 big_sur: "5213e72d5dc5641593b415f5e37618cbd3d1e291d25c4e9478c86b5b8a9c8f08"
sha256 catalina: "561fa5f3d141ae025fe5e611957af4b33ff9b5df614e9a307fecce1645fb3170"
sha256 mojave: "5d938b8949e5d2cf4d41cca27ce4bfd5cfc17dc27f0ec45b6e8b27ab99cc2e87"
sha256 x86_64_linux: "a1447609f66aeaf33aefb8f9bbe3119b58a24374e3c4a102ec173128229c4f09"
end
depends_on "libtool" => :build
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "glib"
depends_on "gnutls"
depends_on "jpeg-turbo"
depends_on "libpng"
depends_on "libslirp"
depends_on "libssh"
depends_on "libusb"
depends_on "lzo"
depends_on "ncurses"
depends_on "nettle"
depends_on "pixman"
depends_on "snappy"
depends_on "vde"
on_linux do
depends_on "gcc"
end
fails_with gcc: "5"
# 820KB floppy disk image file of FreeDOS 1.2, used to test QEMU
resource "test-image" do
url "https://www.ibiblio.org/pub/micro/pc-stuff/freedos/files/distributions/1.2/FD12FLOPPY.zip"
sha256 "81237c7b42dc0ffc8b32a2f5734e3480a3f9a470c50c14a9c4576a2561a35807"
end
if Hardware::CPU.arm?
patch do
url "https://patchwork.kernel.org/series/548227/mbox/"
sha256 "5b9c9779374839ce6ade1b60d1377c3fc118bc43e8482d0d3efa64383e11b6d3"
end
end
def install
ENV["LIBTOOL"] = "glibtool"
args = %W[
--prefix=#{prefix}
--cc=#{ENV.cc}
--host-cc=#{ENV.cc}
--disable-bsd-user
--disable-guest-agent
--enable-curses
--enable-libssh
--enable-slirp=system
--enable-vde
--extra-cflags=-DNCURSES_WIDECHAR=1
--disable-sdl
--disable-gtk
]
# Sharing Samba directories in QEMU requires the samba.org smbd which is
# incompatible with the macOS-provided version. This will lead to
# silent runtime failures, so we set it to a Homebrew path in order to
# obtain sensible runtime errors. This will also be compatible with
# Samba installations from external taps.
args << "--smbd=#{HOMEBREW_PREFIX}/sbin/samba-dot-org-smbd"
args << "--enable-cocoa" if OS.mac?
system "./configure", *args
system "make", "V=1", "install"
end
test do
expected = build.stable? ? version.to_s : "QEMU Project"
assert_match expected, shell_output("#{bin}/qemu-system-aarch64 --version")
assert_match expected, shell_output("#{bin}/qemu-system-alpha --version")
assert_match expected, shell_output("#{bin}/qemu-system-arm --version")
assert_match expected, shell_output("#{bin}/qemu-system-cris --version")
assert_match expected, shell_output("#{bin}/qemu-system-hppa --version")
assert_match expected, shell_output("#{bin}/qemu-system-i386 --version")
assert_match expected, shell_output("#{bin}/qemu-system-m68k --version")
assert_match expected, shell_output("#{bin}/qemu-system-microblaze --version")
assert_match expected, shell_output("#{bin}/qemu-system-microblazeel --version")
assert_match expected, shell_output("#{bin}/qemu-system-mips --version")
assert_match expected, shell_output("#{bin}/qemu-system-mips64 --version")
assert_match expected, shell_output("#{bin}/qemu-system-mips64el --version")
assert_match expected, shell_output("#{bin}/qemu-system-mipsel --version")
assert_match expected, shell_output("#{bin}/qemu-system-nios2 --version")
assert_match expected, shell_output("#{bin}/qemu-system-or1k --version")
assert_match expected, shell_output("#{bin}/qemu-system-ppc --version")
assert_match expected, shell_output("#{bin}/qemu-system-ppc64 --version")
assert_match expected, shell_output("#{bin}/qemu-system-riscv32 --version")
assert_match expected, shell_output("#{bin}/qemu-system-riscv64 --version")
assert_match expected, shell_output("#{bin}/qemu-system-rx --version")
assert_match expected, shell_output("#{bin}/qemu-system-s390x --version")
assert_match expected, shell_output("#{bin}/qemu-system-sh4 --version")
assert_match expected, shell_output("#{bin}/qemu-system-sh4eb --version")
assert_match expected, shell_output("#{bin}/qemu-system-sparc --version")
assert_match expected, shell_output("#{bin}/qemu-system-sparc64 --version")
assert_match expected, shell_output("#{bin}/qemu-system-tricore --version")
assert_match expected, shell_output("#{bin}/qemu-system-x86_64 --version")
assert_match expected, shell_output("#{bin}/qemu-system-xtensa --version")
assert_match expected, shell_output("#{bin}/qemu-system-xtensaeb --version")
resource("test-image").stage testpath
assert_match "file format: raw", shell_output("#{bin}/qemu-img info FLOPPY.img")
end
end
| 43.317073 | 99 | 0.725038 |
d5509e971b322e9c218c24232fd7598aefc2e413 | 746 | name 'zabbix'
maintainer 'Nacer Laradji'
maintainer_email '[email protected]'
license 'Apache 2.0'
description 'Installs/Configures Zabbix Agent/Server'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.8.0'
supports 'ubuntu', '>= 10.04'
supports 'debian', '>= 6.0'
supports 'redhat', '>= 5.0'
supports 'centos', '>= 5.0'
supports 'oracle', '>= 5.0'
supports 'windows'
depends 'apache2', '>= 1.0.8'
depends 'database', '>= 1.3.0'
depends 'mysql', '>= 1.3.0'
depends 'ufw', '>= 0.6.1'
depends 'postgresql'
depends 'php-fpm'
depends 'nginx', '>= 1.0.0'
depends 'ark', '>= 0.7.2'
depends 'chocolatey'
depends 'apt'
depends 'yum'
depends 'java'
depends 'oracle-instantclient'
depends 'php'
depends 'yum-epel'
| 25.724138 | 72 | 0.686327 |
e9925916ad652746b24dc50b257988712913397d | 2,531 | # frozen_string_literal: true
require "rails_helper"
require "generator_spec/test_case"
require File.expand_path("../../../lib/generators/active_job_reporter/install_generator", __FILE__)
RSpec.describe ActiveJobReporter::InstallGenerator, type: :generator do
include GeneratorSpec::TestCase
destination File.expand_path("../tmp", __FILE__)
after(:all) { prepare_destination } # cleanup the tmp directory
describe "no options" do
before(:all) do
prepare_destination
run_generator
end
it "generates a migration for creating the 'jobs' table" do
expect(destination_root).to(
have_structure do
directory("db") do
directory("migrate") do
migration("create_jobs") do
contains "class CreateJobs"
contains "def change"
contains "create_table :jobs"
contains "create_table :job_objects"
contains "create_table :job_messages"
end
end
end
end
)
end
it "generates an initializer" do
expect(destination_root).to(
have_structure do
directory("config") do
directory("initializers") do
file("active_job_reporter.rb") do
contains "config.jobs_table_name = \"jobs\""
end
end
end
end
)
end
end
describe "`--jobs-table-name` option changed" do
before(:all) do
prepare_destination
run_generator %w(--jobs-table-name=active_jobs)
end
it "generates a migration for creating the 'active_jobs' table" do
expect(destination_root).to(
have_structure do
directory("db") do
directory("migrate") do
migration("create_jobs") do
contains "class CreateJobs"
contains "def change"
contains "create_table :active_jobs"
contains "create_table :active_job_objects"
contains "create_table :active_job_messages"
end
end
end
end
)
end
it "generates an initializer" do
expect(destination_root).to(
have_structure do
directory("config") do
directory("initializers") do
file("active_job_reporter.rb") do
contains "config.jobs_table_name = \"active_jobs\""
end
end
end
end
)
end
end
end
| 27.813187 | 99 | 0.580008 |
38fd90f344aa663d4408a3d6d856dc317bd8c098 | 29 | module UserQuizzesHelper
end
| 9.666667 | 24 | 0.896552 |
f882d1c612345eb1dd1186f6bdd35e477e67f0f1 | 1,389 | #-- encoding: UTF-8
#-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2018 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
class AddStickedOnFieldToMessages < ActiveRecord::Migration[5.1]
def change
add_column :messages, :sticked_on, :datetime, default: nil, null: true
end
end
| 39.685714 | 91 | 0.758819 |
b95659cdc5d307cf935f10892345916b4167ff91 | 595 | class Topic < ActiveRecord::Base
has_many :replies, :dependent => :destroy, :order => 'replies.created_at DESC'
belongs_to :project
# pretend find and count were extended and accept an extra option
# if there is a :foo option, prepend its value to collection
def self.find(*args)
more = []
more << args.last.delete(:foo) if args.last.is_a?(Hash) and args.last[:foo]
res = super
more.empty?? res : more + res
end
# if there is a :foo option, always return 100
def self.count(*args)
return 100 if args.last.is_a?(Hash) and args.last[:foo]
super
end
end
| 29.75 | 80 | 0.677311 |
f7930534e13215abe6218d29d39a48071ccdb233 | 81 | require 'sinatra'
get '/' do
File.read(File.join('public', "index.html"))
end
| 11.571429 | 45 | 0.654321 |
5dc8cd446f020b523115834a8e619249faf8bc13 | 2,109 | # frozen_string_literal: true
module Files
class UserCipherUse
attr_reader :options, :attributes
def initialize(attributes = {}, options = {})
@attributes = attributes || {}
@options = options || {}
end
# int64 - UserCipherUse ID
def id
@attributes[:id]
end
# string - The protocol and cipher employed
def protocol_cipher
@attributes[:protocol_cipher]
end
# date-time - The earliest recorded use of this combination of interface and protocol and cipher (for this user)
def created_at
@attributes[:created_at]
end
# string - The interface accessed
def interface
@attributes[:interface]
end
# date-time - The most recent use of this combination of interface and protocol and cipher (for this user)
def updated_at
@attributes[:updated_at]
end
# int64 - ID of the user who performed this access
def user_id
@attributes[:user_id]
end
# Parameters:
# user_id - int64 - User ID. Provide a value of `0` to operate the current session's user.
# cursor - string - Used for pagination. Send a cursor value to resume an existing list from the point at which you left off. Get a cursor from an existing list via the X-Files-Cursor-Next header.
# per_page - int64 - Number of records to show per page. (Max: 10,000, 1,000 or less is recommended).
def self.list(params = {}, options = {})
raise InvalidParameterError.new("Bad parameter: user_id must be an Integer") if params.dig(:user_id) and !params.dig(:user_id).is_a?(Integer)
raise InvalidParameterError.new("Bad parameter: cursor must be an String") if params.dig(:cursor) and !params.dig(:cursor).is_a?(String)
raise InvalidParameterError.new("Bad parameter: per_page must be an Integer") if params.dig(:per_page) and !params.dig(:per_page).is_a?(Integer)
List.new(UserCipherUse, params) do
Api.send_request("/user_cipher_uses", :get, params, options)
end
end
def self.all(params = {}, options = {})
list(params, options)
end
end
end
| 34.57377 | 204 | 0.675676 |
bfd73496199b7829668723e39532e591a4082ee7 | 7,232 | # frozen_string_literal: true
require "dry/initializer"
require "dry/ability/t"
module Dry
module Ability
module Controller
# @private
class Resource
include Initializer[undefined: false].define -> do
param :mediator, T.Instance(ResourceMediator)
param :controller, T.Instance(Controller::Mixin)
option :action_name, T['params.symbol'], default: proc { @controller.action_name.to_sym }
option :controller_name, T['params.symbol'], default: proc { @controller.controller_name.to_sym }
option :is_member, T['bool'], default: proc { @mediator.member_action?(action_name, params) }
option :is_collection, T['bool'], default: proc { @mediator.collection_action?(action_name) }
end
alias_method :member_action?, :is_member
alias_method :collection_action?, :is_collection
delegate :params, to: :controller
delegate :name, to: :mediator
delegate_missing_to :mediator
def call
@controller.instance_variable_set(:@_ability_resource, self)
retval = nil
@mediator.sequence.each do |sym|
retval = public_send(sym)
end
retval
end
def load_and_authorize_resource
load_resource
authorize_resource
end
def load_resource
return if skip?(:load)
if load_instance?
self.resource_instance ||= load_resource_instance
elsif collection_action?
self.collection_instance ||= load_collection
end
end
def authorize_resource
return if skip?(:authorize)
@controller.authorize!(authorization_action, resource_instance || resource_class_with_parent)
end
def parent?
@mediator.parent.nil? ? @mediator.collection_name != controller_name.to_sym : @mediator.parent?
end
def skip?(behavior)
options = @controller.class.cancan_skipper.dig(behavior, name)
return false if options.nil?
options.blank? &&
options[:except] && !action_exists_in?(options[:except]) ||
action_exists_in?(options[:only])
end
def load_resource_instance
raise NotImplementedError
end
def resource_base
raise NotImplementedError
end
def load_instance?
parent? || member_action?
end
# def load_collection?
# collection_action?
# # current_ability.has_scope?(authorization_action, resource_class) || resource_base.respond_to?(:accessible_by)
# end
def load_collection
current_ability.scope_for(authorization_action, resource_class) do
resource_base.accessible_by(current_ability, authorization_action)
end
end
def assign_attributes(resource)
resource.send(:"#{parent_name}=", parent_resource) if singleton? && parent_resource
initial_attributes.each do |attr_name, value|
resource.send(:"#{attr_name}=", value)
end
resource
end
def initial_attributes
current_ability.attributes_for(@action_name, resource_class).delete_if do |key, value|
(resource_params && resource_params.include?(key)) || value.is_a?(Hash)
end
end
def authorization_action
parent? ? @mediator.parent_action : @action_name
end
def id_param
params[@mediator.id_param_key] if params.key?(@mediator.id_param_key)
end
# Returns the class used for this resource. This can be overriden by the :class option.
# If +false+ is passed in it will use the resource name as a symbol in which case it should
# only be used for authorization, not loading since there's no class to load through.
def resource_class
case class_name
when false then
name.to_sym
when String then
class_name.constantize
else
raise ArgumentError, "unexpected class_name: #{class_name}"
end
end
def resource_class_with_parent
parent_resource ? { parent_resource => resource_class } : resource_class
end
def resource_instance=(instance)
@controller.instance_variable_set(:"@#{instance_name}", instance)
end
def resource_instance
@controller.instance_variable_get(:"@#{instance_name}") if load_instance?
end
def collection_instance=(instance)
@controller.instance_variable_set(:"@#{collection_name}", instance)
end
def collection_instance
@controller.instance_variable_get(:"@#{collection_name}")
end
def parent_name
return @parent_name if defined?(@parent_name)
@parent_name = @mediator.through unless parent_resource.nil?
end
# The object to load this resource through.
def parent_resource
return @parent_resource if defined?(@parent_resource)
@parent_resource = if @mediator.through
if @controller.instance_variable_defined? :"@#{@mediator.through}"
@controller.instance_variable_get(:"@#{@mediator.through}")
elsif @controller.respond_to?(@mediator.through, true)
@controller.send(@mediator.through)
end
end
end
def current_ability
@controller.send(:current_ability)
end
def resource_params
if parameters_require_sanitizing? && params_method.present?
case params_method
when Symbol then
@controller.send(params_method)
when String then
@controller.instance_eval(params_method)
when Proc then
params_method.call(@controller)
end
else
resource_params_by_namespaced_name
end
end
def parameters_require_sanitizing?
@mediator.save_actions.include?(@action_name) || resource_params_by_namespaced_name.present?
end
def resource_params_by_namespaced_name
return @resource_params_by_namespaced_name if defined?(@resource_params_by_namespaced_name)
@resource_params_by_namespaced_name =
if params.key?(@mediator.instance_name)
params[@mediator.instance_name]
elsif params.key?(key = extract_key(@mediator.class_name))
params[key]
else
params[name]
end
end
def params_method
@params_method ||= @mediator.params_method || begin
[:"#{@action_name}_params", :"#{name}_params", :resource_params].
detect { |method| @controller.respond_to?(method, true) }
end
end
private
def action_exists_in?(options)
Array.wrap(options).include?(@controller.action_name.to_sym)
end
def extract_key(value)
value.to_s.underscore.tr(?/, ?_)
end
end
end
end
end
| 33.022831 | 123 | 0.616427 |
b9269cc6733fc57b06c84604f5594ce0a2a165a1 | 456 | module Gelauto
class ArrayType < GenericType
def self.introspect(obj)
new.tap do |var|
obj.each { |elem| var[:elem] << Gelauto.introspect(elem) }
end
end
def initialize
super(::Array, [:elem])
end
def to_sig
if self[:elem].empty?
'T::Array'
else
"T::Array[#{self[:elem].to_sig}]"
end
end
def merge!(other)
self[:elem].merge!(other[:elem])
end
end
end
| 17.538462 | 66 | 0.54386 |
1d5356f6b8c6a508d26a5413d14ecfdd531bd0d6 | 1,890 | RailsAdmin.config do |config|
config.parent_controller = '::ApplicationController'
config.authenticate_with do
if current_user.nil?
redirect_to main_app.login_path
elsif !current_user.is_staff
redirect_to main_app.root_path
end
end
config.current_user_method do
current_user
end
RailsAdmin.config {|c| c.label_methods << :rails_admin_label}
### Popular gems integration
## == Devise ==
# config.authenticate_with do
# warden.authenticate! scope: :user
# end
# config.current_user_method(&:current_user)
## == CancanCan ==
# config.authorize_with :cancancan
## == Pundit ==
# config.authorize_with :pundit
## == PaperTrail ==
# config.audit_with :paper_trail, 'User', 'PaperTrail::Version' # PaperTrail >= 3.0.0
### More at https://github.com/sferik/rails_admin/wiki/Base-configuration
## == Gravatar integration ==
## To disable Gravatar integration in Navigation Bar set to false
# config.show_gravatar = true
config.actions do
dashboard # mandatory
index # mandatory
new
export
bulk_delete
show
edit
delete
show_in_app
## With an audit adapter, you can add:
# history_index
# history_show
end
config.model 'Answer' do
list do
include_all_fields
exclude_fields :best_answer_question, :comments, :votes
end
end
config.model 'Question' do
list do
include_all_fields
exclude_fields :user_views, :comments, :votes, :answers, :slug
end
end
config.model 'User' do
list do
include_all_fields
exclude_fields :question_views, :questions, :answers, :slug
end
end
config.included_models = [
"ActsAsTaggableOn::Tag",
"Delayed::Job",
"Answer",
"Comment",
"Question",
"UserQuestionView",
"User",
"Vote",
]
end
| 21.477273 | 87 | 0.654497 |
1cda37ed3a7f9a26ccf7ddff4c925319a6a8aafc | 6,106 | # Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
require 'logger'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# An error encountered while executing a work request.
class Devops::Models::WorkRequestError
# **[Required]** A machine-usable code for the error that occured. Error codes are listed in [API Errors](https://docs.cloud.oracle.com/Content/API/References/apierrors.htm).
# @return [String]
attr_accessor :code
# **[Required]** A human readable description of the issue encountered.
# @return [String]
attr_accessor :message
# **[Required]** Time the error occured. Format defined by [RFC3339](https://datatracker.ietf.org/doc/html/rfc3339).
# @return [DateTime]
attr_accessor :timestamp
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'code': :'code',
'message': :'message',
'timestamp': :'timestamp'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'code': :'String',
'message': :'String',
'timestamp': :'DateTime'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [String] :code The value to assign to the {#code} property
# @option attributes [String] :message The value to assign to the {#message} property
# @option attributes [DateTime] :timestamp The value to assign to the {#timestamp} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.code = attributes[:'code'] if attributes[:'code']
self.message = attributes[:'message'] if attributes[:'message']
self.timestamp = attributes[:'timestamp'] if attributes[:'timestamp']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
code == other.code &&
message == other.message &&
timestamp == other.timestamp
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[code, message, timestamp].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 35.5 | 245 | 0.666557 |
5ddae9fa5da314009091ed1124d77e110877fc02 | 1,174 | # This code was automatically generated using xdrgen
# DO NOT EDIT or your changes may be overwritten
require 'xdr'
# === xdr source ============================================================
#
# struct LiquidityPoolEntry
# {
# PoolID liquidityPoolID;
#
# union switch (LiquidityPoolType type)
# {
# case LIQUIDITY_POOL_CONSTANT_PRODUCT:
# struct
# {
# LiquidityPoolConstantProductParameters params;
#
# int64 reserveA; // amount of A in the pool
# int64 reserveB; // amount of B in the pool
# int64 totalPoolShares; // total number of pool shares issued
# int64 poolSharesTrustLineCount; // number of trust lines for the
# // associated pool shares
# } constantProduct;
# }
# body;
# };
#
# ===========================================================================
module Stellar
class LiquidityPoolEntry < XDR::Struct
include XDR::Namespace
autoload :Body
attribute :liquidity_pool_id, PoolID
attribute :body, Body
end
end
| 29.35 | 80 | 0.508518 |
b9fb78731c5df94a97f5532ea2fd13a64495108e | 399 | require "shopify_api"
module ShopifyUtil
module Product
# **USE WITH CAUTION** Permanently remove all products from Shopify.
#
# @return [void]
def self.clean!()
raise "You can't run this in production" if Rails.env.production?()
raise "You can't run this in development" if Rails.env.development?()
all = self.list()
all.each(&:destroy)
end
end
end
| 24.9375 | 75 | 0.654135 |
1afdc4b4c2b0c625884b9d61c2299ab8c3bcacde | 10,329 | # frozen_string_literal: true
require 'rubygems'
require 'rubygems/dependency_list'
require 'rubygems/package'
require 'rubygems/installer'
require 'rubygems/spec_fetcher'
require 'rubygems/user_interaction'
require 'rubygems/available_set'
require 'rubygems/deprecate'
##
# Installs a gem along with all its dependencies from local and remote gems.
class Gem::DependencyInstaller
include Gem::UserInteraction
extend Gem::Deprecate
DEFAULT_OPTIONS = { # :nodoc:
:env_shebang => false,
:document => %w[ri],
:domain => :both, # HACK dup
:force => false,
:format_executable => false, # HACK dup
:ignore_dependencies => false,
:prerelease => false,
:security_policy => nil, # HACK NoSecurity requires OpenSSL. AlmostNo? Low?
:wrappers => true,
:build_args => nil,
:build_docs_in_background => false,
:install_as_default => false
}.freeze
##
# Documentation types. For use by the Gem.done_installing hook
attr_reader :document
##
# Errors from SpecFetcher while searching for remote specifications
attr_reader :errors
##
# List of gems installed by #install in alphabetic order
attr_reader :installed_gems
##
# Creates a new installer instance.
#
# Options are:
# :cache_dir:: Alternate repository path to store .gem files in.
# :domain:: :local, :remote, or :both. :local only searches gems in the
# current directory. :remote searches only gems in Gem::sources.
# :both searches both.
# :env_shebang:: See Gem::Installer::new.
# :force:: See Gem::Installer#install.
# :format_executable:: See Gem::Installer#initialize.
# :ignore_dependencies:: Don't install any dependencies.
# :install_dir:: See Gem::Installer#install.
# :prerelease:: Allow prerelease versions. See #install.
# :security_policy:: See Gem::Installer::new and Gem::Security.
# :user_install:: See Gem::Installer.new
# :wrappers:: See Gem::Installer::new
# :build_args:: See Gem::Installer::new
def initialize(options = {})
@only_install_dir = !!options[:install_dir]
@install_dir = options[:install_dir] || Gem.dir
@build_root = options[:build_root]
options = DEFAULT_OPTIONS.merge options
@bin_dir = options[:bin_dir]
@dev_shallow = options[:dev_shallow]
@development = options[:development]
@document = options[:document]
@domain = options[:domain]
@env_shebang = options[:env_shebang]
@force = options[:force]
@format_executable = options[:format_executable]
@ignore_dependencies = options[:ignore_dependencies]
@prerelease = options[:prerelease]
@security_policy = options[:security_policy]
@user_install = options[:user_install]
@wrappers = options[:wrappers]
@build_args = options[:build_args]
@build_docs_in_background = options[:build_docs_in_background]
@install_as_default = options[:install_as_default]
@dir_mode = options[:dir_mode]
@data_mode = options[:data_mode]
@prog_mode = options[:prog_mode]
# Indicates that we should not try to update any deps unless
# we absolutely must.
@minimal_deps = options[:minimal_deps]
@available = nil
@installed_gems = []
@toplevel_specs = nil
@cache_dir = options[:cache_dir] || @install_dir
@errors = []
end
##
# Indicated, based on the requested domain, if local
# gems should be considered.
def consider_local?
@domain == :both or @domain == :local
end
##
# Indicated, based on the requested domain, if remote
# gems should be considered.
def consider_remote?
@domain == :both or @domain == :remote
end
##
# Returns a list of pairs of gemspecs and source_uris that match
# Gem::Dependency +dep+ from both local (Dir.pwd) and remote (Gem.sources)
# sources. Gems are sorted with newer gems preferred over older gems, and
# local gems preferred over remote gems.
def find_gems_with_sources(dep, best_only=false) # :nodoc:
set = Gem::AvailableSet.new
if consider_local?
sl = Gem::Source::Local.new
if spec = sl.find_gem(dep.name)
if dep.matches_spec? spec
set.add spec, sl
end
end
end
if consider_remote?
begin
# This is pulled from #spec_for_dependency to allow
# us to filter tuples before fetching specs.
tuples, errors = Gem::SpecFetcher.fetcher.search_for_dependency dep
if best_only && !tuples.empty?
tuples.sort! do |a,b|
if b[0].version == a[0].version
if b[0].platform != Gem::Platform::RUBY
1
else
-1
end
else
b[0].version <=> a[0].version
end
end
tuples = [tuples.first]
end
specs = []
tuples.each do |tup, source|
begin
spec = source.fetch_spec(tup)
rescue Gem::RemoteFetcher::FetchError => e
errors << Gem::SourceFetchProblem.new(source, e)
else
specs << [spec, source]
end
end
if @errors
@errors += errors
else
@errors = errors
end
set << specs
rescue Gem::RemoteFetcher::FetchError => e
# FIX if there is a problem talking to the network, we either need to always tell
# the user (no really_verbose) or fail hard, not silently tell them that we just
# couldn't find their requested gem.
verbose do
"Error fetching remote data:\t\t#{e.message}\n" \
"Falling back to local-only install"
end
@domain = :local
end
end
set
end
rubygems_deprecate :find_gems_with_sources
def in_background(what) # :nodoc:
fork_happened = false
if @build_docs_in_background and Process.respond_to?(:fork)
begin
Process.fork do
yield
end
fork_happened = true
say "#{what} in a background process."
rescue NotImplementedError
end
end
yield unless fork_happened
end
##
# Installs the gem +dep_or_name+ and all its dependencies. Returns an Array
# of installed gem specifications.
#
# If the +:prerelease+ option is set and there is a prerelease for
# +dep_or_name+ the prerelease version will be installed.
#
# Unless explicitly specified as a prerelease dependency, prerelease gems
# that +dep_or_name+ depend on will not be installed.
#
# If c-1.a depends on b-1 and a-1.a and there is a gem b-1.a available then
# c-1.a, b-1 and a-1.a will be installed. b-1.a will need to be installed
# separately.
def install(dep_or_name, version = Gem::Requirement.default)
request_set = resolve_dependencies dep_or_name, version
@installed_gems = []
options = {
:bin_dir => @bin_dir,
:build_args => @build_args,
:document => @document,
:env_shebang => @env_shebang,
:force => @force,
:format_executable => @format_executable,
:ignore_dependencies => @ignore_dependencies,
:prerelease => @prerelease,
:security_policy => @security_policy,
:user_install => @user_install,
:wrappers => @wrappers,
:build_root => @build_root,
:install_as_default => @install_as_default,
:dir_mode => @dir_mode,
:data_mode => @data_mode,
:prog_mode => @prog_mode,
}
options[:install_dir] = @install_dir if @only_install_dir
request_set.install options do |_, installer|
@installed_gems << installer.spec if installer
end
@installed_gems.sort!
# Since this is currently only called for docs, we can be lazy and just say
# it's documentation. Ideally the hook adder could decide whether to be in
# the background or not, and what to call it.
in_background "Installing documentation" do
Gem.done_installing_hooks.each do |hook|
hook.call self, @installed_gems
end
end unless Gem.done_installing_hooks.empty?
@installed_gems
end
def install_development_deps # :nodoc:
if @development and @dev_shallow
:shallow
elsif @development
:all
else
:none
end
end
def resolve_dependencies(dep_or_name, version) # :nodoc:
request_set = Gem::RequestSet.new
request_set.development = @development
request_set.development_shallow = @dev_shallow
request_set.soft_missing = @force
request_set.prerelease = @prerelease
request_set.remote = false unless consider_remote?
installer_set = Gem::Resolver::InstallerSet.new @domain
installer_set.ignore_installed = @only_install_dir
if consider_local?
if dep_or_name =~ /\.gem$/ and File.file? dep_or_name
src = Gem::Source::SpecificFile.new dep_or_name
installer_set.add_local dep_or_name, src.spec, src
version = src.spec.version if version == Gem::Requirement.default
elsif dep_or_name =~ /\.gem$/
Dir[dep_or_name].each do |name|
begin
src = Gem::Source::SpecificFile.new name
installer_set.add_local dep_or_name, src.spec, src
rescue Gem::Package::FormatError
end
end
# else This is a dependency. InstallerSet handles this case
end
end
dependency =
if spec = installer_set.local?(dep_or_name)
Gem::Dependency.new spec.name, version
elsif String === dep_or_name
Gem::Dependency.new dep_or_name, version
else
dep_or_name
end
dependency.prerelease = @prerelease
request_set.import [dependency]
installer_set.add_always_install dependency
request_set.always_install = installer_set.always_install
if @ignore_dependencies
installer_set.ignore_dependencies = true
request_set.ignore_dependencies = true
request_set.soft_missing = true
end
request_set.resolve installer_set
@errors.concat request_set.errors
request_set
end
end
| 30.559172 | 89 | 0.636557 |
ff3ccded934b56b03c551a16ec24bbf286fa6f91 | 43 | module WizardsTeam
VERSION = "0.1.0"
end
| 10.75 | 19 | 0.697674 |
1a225fe04d55d29cce96b79af44b66a4fbfdc49b | 3,456 | #!/usr/bin/env ruby
require "erb"
require "fileutils"
require "nokogiri"
require "open3"
require "shellwords"
# {{{
class String
def blank?
self.match(/^\s+$/)
end
end
class MarkdownDocument
def initialize(contents)
@contents = contents
end
def self.from_filename(filename)
self.new(File.read(filename))
end
def to_html()
@html ||= cmark("--to", "html")
@html
end
def to_xml()
@xml ||= Nokogiri::XML(cmark("--to", "xml"))
@xml
end
# Returns the first h1-level heading
def title()
el = to_xml.at_css("heading[level=1] > text")
raise "No level-1 heading in document." if el.nil? || el.text.blank?
el.text
end
def cmark(*args)
cmd = [
"cmark-gfm",
"--unsafe",
"--extension", "table",
"--extension", "autolink",
*args
]
output, err, status = Open3.capture3(*cmd, stdin_data: @contents)
unless status.success?
$stderr.puts(output)
$stderr.puts(err)
raise "Error running #{cmd.shelljoin}..."
end
output
end
end
class SitePage
def self.support_location=(value)
@@support_location = value
end
def initialize(markdown_document, output_name)
@markdown_document = markdown_document
@output_name = output_name
end
# Use in `<base href="" />`.
def document_base()
File.dirname(@output_name).sub(/^\.$/, "").sub(%r{[^/]+}, "..")
end
# Use in `<title>` or anywhere else relevant.
def title()
@markdown_document.title()
end
# Use where the contents should be displayed.
def contents()
@markdown_document.to_html()
.gsub(%r{href="([^"]+)\.md"}, %q{href="\\1.html"})
end
# Writes the HTML document to the given filename.
def write(output_name)
template = ERB.new(File.read(File.join(@@support_location, "template.erb")))
file_contents = template.result(self.binding())
File.write(output_name, file_contents)
end
end
def generate_sitemap(sitemap, output_name)
list = sitemap.sort{ |a, b| a.first <=> b.first }.map do |pair|
filename, page = pair
" | `#{filename}` | [#{page.title}](#{filename}) |"
end
# We don't have a hierarchy for subfolders.
# So since this makes the sitemap unwieldy to use, we're using
# a simple table with file path to clearly show what's where.
document = [
"# Site Map",
"",
"| path | page title |",
"| ---- | ---------- |",
list.join("\n"),
"",
].join("\n")
markdown_document = MarkdownDocument.new(document)
page = SitePage.new(markdown_document, "sitemap.md")
page.write(output_name)
end
# }}}
if ARGV.length < 2 then
$stderr.puts "Usage: main.rb <source dir> <output dir>"
exit 1
end
$source = ARGV.shift
$output = ARGV.shift
SitePage.support_location = File.join($source, "_support")
files = Dir.glob(File.join($source, "**/*.md"))
# Used to collect all pages
sitemap = []
files.each do |filename|
relative_name = filename.sub(%r{^#{$source}}, "")
relative_output = relative_name.sub(%r{#{".md"}$}, ".html")
output_name = File.join($output, relative_output)
$stderr.puts "\n⇒ processing #{relative_name}"
markdown_document = MarkdownDocument.from_filename(filename)
page = SitePage.new(markdown_document, relative_name)
FileUtils.mkdir_p(File.dirname(output_name))
page.write(output_name)
sitemap << [relative_output, page]
end
generate_sitemap(sitemap, File.join($output, "sitemap.html"))
$stderr.puts("\n\n\nDone!\n\n")
| 22.441558 | 80 | 0.642361 |
4aad40d58e27cb81816382f492a88b7ea209e2ae | 2,684 | require "formula"
class Stunnel < Formula
homepage "https://www.stunnel.org/"
url "ftp://ftp.nluug.nl/pub/networking/stunnel/stunnel-5.05.tar.gz"
mirror "https://www.stunnel.org/downloads/stunnel-5.05.tar.gz"
sha256 "c7e1653345150db7e48d00e1129cf571c7c85de8e7e1aa70b21cf1d76b1e31ef"
bottle do
sha1 "f1b5731e9c6191035797f0d046915a5cf34205bd" => :mavericks
sha1 "4a52a8dd2df0b0e1d0815fc75eca918546f08e86" => :mountain_lion
sha1 "d15b1611c5afd9f308d825ea1f03d0d926124019" => :lion
end
depends_on "openssl"
def install
# This causes a bogus .pem to be created in lieu of interactive cert generation.
stunnel_cnf = Pathname.new("tools/stunnel.cnf")
stunnel_cnf.unlink
stunnel_cnf.write <<-EOS.undent
# OpenSSL configuration file to create a server certificate
# by Michal Trojnara 1998-2013
[ req ]
# the default key length is secure and quite fast - do not change it
default_bits = 2048
# comment out the next line to protect the private key with a passphrase
encrypt_key = no
distinguished_name = req_dn
x509_extensions = cert_type
prompt = no
[ req_dn ]
countryName = PL
stateOrProvinceName = Mazovia Province
localityName = Warsaw
organizationName = Stunnel Developers
organizationalUnitName = Provisional CA
0.commonName = localhost
# To create a certificate for more than one name uncomment:
# 1.commonName = DNS alias of your server
# 2.commonName = DNS alias of your server
# ...
# See http://home.netscape.com/eng/security/ssl_2.0_certificate.html
# to see how Netscape understands commonName.
[ cert_type ]
nsCertType = server
EOS
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--sysconfdir=#{etc}",
"--mandir=#{man}",
"--disable-libwrap",
"--with-ssl=#{Formula["openssl"].opt_prefix}"
system "make", "install"
end
def caveats
<<-EOS.undent
A bogus SSL server certificate has been installed to:
#{etc}/stunnel/stunnel.pem
This certificate will be used by default unless a config file says otherwise!
In your stunnel configuration, specify a SSL certificate with
the "cert =" option for each service.
EOS
end
end
| 36.27027 | 84 | 0.596125 |
abdd0493fa04808b82a766e972e34764065ac888 | 1,673 | class Efl < Formula
desc "Enlightenment Foundation Libraries"
homepage "https://www.enlightenment.org"
url "https://download.enlightenment.org/rel/libs/efl/efl-1.18.4.tar.xz"
sha256 "39ebc07e37437d6ecdeb0f645783484e28a882b38f7e619ad12c2bf9b5548025"
revision 2
bottle do
sha256 "b4966eb8e1147ae7f7b68e5006c702b3e81138fa8cf5416dc8670087d3246947" => :sierra
sha256 "6e696bfa75737c8b4afe14a2f3db78dfcfff841be700a94e108739602dece2bc" => :el_capitan
sha256 "81c36bb7f62bbbf598d1253dd7ebb46a46a65fdd8534ff27f063a73bbccf5093" => :yosemite
end
option "with-docs", "Install development libraries/headers and HTML docs"
depends_on "doxygen" => :build if build.with? "docs"
depends_on "pkg-config" => :build
depends_on "gettext" => :build
depends_on "openssl"
depends_on "freetype"
depends_on "fontconfig"
depends_on "jpeg"
depends_on "libpng"
depends_on "luajit"
depends_on "fribidi"
depends_on "giflib"
depends_on "libtiff"
depends_on "gstreamer"
depends_on "gst-plugins-good"
depends_on "dbus"
depends_on "pulseaudio"
depends_on "bullet"
depends_on "libsndfile"
depends_on "libspectre"
depends_on "libraw"
depends_on "librsvg"
depends_on "poppler"
depends_on "webp" => :optional
depends_on "glib" => :optional
needs :cxx11
def install
ENV.cxx11
args = %W[
--disable-cxx-bindings
--disable-dependency-tracking
--prefix=#{prefix}
]
system "./configure", *args
system "make", "install"
system "make", "install-doc" if build.with? "docs"
end
test do
system bin/"edje_cc", "-V"
system bin/"eolian_gen", "-h"
system bin/"eet", "-V"
end
end
| 26.555556 | 92 | 0.720263 |
39ee96ececda6d6a08872146a2202e922c2a106a | 2,838 | # frozen_string_literal: true
require 'erb_lint/runner_config_resolver'
module ERBLint
class RunnerConfig
class Error < StandardError; end
def initialize(config = nil, file_loader = nil)
@config = (config || {}).dup.deep_stringify_keys
resolver.resolve_inheritance_from_gems(@config, @config.delete('inherit_gem'))
resolver.resolve_inheritance(@config, file_loader) if file_loader
@config.delete("inherit_from")
end
def to_hash
@config.dup
end
def for_linter(klass)
klass_name = if klass.is_a?(String)
klass.to_s
elsif klass.is_a?(Class) && klass <= ERBLint::Linter
klass.simple_name
else
raise ArgumentError, 'expected String or linter class'
end
linter_klass = LinterRegistry.find_by_name(klass_name)
raise Error, "#{klass_name}: linter not found (is it loaded?)" unless linter_klass
linter_klass.config_schema.new(config_hash_for_linter(klass_name))
end
def global_exclude
@config['exclude'] || []
end
def merge(other_config)
self.class.new(@config.deep_merge(other_config.to_hash))
end
def merge!(other_config)
@config.deep_merge!(other_config.to_hash)
self
end
class << self
def default(default_enabled: nil)
default_enabled = default_enabled.nil? ? true : default_enabled
new(
linters: {
AllowedScriptType: { enabled: default_enabled },
ClosingErbTagIndent: { enabled: default_enabled },
ExtraNewline: { enabled: default_enabled },
FinalNewline: { enabled: default_enabled },
NoJavascriptTagHelper: { enabled: default_enabled },
ParserErrors: { enabled: default_enabled },
RightTrim: { enabled: default_enabled },
SelfClosingTag: { enabled: default_enabled },
SpaceAroundErbTag: { enabled: default_enabled },
SpaceIndentation: { enabled: default_enabled },
SpaceInHtmlTag: { enabled: default_enabled },
TrailingWhitespace: { enabled: default_enabled },
RequireInputAutocomplete: { enabled: default_enabled },
},
)
end
def default_for(config)
default_linters_enabled = config.to_hash.dig("EnableDefaultLinters")
default(default_enabled: default_linters_enabled).merge(config)
end
end
private
def linters_config
@config['linters'] || {}
end
def config_hash_for_linter(klass_name)
config_hash = linters_config[klass_name] || {}
config_hash['exclude'] ||= []
config_hash['exclude'].concat(global_exclude) if config_hash['exclude'].is_a?(Array)
config_hash
end
def resolver
@resolver ||= ERBLint::RunnerConfigResolver.new
end
end
end
| 30.516129 | 90 | 0.654686 |
ff6c5251aa06b1b1358cd621bcd2052fd31034f9 | 188 | class Views::DataPassingSystemSpec::ImplicitVariableReadInner < Fortitude::Widgets::Html5
implicit_shared_variable_access true
def content
p "inner widget foo: #{@foo}"
end
end
| 23.5 | 89 | 0.776596 |
616a24a11b0a47f324c2920224c24602581bfff7 | 906 | # frozen_string_literal: true
# We have this module because it was relying on devise methods in a
# helper, which cannot be tested. Helpers do not extend active controller
# and do not have access to devise variables in tests.
#
# convention is to break out controller level 'helper methods' into a module
# and view 'helper methods' into a helper.
module ApplicationModule
def user_logged_in?
redirect_to root_path, alert: 'You must first login.' if current_user.nil?
end
def team_captain?
!current_user.nil? && current_user.on_a_team? && current_user == current_user.team.team_captain
end
# This blocks admins from doing tasks that could leave the application in an inconsistent state, such as creating a
# team or joining a team
def block_admin_action
redirect_back(fallback_location: root_path, alert: I18n.t('admin.should_not_do_action')) if current_user.admin?
end
end
| 37.75 | 117 | 0.771523 |
b9a5aae2a9be0fab50e2d263ae7e55bf0f977c88 | 485 | # frozen_string_literal: true
class MeType < BaseObject
description "Information about the logged-in user"
field :id, ID, null: false, hash_key: "uid", description: "User identifier."
field :type, String, null: false, description: "Type."
field :name, String, null: false, description: "User name."
field :beta_tester, Boolean, null: false, description: "Beta tester status."
def type
"CurrentUser"
end
def beta_tester
object.beta_tester.present?
end
end
| 25.526316 | 78 | 0.717526 |
794370af0ad4f632682ebea0e2001bfd1de3d1dc | 10,282 | require 'spec_helper'
require 'raven/instance'
RSpec.describe Raven::Instance do
let(:event) { Raven::Event.new(:id => "event_id") }
let(:options) { { :key => "value" } }
let(:context) { nil }
let(:configuration) do
config = Raven::Configuration.new
config.dsn = "dummy://12345:[email protected]:3000/sentry/42"
config.logger = Logger.new(nil)
config
end
subject { described_class.new(context, configuration) }
before do
allow(subject).to receive(:send_event)
allow(Raven::Event).to receive(:from_message) { event }
allow(Raven::Event).to receive(:from_exception) { event }
end
describe '#context' do
it 'is Raven.context by default' do
expect(subject.context).to equal(Raven.context)
end
context 'initialized with a context' do
let(:context) { :explicit }
it 'is not Raven.context' do
expect(subject.context).to_not equal(Raven.context)
end
end
end
describe '#capture_type' do
describe 'as #capture_message' do
before do
expect(Raven::Event).to receive(:from_message).with(message, options)
expect(subject).to receive(:send_event).with(event, :exception => nil, :message => message)
end
let(:message) { "Test message" }
it 'sends the result of Event.capture_message' do
subject.capture_type(message, options)
end
it 'yields the event to a passed block' do
expect { |b| subject.capture_type(message, options, &b) }.to yield_with_args(event)
end
end
describe 'as #capture_message when async' do
let(:message) { "Test message" }
around do |example|
prior_async = subject.configuration.async
subject.configuration.async = proc { :ok }
example.run
subject.configuration.async = prior_async
end
it 'sends the result of Event.capture_type' do
expect(Raven::Event).to receive(:from_message).with(message, options)
expect(subject).not_to receive(:send_event).with(event)
expect(subject.configuration.async).to receive(:call).with(event.to_json_compatible)
subject.capture_message(message, options)
end
it 'returns the generated event' do
returned = subject.capture_message(message, options)
expect(returned).to eq(event)
end
end
describe 'as #capture_exception' do
let(:exception) { build_exception }
it 'sends the result of Event.capture_exception' do
expect(Raven::Event).to receive(:from_exception).with(exception, options)
expect(subject).to receive(:send_event).with(event, :exception => exception, :message => nil)
subject.capture_exception(exception, options)
end
it 'has an alias' do
expect(Raven::Event).to receive(:from_exception).with(exception, options)
expect(subject).to receive(:send_event).with(event, :exception => exception, :message => nil)
subject.capture_exception(exception, options)
end
end
describe 'as #capture_exception when async' do
let(:exception) { build_exception }
context "when async" do
around do |example|
prior_async = subject.configuration.async
subject.configuration.async = proc { :ok }
example.run
subject.configuration.async = prior_async
end
it 'sends the result of Event.capture_exception' do
expect(Raven::Event).to receive(:from_exception).with(exception, options)
expect(subject).not_to receive(:send_event).with(event)
expect(subject.configuration.async).to receive(:call).with(event.to_json_compatible)
subject.capture_exception(exception, options)
end
it 'returns the generated event' do
returned = subject.capture_exception(exception, options)
expect(returned).to eq(event)
end
end
context "when async raises an exception" do
around do |example|
prior_async = subject.configuration.async
subject.configuration.async = proc { raise TypeError }
example.run
subject.configuration.async = prior_async
end
it 'sends the result of Event.capture_exception via fallback' do
expect(Raven::Event).to receive(:from_exception).with(exception, options)
expect(subject.configuration.async).to receive(:call).with(event.to_json_compatible)
subject.capture_exception(exception, options)
end
end
end
describe 'as #capture_exception with a should_capture callback' do
let(:exception) { build_exception }
it 'sends the result of Event.capture_exception according to the result of should_capture' do
expect(subject).not_to receive(:send_event).with(event)
subject.configuration.should_capture = proc { false }
expect(subject.configuration.should_capture).to receive(:call).with(exception)
expect(subject.capture_exception(exception, options)).to be false
end
end
end
describe '#capture' do
context 'given a block' do
it 'yields to the given block' do
expect { |b| subject.capture(&b) }.to yield_with_no_args
end
end
it 'does not install an at_exit hook' do
expect(Kernel).not_to receive(:at_exit)
subject.capture {}
end
end
describe '#annotate_exception' do
let(:exception) { build_exception }
def ivars(object)
object.instance_variables.map(&:to_s)
end
it 'adds an annotation to the exception' do
expect(ivars(exception)).not_to include("@__raven_context")
subject.annotate_exception(exception, {})
expect(ivars(exception)).to include("@__raven_context")
expect(exception.instance_variable_get(:@__raven_context)).to \
be_kind_of Hash
end
context 'when the exception already has context' do
it 'does a deep merge of options' do
subject.annotate_exception(exception, :extra => { :language => "ruby" })
subject.annotate_exception(exception, :extra => { :job_title => "engineer" })
expected_hash = { :extra => { :language => "ruby", :job_title => "engineer" } }
expect(exception.instance_variable_get(:@__raven_context)).to \
eq expected_hash
end
end
end
describe '#report_status' do
let(:ready_message) do
"Raven #{Raven::VERSION} ready to catch errors"
end
let(:not_ready_message) do
"Raven #{Raven::VERSION} configured not to capture errors."
end
it 'logs a ready message when configured' do
subject.configuration.silence_ready = false
expect(subject.logger).to receive(:info).with(ready_message)
subject.report_status
end
it 'logs not ready message if the config does not send in current environment' do
subject.configuration.silence_ready = false
subject.configuration.environments = ["production"]
expect(subject.logger).to receive(:info).with(
"Raven #{Raven::VERSION} configured not to capture errors: Not configured to send/capture in environment 'default'"
)
subject.report_status
end
it 'logs nothing if "silence_ready" configuration is true' do
subject.configuration.silence_ready = true
expect(subject.logger).not_to receive(:info)
subject.report_status
end
end
describe '.last_event_id' do
let(:message) { "Test message" }
it 'sends the result of Event.capture_type' do
expect(subject).to receive(:send_event).with(event, :exception => nil, :message => message)
subject.capture_type("Test message", options)
expect(subject.last_event_id).to eq(event.id)
end
end
describe "#tags_context" do
let(:default) { { :foo => :bar } }
let(:additional) { { :baz => :qux } }
before do
subject.context.tags = default
end
it "returns the tags" do
expect(subject.tags_context).to eq default
end
it "returns the tags" do
expect(subject.tags_context(additional)).to eq default.merge(additional)
end
it "doesn't set anything if the tags is empty" do
subject.tags_context({})
expect(subject.context.tags).to eq default
end
it "adds tags" do
subject.tags_context(additional)
expect(subject.context.tags).to eq default.merge(additional)
end
context 'when block given' do
it "returns the tags" do
tags = subject.tags_context(additional) do
# do nothing
end
expect(tags).to eq default
end
it "adds tags only in the block" do
subject.tags_context(additional) do
expect(subject.context.tags).to eq default.merge(additional)
end
expect(subject.context.tags).to eq default
end
end
end
describe "#extra_context" do
let(:default) { { :foo => :bar } }
let(:additional) { { :baz => :qux } }
before do
subject.context.extra = default
end
it "returns the extra" do
expect(subject.extra_context).to eq default
end
it "returns the extra" do
expect(subject.extra_context(additional)).to eq default.merge(additional)
end
it "doesn't set anything if the extra is empty" do
subject.extra_context({})
expect(subject.context.extra).to eq default
end
it "adds extra" do
subject.extra_context(additional)
expect(subject.context.extra).to eq default.merge(additional)
end
context 'when block given' do
it "returns the extra" do
extra = subject.extra_context(additional) do
# do nothing
end
expect(extra).to eq default
end
it "adds extra only in the block" do
subject.extra_context(additional) do
expect(subject.context.extra).to eq default.merge(additional)
end
expect(subject.context.extra).to eq default
end
end
end
describe "#rack_context" do
it "doesn't set anything if the context is empty" do
subject.rack_context({})
expect(subject.context.rack_env).to be_nil
end
it "sets arbitrary rack context" do
subject.rack_context(:foo => :bar)
expect(subject.context.rack_env[:foo]).to eq(:bar)
end
end
end
| 30.96988 | 123 | 0.659697 |
28ef8feff6595bdb1d2f7e6fb8819e73d256b5c0 | 503 | require 'acrobat'
require 'pry'
app = Acrobat::App.new
#app.show
Acrobat::App.run do |app|
dir = Pathname(__dir__)
form_path = dir + '6030.17.antenna.pdf'
puts "Working on #{form_path}"
doc1 = app.open(form_path)
doc1.show
fields = {'city' => 'Salt Lake City',
'state' => 'Utah',
'lid' => 'SLCB',
'fac' => 'RTR',
cost_center: '1234'
}
doc1.fill_form(fields)
doc1.save_as(name: 'slcb.example.pdf',dir: dir.parent + 'tmp')
end
| 21.869565 | 64 | 0.568588 |
288cf04a5b219bbf9fa5c9abbf17c34205d19377 | 974 | Gem::Specification.new do |s|
s.name = "bugsnag"
s.version = File.read("VERSION").strip
s.authors = ["James Smith"]
s.email = "[email protected]"
s.description = "Ruby notifier for bugsnag.com"
s.summary = "Ruby notifier for bugsnag.com"
s.homepage = "http://github.com/bugsnag/bugsnag-ruby"
s.licenses = ["MIT"]
s.files = `git ls-files`.split("\n")
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.require_paths = ["lib"]
s.add_runtime_dependency 'multi_json', ["~> 1.0"]
if RUBY_VERSION < "1.9"
# Use ruby 1.8 compatible httparty
s.add_runtime_dependency 'httparty', ["< 0.12.0", ">= 0.6"]
s.add_development_dependency "rake", "~> 10.1.1"
else
s.add_runtime_dependency 'httparty', ["< 1.0", ">= 0.6"]
s.add_development_dependency 'rake'
end
s.add_development_dependency 'rspec'
s.add_development_dependency 'rdoc'
s.add_development_dependency 'pry'
s.add_development_dependency 'webmock'
end
| 26.324324 | 63 | 0.667351 |
6a7093a8da3c726308afed9015946d1347ad033f | 11,934 | require "erb"
require "test/unit"
require 'rss-assertions'
require "rss"
module RSS
class TestCase < Test::Unit::TestCase
include ERB::Util
include RSS
include Assertions
XMLDECL_VERSION = "1.0"
XMLDECL_ENCODING = "UTF-8"
XMLDECL_STANDALONE = "no"
RDF_ABOUT = "http://www.xml.com/xml/news.rss"
RDF_RESOURCE = "http://xml.com/universal/images/xml_tiny.gif"
TITLE_VALUE = "XML.com"
LINK_VALUE = "http://xml.com/pub"
URL_VALUE = "http://xml.com/universal/images/xml_tiny.gif"
NAME_VALUE = "hogehoge"
LANGUAGE_VALUE = "ja"
DESCRIPTION_VALUE = "
XML.com features a rich mix of information and services
for the XML community.
"
RESOURCES = [
"http://xml.com/pub/2000/08/09/xslt/xslt.html",
"http://xml.com/pub/2000/08/09/rdfdb/index.html",
]
CLOUD_DOMAIN = "data.ourfavoritesongs.com"
CLOUD_PORT = "80"
CLOUD_PATH = "/RPC2"
CLOUD_REGISTER_PROCEDURE = "ourFavoriteSongs.rssPleaseNotify"
CLOUD_PROTOCOL = "xml-rpc"
ENCLOSURE_URL = "http://www.scripting.com/mp3s/weatherReportSuite.mp3"
ENCLOSURE_LENGTH = "12216320"
ENCLOSURE_TYPE = "audio/mpeg"
CATEGORY_DOMAIN = "http://www.superopendirectory.com/"
FEED_TITLE = "dive into mark"
FEED_UPDATED = "2003-12-13T18:30:02Z"
FEED_AUTHOR_NAME = "John Doe"
FEED_ID = "urn:uuid:60a76c80-d399-11d9-b93C-0003939e0af6"
ENTRY_TITLE = "Atom-Powered Robots Run Amok"
ENTRY_LINK = "http://example.org/2003/12/13/atom03"
ENTRY_ID = "urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a"
ENTRY_UPDATED = "2003-12-13T18:30:02Z"
ENTRY_SUMMARY = "Some text."
t = Time.iso8601("2000-01-01T12:00:05+00:00")
class << t
alias_method(:to_s, :iso8601)
end
DC_ELEMENTS = {
:title => "hoge",
:description =>
" XML is placing increasingly heavy loads on
the existing technical infrastructure of the Internet.",
:creator => "Rael Dornfest (mailto:[email protected])",
:subject => "XML",
:publisher => "The O'Reilly Network",
:contributor => "hogehoge",
:type => "fugafuga",
:format => "hohoho",
:identifier => "fufufu",
:source => "barbar",
:language => "ja",
:relation => "cococo",
:rights => "Copyright (c) 2000 O'Reilly & Associates, Inc.",
:date => t,
}
DC_NODES = DC_ELEMENTS.collect do |name, value|
"<#{DC_PREFIX}:#{name}>#{value}</#{DC_PREFIX}:#{name}>"
end.join("\n")
def default_test
# This class isn't tested
end
private
def make_xmldecl(v=XMLDECL_VERSION, e=XMLDECL_ENCODING, s=XMLDECL_STANDALONE)
rv = "<?xml version='#{v}'"
rv << " encoding='#{e}'" if e
rv << " standalone='#{s}'" if s
rv << "?>"
rv
end
def make_RDF(content=nil, xmlns=[])
<<-EORSS
#{make_xmldecl}
<rdf:RDF xmlns="#{URI}" xmlns:rdf="#{RDF::URI}"
#{xmlns.collect {|pre, uri| "xmlns:#{pre}='#{uri}'"}.join(' ')}>
#{block_given? ? yield : content}
</rdf:RDF>
EORSS
end
def make_channel(content=nil)
<<-EOC
<channel rdf:about="#{RDF_ABOUT}">
<title>#{TITLE_VALUE}</title>
<link>#{LINK_VALUE}</link>
<description>#{DESCRIPTION_VALUE}</description>
<image rdf:resource="#{RDF_RESOURCE}" />
<items>
<rdf:Seq>
#{RESOURCES.collect do |res| '<rdf:li resource="' + res + '" />' end.join("\n")}
</rdf:Seq>
</items>
<textinput rdf:resource="#{RDF_RESOURCE}" />
#{block_given? ? yield : content}
</channel>
EOC
end
def make_image(content=nil)
<<-EOI
<image rdf:about="#{RDF_ABOUT}">
<title>#{TITLE_VALUE}</title>
<url>#{URL_VALUE}</url>
<link>#{LINK_VALUE}</link>
#{block_given? ? yield : content}
</image>
EOI
end
def make_item(content=nil)
<<-EOI
<item rdf:about="#{RDF_ABOUT}">
<title>#{TITLE_VALUE}</title>
<link>#{LINK_VALUE}</link>
<description>#{DESCRIPTION_VALUE}</description>
#{block_given? ? yield : content}
</item>
EOI
end
def make_textinput(content=nil)
<<-EOT
<textinput rdf:about="#{RDF_ABOUT}">
<title>#{TITLE_VALUE}</title>
<description>#{DESCRIPTION_VALUE}</description>
<name>#{NAME_VALUE}</name>
<link>#{LINK_VALUE}</link>
#{block_given? ? yield : content}
</textinput>
EOT
end
def make_sample_RDF
make_RDF(<<-EOR)
#{make_channel}
#{make_image}
#{make_item}
#{make_textinput}
EOR
end
def make_rss20(content=nil, xmlns=[])
<<-EORSS
#{make_xmldecl}
<rss version="2.0"
#{xmlns.collect {|pre, uri| "xmlns:#{pre}='#{uri}'"}.join(' ')}>
#{block_given? ? yield : content}
</rss>
EORSS
end
def make_sample_items20
RESOURCES.collect do |res|
elems = ["<link>#{res}</link>"]
elems << "<title>title of #{res}</title>"
elems = elems.join("\n")
item = "<item>\n#{elems}\n</item>"
end.join("\n")
end
def make_channel20(content=nil)
<<-EOC
<channel>
<title>#{TITLE_VALUE}</title>
<link>#{LINK_VALUE}</link>
<description>#{DESCRIPTION_VALUE}</description>
<language>#{LANGUAGE_VALUE}</language>

#{make_sample_items20}
<textInput>
<title>#{TITLE_VALUE}</title>
<description>#{DESCRIPTION_VALUE}</description>
<name>#{NAME_VALUE}</name>
<link>#{RDF_RESOURCE}</link>
</textInput>
#{block_given? ? yield : content}
</channel>
EOC
end
def make_item20(content=nil)
<<-EOI
<item>
<title>#{TITLE_VALUE}</title>
<link>#{LINK_VALUE}</link>
<description>#{DESCRIPTION_VALUE}</description>
#{block_given? ? yield : content}
</item>
EOI
end
def make_cloud20
<<-EOC
<cloud
domain="#{CLOUD_DOMAIN}"
port="#{CLOUD_PORT}"
path="#{CLOUD_PATH}"
registerProcedure="#{CLOUD_REGISTER_PROCEDURE}"
protocol="#{CLOUD_PROTOCOL}" />
EOC
end
def make_sample_rss20
make_rss20(<<-EOR)
#{make_channel20}
EOR
end
def make_feed_without_entry(content=nil, xmlns=[])
<<-EOA
<feed xmlns="#{Atom::URI}"
#{xmlns.collect {|pre, uri| "xmlns:#{pre}='#{uri}'"}.join(' ')}>
<id>#{FEED_ID}</id>
<title>#{FEED_TITLE}</title>
<updated>#{FEED_UPDATED}</updated>
<author>
<name>#{FEED_AUTHOR_NAME}</name>
</author>
#{block_given? ? yield : content}
</feed>
EOA
end
def make_entry(content=nil)
<<-EOA
<entry>
<title>#{ENTRY_TITLE}</title>
<id>#{ENTRY_ID}</id>
<updated>#{ENTRY_UPDATED}</updated>
#{block_given? ? yield : content}
</entry>
EOA
end
def make_feed_with_open_entry(content=nil, xmlns=[], &block)
make_feed_without_entry(<<-EOA, xmlns)
#{make_entry(content, &block)}
EOA
end
def make_feed_with_open_entry_source(content=nil, xmlns=[])
make_feed_with_open_entry(<<-EOA, xmlns)
<source>
#{block_given? ? yield : content}
</source>
EOA
end
def make_feed(content=nil, xmlns=[])
make_feed_without_entry(<<-EOA, xmlns)
<entry>
<title>#{ENTRY_TITLE}</title>
<link href="#{ENTRY_LINK}"/>
<id>#{ENTRY_ID}</id>
<updated>#{ENTRY_UPDATED}</updated>
<summary>#{ENTRY_SUMMARY}</summary>
</entry>
#{block_given? ? yield : content}
EOA
end
def make_entry_document(content=nil, xmlns=[])
<<-EOA
<entry xmlns="#{Atom::URI}"
#{xmlns.collect {|pre, uri| "xmlns:#{pre}='#{uri}'"}.join(' ')}>
<id>#{ENTRY_ID}</id>
<title>#{ENTRY_TITLE}</title>
<updated>#{ENTRY_UPDATED}</updated>
<author>
<name>#{FEED_AUTHOR_NAME}</name>
</author>
#{block_given? ? yield : content}
</entry>
EOA
end
def make_entry_document_with_open_source(content=nil, xmlns=[])
make_entry_document(<<-EOA, xmlns)
<source>
#{block_given? ? yield : content}
</source>
EOA
end
def make_element(elem_name, attrs, contents)
attrs_str = attrs.collect do |name, value|
"#{h name}='#{h value}'"
end.join(" ")
attrs_str = " #{attrs_str}" unless attrs_str.empty?
if contents.is_a?(String)
contents_str = h(contents)
else
contents_str = contents.collect do |name, value|
"#{Element::INDENT}<#{h name}>#{h value}</#{h name}>"
end.join("\n")
contents_str = "\n#{contents_str}\n"
end
"<#{h elem_name}#{attrs_str}>#{contents_str}</#{h elem_name}>"
end
def xmlns_container(xmlns_decls, content)
attributes = xmlns_decls.collect do |prefix, uri|
"xmlns:#{h prefix}=\"#{h uri}\""
end.join(" ")
"<dummy #{attributes}>#{content}</dummy>"
end
private
def setup_rss10(rdf)
assert_equal("", rdf.to_s)
channel = RDF::Channel.new
assert_equal("", channel.to_s)
channel.about = "http://example.com/index.rdf"
channel.title = "title"
channel.link = "http://example.com/"
channel.description = "description"
assert_equal("", channel.to_s)
item_title = "item title"
item_link = "http://example.com/item"
channel.items = RDF::Channel::Items.new
channel.items.Seq.lis << RDF::Channel::Items::Seq::Li.new(item_link)
assert_not_equal("", channel.to_s)
rdf.channel = channel
assert_equal("", rdf.to_s)
item = RDF::Item.new
item.title = item_title
item.link = item_link
item.about = item_link
rdf.items << item
assert_not_equal("", rdf.to_s)
end
def setup_rss20(rss)
assert_equal("", rss.to_s)
channel = Rss::Channel.new
assert_equal("", channel.to_s)
channel.title = "title"
channel.link = "http://example.com/"
channel.description = "description"
assert_not_equal("", channel.to_s)
rss.channel = channel
assert_not_equal("", rss.to_s)
end
def setup_dummy_channel(maker)
about = "http://hoge.com"
title = "fugafuga"
link = "http://hoge.com/feed.xml"
description = "fugafugafugafuga"
language = "ja"
maker.channel.about = about
maker.channel.title = title
maker.channel.link = link
maker.channel.description = description
maker.channel.language = language
end
def setup_dummy_channel_atom(maker)
updated = Time.now
author = "Foo"
setup_dummy_channel(maker)
maker.channel.links.first.rel = "self"
maker.channel.links.first.type = "application/atom+xml"
maker.channel.updated = updated
maker.channel.author = author
end
def setup_dummy_image(maker)
title = "fugafuga"
link = "http://hoge.com"
url = "http://hoge.com/hoge.png"
maker.channel.link = link if maker.channel.link.nil?
maker.image.title = title
maker.image.url = url
end
def setup_dummy_textinput(maker)
title = "fugafuga"
description = "text hoge fuga"
name = "hoge"
link = "http://hoge.com/search.cgi"
maker.textinput.title = title
maker.textinput.description = description
maker.textinput.name = name
maker.textinput.link = link
end
def setup_dummy_item(maker)
title = "TITLE"
link = "http://hoge.com/"
item = maker.items.new_item
item.title = title
item.link = link
end
def setup_dummy_item_atom(maker)
setup_dummy_item(maker)
item = maker.items.first
item.id = "http://example.net/xxx"
item.updated = Time.now
end
def setup_taxo_topic(target, topics)
topics.each do |topic|
taxo_topic = target.taxo_topics.new_taxo_topic
topic.each do |name, value|
case name
when :link
taxo_topic.taxo_link = value
when :topics
value.each do |t|
taxo_topic.taxo_topics << t
end
else
dc_elems = taxo_topic.__send__("dc_#{name}s")
dc_elem = dc_elems.__send__("new_#{name}")
dc_elem.value = value
end
end
end
end
end
end
| 24.914405 | 81 | 0.614379 |
218b5e55af492dd4e72b7fb10f7fdf939ea23d75 | 374 | require 'devise'
require 'rmagick'
require 'carrierwave'
require 'activemerchant'
require 'activeadmin'
module AuthForum
class Engine < ::Rails::Engine
isolate_namespace AuthForum
config.to_prepare do
ApplicationController.helper(ActionView::Helpers::ApplicationHelper)
end
initializer :auth_forum do
require 'jquery-ui-rails'
end
end
end | 23.375 | 74 | 0.751337 |
389e159fcdaf026e62162c2f58f24a8f7d4f010b | 276 | if node["platform_family"] != "windows"
openssl_x509_certificate "/tmp/mycert.pem" do
common_name "www.f00bar.com"
org "Foo Bar"
org_unit "Lab"
country "US"
expire 360
end
openssl_rsa_private_key "/tmp/server.key" do
key_length 2048
end
end
| 17.25 | 47 | 0.677536 |
26d9ee911adafd61340eb61c4e6f0c575358ab09 | 1,013 | class ErrorsController < ApplicationController
def not_found
error = CloudController::Errors::NotFound.new_from_details('NotFound')
presenter = ErrorPresenter.new(error, Rails.env.test?, V3ErrorHasher.new(error))
render status: :not_found, json: presenter
end
def internal_error
error = request.env['action_dispatch.exception']
presenter = ErrorPresenter.new(error, Rails.env.test?, V3ErrorHasher.new(error))
logger.error(presenter.log_message)
render status: presenter.response_code, json: presenter
end
def bad_request
error = CloudController::Errors::ApiError.new_from_details('InvalidRequest')
if request.env['action_dispatch.exception'].is_a?(ActionDispatch::ParamsParser::ParseError)
error = CloudController::Errors::ApiError.new_from_details('MessageParseError', 'invalid request body')
end
presenter = ErrorPresenter.new(error, Rails.env.test?, V3ErrorHasher.new(error))
render status: presenter.response_code, json: presenter
end
end
| 38.961538 | 109 | 0.76308 |
797a525a83fd653d98dcd0f9b8b4d264d4e8dd8e | 3,310 | class GitBasedDomainImportService
def queue_import(git_repo_id, branch_or_tag, tenant_id)
git_repo = GitRepository.find_by(:id => git_repo_id)
ref_type = if git_repo.git_branches.any? { |git_branch| git_branch.name == branch_or_tag }
"branch"
else
"tag"
end
import_options = {
"git_repository_id" => git_repo.id,
"ref" => branch_or_tag,
"ref_type" => ref_type,
"tenant_id" => tenant_id,
"overwrite" => true
}
task_options = {
:action => "Import git repository",
:userid => User.current_user.userid
}
queue_options = {
:class_name => "MiqAeDomain",
:method_name => "import_git_repo",
:role => "git_owner",
:args => [import_options]
}
MiqTask.generic_action_with_callback(task_options, queue_options)
end
def queue_refresh(git_repo_id)
task_options = {
:action => "Refresh git repository",
:userid => User.current_user.userid
}
queue_options = {
:class_name => "GitRepository",
:method_name => "refresh",
:instance_id => git_repo_id,
:role => "git_owner",
:args => []
}
MiqTask.generic_action_with_callback(task_options, queue_options)
end
def queue_refresh_and_import(git_url, ref, ref_type, tenant_id)
import_options = {
"git_url" => git_url,
"ref" => ref,
"ref_type" => ref_type,
"tenant_id" => tenant_id,
"overwrite" => true
}
task_options = {
:action => "Refresh and import git repository",
:userid => User.current_user.userid
}
queue_options = {
:class_name => "MiqAeDomain",
:method_name => "import_git_url",
:role => "git_owner",
:args => [import_options]
}
MiqTask.generic_action_with_callback(task_options, queue_options)
end
def queue_destroy_domain(domain_id)
task_options = {
:action => "Destroy domain",
:userid => User.current_user.userid
}
queue_options = {
:class_name => "MiqAeDomain",
:method_name => "destroy",
:instance_id => domain_id,
:role => "git_owner",
:args => []
}
MiqTask.generic_action_with_callback(task_options, queue_options)
end
def import(git_repo_id, branch_or_tag, tenant_id)
task_id = queue_import(git_repo_id, branch_or_tag, tenant_id)
task = MiqTask.wait_for_taskid(task_id)
domain = task.task_results
error_message = _("Selected branch or tag does not contain a valid domain")
raise MiqException::Error, error_message unless domain.kind_of?(MiqAeDomain)
domain.update_attribute(:enabled, true)
end
def refresh(git_repo_id)
task_id = queue_refresh(git_repo_id)
task = MiqTask.wait_for_taskid(task_id)
raise MiqException::Error, task.message unless task.status == "Ok"
task.task_results
end
def destroy_domain(domain_id)
task_id = queue_destroy_domain(domain_id)
task = MiqTask.wait_for_taskid(task_id)
raise MiqException::Error, task.message unless task.status == "Ok"
task.task_results
end
def self.available?
MiqRegion.my_region.role_active?("git_owner")
end
end
| 27.131148 | 94 | 0.626586 |
bf994128cb00a0327a5bb0b451c4ad0518b41674 | 1,372 | # coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "turmali/version"
Gem::Specification.new do |spec|
spec.name = "turmali"
spec.version = Turmali::VERSION
spec.authors = ["Eiffel Qiu"]
spec.email = ["[email protected]"]
spec.summary = %q{Turmali is a website building language.}
spec.description = %q{Turmali is a website building language.}
spec.homepage = "https://github.com/eiffelqiu/turmali."
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata["allowed_push_host"] = "https://rubygems.org"
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "bin"
spec.executables = 'tml'
spec.require_paths = ["lib"]
spec.add_dependency "terminal-table", "~> 1.8.0"
spec.add_development_dependency "bundler", "~> 1.15"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
end
| 35.179487 | 96 | 0.663265 |
61ba5072df103087614f61f5486118b7a4b8387e | 304 | FactoryBot.define do
factory :user do
sequence(:github_id) {|n| "#{Faker::Lorem.word}#{n}" }
name { Faker::Name.name }
sequence(:token) {|n| "123456#{n}"}
location { Faker::Address.city }
sequence(:latitude) {|n| n + 6.22964}
sequence(:longitude) {|n| n + -75.587482}
end
end
| 27.636364 | 58 | 0.601974 |
1a6f95408d091939c6b1d3a45829394d51c66bde | 512 | # For render
class EventsController < ApplicationController
include LinesHelper
def index
events = Event.where(line: current_line)
.order_by(created_at: :desc)
@events = paginate_results(events)
respond_to do |format|
format.html { render partial: 'events/events' }
format.js { render :layout => false }
end
end
private
def paginate_results(results)
Kaminari.paginate_array(results)
.page(params[:page])
.per(25)
end
end
| 21.333333 | 53 | 0.644531 |
18020ce3f9d64953ffdf53b3a441e5ea803e203e | 2,249 | module Nexpose
module JsonSerializer
@@namespace = 'Nexpose'
def deserialize(data)
data.each do |key, value|
if respond_to?(key)
property = value
if value.respond_to? :each
obj = resolve_type(key)
unless obj.nil?
if value.is_a?(Array)
property = value.map { |dv| ((dv.respond_to? :each) ? create_object(obj, dv).deserialize(dv): dv) }
else
property = create_object(obj, value).deserialize(value)
end
end
elsif value.is_a?(String) && value.match(/^\d{8}T\d{6}\.\d{3}/)
property = ISO8601.to_time(value)
end
instance_variable_set("@#{key}", property)
end
end
self
end
def serialize()
hash = to_hash(Hash.new)
unless hash.nil?
JSON.generate(hash)
end
end
def to_hash(hash)
self.instance_variables.each do |m|
value = self.instance_variable_get(m)
hash[m.to_s.delete('@')] = do_hash(value)
end
hash
end
private
def do_hash(obj)
if obj.is_a?(Array)
obj = obj.map do |el|
do_hash(el)
end
elsif obj.class.included_modules.include? JsonSerializer
obj = obj.to_hash(Hash.new)
end
obj
end
def create_object(obj, data)
if obj.respond_to?(:json_initializer)
obj.method(:json_initializer).call(data)
else
obj.method(:new).call
end
end
def resolve_type(field)
class_name = normalize_field(field)
type_attribute = "#{field}_type"
if self.respond_to?(type_attribute)
clazz = self.public_send(type_attribute)
elsif Object.const_get(@@namespace).const_defined?(class_name)
resolved = Object.const_get(@@namespace).const_get(class_name)
clazz = resolved if resolved.included_modules.include? JsonSerializer
end
clazz
end
def normalize_field(field)
class_name = field.to_s.split('_').map(&:capitalize!).join
class_name = 'Vulnerability' if class_name == 'Vulnerabilities'
class_name.chop! if class_name.end_with?('s')
class_name
end
end
end | 24.445652 | 116 | 0.587817 |
18dce6e8004f56c38661c316cb2f457a4a8cdb3c | 1,681 | # -*- encoding: utf-8 -*-
# stub: digest-crc 0.4.1 ruby lib
Gem::Specification.new do |s|
s.name = "digest-crc".freeze
s.version = "0.4.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Postmodern".freeze]
s.date = "2014-04-17"
s.description = "Adds support for calculating Cyclic Redundancy Check (CRC) to the Digest module.".freeze
s.email = "[email protected]".freeze
s.extra_rdoc_files = ["ChangeLog.md".freeze, "LICENSE.txt".freeze, "README.md".freeze]
s.files = ["ChangeLog.md".freeze, "LICENSE.txt".freeze, "README.md".freeze]
s.homepage = "https://github.com/postmodern/digest-crc#readme".freeze
s.licenses = ["MIT".freeze]
s.rubygems_version = "3.0.3".freeze
s.summary = "A Cyclic Redundancy Check (CRC) library for Ruby.".freeze
s.installed_by_version = "3.0.3" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rubygems-tasks>.freeze, ["~> 0.2"])
s.add_development_dependency(%q<rspec>.freeze, ["~> 2.4"])
s.add_development_dependency(%q<yard>.freeze, ["~> 0.8"])
else
s.add_dependency(%q<rubygems-tasks>.freeze, ["~> 0.2"])
s.add_dependency(%q<rspec>.freeze, ["~> 2.4"])
s.add_dependency(%q<yard>.freeze, ["~> 0.8"])
end
else
s.add_dependency(%q<rubygems-tasks>.freeze, ["~> 0.2"])
s.add_dependency(%q<rspec>.freeze, ["~> 2.4"])
s.add_dependency(%q<yard>.freeze, ["~> 0.8"])
end
end
| 41 | 112 | 0.66627 |
f86c381db421f4d85fcd22b64ffe4c07ec426caa | 2,183 | ##
# $Id$
##
##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
require 'msf/core'
class Metasploit3 < Msf::Auxiliary
include Msf::Exploit::Remote::TNS
include Msf::Auxiliary::Report
include Msf::Auxiliary::Scanner
def initialize(info = {})
super(update_info(info,
'Name' => 'Oracle TNS Listener SID Enumeration',
'Description' => %q{
This module simply queries the TNS listner for the Oracle SID.
With Oracle 9.2.0.8 and above the listener will be protected and
the SID will have to be bruteforced or guessed.
},
'Author' => [ 'CG', 'MC' ],
'License' => MSF_LICENSE,
'Version' => '$Revision$',
'DisclosureDate' => 'Jan 7 2009'
))
register_options(
[
Opt::RPORT(1521)
], self.class)
deregister_options('RHOST')
end
def run_host(ip)
begin
connect
pkt = tns_packet("(CONNECT_DATA=(COMMAND=STATUS))")
sock.put(pkt)
select(nil,nil,nil,0.5)
data = sock.get_once
if ( data and data =~ /ERROR_STACK/ )
print_error("TNS listener protected for #{ip}...")
else
if(not data)
print_error("#{ip} Connection but no data")
else
sid = data.scan(/INSTANCE_NAME=([^\)]+)/)
sid.uniq.each do |s|
report_note(
:host => ip,
:port => rport,
:type => "oracle_sid",
:data => "PORT=#{rport}, SID=#{s}",
:update => :unique_data
)
print_good("Identified SID for #{ip}:#{rport} #{s}")
end
service_name = data.scan(/SERVICE_NAME=([^\)]+)/)
service_name.uniq.each do |s|
report_note(
:host => ip,
:port => rport,
:type => "oracle_service_name",
:data => "PORT=#{rport}, SERVICE_NAME=#{s}",
:update => :unique_data
)
print_status("Identified SERVICE_NAME for #{ip}:#{rport} #{s}")
end
end
end
disconnect
rescue ::Rex::ConnectionError
rescue ::Errno::EPIPE
end
end
end
| 24.255556 | 71 | 0.585891 |
082d987de7ee472a08da6dc8a4f44913ff8cbd85 | 4,427 | module XeroGateway
class BaseRecord
class UnsupportedAttributeType < StandardError; end
class_attribute :element_name
class_attribute :attribute_definitions
class_attribute :attribute_definitions_readonly
# The source XML record that initialized this instance.
attr_reader :source_xml
class << self
def attributes(hash)
hash.each do |k, v|
attribute k, v
end
end
def attribute(name, value)
self.attribute_definitions ||= {}
self.attribute_definitions[name] = value
case value
when Hash
value.each do |k, v|
attribute("#{name}#{k}", v)
end
else
attr_accessor name.underscore
end
end
# Set list of attributes that should never be included in update/create responses.
def readonly_attributes(*attrs)
self.attribute_definitions_readonly ||= []
self.attribute_definitions_readonly += attrs.flatten
end
def from_xml(base_element, gateway = nil)
args = gateway ? [{ gateway: gateway }] : []
new(*args).from_xml(base_element)
end
def xml_element
element_name || self.name.split('::').last
end
end
def initialize(params = {})
params.each do |k,v|
self.send("#{k}=", v) if respond_to?("#{k}=")
end
end
def ==(other)
to_xml == other.to_xml
end
def to_xml(builder = Builder::XmlMarkup.new)
builder.__send__(self.class.xml_element) do
to_xml_attributes(builder)
end
end
def from_xml(base_element)
@source_xml = base_element
from_xml_attributes(base_element)
self
end
def from_xml_attributes(element, attribute = nil, attr_definition = self.class.attribute_definitions)
if Hash === attr_definition
element.children.each do |child|
next unless child.respond_to?(:name)
child_attribute = child.name
child_attr_definition = attr_definition[child_attribute]
child_attr_name = "#{attribute}#{child_attribute}" # SalesDetails/UnitPrice => SalesDetailsUnitPrice
next unless child_attr_definition
from_xml_attributes(child, child_attr_name, child_attr_definition)
end
return
end
value = case attr_definition
when :boolean then element.text == "true"
when :float then element.text.to_f
when :integer then element.text.to_i
when :currency then BigDecimal(element.text)
when :date then Dates::Helpers.parse_date(element.text)
when :datetime then Dates::Helpers.parse_date_time(element.text)
when :datetime_utc then Dates::Helpers.parse_date_time_utc(element.text)
when Array then array_from_xml(element, attr_definition)
when Class
attr_definition.from_xml(element) if attr_definition.respond_to?(:from_xml)
else element.text
end if element.text.present? || element.children.present?
send("#{attribute.underscore}=", value)
end
def array_from_xml(element, attr_definition)
definition_klass = attr_definition.first
element.children.map { |child_el| definition_klass.from_xml(child_el) }
end
def to_xml_attributes(builder = Builder::XmlMarkup.new, path = nil, attr_definitions = self.class.attribute_definitions)
attr_definitions.each do |attr, value|
next if self.class.attribute_definitions_readonly && self.class.attribute_definitions_readonly.include?(attr)
case value
when Hash
builder.__send__(attr) do
to_xml_attributes(builder, "#{path}#{attr}", value)
end
when Array
raise UnsupportedAttributeType.new("#{value} instances don't respond to #to_xml") unless value.first.method_defined?(:to_xml)
options = value.length > 1 ? value.last : {}
value = send("#{path}#{attr}".underscore)
value ||= [] unless options[:omit_if_empty]
builder.__send__(attr) do |array_wrapper|
value.map do |k|
k.to_xml(array_wrapper)
end
end unless value.nil?
else
value = send("#{path}#{attr}".underscore)
builder.__send__(attr, value) unless value.nil?
end
end
end
end
end
| 31.397163 | 135 | 0.634515 |
e81bee08d1fab3702175506fc6cb36a55d61f157 | 2,483 | class StaticArray
def initialize(capacity)
@store = Array.new(capacity)
end
def [](i)
validate!(i)
@store[i]
end
def []=(i, val)
validate!(i)
@store[i] = val
end
def length
@store.length
end
private
def validate!(i)
raise "Overflow error" unless i.between?(0, @store.length - 1)
end
end
class DynamicArray
include Enumerable
attr_reader :count
def initialize(capacity = 8)
@store = StaticArray.new(capacity)
@count = 0
@start_idx = 0
end
def [](i)
if i >= @count
return nil
elsif i < 0
return nil if i < -@count
return self[@count + i]
end
@store[(@start_idx + i) % capacity]
end
def []=(i, val)
if i >= @count
(i - @count).times { push(nil) }
elsif i < 0
return nil if i < -@count
return self[@count + i] = val
end
if i == @count
resize! if capacity == @count
@count += 1
end
@store[(@start_idx + i) % capacity] = val
end
def capacity
@store.length
end
def include?(val)
any? { |el| el == val }
end
def push(val)
resize! if capacity == @count
@store[(@start_idx + @count) % capacity] = val
@count += 1
self
end
def unshift(val)
resize! if capacity == @count
@start_idx = (@start_idx - 1) % capacity
@store[@start_idx] = val
@count += 1
self
end
def pop
return nil if @count == 0
last_item = @store[(@start_idx + @count - 1) % capacity]
@count -= 1
last_item
end
def shift
return nil if @count == 0
@count -= 1
first_item = @store[@start_idx]
@start_idx = (@start_idx + 1) % capacity
first_item
end
def first
return nil if @count == 0
@store[@start_idx]
end
def last
return nil if @count == 0
@store[(@start_idx + @count - 1) % capacity]
end
def each
@count.times { |i| yield self[i] }
self
end
def to_s
"[" + inject([]) { |acc, el| acc << el }.join(", ") + "]"
end
def ==(other)
return false unless [Array, DynamicArray].include?(other.class)
return false unless length == other.length
each_with_index { |el, i| return false unless el == other[i] }
true
end
alias_method :<<, :push
%i(length size).each { |method| alias_method method, :count }
private
def resize!
new_store = StaticArray.new(capacity * 2)
each_with_index { |el, i| new_store[i] = el }
@store = new_store
@start_idx = 0
end
end
| 17.485915 | 67 | 0.571889 |
abe0beb1a6da5bade55a2bbedd0a365faf20ca8e | 744 | class SessionsController < ApplicationController
def new
end
def create
@user = User.find_by(email: params[:session][:email].downcase)
if @user && @user.authenticate(params[:session][:password])
if @user.activated?
log_in @user
params[:session][:remember_me] == '1' ? remember(@user) : forget(@user)
redirect_back_or @user
else
message = "Account not activated."
message += "Check your email for the activation link."
flash[:warning] = message
redirect_to root_url
end
else
flash.now[:danger] = "Invalid email/password combination"
render 'new'
end
end
def destroy
log_out if logged_in?
redirect_to root_url
end
end
| 25.655172 | 79 | 0.634409 |
1aee1190dfe797fb1d38db49a9227c1ae4329efa | 28,397 | module Shoes
#this class represents a whole Shoes App.
class App < Qt::Application #:nodoc: all
def initialize opts = {}, blk #:nodoc:
super ARGV
#set the application icon to the shoes logo
set_window_icon(Qt::Icon.new "#{File.expand_path(File.dirname(__FILE__))}/../../static/blue_shoes.jpg")
#set up some options with defaults
height = opts[:height] || 200
width = opts[:width] || 400
resizeable = opts[:resizable].nil? ? true : false
@_main_window = Qt::Widget.new do
self.layout = Shoes::Stack.new
resize height, width
#this is the list of subwidgets we've got
@widgets = []
#we can make the window unresizable by adding a minimum and maximum
#size
unless resizeable
setMaximumSize(height,width)
setMinimumSize(height,width)
end
# in QT, each widget has a paint_event that gets called whenever
# it needs to be repainted.
def self.paint_event event
#we create a painter...
painter = Qt::Painter.new self
#then call it over every widget in our list
@widgets.each{|w| w.draw painter }
#and then end painting
painter.end
end
#a nice convenience function to add our widgets to the array
def add_widget widget
@widgets << widget
end
end
#we want to have a current widget, which starts off as the main window
@_current_widget = @_main_window
#we evaluate the block we were passed. This is pretty much the heart of
#Shoes.
instance_eval &blk
#then we show our window
@_main_window.show
#and call QT's exec to kick things off!
exec
exit
end
#Create a new button.
def button txt, style={}, &blk
#create the button, don't forget to hook up that signal!
b = Qt::PushButton.new txt do
connect(SIGNAL :clicked) { blk.call } if blk
end
#add it to our widget list
add_widget b
#and we want to return it.
b
end
#a convenience function for adding a widget to the current widget
def add_widget widget
@_current_widget.layout.add_widget widget, 0
end
#these classes should probably be moved to Shoes::Dialog
#create an alert
class Alert < Qt::Dialog
def initialize(message, parent = nil)
super(parent)
Qt::MessageBox::information(self,"Alert!" , message)
end
end
# Pops up a window containing a short message.
def alert(message)
Alert.new message
end
class Ask < Qt::Dialog
attr_accessor :text
def initialize(message, parent = nil)
super(parent)
ok = Qt::Boolean.new
self.text = Qt::InputDialog.getText(self,
"I have a Question?",
message,
Qt::LineEdit::Normal,
Qt::Dir::home().dirName(),
ok)
end
end
# Pops up a window and asks a question.
def ask(message)
ask = Ask.new message
ask.text
end
# Pops up a color picker window. The program will wait for a color to be picked, then gives you back a Color object. See the Color help for some ways you can use this color.
def ask_color(title)
# returns Shoes::Color
throw NotImplementedError
end
# Pops up an "Open file..." window. It's the standard window which shows all of your folders and lets you select a file to open. Hands you back the name of the file.
def ask_open_file
# returns a string
throw NotImplementedError
end
# Pops up a "Save file..." window, similiar to ask_open_file, described previously.
def ask_save_file
# returns a string
throw NotImplementedError
end
# Pops up an "Open folder..." window. It's the standard window which shows all of your folders and lets you select a folder to open. Hands you back the name of the folder.
def ask_open_folder
# returns a string
throw NotImplementedError
end
# Pops up a "Save folder..." window, similiar to ask_open_folder, described previously. On OS X, this method currently behaves like an alias of ask_open_folder.
def ask_save_folder
# returns a string
throw NotImplementedError
end
# Pops up a yes-or-no question. If the person at the computer, clicks yes, you'll get back a true. If not, you'll get back false.
def confirm(question)
# returns true or false
throw NotImplementedError
end
# Sends a debug message to the Shoes console. You can bring up the Shoes console by pressing Alt-/ on any Shoes window (or ⌘-/ on OS X.)
def debug(message)
# returns a string
throw NotImplementedError
end
# Sends an error message to the Shoes console. This method should only be used to log errors. Try the debug method for logging messages to yourself.
def error(message)
# returns nil
throw NotImplementedError
end
# Loads a TrueType (or other type of font) from a file. While TrueType is supported by all platforms, your platform may support other types of fonts. Shoes uses each operating system's built-in font system to make this work.
def font(message)
# returns an array of font family names
throw NotImplementedError
end
# Builds a linear gradient from two colors. For each color, you may pass in a Shoes::Color object or a string describing the color.
def gradient(color1, color2)
# returns Shoes::Pattern
throw NotImplementedError
end
# Create a grayscale color from a level of darkness and, optionally, an alpha level.
def gray(darkness, alpha)
# returns Shoes::Color
throw NotImplementedError
end
# Logs an informational message to the user in the Shoes console. So, where debug messages are designed to help the program figure out what's happening, info messages tell the user extra information about the program.
def info(message)
# returns nil
throw NotImplementedError
end
# Create a color from red, green and blue components. An alpha level (indicating transparency) can also be added, optionally.
# This method may also be called as Shoes.rgb.
def rgb(red, green, blue, alpha)
# returns Shoes::Color
throw NotImplementedError
end
# Logs a warning for the user. A warning is not a catastrophic error (see error for that.) It is just a notice that the program will be changing in the future or that certain parts of the program aren't reliable yet.
def warn(message)
# returns nil
throw NotImplementedError
end
# the system clipboard
attr_accessor :clipboard
# Closes the app window
def close
throw NotImplementedError
end
#starts a download thread
def download url, opts={}, &blk
throw NotImplementedError
end
# Gets a string containing the URL of the current app.
def location
throw NotImplementedError
end
# Identifies the mouse cursor's location, along with which button is being pressed.
def mouse
# an array of numbers: button, left, top
throw NotImplementedErrror
end
# Gets the app which launched this app. In most cases, this will be nil. But if this app was launched using the window method, the owner will be the app which called window.
def owner
#returns Shoes::App
throw NotImplementedError
end
# Has the window been fully constructed and displayed? This is useful for threaded code which may try to use the window before it is completely built. (Also see the start event which fires once the window is open.)
def started?
# returns true or false
throw NotImplementedError
end
# Changes the location, in order to view a different Shoes URL.
def visit url
throw NotImplementedError
end
# Draws an arc shape (a section of an oval) at coordinates (left, top). This method just give you a bit more control than oval, by offering the :angle1 and :angle2 styles. (In fact, you can mimick the oval method by setting :angle1 to 0 and :angle2 to Shoes::TWO_PI.)
def arc(left, top, width, height, angle1, angle2)
# returns Shoes::Shape
throw NotImplementedError
end
# Draws an arrow at coordinates (left, top) with a pixel width.
def arrow(left, top, width)
# returns Shoes::Shape
throw NotImplementedError
end
# Sets the line cap, which is the shape at the end of every line you draw. If set to :curve, the end is rounded. The default is :rect, a line which ends abruptly flat. The :project cap is also fat, but sticks out a bit longer.
def cap(how)
# returns self
throw NotImplementedError
end
# Sets the fill bucket to a specific color (or pattern.) Patterns can be colors, gradients or images. So, once the fill bucket is set, you can draw shapes and they will be colored in with the pattern you've chosen.
def fill(pattern)
# returns pattern
throw NotimplementedError
end
# Blanks the fill color, so that any shapes drawn will not be filled in. Instead, shapes will have only a lining, leaving the middle transparent.
def nofill
# returns self
throw NotImplementedError
end
# Empties the line color. Shapes drawn will have no outer line. If nofill is also set, shapes drawn will not be visible.
def nostroke
# returns self
throw NotImplementedError
end
# Draws a line using the current line color (aka "stroke") starting at coordinates (left, top) and ending at coordinates (x2, y2).
def line(left, top, x2, y2)
# returns Shoes::Shape
throw NotImplementedError
end
# Draws a circular form at pixel coordinates (left, top) with a width and height of radius pixels. The line and fill colors are used to draw the shape. By default, the coordinates are for the oval's leftmost, top corner, but this can be changed by calling the transform method or by using the :center style on the next method below.
def oval(left, top, radius)
# returns Shoes::Shape
throw NotImplementedError
end
# Draw circular form using a style hash.
def oval(styles)
# returns Shoes::Shape
throw NotImplementedError
end
# Draws a rectangle starting from coordinates (top, left) with dimensions of width x height. Optionally, you may give the rectangle rounded corners with a fifth argument: the radius of the corners in pixels.
def rect(top, left, width, height, corners = 0)
# returns Shoes::Shape
throw NotImplementedError
end
# Draw a rectangle using a style hash.
def rect(styles)
# returns Shoes::Shape
throw NotImplementedError
end
# Rotates the pen used for drawing by a certain number of degrees, so that any shapes will be drawn at that angle.
def rotate(degrees)
# returns self
throw NotImplementedError
end
# Describes an arbitrary shape to draw, beginning at coordinates (left, top) and continued by calls to line_to, move_to, curve_to and arc_to inside the block. You can look at it as sketching a shape with a long line that curves and arcs and bends.
def shape(left, top)
# returns Shoes::Shape
throw NotImplementedError
end
# Draws a star using the stroke and fill colors. The star is positioned with its center point at coordinates (left, top) with a certain number of points. The outer width defines the full radius of the star; the inner width specifies the radius of the star's middle, where points stem from.
def star(left, top, points = 10, outer = 100.0, inner = 50.0)
# returns Shoes::Shape
throw NotImplementedError
end
# Set the active line color for this slot. The pattern may be a color, a gradient or an image, all of which are categorized as "patterns." The line color is then used to draw the borders of any subsequent shape.
def stroke(pattern)
# returns pattern
throw NotImplementedError
end
# Sets the line size for all drawing within this slot. Whereas the stroke method alters the line color, the strokewidth method alters the line size in pixels. Calling strokewidth(4) will cause lines to be drawn 4 pixels wide.
def strokewidth(number)
# returns self
throw NotImplementedError
end
# Should transformations (such as skew and rotate) be performed around the center of the shape? Or the corner of the shape? Shoes defaults to :corner.
def transform(where)
# returns self
throw NotImplementedError
end
# Moves the starting point of the drawing pen for this slot. Normally, the pen starts at (0, 0) in the top-left corner, so that all shapes are drawn from that point. With translate, if the starting point is moved to (10, 20) and a shape is drawn at (50, 60), then the shape is actually drawn at (60, 80) on the slot.
def translate(left, top)
# returns self
throw NotImplementedError
end
# Starts an animation timer, which runs parallel to the rest of the app. The fps is a number, the frames per seconds. This number dictates how many times per second the attached block will be called.
def animate(fps)
# returns |frame| ... } » Shoes::Animation
throw NotImplementedError
end
# Draws a Background element with a specific color (or pattern.) Patterns can be colors, gradients or images. Colors and images will tile across the background. Gradients stretch to fill the background.
def background(pattern, style, &blk)
background = Shoes::Background.new(pattern, style)
@_main_window.add_widget background
background
end
# Creates a Banner text block. Shoes automatically styles this text to 48 pixels high.
def banner(text)
# returns Shoes::Banner
throw NotImplementedError
end
# Draws a Border element using a specific color (or pattern.) Patterns can be colors, gradients or images. Colors and images will tile across the border. Gradients stretch to fill the border.
def border(text, opts={})
# returns Shoes::Border
throw NotImplementedError
end
# Creates a Caption text block. Shoes styles this text to 14 pixels high.
def caption(text)
# returns Shoes::Caption
throw NotImplementedError
end
# Adds a check box.
def check()
# returns Shoes::Check
throw NotImplementedError
end
# Create a Code text fragment. This text defaults to a monospaced font.
def code(text)
# returns Shoes::Code
throw NotImplementedError
end
# Creates a Del text fragment (short for "deleted") which defaults to text with a single strikethrough in its middle.
def del(text)
# returns Shoes::Del
throw NotImplementedError
end
# Opens a new app window (just like the window method does,) but the window is given a dialog box look.
def dialog(styles)
# returns ... } » Shoes::App
throw NotImplementedError
end
# Adds a large, multi-line textarea to this slot. The text is optional and should be a string that will start out the box. An optional block can be attached here which is called any type the user changes the text in the box.
def edit_box(text)
# returns Shoes::EditBox
throw NotImplementedError
end
# Adds a single-line text box to this slot. The text is optional and should be a string that will start out the box. An optional block can be attached here which is called any type the user changes the text in the box.
def edit_line(text)
# returns Shoes::EditLine
throw NotImplementedError
end
# Creates an Em text fragment (short for "emphasized") which, by default, is styled with italics.
def em(text)
# returns Shoes::Em
throw NotImplementedError
end
# A timer similar to the animation method, but much slower. This timer fires a given number of seconds, running the block attached. So, for example, if you need to check a web site every five minutes, you'd call every(300) with a block containing the code to actually ping the web site.
def every(seconds)
# returns Shoes::Every
throw NotImplementedError
end
# A flow is an invisible box (or "slot") in which you place Shoes elements. Both flows and stacks are explained in great detail on the main Slots page.
def flow(style, &blk)
flow = Shoes::Flow.new(style)
add_widget flow
instance_eval &blk
flow
end
# Creates an Image element for displaying a picture. PNG, JPEG and GIF formats are allowed.
def image(path)
# returns Shoes::Image
throw NotImplementedError
end
# Quickly grab the width and height of an image. The image won't be loaded into the cache or displayed.
def imagesize(path)
# returns [width, height]
throw NotImplementedError
end
# Creates an Ins text fragment (short for "inserted") which Shoes styles with a single underline.
def ins(text)
# returns Shoes::Ins
throw NotImplementedError
end
# Creates an Inscription text block. Shoes styles this text at 10 pixels high.
def inscription(text)
# returns Shoes::Inscription
throw NotImplementedError
end
# Creates a Link text block, which Shoes styles with a single underline and colors with a #06E (blue) colored stroke.
def link(text, opts)
# returns Shoes::Link
throw NotImplementedError
end
# Adds a drop-down list box containing entries for everything in the items array. An optional block may be attached, which is called if anything in the box becomes selected by the user.
def list_box(opts)
# returns Shoes::ListBox
throw NotImplementedError
end
# Adds a progress bar.
def progress
# returns Shoes::Progress
throw NotImplementedError
end
# Create a Para text block (short for "paragraph") which Shoes styles at 12 pixels high.
def para(text)
para = Shoes::Para.new(text)
add_widget para.to_label
para
end
# Adds a radio button. If a group name is given, the radio button is considered part of a group. Among radio buttons in the same group, only one may be checked. (If no group name is given, the radio button is grouped with any other radio buttons in the same slot.)
def radio(group)
# returns Shoes::Radio
throw NotImplementedError
end
# Creates a Span text fragment, unstyled by default.
def span(text)
# returns Shoes::Span
throw NotImplementedError
end
# Creates a new stack. A stack is a type of slot. (See the main Slots page for a full explanation of both stacks and flows.)
def stack(style, &blk)
stack = Shoes::Stack.new
add_widget stack
instance_eval &blk
stack
end
# Creates a Strong text fragment, styled in bold by default.
def strong(text)
strong = Shoes::Strong.new(text)
strong
end
# Creates a Sub text fragment (short for "subscript") which defaults to lowering the text by 10 pixels and styling it in an x-small font.
def sub(text)
# returns Shoes::Sub
throw NotImplementedError
end
# Creates a Subtitle text block. Shoes styles this text to 26 pixels high.
def subtitle(text)
# returns Shoes::Subtitle
throw NotImplementedError
end
# Creates a Sup text fragment (short for "superscript") which defaults to raising the text by 10 pixels and styling it in an x-small font.
def sup(text)
# returns Shoes::Sup
throw NotImplementedError
end
# Creates a Tagline text block. Shoes styles this text to 18 pixels high.
def tagline(text)
# returns Shoes::Tagline
throw NotImplementedError
end
# A one-shot timer. If you want to schedule to run some code in a few seconds (or minutes, hours) you can attach the code as a block here.
def timer(seconds)
# returns Shoes::Timer
throw NotImplementedError
end
# Creates a Title text block. Shoes styles these elements to 34 pixels high.
def title(text)
# returns Shoes::Title
throw NotImplementedError
end
# Embeds a movie in this slot.
def video(url)
# returns Shoes::Video
throw NotImplementedError
end
# Opens a new app window. This method is almost identical to the Shoes.app method used to start an app in the first place. The difference is that the window method sets the new window's owner property. (A normal Shoes.app has its owner set to nil.)
def window(styles)
# returns Shoes::App
throw NotImplementedError
end
# The click block is called when a mouse button is clicked. The button is the number of the mouse button which has been pressed. The left and top are the mouse coordinates at which the click happened.
def click(&blk)
# returns self
throw NotImplementedError
end
# When a slot is removed, it's finish event occurs. The finish block is immediately handed self, the slot object which has been removed.
def finish(&blk)
# returns self
throw NotImplementedError
end
# The hover event happens when the mouse enters the slot. The block gets self, meaning the object which was hovered over.
def hover(&blk)
# returns self
throw NotImplementedError
end
# Whenever a key (or combination of keys) is pressed, the block gets called. The block is sent a key which is a string representing the character (such as the letter or number) on the key. For special keys and key combos, a Ruby symbol is sent, rather than a string.
def keypress(&blk)
# returns self
throw NotImplementedError
end
# The leave event takes place when the mouse cursor exits a slot. The moment it no longer is inside the slot's edges. When that takes place, the block is called with self, the slot object which is being left.
def leave(&blk)
# returns self
throw NotImplementedError
end
# The motion block gets called every time the mouse moves around inside the slot. The block is handed the cursor's left and top coordinates.
def motion(&blk)
# returns self
throw NotImplementedError
end
# The release block runs whenever the mouse is unclicked (on mouse up). When the finger is lifted. The button is the number of the button that was depressed. The left and top are the coordinates of the mouse at the time the button was released.
def release(&blk)
# returns self
throw NotImplementedError
end
# The first time the slot is drawn, the start event fires. The block is handed self, the slot object which has just been drawn.
def start(&blk)
# returns self
throw NotImplementedError
end
# Adds elements to the end of a slot.
def append(&blk)
# returns self
throw NotImplementedError
end
# Adds elements to a specific place in a slot, just after the element which is a child of the slot.
def after(&blk)
# returns self
throw NotImplementedError
end
# Adds elements to a specific place in a slot, just before the element which is a child of the slot.
def before(&blk)
# returns self
throw NotImplementedError
end
# Empties the slot of any elements, timers and nested slots. This is effectively identical to looping through the contents of the slot and calling each element's remove method.
def clear(&blk)
# returns self
throw NotImplementedError
end
# The clear method also takes an optional block. The block will be used to replace the contents of the slot.
def clear(&blk)
# returns self
throw NotImplementedError
end
# Adds elements to the beginning of a slot.
def prepend(&blk)
# returns self
throw NotImplementedError
end
# A shortcut method for setting the :displace_left and :displace_top styles. Displacing is a handy way of moving a slot without altering the layout. In fact, the top and left methods will not report displacement at all. So, generally, displacement is only for temporary animations. For example, jiggling a button in place.
def displace(left, top)
# returns self
throw NotImplementedError
end
# The size of the scrollbar area. When Shoes needs to show a scrollbar, the scrollbar may end up covering up some elements that touch the edge of the window. The gutter tells you how many pixels to expect the scrollbar to cover.
def gutter
# returns a number
throw NotImplementedError
end
# The vertical size of the viewable slot in pixels. So, if this is a scrolling slot, you'll need to use scroll_height() to get the full size of the slot.
def height
# returns a number
throw NotImplementedError
end
# Hides the slot, so that it can't be seen. See also show and toggle.
def hide
#returns self
throw NotImplementedError
end
# The left pixel location of the slot. Also known as the x-axis coordinate.
def left
# returns a number
throw NotImplementedError
end
# Moves the slot to specific coordinates, the (left, top) being the upper left hand corner of the slot.
def move(left, top)
# returns self
throw NotImplementedError
end
# Removes the slot. It will no longer be displayed and will not be listed in its parent's contents. It's gone.
def remove
# returns self
throw NotImplementedError
end
# Is this slot allowed to show a scrollbar? True or false. The scrollbar will only appear if the height of the slot is also fixed.
def scroll
# returns true or false
throw NotImplementedError
end
# The vertical size of the full slot, including any of it which is hidden by scrolling.
def scroll_height
# returns a number
throw NotImplementedError
end
# The top coordinate which this slot can be scrolled down to. The top coordinate of a scroll bar is always zero. The bottom coordinate is the full height of the slot minus one page of scrolling. This bottom coordinate is what scroll_max returns.
def scroll_max
# returns a number
throw NotImplementedError
end
# The top coordinate which this slot is scrolled down to. So, if the slot is scrolled down twenty pixels, this method will return 20.
def scroll_top
# returns a number
throw NotImplementedError
end
# Scrolls the slot to a certain coordinate. This must be between zero and scroll_max.
def scroll_top=(number)
throw NotImplementedErrror
end
# Reveals the slot, if it is hidden. See also hide and toggle.
def show
# returns self
throw NotImplementedError
end
# Calling the style method with no arguments returns a hash of the styles presently applied to this slot.
def style
# returns styles
throw NotImplementedError
end
# Alter the slot using a hash of style settings. Any of the methods on this page (aside from this method, of course) can be used as a style setting. So, for example, there is a width method, thus there is also a width style.
def style(styles)
# returns styles
throw NotImplementedError
end
# Hides the slot, if it is shown. Or shows the slot, if it is hidden.
def toggle
# returns self
throw NotImplementedError
end
# The top pixel location of the slot. Also known as the y-axis coordinate.
def top
# returns a number
throw NotImplementedError
end
# The horizontal size of the slot in pixels.
def width
# returns a number
throw NotImplementedError
end
# Lists all elements in a slot.
def contents
# returns an array of elements
throw NotImplementedError
end
# Gets the object for this element's container.
def parent
# returns a Shoes::Stack or Shoes::Flow
throw NotImplementedError
end
end
end
| 36.5 | 336 | 0.685108 |
f7f0cd7f352e8045364e9220d56efa83de42699d | 2,379 | #!/usr/bin/env ruby
# Encoding: utf-8
#
# Copyright:: Copyright 2011, Google Inc. All Rights Reserved.
#
# License:: Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This example runs incorrect query and demonstrates how to handle errors.
require 'dfp_api'
API_VERSION = :v201511
def produce_api_error()
# Get DfpApi instance and load configuration from ~/dfp_api.yml.
dfp = DfpApi::Api.new
# To enable logging of SOAP requests, set the log_level value to 'DEBUG' in
# the configuration file or provide your own logger:
# dfp.logger = Logger.new('dfp_xml.log')
# Get the UserService.
user_service = dfp.service(:UserService, API_VERSION)
# Omitting "id" field here to produce an error.
user = {:preferred_locale => 'en_UK', :name => 'foo_bar'}
# Execute request and get the response, this should raise an exception.
user = user_service.update_user(user)
# Output retrieved data.
puts "User ID: %d, name: %s, email: %s" %
[user[:id], user[:name], user[:email]]
end
if __FILE__ == $0
begin
# This function should produce an exception for demo.
produce_api_error()
# One of two kinds of exception might occur, general HTTP error like 403 or
# 404 and DFP API error defined in WSDL and described in documentation.
# Handling HTTP errors.
rescue AdsCommon::Errors::HttpError => e
puts "HTTP Error: %s" % e
# Handling API errors.
rescue DfpApi::Errors::ApiException => e
# Standard DFP API error includes message and array of errors occured.
puts "Message: %s" % e.message
puts 'Errors:'
# Print out each of the errors.
e.errors.each_with_index do |error, index|
puts "\tError [%d]:" % (index + 1)
error.each do |field, value|
puts "\t\t%s: %s" % [field, value]
end
end
end
end
| 32.589041 | 79 | 0.675074 |
26eb010c21464b3164a5869b06003feb5c125856 | 798 | #
# Cookbook Name:: krb5
# Recipe:: kadmin
#
# Copyright © 2014 Cask Data, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe 'krb5::default'
include_recipe 'krb5::kadmin'
service 'krb5-admin-server' do
service_name node['krb5']['kadmin']['service_name']
action :nothing
end
| 29.555556 | 74 | 0.745614 |
039718379f3fde9fda9628c1fe56ea649349cc0c | 241 | require 'redis'
require 'active_support/all'
require "htmon/icinga/version"
require "htmon/icinga/module"
module Htmon
module Icinga
def self.redis
@redis
end
def self.redis= r
@redis = r
end
end
end
| 12.684211 | 30 | 0.647303 |
4a2328744893e264fd15d13d836d0448004405c2 | 4,143 | # encoding: utf-8
require "cases/helper"
module ActiveRecord
module ConnectionAdapters
class MysqlAdapterTest < ActiveRecord::TestCase
def setup
@conn = ActiveRecord::Base.connection
@conn.exec_query('drop table if exists ex')
@conn.exec_query(<<-eosql)
CREATE TABLE `ex` (
`id` int(11) DEFAULT NULL auto_increment PRIMARY KEY,
`number` integer,
`data` varchar(255))
eosql
end
def test_valid_column
column = @conn.columns('ex').find { |col| col.name == 'id' }
assert @conn.valid_type?(column.type)
end
def test_invalid_column
assert_not @conn.valid_type?(:foobar)
end
def test_client_encoding
assert_equal Encoding::UTF_8, @conn.client_encoding
end
def test_exec_insert_number
insert(@conn, 'number' => 10)
result = @conn.exec_query('SELECT number FROM ex WHERE number = 10')
assert_equal 1, result.rows.length
# if there are no bind parameters, it will return a string (due to
# the libmysql api)
assert_equal '10', result.rows.last.last
end
def test_exec_insert_string
str = 'いただきます!'
insert(@conn, 'number' => 10, 'data' => str)
result = @conn.exec_query('SELECT number, data FROM ex WHERE number = 10')
value = result.rows.last.last
# FIXME: this should probably be inside the mysql AR adapter?
value.force_encoding(@conn.client_encoding)
# The strings in this file are utf-8, so transcode to utf-8
value.encode!(Encoding::UTF_8)
assert_equal str, value
end
def test_tables_quoting
@conn.tables(nil, "foo-bar", nil)
flunk
rescue => e
# assertion for *quoted* database properly
assert_match(/database 'foo-bar'/, e.inspect)
end
def test_pk_and_sequence_for
pk, seq = @conn.pk_and_sequence_for('ex')
assert_equal 'id', pk
assert_equal @conn.default_sequence_name('ex', 'id'), seq
end
def test_pk_and_sequence_for_with_non_standard_primary_key
@conn.exec_query('drop table if exists ex_with_non_standard_pk')
@conn.exec_query(<<-eosql)
CREATE TABLE `ex_with_non_standard_pk` (
`code` INT(11) DEFAULT NULL auto_increment,
PRIMARY KEY (`code`))
eosql
pk, seq = @conn.pk_and_sequence_for('ex_with_non_standard_pk')
assert_equal 'code', pk
assert_equal @conn.default_sequence_name('ex_with_non_standard_pk', 'code'), seq
end
def test_pk_and_sequence_for_with_custom_index_type_pk
@conn.exec_query('drop table if exists ex_with_custom_index_type_pk')
@conn.exec_query(<<-eosql)
CREATE TABLE `ex_with_custom_index_type_pk` (
`id` INT(11) DEFAULT NULL auto_increment,
PRIMARY KEY USING BTREE (`id`))
eosql
pk, seq = @conn.pk_and_sequence_for('ex_with_custom_index_type_pk')
assert_equal 'id', pk
assert_equal @conn.default_sequence_name('ex_with_custom_index_type_pk', 'id'), seq
end
def test_tinyint_integer_typecasting
@conn.exec_query('drop table if exists ex_with_non_boolean_tinyint_column')
@conn.exec_query(<<-eosql)
CREATE TABLE `ex_with_non_boolean_tinyint_column` (
`status` TINYINT(4))
eosql
insert(@conn, { 'status' => 2 }, 'ex_with_non_boolean_tinyint_column')
result = @conn.exec_query('SELECT status FROM ex_with_non_boolean_tinyint_column')
assert_equal 2, result.column_types['status'].type_cast(result.last['status'])
end
private
def insert(ctx, data, table='ex')
binds = data.map { |name, value|
[ctx.columns(table).find { |x| x.name == name }, value]
}
columns = binds.map(&:first).map(&:name)
sql = "INSERT INTO #{table} (#{columns.join(", ")})
VALUES (#{(['?'] * columns.length).join(', ')})"
ctx.exec_insert(sql, 'SQL', binds)
end
end
end
end
| 32.880952 | 91 | 0.625392 |
38be95480fd0480b1968981fa9384684fe23c56a | 1,221 | require 'spec_helper'
describe 'designate::generic_service' do
shared_examples_for 'designate::generic_service' do
let :pre_condition do
'include designate'
end
let :params do
{
:package_name => 'foo',
:service_name => 'food',
:enabled => true,
:manage_service => true,
:ensure_package => 'latest',
}
end
let :title do
'foo'
end
context 'should configure related package and service' do
it { is_expected.to contain_package('designate-foo').with(
:name => 'foo',
:ensure => 'latest',
:tag => ['openstack','designate-package'],
)}
it { is_expected.to contain_service('designate-foo').with(
:name => 'food',
:ensure => 'running',
:enable => true,
:hasstatus => true,
:tag => ['openstack','designate-service'],
)}
end
end
on_supported_os({
:supported_os => OSDefaults.get_supported_os
}).each do |os,facts|
context "on #{os}" do
let (:facts) do
facts.merge!(OSDefaults.get_facts())
end
it_behaves_like 'designate::generic_service'
end
end
end
| 23.037736 | 65 | 0.555283 |
1c4bd1fb25c2d099946ea99817c8d0c0d13a867c | 71 | require 'test_helper'
class Ffprober::ParserTest < Minitest::Test
end
| 14.2 | 43 | 0.788732 |
f87fdc9e8074ac3b845c717808d944ed27f3cc80 | 40,396 | require File.expand_path("../../../spec_helper", __FILE__)
class DesignViewModel < CouchRest::Model::Base
use_database DB
property :name
property :title
design do
view :by_name
view :by_just_name, :map => "function(doc) { emit(doc['name'], null); }"
end
end
describe "Design View" do
describe "(unit tests)" do
before :each do
@mod = DesignViewModel
@klass = CouchRest::Model::Designs::View
end
describe ".new" do
describe "with invalid parent model" do
it "should burn" do
expect { @klass.new(String, nil) }.to raise_error(/View cannot be initialized without a parent Model/)
end
end
describe "with CouchRest Model" do
it "should setup attributes" do
@obj = @klass.new(@mod.design_doc, @mod, {}, 'test_view')
expect(@obj.design_doc).to eql(@mod.design_doc)
expect(@obj.model).to eql(@mod)
expect(@obj.name).to eql('test_view')
expect(@obj.query).to be_empty
end
it "should complain if there is no name" do
expect { @klass.new(@mod.design_doc, @mod, {}, nil) }.to raise_error(/Name must be provided/)
end
end
describe "with previous view instance" do
before :each do
first = @klass.new(@mod.design_doc, @mod, {}, 'test_view')
@obj = @klass.new(@mod.design_doc, first, {:foo => :bar})
end
it "should copy attributes" do
expect(@obj.model).to eql(@mod)
expect(@obj.name).to eql('test_view')
expect(@obj.query).to eql({:foo => :bar})
end
it "should delete query keys if :delete defined" do
@obj2 = @klass.new(@mod.design_doc, @obj, {:delete => [:foo]})
expect(@obj2.query).not_to include(:foo)
end
end
describe "with proxy in query for first initialization" do
it "should set model to proxy object and remove from query" do
proxy = double("Proxy")
@obj = @klass.new(@mod.design_doc, @mod, {:proxy => proxy}, 'test_view')
expect(@obj.model).to eql(proxy)
end
end
describe "with proxy in query for chained instance" do
it "should set the model to proxy object instead of parents model" do
proxy = double("Proxy")
@obj = @klass.new(@mod.design_doc, @mod, {}, 'test_view')
expect(@obj.model).to eql(@mod)
@obj = @obj.proxy(proxy)
expect(@obj.model).to eql(proxy)
end
end
end
describe ".define_and_create" do
before :each do
@design_doc = { }
end
it "should call define and create_model_methods method" do
expect(@klass).to receive(:define).with(@design_doc, 'test', {}).and_return(nil)
expect(@klass).to receive(:create_model_methods).with(@design_doc, 'test', {}).and_return(nil)
@klass.define_and_create(@design_doc, 'test')
end
it "should call define and create_model_methods method with opts" do
expect(@klass).to receive(:define).with(@design_doc, 'test', {:foo => :bar}).and_return(nil)
expect(@klass).to receive(:create_model_methods).with(@design_doc, 'test', {:foo => :bar}).and_return(nil)
@klass.define_and_create(@design_doc, 'test', {:foo => :bar})
end
end
describe ".define" do
describe "under normal circumstances" do
before :each do
@design_doc = { }
allow(@design_doc).to receive(:model).and_return(DesignViewModel)
end
it "should add a basic view" do
@klass.define(@design_doc, 'test_view', :map => 'foo')
expect(@design_doc['views']['test_view']).not_to be_nil
end
it "should not overwrite reduce if set" do
@klass.define(@design_doc, 'by_title', :reduce => true)
expect(@design_doc['views']['by_title']['map']).not_to be_blank
expect(@design_doc['views']['by_title']['reduce']).to eql(true)
end
it "should replace reduce symbol with string name" do
@klass.define(@design_doc, 'by_title', :reduce => :sum)
expect(@design_doc['views']['by_title']['map']).not_to be_blank
expect(@design_doc['views']['by_title']['reduce']).to eql('_sum')
end
it "should replace reduce symbol with string if map function present" do
@klass.define(@design_doc, 'by_title', :map => "function(d) { }", :reduce => :sum)
expect(@design_doc['views']['by_title']['map']).not_to be_blank
expect(@design_doc['views']['by_title']['reduce']).to eql('_sum')
end
it "should auto generate mapping from name" do
expect { @klass.define(@design_doc, 'by_title') }.not_to raise_error
str = @design_doc['views']['by_title']['map']
expect(str).to include("((doc['#{DesignViewModel.model_type_key}'] == 'DesignViewModel') && (doc['title'] != null))")
expect(str).to include("emit(doc['title'], 1);")
str = @design_doc['views']['by_title']['reduce']
expect(str).to include("_sum")
end
it "should auto generate mapping from name with and" do
@klass.define(@design_doc, 'by_title_and_name')
str = @design_doc['views']['by_title_and_name']['map']
expect(str).to include("(doc['title'] != null) && (doc['name'] != null)")
expect(str).to include("emit([doc['title'], doc['name']], 1);")
str = @design_doc['views']['by_title_and_name']['reduce']
expect(str).to include("_sum")
end
it "should allow reduce methods as symbols" do
@klass.define(@design_doc, 'by_title', :reduce => :stats)
expect(@design_doc['views']['by_title']['reduce']).to eql('_stats')
end
it "should allow the emit value to be overridden" do
@klass.define(@design_doc, 'by_title', :emit => :name)
str = @design_doc['views']['by_title']['map']
expect(str).to include("emit(doc['title'], doc['name']);")
end
it "should forward a non-symbol emit value straight into the view" do
@klass.define(@design_doc, 'by_title', :emit => 3)
str = @design_doc['views']['by_title']['map']
expect(str).to include("emit(doc['title'], 3);")
end
it "should support emitting an array" do
@klass.define(@design_doc, 'by_title', :emit => [1, :name])
str = @design_doc['views']['by_title']['map']
expect(str).to include("emit(doc['title'], [1, doc['name']]);")
end
it "should guard against nulls when emitting properties" do
@klass.define(@design_doc, 'by_title', :emit => :name)
str = @design_doc['views']['by_title']['map']
expect(str).to include("doc['name'] != null")
end
it "should guard against nulls when emitting multiple properties" do
@klass.define(@design_doc, 'by_title', :emit => [:name, :another_property])
str = @design_doc['views']['by_title']['map']
expect(str).to include("doc['name'] != null")
expect(str).to include("doc['another_property'] != null")
end
it "should not guard against nulls for non-symbol emits" do
@klass.define(@design_doc, 'by_title', :emit => [:name, 3])
str = @design_doc['views']['by_title']['map']
expect(str).not_to include("( != null)")
end
it "should not provide a default reduce function the emit value is overridden" do
@klass.define(@design_doc, 'by_title', :emit => :name)
str = @design_doc['views']['by_title']['reduce']
expect(str).to be_nil
end
end
describe ".create_model_methods" do
before :each do
@model = DesignViewModel
@design_doc = { }
allow(@design_doc).to receive(:model).and_return(@model)
allow(@design_doc).to receive(:method_name).and_return("design_doc")
allow(@model).to receive('design_doc').and_return(@design_doc)
end
it "should create standard view method" do
@klass.create_model_methods(@design_doc, 'by_name')
expect(@model).to respond_to('by_name')
expect(@design_doc).to receive('view').with('by_name', {})
@model.by_name
end
it "should create find_ view method" do
@klass.create_model_methods(@design_doc, 'by_name')
expect(@model).to respond_to('find_by_name')
view = double("View")
expect(view).to receive('key').with('fred').and_return(view)
expect(view).to receive('first').and_return(nil)
expect(@design_doc).to receive('view').and_return(view)
@model.find_by_name('fred')
end
it "should create find_! view method" do
@klass.create_model_methods(@design_doc, 'by_name')
expect(@model).to respond_to('find_by_name!')
obj = double("SomeKlass")
view = double("View")
expect(view).to receive('key').with('fred').and_return(view)
expect(view).to receive('first').and_return(obj)
expect(@design_doc).to receive('view').and_return(view)
expect(@model.find_by_name!('fred')).to eql(obj)
end
it "should create find_! view method and raise error when nil" do
@klass.create_model_methods(@design_doc, 'by_name')
view = double("View")
expect(view).to receive('key').with('fred').and_return(view)
expect(view).to receive('first').and_return(nil)
expect(@design_doc).to receive('view').and_return(view)
expect { @model.find_by_name!('fred') }.to raise_error(CouchRest::Model::DocumentNotFound)
end
end
end
describe "instance methods" do
before :each do
@obj = @klass.new(@mod.design_doc, @mod, {}, 'test_view')
end
describe "#rows" do
it "should execute query" do
expect(@obj).to receive(:execute).and_return(true)
expect(@obj).to receive(:result).twice.and_return({'rows' => []})
expect(@obj.rows).to be_empty
end
it "should wrap rows in ViewRow class" do
expect(@obj).to receive(:execute).and_return(true)
expect(@obj).to receive(:result).twice.and_return({'rows' => [{:foo => :bar}]})
expect(CouchRest::Model::Designs::ViewRow).to receive(:new).with({:foo => :bar}, @obj.model, DB)
@obj.rows
end
describe "streaming" do
let :sample_data do
[
{"id" => "doc1", "key" => "doc1", "value" => {"rev" => "4324BB"}},
{"id" => "doc2", "key" => "doc2", "value" => {"rev" => "2441HF"}},
{"id" => "doc3", "key" => "doc3", "value" => {"rev" => "74EC24"}}
]
end
it "should support blocks" do
expect(@obj).to receive(:execute) do |&block|
sample_data.each { |r| block.call(r) }
end
rows = []
@obj.rows {|r| rows << r }
expect(rows.length).to eql(3)
expect(rows.first).to be_a(CouchRest::Model::Designs::ViewRow)
expect(rows.first.id).to eql('doc1')
expect(rows.last.value['rev']).to eql('74EC24')
end
end
end
describe "#all" do
it "should ensure docs included and call docs" do
expect(@obj).to receive(:include_docs!)
expect(@obj).to receive(:docs)
@obj.all
end
it "should pass on a block" do
block = lambda { 'ok' }
expect(@obj).to receive(:docs) { block.call() }
expect(@obj.all(&block)).to eql('ok')
end
end
describe "#docs" do
it "should provide docs from rows" do
expect(@obj).to receive(:rows).and_return([])
@obj.docs
end
it "should cache the results" do
expect(@obj).to receive(:rows).once.and_return([])
@obj.docs
@obj.docs
end
describe "streaming" do
let :sample_data do
[
{"id" => "doc1", "key" => "doc1", "doc" => {"_id" => "123", "type" => 'DesignViewModel', 'name' => 'Test1'}},
{"id" => "doc3", "key" => "doc3", "doc" => {"_id" => "234", "type" => 'DesignViewModel', 'name' => 'Test2'}}
]
end
it "should support blocks" do
expect(@obj).to receive(:execute) do |&block|
sample_data.each { |r| block.call(r) }
end
docs = []
@obj.docs {|d| docs << d }
expect(docs.length).to eql(2)
expect(docs.first).to be_a(DesignViewModel)
expect(docs.first.name).to eql('Test1')
expect(docs.last.id).to eql('234')
end
end
end
describe "#first" do
it "should provide the first result of loaded query" do
expect(@obj).to receive(:result).and_return(true)
expect(@obj).to receive(:all).and_return([:foo])
expect(@obj.first).to eql(:foo)
end
it "should perform a query if no results cached" do
view = double('SubView')
expect(@obj).to receive(:result).and_return(nil)
expect(@obj).to receive(:limit).with(1).and_return(view)
expect(view).to receive(:all).and_return([:foo])
expect(@obj.first).to eql(:foo)
end
end
describe "#last" do
it "should provide the last result of loaded query" do
expect(@obj).to receive(:result).and_return(true)
expect(@obj).to receive(:all).and_return([:foo, :bar])
expect(@obj.first).to eql(:foo)
end
it "should perform a query if no results cached" do
view = double('SubView')
expect(@obj).to receive(:result).and_return(nil)
expect(@obj).to receive(:limit).with(1).and_return(view)
expect(view).to receive(:descending).and_return(view)
expect(view).to receive(:all).and_return([:foo, :bar])
expect(@obj.last).to eql(:bar)
end
end
describe "#length" do
it "should provide a length from the docs array" do
expect(@obj).to receive(:docs).and_return([1, 2, 3])
expect(@obj.length).to eql(3)
end
end
describe "#count" do
it "should raise an error if view prepared for group" do
expect(@obj).to receive(:query).and_return({:group => true})
expect { @obj.count }.to raise_error(/group/)
end
it "should return first row value if reduce possible" do
view = double("SubView")
row = double("Row")
expect(@obj).to receive(:can_reduce?).and_return(true)
expect(@obj).to receive(:reduce).and_return(view)
expect(view).to receive(:skip).with(0).and_return(view)
expect(view).to receive(:limit).with(1).and_return(view)
expect(view).to receive(:rows).and_return([row])
expect(row).to receive(:value).and_return(2)
expect(@obj.count).to eql(2)
end
it "should return 0 if no rows and reduce possible" do
view = double("SubView")
expect(@obj).to receive(:can_reduce?).and_return(true)
expect(@obj).to receive(:reduce).and_return(view)
expect(view).to receive(:skip).with(0).and_return(view)
expect(view).to receive(:limit).with(1).and_return(view)
expect(view).to receive(:rows).and_return([])
expect(@obj.count).to eql(0)
end
it "should perform limit request for total_rows" do
view = double("SubView")
expect(@obj).to receive(:limit).with(0).and_return(view)
expect(view).to receive(:total_rows).and_return(4)
expect(@obj).to receive(:can_reduce?).and_return(false)
expect(@obj.count).to eql(4)
end
end
describe "#empty?" do
it "should check the #all method for any results" do
all = double("All")
expect(all).to receive(:empty?).and_return('win')
expect(@obj).to receive(:all).and_return(all)
expect(@obj.empty?).to eql('win')
end
end
describe "#each" do
it "should call each method on all" do
expect(@obj).to receive(:all).and_return([])
@obj.each
end
it "should call each and pass block" do
set = [:foo, :bar]
expect(@obj).to receive(:all).and_return(set)
result = []
@obj.each do |s|
result << s
end
expect(result).to eql(set)
end
end
describe "#offset" do
it "should excute" do
expect(@obj).to receive(:execute).and_return({'offset' => 3})
expect(@obj.offset).to eql(3)
end
end
describe "#total_rows" do
it "should excute" do
expect(@obj).to receive(:execute).and_return({'total_rows' => 3})
expect(@obj.total_rows).to eql(3)
end
end
describe "#values" do
it "should request each row and provide value" do
row = double("Row")
expect(row).to receive(:value).twice.and_return('foo')
expect(@obj).to receive(:rows).and_return([row, row])
expect(@obj.values).to eql(['foo', 'foo'])
end
end
describe "#[]" do
it "should execute and provide requested field" do
expect(@obj).to receive(:execute).and_return({'total_rows' => 2})
expect(@obj['total_rows']).to eql(2)
end
end
describe "#info" do
it "should raise error" do
expect { @obj.info }.to raise_error(/Not yet implemented/)
end
end
describe "#database" do
it "should update query with value" do
expect(@obj).to receive(:update_query).with({:database => 'foo'})
@obj.database('foo')
end
end
describe "#key" do
it "should update query with value" do
expect(@obj).to receive(:update_query).with({:key => 'foo'})
@obj.key('foo')
end
it "should raise error if startkey set" do
@obj.query[:startkey] = 'bar'
expect { @obj.key('foo') }.to raise_error(/View#key cannot be used/)
end
it "should raise error if endkey set" do
@obj.query[:endkey] = 'bar'
expect { @obj.key('foo') }.to raise_error(/View#key cannot be used/)
end
it "should raise error if both startkey and endkey set" do
@obj.query[:startkey] = 'bar'
@obj.query[:endkey] = 'bar'
expect { @obj.key('foo') }.to raise_error(/View#key cannot be used/)
end
it "should raise error if keys set" do
@obj.query[:keys] = 'bar'
expect { @obj.key('foo') }.to raise_error(/View#key cannot be used/)
end
end
describe "#startkey" do
it "should update query with value" do
expect(@obj).to receive(:update_query).with({:startkey => 'foo'})
@obj.startkey('foo')
end
it "should raise and error if key set" do
@obj.query[:key] = 'bar'
expect { @obj.startkey('foo') }.to raise_error(/View#startkey/)
end
it "should raise and error if keys set" do
@obj.query[:keys] = 'bar'
expect { @obj.startkey('foo') }.to raise_error(/View#startkey/)
end
end
describe "#startkey_doc" do
it "should update query with value" do
expect(@obj).to receive(:update_query).with({:startkey_docid => 'foo'})
@obj.startkey_doc('foo')
end
it "should update query with object id if available" do
doc = double("Document")
expect(doc).to receive(:id).and_return(44)
expect(@obj).to receive(:update_query).with({:startkey_docid => 44})
@obj.startkey_doc(doc)
end
end
describe "#endkey" do
it "should update query with value" do
expect(@obj).to receive(:update_query).with({:endkey => 'foo'})
@obj.endkey('foo')
end
it "should raise and error if key set" do
@obj.query[:key] = 'bar'
expect { @obj.endkey('foo') }.to raise_error(/View#endkey/)
end
it "should raise and error if keys set" do
@obj.query[:keys] = 'bar'
expect { @obj.endkey('foo') }.to raise_error(/View#endkey/)
end
end
describe "#endkey_doc" do
it "should update query with value" do
expect(@obj).to receive(:update_query).with({:endkey_docid => 'foo'})
@obj.endkey_doc('foo')
end
it "should update query with object id if available" do
doc = double("Document")
expect(doc).to receive(:id).and_return(44)
expect(@obj).to receive(:update_query).with({:endkey_docid => 44})
@obj.endkey_doc(doc)
end
end
describe "#keys" do
it "should update the query" do
expect(@obj).to receive(:update_query).with({:keys => ['foo', 'bar']})
@obj.keys(['foo', 'bar'])
end
it "should raise and error if key set" do
@obj.query[:key] = 'bar'
expect { @obj.keys('foo') }.to raise_error(/View#keys/)
end
it "should raise and error if startkey or endkey set" do
@obj.query[:startkey] = 'bar'
expect { @obj.keys('foo') }.to raise_error(/View#keys/)
@obj.query.delete(:startkey)
@obj.query[:endkey] = 'bar'
expect { @obj.keys('foo') }.to raise_error(/View#keys/)
end
end
describe "#keys (without parameters)" do
it "should request each row and provide key value" do
row = double("Row")
expect(row).to receive(:key).twice.and_return('foo')
expect(@obj).to receive(:rows).and_return([row, row])
expect(@obj.keys).to eql(['foo', 'foo'])
end
end
describe "#descending" do
it "should update query" do
expect(@obj).to receive(:update_query).with({:descending => true})
@obj.descending
end
it "should reverse start and end keys if given" do
@obj = @obj.startkey('a').endkey('z')
@obj = @obj.descending
expect(@obj.query[:endkey]).to eql('a')
expect(@obj.query[:startkey]).to eql('z')
end
it "should reverse even if start or end nil" do
@obj = @obj.startkey('a')
@obj = @obj.descending
expect(@obj.query[:endkey]).to eql('a')
expect(@obj.query[:startkey]).to be_nil
end
it "should reverse start_doc and end_doc keys if given" do
@obj = @obj.startkey_doc('a').endkey_doc('z')
@obj = @obj.descending
expect(@obj.query[:endkey_docid]).to eql('a')
expect(@obj.query[:startkey_docid]).to eql('z')
end
end
describe "#limit" do
it "should update query with value" do
expect(@obj).to receive(:update_query).with({:limit => 3})
@obj.limit(3)
end
end
describe "#skip" do
it "should update query with value" do
expect(@obj).to receive(:update_query).with({:skip => 3})
@obj.skip(3)
end
it "should update query with default value" do
expect(@obj).to receive(:update_query).with({:skip => 0})
@obj.skip
end
end
describe "#reduce" do
it "should update query" do
expect(@obj).to receive(:can_reduce?).and_return(true)
expect(@obj).to receive(:update_query).with({:reduce => true, :delete => [:include_docs]})
@obj.reduce
end
it "should raise error if query cannot be reduced" do
expect(@obj).to receive(:can_reduce?).and_return(false)
expect { @obj.reduce }.to raise_error(/Cannot reduce a view without a reduce method/)
end
end
describe "#group" do
it "should update query" do
expect(@obj).to receive(:query).and_return({:reduce => true})
expect(@obj).to receive(:update_query).with({:group => true})
@obj.group
end
it "should raise error if query not prepared for reduce" do
expect(@obj).to receive(:query).and_return({:reduce => false})
expect { @obj.group }.to raise_error(/View#reduce must have been set before grouping is permitted/)
end
end
describe "#group" do
it "should update query" do
expect(@obj).to receive(:query).and_return({:reduce => true})
expect(@obj).to receive(:update_query).with({:group => true})
@obj.group
end
it "should raise error if query not prepared for reduce" do
expect(@obj).to receive(:query).and_return({:reduce => false})
expect { @obj.group }.to raise_error(/View#reduce must have been set before grouping is permitted/)
end
end
describe "#group_level" do
it "should update query" do
expect(@obj).to receive(:group).and_return(@obj)
expect(@obj).to receive(:update_query).with({:group_level => 3})
@obj.group_level(3)
end
end
describe "#stale" do
it "should update query with ok" do
expect(@obj).to receive(:update_query).with(:stale => 'ok')
@obj.stale('ok')
end
it "should update query with update_after" do
expect(@obj).to receive(:update_query).with(:stale => 'update_after')
@obj.stale('update_after')
end
it "should fail if anything else is provided" do
expect { @obj.stale('yes') }.to raise_error(/can only be set with/)
end
end
describe "#include_docs" do
it "should call include_docs! on new view" do
expect(@obj).to receive(:update_query).and_return(@obj)
expect(@obj).to receive(:include_docs!)
@obj.include_docs
end
end
describe "#reset!" do
it "should empty all cached data" do
expect(@obj).to receive(:result=).with(nil)
@obj.instance_exec { @rows = 'foo'; @docs = 'foo' }
@obj.reset!
expect(@obj.instance_exec { @rows }).to be_nil
expect(@obj.instance_exec { @docs }).to be_nil
end
end
#### PROTECTED METHODS
describe "#include_docs!" do
it "should set query value" do
expect(@obj).to receive(:result).and_return(false)
expect(@obj).not_to receive(:reset!)
@obj.send(:include_docs!)
expect(@obj.query[:include_docs]).to be_truthy
end
it "should reset if result and no docs" do
expect(@obj).to receive(:result).and_return(true)
expect(@obj).to receive(:include_docs?).and_return(false)
expect(@obj).to receive(:reset!)
@obj.send(:include_docs!)
expect(@obj.query[:include_docs]).to be_truthy
end
it "should raise an error if view is reduced" do
@obj.query[:reduce] = true
expect { @obj.send(:include_docs!) }.to raise_error(/Cannot include documents in view that has been reduced/)
end
end
describe "#include_docs?" do
it "should return true if set" do
expect(@obj).to receive(:query).and_return({:include_docs => true})
expect(@obj.send(:include_docs?)).to be_truthy
end
it "should return false if not set" do
expect(@obj).to receive(:query).and_return({})
expect(@obj.send(:include_docs?)).to be_falsey
expect(@obj).to receive(:query).and_return({:include_docs => false})
expect(@obj.send(:include_docs?)).to be_falsey
end
end
describe "#update_query" do
it "returns a new instance of view" do
expect(@obj.send(:update_query).object_id).not_to eql(@obj.object_id)
end
it "returns a new instance of view with extra parameters" do
new_obj = @obj.send(:update_query, {:foo => :bar})
expect(new_obj.query[:foo]).to eql(:bar)
end
end
describe "#can_reduce?" do
it "should check and prove true" do
expect(@obj).to receive(:name).and_return('test_view')
expect(@obj).to receive(:design_doc).and_return({'views' => {'test_view' => {'reduce' => 'foo'}}})
expect(@obj.send(:can_reduce?)).to be_truthy
end
it "should check and prove false" do
expect(@obj).to receive(:name).and_return('test_view')
expect(@obj).to receive(:design_doc).and_return({'views' => {'test_view' => {'reduce' => nil}}})
expect(@obj.send(:can_reduce?)).to be_falsey
end
end
describe "#execute" do
before :each do
# disable real execution!
@design_doc = double("DesignDoc")
allow(@design_doc).to receive(:view_on)
allow(@design_doc).to receive(:sync)
allow(@obj).to receive(:design_doc).and_return(@design_doc)
end
it "should return previous result if set" do
@obj.result = "foo"
expect(@obj.send(:execute)).to eql('foo')
end
it "should raise issue if no database" do
expect(@obj).to receive(:query).and_return({:database => nil})
model = double("SomeModel")
expect(model).to receive(:database).and_return(nil)
expect(@obj).to receive(:model).and_return(model)
expect { @obj.send(:execute) }.to raise_error(CouchRest::Model::DatabaseNotDefined)
end
it "should delete the reduce option if not going to be used" do
expect(@obj).to receive(:can_reduce?).and_return(false)
expect(@obj.query).to receive(:delete).with(:reduce)
@obj.send(:execute)
end
it "should call to save the design document" do
expect(@obj).to receive(:can_reduce?).and_return(false)
expect(@design_doc).to receive(:sync).with(DB)
@obj.send(:execute)
end
it "should populate the results" do
expect(@obj).to receive(:can_reduce?).and_return(true)
expect(@design_doc).to receive(:view_on).and_return('foos')
@obj.send(:execute)
expect(@obj.result).to eql('foos')
end
it "should not remove nil values from query" do
expect(@obj).to receive(:can_reduce?).and_return(true)
allow(@obj).to receive(:use_database).and_return(@mod.database)
@obj.query = {:reduce => true, :limit => nil, :skip => nil}
expect(@design_doc).to receive(:view_on).with(@mod.database, 'test_view', {:reduce => true, :limit => nil, :skip => nil})
@obj.send(:execute)
end
it "should accept a block and pass to view_on" do
row = {'id' => '1234'}
expect(@design_doc).to receive(:view_on) { |db,n,q,&block| block.call(row) }
expect(@obj).to receive(:can_reduce?).and_return(true)
@obj.send(:execute) do |r|
expect(r).to eql(row)
end
end
end
describe "pagination methods" do
describe "#page" do
it "should call limit and skip" do
expect(@obj).to receive(:limit).with(25).and_return(@obj)
expect(@obj).to receive(:skip).with(25).and_return(@obj)
@obj.page(2)
end
end
describe "#per" do
it "should raise an error if page not called before hand" do
expect { @obj.per(12) }.to raise_error(/View#page must be called before #per/)
end
it "should not do anything if number less than or eql 0" do
view = @obj.page(1)
expect(view.per(0)).to eql(view)
end
it "should set limit and update skip" do
view = @obj.page(2).per(10)
expect(view.query[:skip]).to eql(10)
expect(view.query[:limit]).to eql(10)
end
end
describe "#total_count" do
it "set limit and skip to nill and perform count" do
expect(@obj).to receive(:limit).with(nil).and_return(@obj)
expect(@obj).to receive(:skip).with(nil).and_return(@obj)
expect(@obj).to receive(:count).and_return(5)
expect(@obj.total_count).to eql(5)
expect(@obj.total_count).to eql(5) # Second to test caching
end
end
describe "#total_pages" do
it "should use total_count and limit_value" do
expect(@obj).to receive(:total_count).and_return(200)
expect(@obj).to receive(:limit_value).and_return(25)
expect(@obj.total_pages).to eql(8)
end
end
# `num_pages` aliases to `total_pages` for compatibility for Kaminari '< 0.14'
describe "#num_pages" do
it "should use total_count and limit_value" do
expect(@obj).to receive(:total_count).and_return(200)
expect(@obj).to receive(:limit_value).and_return(25)
expect(@obj.num_pages).to eql(8)
end
end
describe "#current_page" do
it "should use offset and limit" do
expect(@obj).to receive(:offset_value).and_return(25)
expect(@obj).to receive(:limit_value).and_return(25)
expect(@obj.current_page).to eql(2)
end
end
end
describe "ActiveRecord compatibility methods" do
describe "#model_name" do
it "should use the #model class" do
expect(@obj.model_name.to_s).to eql DesignViewModel.to_s
end
end
end
end
end
describe "ViewRow" do
before :all do
@klass = CouchRest::Model::Designs::ViewRow
end
let :model do
m = double()
allow(m).to receive(:database).and_return(DB)
m
end
describe "intialize" do
it "should store reference to model" do
obj = @klass.new({}, model)
expect(obj.model).to eql(model)
end
it "should copy details from hash" do
obj = @klass.new({:foo => :bar, :test => :example}, model)
expect(obj[:foo]).to eql(:bar)
expect(obj[:test]).to eql(:example)
end
end
describe "running" do
before :each do
end
it "should provide id" do
obj = @klass.new({'id' => '123456'}, model)
expect(obj.id).to eql('123456')
end
it "may be instantiated with a database" do
obj = @klass.new({'id' => '123456'}, model, 'foo')
expect(obj.db).to eql('foo')
end
it "may use model's database" do
obj = @klass.new({'id' => '123456'}, model)
expect(obj.db).to eql(DB)
end
it "should provide key" do
obj = @klass.new({'key' => 'thekey'}, model)
expect(obj.key).to eql('thekey')
end
it "should provide the value" do
obj = @klass.new({'value' => 'thevalue'}, model)
expect(obj.value).to eql('thevalue')
end
it "should provide the raw document" do
obj = @klass.new({'doc' => 'thedoc'}, model)
expect(obj.raw_doc).to eql('thedoc')
end
it "should instantiate a new document" do
hash = {'doc' => {'_id' => '12345', 'name' => 'sam'}}
obj = @klass.new(hash, DesignViewModel)
doc = double('DesignViewDoc')
allow(doc).to receive(:database).and_return(DB)
expect(obj.model).to receive(:build_from_database).with(hash['doc']).and_return(doc)
expect(obj.doc).to eql(doc)
end
it "should try to load from id if no document" do
hash = {'id' => '12345', 'value' => 5}
obj = @klass.new(hash, DesignViewModel)
doc = double('DesignViewModel')
allow(doc).to receive(:database).and_return(DB)
expect(obj.model).to receive(:get).with('12345', DB).and_return(doc)
expect(obj.doc).to eql(doc)
end
it "should try to load linked document if available" do
hash = {'id' => '12345', 'value' => {'_id' => '54321'}}
obj = @klass.new(hash, DesignViewModel)
doc = double('DesignViewModel')
allow(doc).to receive(:database).and_return(DB)
expect(obj.model).to receive(:get).with('54321', DB).and_return(doc)
expect(obj.doc).to eql(doc)
end
it "should try to return nil for document if none available" do
hash = {'value' => 23} # simulate reduce
obj = @klass.new(hash, DesignViewModel)
doc = double('DesignViewModel')
expect(obj.model).not_to receive(:get)
expect(obj.doc).to be_nil
end
end
end
describe "scenarios" do
before :all do
@objs = [
{:name => "Judith"},
{:name => "Lorena"},
{:name => "Peter"},
{:name => "Sam"},
{:name => "Vilma"}
].map{|h| DesignViewModel.create(h)}
end
describe "loading documents" do
it "should return first" do
expect(DesignViewModel.by_name.first.name).to eql("Judith")
end
it "should return last" do
expect(DesignViewModel.by_name.last.name).to eql("Vilma")
end
it "should allow multiple results" do
view = DesignViewModel.by_name.limit(3)
expect(view.total_rows).to eql(5)
expect(view.last.name).to eql("Peter")
expect(view.all.length).to eql(3)
end
it "should not return document if nil key provided" do
expect(DesignViewModel.by_name.key(nil).first).to be_nil
end
end
describe "index information" do
it "should provide total_rows" do
expect(DesignViewModel.by_name.total_rows).to eql(5)
end
it "should provide total_rows" do
expect(DesignViewModel.by_name.total_rows).to eql(5)
end
it "should provide an offset" do
expect(DesignViewModel.by_name.offset).to eql(0)
end
it "should provide a set of keys" do
expect(DesignViewModel.by_name.limit(2).keys).to eql(["Judith", "Lorena"])
end
end
describe "viewing" do
it "should load views with no reduce method" do
docs = DesignViewModel.by_just_name.all
expect(docs.length).to eql(5)
end
it "should load documents by specific keys" do
docs = DesignViewModel.by_name.keys(["Judith", "Peter"]).all
expect(docs[0].name).to eql("Judith")
expect(docs[1].name).to eql("Peter")
end
it "should provide count even if limit or skip set" do
docs = DesignViewModel.by_name.limit(20).skip(2)
expect(docs.count).to eql(5)
end
end
describe "pagination" do
before :all do
DesignViewModel.paginates_per 3
end
before :each do
@view = DesignViewModel.by_name.page(1)
end
it "should calculate number of pages" do
expect(@view.total_pages).to eql(2)
end
it "should return results from first page" do
expect(@view.all.first.name).to eql('Judith')
expect(@view.all.last.name).to eql('Peter')
end
it "should return results from second page" do
expect(@view.page(2).all.first.name).to eql('Sam')
expect(@view.page(2).all.last.name).to eql('Vilma')
end
it "should allow overriding per page count" do
@view = @view.per(10)
expect(@view.total_pages).to eql(1)
expect(@view.all.last.name).to eql('Vilma')
end
end
describe "concurrent view accesses" do
# NOTE: must use `DesignViewModel2` instead of `DesignViewModel` to mimic
# a "cold" start of a multi-threaded application (as the checksum is
# stored at the class level)
class DesignViewModel2 < CouchRest::Model::Base
use_database DB
property :name
design do
view :by_name
end
end
it "should not conflict" do
expect {
threads = 2.times.map {
Thread.new {
DesignViewModel2.by_name.page(1).to_a
}
}
threads.each(&:join)
}.to_not raise_error
end
end
end
end
| 36.100089 | 131 | 0.570205 |
38480ba1a24b5ff10619c1e9234184abf88eba05 | 5,636 | # frozen_string_literal: true
require 'colorize'
require 'twisty_puzzles/utils/array_helper'
module TwistyPuzzles
# Module to print and display cube and Skewb states.
module CubePrintHelper
include Utils::ArrayHelper
def color_symbol(color)
case color
when :orange then :light_red
when :unknown then :light_black
else color
end
end
COLOR_MODES = %i[color nocolor].freeze
ColorInfo = Struct.new(:reverse_lines_mode, :reverse_columns_mode, :skewb_corner_permutation)
FACE_SYMBOL_INFOS = {
U: ColorInfo.new(:reverse, :reverse, [2, 3, 0, 1]),
L: ColorInfo.new(:keep, :reverse, [2, 0, 3, 1]),
F: ColorInfo.new(:keep, :reverse, [2, 0, 3, 1]),
R: ColorInfo.new(:keep, :keep, [1, 0, 3, 2]),
B: ColorInfo.new(:keep, :keep, [1, 0, 3, 2]),
D: ColorInfo.new(:keep, :reverse, [2, 0, 3, 1])
}.freeze
def color_character(color, color_mode)
unless COLOR_MODES.include?(color_mode)
raise ArgumentError, "Invalid color mode #{color_mode}"
end
char = color.to_s[0].upcase
if color_mode == :color
char.colorize(background: color_symbol(color))
else
char
end
end
def maybe_reverse(reverse_mode, stuff)
case reverse_mode
when :reverse
stuff.reverse
when :keep
stuff
else
raise
end
end
def face_lines(cube_state, face_symbol, row_multiplicity = 1, column_multiplicity = 1)
face = Face.for_face_symbol(face_symbol)
face_symbol_info = FACE_SYMBOL_INFOS[face_symbol]
stickers = cube_state.sticker_array(face)
lines =
stickers.collect_concat do |sticker_line|
line = sticker_line.map { |c| yield(c) * column_multiplicity }
[maybe_reverse(face_symbol_info.reverse_columns_mode, line).join] * row_multiplicity
end
maybe_reverse(face_symbol_info.reverse_lines_mode, lines)
end
def simple_face_lines(cube_state, face_symbol, color_mode)
face_lines(cube_state, face_symbol) { |c| color_character(c, color_mode) }
end
SKEWB_FACE_SIZE = 5
def skewb_ascii_art_line(first_color, middle_color, last_color, num_first_color)
raise if num_first_color > SKEWB_FACE_SIZE / 2
first_color * num_first_color +
middle_color * (SKEWB_FACE_SIZE - 2 * num_first_color) +
last_color * num_first_color
end
def skewb_ascii_art(center_color, corner_colors)
raise unless corner_colors.length == 4
first_part =
(1..SKEWB_FACE_SIZE / 2).to_a.reverse.map do |i|
skewb_ascii_art_line(corner_colors[0], center_color, corner_colors[1], i)
end
middle_part = SKEWB_FACE_SIZE.odd? ? [center_color * SKEWB_FACE_SIZE] : []
last_part =
(1..SKEWB_FACE_SIZE / 2).map do |i|
skewb_ascii_art_line(corner_colors[2], center_color, corner_colors[3], i)
end
first_part + middle_part + last_part
end
# Prints a Skewb face like this:
# rrgww
# rgggw
# ggggg
# ogggb
# oogbb
def skewb_face_lines(cube_state, face_symbol, color_mode)
face = Face.for_face_symbol(face_symbol)
face_symbol_info = FACE_SYMBOL_INFOS[face_symbol]
stickers = cube_state.sticker_array(face)
center_color = color_character(stickers[0], color_mode)
corner_colors = stickers[1..].map { |c| color_character(c, color_mode) }
permuted_corner_colors =
apply_permutation(corner_colors, face_symbol_info.skewb_corner_permutation)
raise unless corner_colors.length == 4
skewb_ascii_art(center_color, permuted_corner_colors)
end
def ll_string(cube_state, color_mode)
top_face = face_lines(cube_state, :U, 2, 3) { |c| color_character(c, color_mode) }
front_face = face_lines(cube_state, :F, 1, 3) { |c| color_character(c, color_mode) }
right_face = face_lines(cube_state, :R, 1, 3) { |c| color_character(c, color_mode) }
pll_line = front_face.first + right_face.first
(top_face + [pll_line] * 3).join("\n")
end
def cube_string(cube_state, color_mode)
top_face = simple_face_lines(cube_state, :U, color_mode)
left_face = simple_face_lines(cube_state, :L, color_mode)
front_face = simple_face_lines(cube_state, :F, color_mode)
right_face = simple_face_lines(cube_state, :R, color_mode)
back_face = simple_face_lines(cube_state, :B, color_mode)
bottom_face = simple_face_lines(cube_state, :D, color_mode)
middle_belt = zip_concat_lines(left_face, front_face, right_face, back_face)
lines = pad_lines(top_face, cube_state.n) + middle_belt +
pad_lines(bottom_face, cube_state.n)
lines.join("\n")
end
def skewb_string(skewb_state, color_mode)
top_face = skewb_face_lines(skewb_state, :U, color_mode)
left_face = skewb_face_lines(skewb_state, :L, color_mode)
front_face = skewb_face_lines(skewb_state, :F, color_mode)
right_face = skewb_face_lines(skewb_state, :R, color_mode)
back_face = skewb_face_lines(skewb_state, :B, color_mode)
bottom_face = skewb_face_lines(skewb_state, :D, color_mode)
middle_belt = zip_concat_lines(left_face, front_face, right_face, back_face)
lines = pad_lines(top_face, SKEWB_FACE_SIZE) + middle_belt +
pad_lines(bottom_face, SKEWB_FACE_SIZE)
lines.join("\n")
end
def empty_name
' '
end
def pad_lines(lines, padding)
lines.map { |line| empty_name * padding + line }
end
def zip_concat_lines(*args)
args.transpose.map(&:join)
end
end
end
| 35.006211 | 97 | 0.677608 |
1ca0df8f0de911df56b7e28cd90c36652f1dc3bb | 486 | cask :v1 => 'font-poppins' do
version '2.000'
sha256 '86f53a3d50baaadca0d7a1aaf4d69e4d8d3a3d8a9fe67bc3d9b0c0db000e0f39'
url 'https://github.com/itfoundry/poppins/releases/download/v2.000/poppins-2_000.zip'
appcast 'https://github.com/itfoundry/poppins/releases.atom'
homepage 'https://github.com/itfoundry/poppins'
license :ofl
font 'Poppins-Bold.otf'
font 'Poppins-Light.otf'
font 'Poppins-Medium.otf'
font 'Poppins-Regular.otf'
font 'Poppins-SemiBold.otf'
end
| 30.375 | 87 | 0.759259 |
6a21d5c703fb57ddb9baca49539a75cc05fc303f | 171 | # frozen_string_literal: true
require 'test_helper'
class CaptionsControllerTest < ActionDispatch::IntegrationTest
# test "the truth" do
# assert true
# end
end
| 17.1 | 62 | 0.754386 |
1c2e5c69f06647e6ac089afba23f559f65d15ff6 | 55 | class User < ApplicationRecord
has_many :books
end
| 13.75 | 30 | 0.763636 |
ff2c72d41b582322cd76f523992b7faf5a514b8c | 9,241 | module CommunitySteps
def save_name_and_action(community_id, groups)
created_translations = TranslationService::API::Api.translations.create(community_id, groups)
created_translations[:data].map { |translation| translation[:translation_key] }
end
end
World(CommunitySteps)
Given /^there are following communities:$/ do |communities_table|
communities_table.hashes.each do |hash|
ident = hash[:community]
existing_community = Community.where(ident: ident).first
existing_community.destroy if existing_community
@hash_community = FactoryGirl.create(:community, :ident => ident, :settings => {"locales" => ["en", "fi"]})
attributes_to_update = hash.except('community')
@hash_community.update_attributes(attributes_to_update) unless attributes_to_update.empty?
end
end
Given /^the test community has following available locales:$/ do |locale_table|
@locales = []
locale_table.hashes.each do |hash|
@locales << hash['locale']
end
#here is expected that the first community is the test community where the subdomain is pointing by default
community = Community.first
community.update_attributes({:settings => { "locales" => @locales }})
community.locales.each do |locale|
unless community.community_customizations.find_by_locale(locale)
community.community_customizations.create(:locale => locale, :name => "Sharetribe")
end
end
end
Given /^the terms of community "([^"]*)" are changed to "([^"]*)"$/ do |community, terms|
Community.where(ident: community).first.update_attribute(:consent, terms)
end
Then /^Most recently created user should be member of "([^"]*)" community with(?: status "(.*?)" and)? its latest consent accepted(?: with invitation code "([^"]*)")?$/ do |community_ident, status, invitation_code|
# Person.last seemed to return unreliable results for some reason
# (kassi_testperson1 instead of the actual newest person, so changed
# to look for the latest CommunityMembership)
status ||= "accepted"
community = Community.where(ident: community_ident).first
expect(CommunityMembership.last.community).to eq(community)
expect(CommunityMembership.last.consent).to eq(community.consent)
expect(CommunityMembership.last.status).to eq(status)
expect(CommunityMembership.last.invitation.code).to eq(invitation_code) if invitation_code.present?
end
Given /^given name and last name are not required in community "([^"]*)"$/ do |community|
Community.where(ident: community).first.update_attribute(:real_name_required, 0)
end
Given /^community "([^"]*)" requires invite to join$/ do |community|
Community.where(ident: community).first.update_attribute(:join_with_invite_only, true)
end
Given /^community "([^"]*)" does not require invite to join$/ do |community|
Community.where(ident: community).first.update_attribute(:join_with_invite_only, false)
end
Given /^users (can|can not) invite new users to join community "([^"]*)"$/ do |verb, community|
can_invite = verb == "can"
Community.where(ident: community).first.update_attribute(:users_can_invite_new_users, can_invite)
end
Given /^there is an invitation for community "([^"]*)" with code "([^"]*)"(?: with (\d+) usages left)?$/ do |community, code, usages_left|
inv = Invitation.new(:community => Community.where(ident: community).first, :code => code, :inviter_id => @people.first[1].id)
inv.usages_left = usages_left if usages_left.present?
inv.save
end
Then /^Invitation with code "([^"]*)" should have (\d+) usages_left$/ do |code, usages|
expect(Invitation.find_by_code(code).usages_left).to eq(usages.to_i)
end
When /^I move to community "([^"]*)"$/ do |community|
Capybara.default_host = "http://#{community}.lvh.me:9887"
Capybara.app_host = "http://#{community}.lvh.me:9887"
@current_community = Community.where(ident: community).first
end
When /^I arrive to sign up page with the link in the invitation email with code "(.*?)"$/ do |code|
visit "/en/signup?code=#{code}"
end
Given /^community "(.*?)" is private$/ do |community_ident|
Community.where(ident: community_ident).first.update_attributes({:private => true})
end
Given /^this community is private$/ do
@current_community.private = true
@current_community.save!
end
Given /^community "(.*?)" has following category structure:$/ do |community, categories|
current_community = Community.where(ident: community).first
old_category_ids = current_community.categories.collect(&:id)
current_community.categories = categories.hashes.map do |hash|
category = current_community.categories.create!
category.translations.create!(:name => hash['fi'], :locale => 'fi')
category.translations.create!(:name => hash['en'], :locale => 'en')
shape = category.community.shapes.first
CategoryListingShape.create!(category_id: category.id, listing_shape_id: shape[:id])
if hash['category_type'].eql?("main")
@top_level_category = category
else
category.update_attribute(:parent_id, @top_level_category.id)
end
category
end
# Clean old
current_community.categories.select do |category|
old_category_ids.include? category.id
end.each do |category|
category.destroy!
end
end
Given /^community "(.*?)" has following listing shapes enabled:$/ do |community, listing_shapes|
current_community = Community.where(ident: community).first
ListingShape.where(community_id: current_community.id).destroy_all
process_id = TransactionProcess.where(community_id: current_community.id, process: :none).first.id
listing_shapes.hashes.map do |hash|
name_tr_key, action_button_tr_key = save_name_and_action(current_community.id, [
{translations: [ {locale: 'fi', translation: hash['fi']}, {locale: 'en', translation: hash['en']} ]},
{translations: [ {locale: 'fi', translation: (hash['button'] || 'Action')}, {locale: 'en', translation: (hash['button'] || 'Action')} ]}
])
ListingShape.create_with_opts(
community: current_community,
opts: {
price_enabled: true,
shipping_enabled: false,
name_tr_key: name_tr_key,
action_button_tr_key: action_button_tr_key,
transaction_process_id: process_id,
basename: hash['en'],
units: [ {unit_type: 'hour', quantity_selector: 'number', kind: 'time'} ]
}
)
end
current_community.reload
end
Given /^listing publishing date is shown in community "(.*?)"$/ do |community_ident|
Community.where(ident: community_ident).first.update_attributes({:show_listing_publishing_date => true})
end
Given /^current community requires users to be verified to post listings$/ do
@current_community.update_attribute(:require_verification_to_post_listings, true)
end
Given(/^this community has price filter enabled with min value (\d+) and max value (\d+)$/) do |min, max|
@current_community.show_price_filter = true
@current_community.price_filter_min = min.to_i * 100 # Cents
@current_community.price_filter_max = max.to_i * 100 # Cents
@current_community.save!
end
When /^community updates get delivered$/ do
CommunityMailer.deliver_community_updates
end
Given(/^this community does not send automatic newsletters$/) do
@current_community.update_attribute(:automatic_newsletters, false)
end
Given(/^community emails are sent from name "(.*?)" and address "(.*?)"$/) do |name, email|
EmailService::API::Api.addresses.create(
community_id: @current_community.id,
address: {
name: name,
email: email,
verification_status: :verified
}
)
end
Given /^community "(.*?)" has country "(.*?)" and currency "(.*?)"$/ do |community, country, currency|
community = Community.where(ident: community).first
community.country = country
community.currency = currency
community.save
end
Given /^community "(.*?)" has payment method "(.*?)" provisioned$/ do |community, payment_gateway|
community = Community.where(ident: community).first
if payment_gateway
TransactionService::API::Api.settings.provision(
community_id: community.id,
payment_gateway: payment_gateway,
payment_process: :preauthorize,
active: true)
end
if payment_gateway == 'stripe'
FeatureFlagService::API::Api.features.enable(community_id: community.id, features: [:stripe])
end
end
Given /^community "(.*?)" has payment method "(.*?)" enabled by admin$/ do |community, payment_gateway|
community = Community.where(ident: community).first
tx_settings_api = TransactionService::API::Api.settings
if payment_gateway == 'paypal'
FactoryGirl.create(:paypal_account,
community_id: community.id,
order_permission: FactoryGirl.build(:order_permission))
end
data = {
community_id: community.id,
payment_process: :preauthorize,
payment_gateway: payment_gateway
}
tx_settings_api.activate(data)
tx_settings_api.update(data.merge(
commission_from_seller: 10,
minimum_price_cents: 100
))
if payment_gateway == 'stripe'
tx_settings_api.update(data.merge(
api_private_key: 'sk_test_123456789012345678901234',
api_publishable_key: 'pk_test_123456789012345678901234'
))
tx_settings_api.api_verified(data)
end
end
| 38.504167 | 214 | 0.722433 |
38c5ccb7cd95cec055bed17d9aff66c8c2254ef6 | 439 | # Load the Rails application.
require_relative 'application'
# Initialize the Rails application.
Rails.application.initialize!
require_relative './settings'
ActionMailer::Base.smtp_settings = {
user_name: ENV['SENDGRID_USERNAME'],
password: ENV['SENDGRID_PASSWORD'],
domain: 'coc-beacon.org',
host: 'coc-beacon.herokuapp.com',
address: 'smtp.sendgrid.net',
port: 587,
authentication: :plain,
enable_starttls_auto: true
}
| 24.388889 | 38 | 0.751708 |
e9768b218ce060d03579a35589a7d823c046281f | 5,527 | #
# Author:: Bryan W. Berry (<[email protected]>)
# Author:: Daniel DeLeo (<[email protected]>)
# Copyright:: Copyright (c) 2012 Bryan W. Berry
# Copyright:: Copyright (c) 2012 Daniel DeLeo
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'chef'
require 'chef/application'
require 'chef/client'
require 'chef/config'
require 'chef/log'
require 'fileutils'
require 'tempfile'
require 'chef/providers'
require 'chef/resources'
class Chef::Application::Apply < Chef::Application
banner "Usage: chef-apply [RECIPE_FILE] [-e RECIPE_TEXT] [-s]"
option :execute,
:short => "-e RECIPE_TEXT",
:long => "--execute RECIPE_TEXT",
:description => "Execute resources supplied in a string",
:proc => nil
option :stdin,
:short => "-s",
:long => "--stdin",
:description => "Execute resources read from STDIN",
:boolean => true
option :json_attribs,
:short => "-j JSON_ATTRIBS",
:long => "--json-attributes JSON_ATTRIBS",
:description => "Load attributes from a JSON file or URL",
:proc => nil
option :log_level,
:short => "-l LEVEL",
:long => "--log_level LEVEL",
:description => "Set the log level (debug, info, warn, error, fatal)",
:proc => lambda { |l| l.to_sym }
option :help,
:short => "-h",
:long => "--help",
:description => "Show this message",
:on => :tail,
:boolean => true,
:show_options => true,
:exit => 0
option :version,
:short => "-v",
:long => "--version",
:description => "Show chef version",
:boolean => true,
:proc => lambda {|v| puts "Chef: #{::Chef::VERSION}"},
:exit => 0
option :why_run,
:short => '-W',
:long => '--why-run',
:description => 'Enable whyrun mode',
:boolean => true
option :color,
:long => '--[no-]color',
:boolean => true,
:default => !Chef::Platform.windows?,
:description => "Use colored output, defaults to enabled"
option :minimal_ohai,
:long => "--minimal-ohai",
:description => "Only run the bare minimum ohai plugins chef needs to function",
:boolean => true
attr_reader :json_attribs
def initialize
super
end
def reconfigure
parse_options
Chef::Config.merge!(config)
configure_logging
configure_proxy_environment_variables
parse_json
end
def parse_json
if Chef::Config[:json_attribs]
config_fetcher = Chef::ConfigFetcher.new(Chef::Config[:json_attribs])
@json_attribs = config_fetcher.fetch_json
end
end
def read_recipe_file(file_name)
if file_name.nil?
Chef::Application.fatal!("No recipe file was provided", 1)
else
recipe_path = File.expand_path(file_name)
unless File.exist?(recipe_path)
Chef::Application.fatal!("No file exists at #{recipe_path}", 1)
end
recipe_fh = open(recipe_path)
recipe_text = recipe_fh.read
[recipe_text, recipe_fh]
end
end
def get_recipe_and_run_context
Chef::Config[:solo] = true
@chef_client = Chef::Client.new(@json_attribs)
@chef_client.run_ohai
@chef_client.load_node
@chef_client.build_node
run_context = if @chef_client.events.nil?
Chef::RunContext.new(@chef_client.node, {})
else
Chef::RunContext.new(@chef_client.node, {}, @chef_client.events)
end
recipe = Chef::Recipe.new("(chef-apply cookbook)", "(chef-apply recipe)", run_context)
[recipe, run_context]
end
# write recipe to temp file, so in case of error,
# user gets error w/ context
def temp_recipe_file
@recipe_fh = Tempfile.open('recipe-temporary-file')
@recipe_fh.write(@recipe_text)
@recipe_fh.rewind
@recipe_filename = @recipe_fh.path
end
def run_chef_recipe
if config[:execute]
@recipe_text = config[:execute]
temp_recipe_file
elsif config[:stdin]
@recipe_text = STDIN.read
temp_recipe_file
else
if !ARGV[0]
puts opt_parser
Chef::Application.exit! "No recipe file provided", 1
end
@recipe_filename = ARGV[0]
@recipe_text,@recipe_fh = read_recipe_file @recipe_filename
end
recipe,run_context = get_recipe_and_run_context
recipe.instance_eval(@recipe_text, @recipe_filename, 1)
runner = Chef::Runner.new(run_context)
begin
runner.converge
ensure
@recipe_fh.close
end
end
def run_application
begin
parse_options
run_chef_recipe
Chef::Application.exit! "Exiting", 0
rescue SystemExit => e
raise
rescue Exception => e
Chef::Application.debug_stacktrace(e)
Chef::Application.fatal!("#{e.class}: #{e.message}", 1)
end
end
# Get this party started
def run
reconfigure
run_application
end
end
| 28.055838 | 90 | 0.627103 |
39f2c4b35cc5f7cb5b63a667ec684eacb9238caf | 2,178 | class KentTools < Formula
desc "Utilities for the UCSC Genome Browser"
homepage "http://genome.ucsc.edu/"
url "http://hgdownload.cse.ucsc.edu/admin/exe/userApps.v316.src.tgz"
head "git://genome-source.cse.ucsc.edu/kent.git"
sha256 "1190e52702ff2661ac48fe4f0ef9f966718f44ec09596a6f77c8049c638a59fe"
bottle do
cellar :any
sha256 "9c5e426ee9255fe8a04ac510e52e8e1830ed5657d99ec2a9dae25eb90c1308d8" => :yosemite
sha256 "6f869c09ee9c3a33bf8f69b75dfefaa456346879104268b26e4f9057a1276db5" => :mavericks
sha256 "29cd9e14a04bb59dafe8c87a3d9e96f19cabcef98118eaba5c04ca66bd6dae4b" => :mountain_lion
end
depends_on :mysql
depends_on "libpng"
depends_on "openssl"
def install
libpng = Formula["libpng"]
mysql = Formula["mysql"]
args = ["userApps", "BINDIR=#{bin}", "SCRIPTS=#{bin}"]
args << "MACHTYPE=#{`uname -m`.chomp}"
args << "PNGLIB=-L#{libpng.opt_lib} -lpng"
args << "PNGINCL=-I#{libpng.opt_include}"
# On Linux, depends_on :mysql looks at system MySQL so check if Homebrew
# MySQL already exists. If it does, then link against that. Otherwise, let
# kent-tools link against system MySQL (see kent/src/inc/common.mk)
if mysql.installed?
args << "MYSQLINC=#{mysql.opt_include}/mysql"
args << "MYSQLLIBS=-lmysqlclient -lz"
end
cd build.head? ? "src" : "kent/src" do
system "make", *args
end
cd bin do
blat_bin = %w[blat faToTwoBit gfClient gfServer nibFrag pslPretty
pslReps pslSort twoBitInfo twoBitToFa]
rm blat_bin
mv "calc", "kent-tools-calc"
end
end
def caveats; <<-EOS.undent
The `calc` tool has been renamed to `kent-tools-calc`.
This only installs the standalone tools located at
http://hgdownload.cse.ucsc.edu/admin/exe/
If you need the full UCSC Genome Browser, run:
brew install ucsc-genome-browser
This does not install the BLAT tools. To install BLAT:
brew install blat
EOS
end
test do
(testpath/"test.fa").write <<-EOF.undent
>test
ACTG
EOF
system "#{bin}/faOneRecord test.fa test > out.fa"
compare_file "test.fa", "out.fa"
end
end
| 30.676056 | 95 | 0.685491 |
7950febad6c0fda0ad5cba1585f959b9d47dbefa | 71 | module Bootstrap
module Willpaginate
VERSION = "0.0.8"
end
end
| 11.833333 | 21 | 0.690141 |
f83b8b4d04561be06a8db082672d10775fb5f30f | 451 | cask 'gn-growler-utility' do
version '3.1'
sha256 '5de013f7447e4c7f712951035994e759b9168525ee52b1f2e77f04b664bbf1b4'
url "http://wafflesoftware.net/growlergn/download/GNGrowler-#{version.delete('.')}.zip"
name 'Growler for Google Notifier'
homepage 'http://wafflesoftware.net/growlergn/'
license :unknown # todo: change license and remove this comment; ':unknown' is a machine-generated placeholder
app 'GN Growler Utility.app'
end
| 37.583333 | 115 | 0.767184 |
1877235e4bfe17e08152c4ef4fbbb4a8385c13b4 | 13,960 | # This class is used to manage tasks using the Task Scheduler V2 API
#
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa383600(v=vs.85).aspx
#
require_relative './error'
require_relative './trigger'
module PuppetX
module PuppetLabs
module ScheduledTask
class Task
# The name of the root folder for tasks
ROOT_FOLDER = '\\'.freeze
# https://docs.microsoft.com/en-us/windows/desktop/api/taskschd/ne-taskschd-_task_enum_flags
class TASK_ENUM_FLAGS
TASK_ENUM_HIDDEN = 0x1
end
# https://docs.microsoft.com/en-us/windows/desktop/api/taskschd/ne-taskschd-_task_action_type
class TASK_ACTION_TYPE
TASK_ACTION_EXEC = 0
TASK_ACTION_COM_HANDLER = 5
TASK_ACTION_SEND_EMAIL = 6
TASK_ACTION_SHOW_MESSAGE = 7
end
# https://docs.microsoft.com/en-us/windows/desktop/api/taskschd/ne-taskschd-_task_compatibility
# Win7/2008 R2 = 3
# Win8/Server 2012 R2 or Server 2016 = 4
# Windows 10 = 5 / 6
class TASK_COMPATIBILITY
TASK_COMPATIBILITY_AT = 0
TASK_COMPATIBILITY_V1 = 1
TASK_COMPATIBILITY_V2 = 2
TASK_COMPATIBILITY_V2_1 = 3
TASK_COMPATIBILITY_V2_2 = 4
TASK_COMPATIBILITY_V2_3 = 5
TASK_COMPATIBILITY_V2_4 = 6
end
# https://docs.microsoft.com/en-us/windows/desktop/api/taskschd/ne-taskschd-_task_creation
class TASK_CREATION
TASK_VALIDATE_ONLY = 0x1
TASK_CREATE = 0x2
TASK_UPDATE = 0x4
# ( TASK_CREATE | TASK_UPDATE )
TASK_CREATE_OR_UPDATE = 0x6
TASK_DISABLE = 0x8
TASK_DONT_ADD_PRINCIPAL_ACE = 0x10
TASK_IGNORE_REGISTRATION_TRIGGERS = 0x20
end
# https://docs.microsoft.com/en-us/windows/desktop/api/taskschd/ne-taskschd-_task_logon_type
class TASK_LOGON_TYPE
TASK_LOGON_NONE = 0
TASK_LOGON_PASSWORD = 1
TASK_LOGON_S4U = 2
TASK_LOGON_INTERACTIVE_TOKEN = 3
TASK_LOGON_GROUP = 4
TASK_LOGON_SERVICE_ACCOUNT = 5
TASK_LOGON_INTERACTIVE_TOKEN_OR_PASSWORD = 6
end
# https://docs.microsoft.com/en-us/windows/desktop/api/taskschd/ne-taskschd-_task_run_flags
class TASK_RUN_FLAGS
TASK_RUN_NO_FLAGS = 0
TASK_RUN_AS_SELF = 0x1
TASK_RUN_IGNORE_CONSTRAINTS = 0x2
TASK_RUN_USE_SESSION_ID = 0x4
TASK_RUN_USER_SID = 0x8
end
# https://docs.microsoft.com/en-us/windows/desktop/api/taskschd/ne-taskschd-_task_runlevel
class TASK_RUNLEVEL_TYPE
TASK_RUNLEVEL_LUA = 0
TASK_RUNLEVEL_HIGHEST = 1
end
# https://docs.microsoft.com/en-us/windows/desktop/api/taskschd/ne-taskschd-_task_processtokensid
class TASK_PROCESSTOKENSID_TYPE
TASK_PROCESSTOKENSID_NONE = 0
TASK_PROCESSTOKENSID_UNRESTRICTED = 1
TASK_PROCESSTOKENSID_DEFAULT = 2
end
# https://docs.microsoft.com/en-us/windows/desktop/api/taskschd/ne-taskschd-_task_state
class TASK_STATE
TASK_STATE_UNKNOWN = 0
TASK_STATE_DISABLED = 1
TASK_STATE_QUEUED = 2
TASK_STATE_READY = 3
TASK_STATE_RUNNING = 4
end
# https://docs.microsoft.com/en-us/windows/desktop/api/taskschd/ne-taskschd-_task_instances_policy
class TASK_INSTANCES_POLICY
TASK_INSTANCES_PARALLEL = 0
TASK_INSTANCES_QUEUE = 1
TASK_INSTANCES_IGNORE_NEW = 2
TASK_INSTANCES_STOP_EXISTING = 3
end
public
# Returns a new TaskScheduler object.
# An existing task named task_name will be returned if one exists,
# otherwise a new task is created by that name (but not yet saved to the system).
#
def initialize(task_name, compatibility_level = nil)
raise TypeError unless task_name.is_a?(String)
@full_task_path = ROOT_FOLDER + task_name
# definition populated when task exists, otherwise new
@task, @definition = self.class.task(@full_task_path)
task_userid = @definition.Principal.UserId || ''
if compatibility_level == :v1_compatibility
self.compatibility = TASK_COMPATIBILITY::TASK_COMPATIBILITY_V1
end
set_account_information(task_userid,nil)
end
V1_COMPATIBILITY = [
TASK_COMPATIBILITY::TASK_COMPATIBILITY_AT,
TASK_COMPATIBILITY::TASK_COMPATIBILITY_V1
].freeze
V2_COMPATIBILITY = [
TASK_COMPATIBILITY::TASK_COMPATIBILITY_V2_4,
TASK_COMPATIBILITY::TASK_COMPATIBILITY_V2_3,
TASK_COMPATIBILITY::TASK_COMPATIBILITY_V2_2,
TASK_COMPATIBILITY::TASK_COMPATIBILITY_V2_1,
TASK_COMPATIBILITY::TASK_COMPATIBILITY_V2,
TASK_COMPATIBILITY::TASK_COMPATIBILITY_AT,
TASK_COMPATIBILITY::TASK_COMPATIBILITY_V1
].freeze
# Returns an array of scheduled task names.
#
def self.tasks(compatibility = V2_COMPATIBILITY)
enum_task_names(ROOT_FOLDER,
include_child_folders: false,
include_compatibility: compatibility).map do |item|
task_name_from_task_path(item)
end
end
RESERVED_FOR_FUTURE_USE = 0
# Returns an array of scheduled task names.
# By default EVERYTHING is enumerated
# option hash
# include_child_folders: recurses into child folders for tasks. Default true
# include_compatibility: Only include tasks which have any of the specified compatibility levels. Default empty array (everything is permitted)
#
def self.enum_task_names(folder_path = ROOT_FOLDER, enum_options = {})
raise TypeError unless folder_path.is_a?(String)
options = {
:include_child_folders => true,
:include_compatibility => [],
}.merge(enum_options)
array = []
task_folder = task_service.GetFolder(folder_path)
filter_compatibility = !options[:include_compatibility].empty?
task_folder.GetTasks(TASK_ENUM_FLAGS::TASK_ENUM_HIDDEN).each do |task|
next if filter_compatibility && !options[:include_compatibility].include?(task.Definition.Settings.Compatibility)
array << task.Path
end
return array unless options[:include_child_folders]
task_folder.GetFolders(RESERVED_FOR_FUTURE_USE).each do |child_folder|
array += enum_task_names(child_folder.Path, options)
end
array
end
# Returns whether or not the scheduled task exists.
def self.exists?(job_name)
# task name comparison is case insensitive
tasks.any? { |name| name.casecmp(job_name) == 0 }
end
# Delete the specified task name.
#
def self.delete(task_name)
task_path = ROOT_FOLDER + task_name
task_folder = task_service.GetFolder(folder_path_from_task_path(task_path))
task_folder.DeleteTask(task_name_from_task_path(task_path), 0)
end
# Creates or Updates an existing task with the supplied task definition
# Tasks must be saved before they can be activated.
#
# The .job file itself is typically stored in the C:\WINDOWS\Tasks folder.
def save
task_path = @task ? @task.Path : @full_task_path
task_folder = self.class.task_service.GetFolder(self.class.folder_path_from_task_path(task_path))
task_user = nil
task_password = nil
case @definition.Principal.LogonType
when TASK_LOGON_TYPE::TASK_LOGON_PASSWORD,
TASK_LOGON_TYPE::TASK_LOGON_INTERACTIVE_TOKEN_OR_PASSWORD
task_user = @definition.Principal.UserId
task_password = @task_password
end
saved = task_folder.RegisterTaskDefinition(
self.class.task_name_from_task_path(task_path),
@definition,
TASK_CREATION::TASK_CREATE_OR_UPDATE,
task_user, task_password, @definition.Principal.LogonType)
@task ||= saved
end
# Sets the +user+ and +password+ for the given task. If the user and
# password are set properly then true is returned.
#
# In some cases the job may be created, but the account information was
# bad. In this case the task is created but a warning is generated and
# false is returned.
#
# Note that if intending to use SYSTEM, specify an empty user and nil password
#
# This must be done prior to the 1st save() call for the task to be
# properly registered and visible through the MMC snap-in / schtasks.exe
#
def set_account_information(user, password)
@task_password = password
if (user.nil? || user == "")
# Setup for the local system account
@definition.Principal.UserId = 'SYSTEM'
@definition.Principal.LogonType = TASK_LOGON_TYPE::TASK_LOGON_SERVICE_ACCOUNT
@definition.Principal.RunLevel = TASK_RUNLEVEL_TYPE::TASK_RUNLEVEL_HIGHEST
else
@definition.Principal.UserId = user
@definition.Principal.LogonType = TASK_LOGON_TYPE::TASK_LOGON_PASSWORD
@definition.Principal.RunLevel = TASK_RUNLEVEL_TYPE::TASK_RUNLEVEL_HIGHEST
end
true
end
# Returns the user associated with the task or nil if no user has yet
# been associated with the task.
#
def account_information
principal = @definition.Principal
principal.nil? ? nil : principal.UserId
end
# Returns the name of the application associated with the task.
#
def application_name
action = default_action(create_if_missing: false)
action.nil? ? nil : action.Path
end
# Sets the application name associated with the task.
#
def application_name=(app)
action = default_action(create_if_missing: true)
action.Path = app
app
end
# Returns the command line parameters for the task.
#
def parameters
action = default_action(create_if_missing: false)
action.nil? ? nil : action.Arguments
end
# Sets the parameters for the task. These parameters are passed as command
# line arguments to the application the task will run. To clear the command
# line parameters set it to an empty string.
#
def parameters=(param)
action = default_action(create_if_missing: true)
action.Arguments = param
param
end
# Returns the working directory for the task.
#
def working_directory
action = default_action(create_if_missing: false)
action.nil? ? nil : action.WorkingDirectory
end
# Sets the working directory for the task.
#
def working_directory=(dir)
action = default_action(create_if_missing: false)
action.WorkingDirectory = dir
dir
end
def compatibility
@definition.Settings.Compatibility
end
def compatibility=(value)
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa381846(v=vs.85).aspx
@definition.Settings.Compatibility = value
end
# Returns a set of trigger hashes with their indexes, for supported trigger
# types. Returns nil for each unknown trigger types in the collection.
#
def triggers
@definition.Triggers.count.times.map { |i| trigger(i) }
end
# Deletes the trigger at the specified index.
#
def delete_trigger(index)
# The older V1 API uses a starting index of zero, wherease the V2 API uses one.
# Need to increment by one to maintain the same behavior
index += 1
@definition.Triggers.Remove(index)
index
end
# Deletes all triggers
def clear_triggers
@definition.Triggers.Clear()
end
# Appends a new trigger for the currently active task.
#
def append_trigger(manifest_hash)
Trigger::V2.append_trigger(@definition, manifest_hash)
end
def enabled
@definition.Settings.Enabled
end
def enabled=(value)
@definition.Settings.Enabled = value
end
private
# :stopdoc:
def self.task_service
service = WIN32OLE.new('Schedule.Service')
service.connect()
service
end
def self.task_name_from_task_path(task_path)
task_path.rpartition('\\')[2]
end
def self.folder_path_from_task_path(task_path)
path = task_path.rpartition('\\')[0]
path.empty? ? ROOT_FOLDER : path
end
def self.task(task_path)
raise TypeError unless task_path.is_a?(String)
service = task_service
begin
task_folder = service.GetFolder(folder_path_from_task_path(task_path))
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa381363(v=vs.85).aspx
_task = task_folder.GetTask(task_name_from_task_path(task_path))
return _task, _task.Definition
rescue WIN32OLERuntimeError => e
unless Error.is_com_error_type(e, Error::ERROR_FILE_NOT_FOUND)
raise Puppet::Error.new( _("GetTask failed with: %{error}") % { error: e }, e )
end
end
return nil, service.NewTask(0)
end
# Find the first TASK_ACTION_EXEC action
def default_action(create_if_missing: false)
action = nil
([email protected]).each do |i|
index_action = action_at(i)
action = index_action if index_action.Type == TASK_ACTION_TYPE::TASK_ACTION_EXEC
break if action
end
if action.nil? && create_if_missing
action = @definition.Actions.Create(TASK_ACTION_TYPE::TASK_ACTION_EXEC)
end
action
end
def action_at(index)
@definition.Actions.Item(index)
rescue WIN32OLERuntimeError => err
raise unless Error.is_com_error_type(err, Error::E_INVALIDARG)
nil
end
# Returns a Win32OLE Trigger Object for the trigger at the given index for the
# supplied definition.
#
# Returns nil if the index does not exist
#
# Note - This is a 1 based array (not zero)
#
def trigger_at(index)
@definition.Triggers.Item(index)
rescue WIN32OLERuntimeError => err
raise unless Error.is_com_error_type(err, Error::E_INVALIDARG)
nil
end
# Returns a hash that describes the trigger at the given index for the
# current task.
#
def trigger(index)
# The older V1 API uses a starting index of zero, wherease the V2 API uses one.
# Need to increment by one to maintain the same behavior
trigger_object = trigger_at(index + 1)
trigger_object.nil? || Trigger::V2::TYPE_MANIFEST_MAP[trigger_object.Type].nil? ?
# nil trigger definitions are unsupported ITrigger types
nil :
Trigger::V2.to_manifest_hash(trigger_object).merge!({ 'index' => index })
end
end
end
end
end
| 31.441441 | 148 | 0.711246 |
39933dbb985ae48c52364ded3de0db4b744a75a0 | 1,626 | class IntouchCronJobsController < ApplicationController
unloadable
layout 'admin'
before_action :require_admin
accept_api_auth :index
def init
hash = {
'cron_feedback_regular_notification' => {
'class' => 'CronFeedbackRegularNotification',
'cron' => '*/5 * * * *'
},
'cron_overdue_regular_notification' => {
'class' => 'CronOverdueRegularNotification',
'cron' => '0 10 * * *'
},
'cron_unassigned_regular_notification' => {
'class' => 'CronUnassignedRegularNotification',
'cron' => '*/15 * * * *'
},
'cron_working_regular_notification' => {
'class' => 'CronWorkingRegularNotification',
'cron' => '*/5 * * * *'
}
}
Sidekiq::Cron::Job.load_from_hash hash
redirect_to action: 'plugin', id: 'redmine_intouch', controller: 'settings', tab: 'sidekiq_cron_jobs'
end
def index
@sidekiq_cron_jobs = Sidekiq::Cron::Job.all
respond_to do |format|
format.api
format.html { render action: 'index', layout: false if request.xhr? }
end
end
def edit
@sidekiq_cron_job = Sidekiq::Cron::Job.find(params[:id])
end
def update
@sidekiq_cron_job = Sidekiq::Cron::Job.find(params[:id])
@sidekiq_cron_job.cron = params[:sidekiq_cron_job][:cron] if params[:sidekiq_cron_job]
if @sidekiq_cron_job.valid?
@sidekiq_cron_job.save
flash[:notice] = l(:notice_successful_update)
redirect_to action: 'plugin', id: 'redmine_intouch', controller: 'settings', tab: 'sidekiq_cron_jobs'
else
render action: 'edit'
end
end
end
| 26.655738 | 107 | 0.632841 |
e255fbd8425a4dd28d0a699afa5409865983c8a5 | 846 | #
# Copyright 2019 ThoughtWorks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class HeaderConstraint
include Services
def matches? (request)
return true unless system_environment.isApiSafeModeEnabled
return false if request.headers['HTTP_CONFIRM'].blank?
'true'.casecmp(request.headers['HTTP_CONFIRM']).zero?
end
end | 31.333333 | 74 | 0.758865 |
f770bc7dabf8c55fd3d16d53390feb548f108d21 | 1,137 | require 'test_helper'
class UsersLoginTest < ActionDispatch::IntegrationTest
def setup
@user = users(:jdoe)
end
test "login with invalid information" do
get login_path
assert_template 'sessions/new'
post login_path, params: { session: { email: "", password: "" } }
assert_template 'sessions/new'
assert_not flash.empty?
get root_path
assert flash.empty?
end
test "login with valid information followed by logout" do
get login_path
post login_path, params: { session: { email: @user.email,
password: 'password' } }
assert is_logged_in?
assert_redirected_to @user
follow_redirect!
assert_template 'users/show'
assert_select "a[href=?]", signup_path, count: 0
assert_select "a[href=?]", login_path, count: 0
assert_select "a[href=?]", logout_path
delete logout_path
assert_not is_logged_in?
assert_redirected_to root_url
follow_redirect!
assert_select "a[href=?]", signup_path
assert_select "a[href=?]", login_path
assert_select "a[href=?]", logout_path, count: 0
end
end
| 29.153846 | 69 | 0.664028 |
3380cf64580e6011045ec79b29d6590f5166c68e | 815 | cask "cookie" do
version "6.5"
sha256 :no_check
url "https://sweetpproductions.com/products/cookieapp/Cookie.dmg"
name "Cookie"
desc "Protection from tracking and online profiling"
homepage "https://sweetpproductions.com/"
livecheck do
url "https://sweetpproductions.com/products/cookieapp/appcast.xml"
strategy :sparkle
end
depends_on macos: ">= :mojave"
app "Cookie.app"
zap trash: [
"~/Library/Application Scripts/com.sweetpproductions.Cookie5",
"~/Library/Containers/com.sweetpproductions.Cookie5",
"~/Library/Preferences/com.sweetpproductions.Cookie5.plist",
"~/Library/Application Scripts/com.sweetpproductions.CookieApp",
"~/Library/Containers/com.sweetpproductions.CookieApp",
"~/Library/Preferences/com.sweetpproductions.CookieApp.plist",
]
end
| 29.107143 | 70 | 0.73865 |
28031c7f071c56fc848697dee7439ae6dca8a556 | 994 | # -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "authlogic_device_tokens/version"
Gem::Specification.new do |s|
s.name = %q{authlogic_device_tokens}
s.version = AuthlogicDeviceTokens::VERSION
s.authors = ["Jesus Laiz (aka zheileman)"]
s.date = %q{2013-01-17}
s.description = %q{Authlogic extension to support multiple per-device and per-user tokens.}
s.email = %q{[email protected]}
s.extra_rdoc_files = [
"LICENSE",
"README.md"
]
s.files = Dir.glob('**/*') - Dir.glob('authlogic_device_tokens*.gem')
s.homepage = %q{http://github.com/zheileman/authlogic_device_tokens}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.summary = %q{Authlogic extension to support multiple per-device and per-user tokens.}
s.add_runtime_dependency 'authlogic', '~>3.0'
s.add_development_dependency 'rails', '~>3.0'
s.add_development_dependency 'sqlite3'
end
| 35.5 | 98 | 0.657948 |
61447fe78ae02360970f8e89cf9bb75721f02ede | 2,081 | # -*- encoding: utf-8 -*-
# stub: io-like 0.3.0 ruby lib
Gem::Specification.new do |s|
s.name = "io-like".freeze
s.version = "0.3.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Jeremy Bopp".freeze]
s.date = "2009-04-29"
s.description = "The IO::Like module provides the methods of an IO object based upon on a few simple methods provided by the including class: unbuffered_read, unbuffered_write, and unbuffered_seek. These methods provide the underlying read, write, and seek support respectively, and only the method or methods necessary to the correct operation of the IO aspects of the including class need to be provided. Missing functionality will cause the resulting object to appear read-only, write-only, and/or unseekable depending on which underlying methods are absent. Additionally, read and write operations which are buffered in IO are buffered with independently configurable buffer sizes. Duplexed objects (those with separate read and write streams) are also supported.".freeze
s.email = "jeremy at bopp dot net".freeze
s.extra_rdoc_files = ["CONTRIBUTORS".freeze, "HACKING".freeze, "LICENSE".freeze, "LICENSE.rubyspec".freeze, "GPL".freeze, "LEGAL".freeze, "NEWS".freeze, "README".freeze]
s.files = ["CONTRIBUTORS".freeze, "GPL".freeze, "HACKING".freeze, "LEGAL".freeze, "LICENSE".freeze, "LICENSE.rubyspec".freeze, "NEWS".freeze, "README".freeze]
s.homepage = "http://io-like.rubyforge.org".freeze
s.rdoc_options = ["--title".freeze, "IO::Like Documentation".freeze, "--charset".freeze, "utf-8".freeze, "--line-numbers".freeze, "--inline-source".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 1.8.1".freeze)
s.rubyforge_project = "io-like".freeze
s.rubygems_version = "2.6.8".freeze
s.summary = "A module which provides the functionality of an IO object to any class which provides a couple of simple methods.".freeze
s.installed_by_version = "2.6.8" if s.respond_to? :installed_by_version
end
| 83.24 | 781 | 0.745315 |
1a0dccd0c2c2501b71013facecbe034eac60a295 | 2,549 | module Zuck
# Including this module does three things:
#
# 1. Lets you use `x[:foo]` to access keys of the
# underlying Hash
# 2. Lets you use `x[:foo] = :bar` to set values in
# the underlying Hash
# 3. Lets you define which keys are to be expected in
# the underlying hash. These keys will become methods
#
# Here's an example:
#
# class MyObjectWithHash
#
# include Zuck::HashDelegator
#
# known_keys :foo, :bar
#
# def initialize(initial_data)
# set_data(initial_data)
# end
# end
#
# > x = MyObjectWithHash.new(foo: :foo)
# > x.foo
# => :foo
# > x.bar
# => nil
# > x['bar'] = :everything_is_a_symbol
# > x[:bar]
# => :everything_is_a_symbol
# > x['bar']
# => :everything_is_a_symbol
# > x.foo
# => :everything_is_a_symbol
# > x.foo = :you_also_have_setters
# => :you_also_have_setters
#
# As you can see, all string keys become symbols and the
# foo and bar methods were added because they are known keys
#
module HashDelegator
def self.included(base)
base.extend(ClassMethods)
end
module ClassMethods
def known_keys(*args)
args.each do |key|
# Define list of known keys
self.send(:define_method, :known_keys) do
args || []
end
# Define getter
self.send(:define_method, key) do
init_hash
@hash_delegator_hash[key]
end
# Define setter
self.send(:define_method, "#{key}=") do |val|
init_hash
@hash_delegator_hash[key] = val
end
end
end
end
def set_data(d)
e = "You can only assign a Hash to #{self.class}, not a #{d.class}"
raise e unless d.is_a? Hash
hash = Hash.new
d.each do |key, value|
hash[(key.to_sym rescue key) || key] = value
end
@hash_delegator_hash = hash
end
def data=(d)
set_data(d)
end
def data
@hash_delegator_hash
end
def [](key)
init_hash
@hash_delegator_hash[key.to_sym]
end
def []=(key, value)
init_hash
@hash_delegator_hash[key.to_sym] = value
end
def to_s
init_hash
vars = @hash_delegator_hash.map do |k, v|
"#{k}: #{v.to_json}"
end.join(", ")
"#<#{self.class} #{vars}>"
end
private
def init_hash
@hash_delegator_hash ||= {}
end
end
end
| 21.974138 | 73 | 0.547666 |
26ab2a7e355511975ec7923232faf7b83c668142 | 1,253 | require File.dirname(__FILE__) + '/../../spec_helper'
include PoolParty::Resources
describe "directory" do
describe "instances" do
before(:each) do
@directory = directory({:name => "/etc/apache2/puppetmaster.conf"})
end
it "should turn the one hash instance into a string" do
@directory.to_string.should =~ /"\/etc\/apache2\/puppetmaster\.conf":/
end
it "should turn the two hash instance into a string" do
@directory = directory do
name "/etc/init.d/puppetmaster"
owner "redsmith"
end
@directory.to_string.should =~ /"\/etc\/init\.d\/puppetmaster":/
end
describe "as included" do
before(:each) do
@directory = directory({:rent => "low"}) do
name "/www/conf/httpd.conf"
end
end
it "should use default values" do
@directory.name.should == "/www/conf/httpd.conf"
end
it "should keep the default values for the directory" do
@directory.mode.should == 644
end
it "should also set options through a hash" do
@directory.rent.should == "low"
end
it "should have ensure set to directory" do
@directory.ensure.should == "directory"
end
end
end
end
| 30.560976 | 76 | 0.610535 |
28e516b9cec286c7752da82f89f2d123c09078bb | 227 | class CreatePredicateParams < ActiveRecord::Migration[5.0]
def change
create_table :predicate_params do |t|
t.text :name
t.text :param_type
t.integer :predicate_id
t.timestamps
end
end
end
| 17.461538 | 58 | 0.674009 |
62ebff3996fbd06f80e278e698ffce31e096a7ea | 2,036 | Pod::Spec.new do |s|
s.name = 'FirebaseStorageSwift'
s.version = '8.2.0-beta'
s.summary = 'Swift Extensions for Google Cloud Storage'
s.description = <<-DESC
Firebase Storage provides robust, secure file uploads and downloads from Firebase SDKs, powered by Google Cloud Storage.
DESC
s.homepage = 'https://developers.google.com/'
s.license = { :type => 'Apache', :file => 'LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/Firebase/firebase-ios-sdk.git',
:tag => 'CocoaPods-' + s.version.to_s
}
s.swift_version = '5.0'
ios_deployment_target = '10.0'
osx_deployment_target = '10.12'
tvos_deployment_target = '10.0'
watchos_deployment_target = '6.0'
s.ios.deployment_target = ios_deployment_target
s.osx.deployment_target = osx_deployment_target
s.tvos.deployment_target = tvos_deployment_target
s.watchos.deployment_target = watchos_deployment_target
s.cocoapods_version = '>= 1.4.0'
s.prefix_header_file = false
s.source_files = [
'FirebaseStorageSwift/Sources/*.swift',
]
s.dependency 'FirebaseStorage', '~> 8.0'
s.test_spec 'integration' do |int_tests|
int_tests.scheme = { :code_coverage => true }
int_tests.platforms = {
:ios => ios_deployment_target,
:osx => osx_deployment_target,
:tvos => tvos_deployment_target
}
int_tests.source_files = 'FirebaseStorageSwift/Tests/Integration/*.swift'
int_tests.requires_app_host = true
# Resources are shared with FirebaseStorage's integration tests.
int_tests.resources = 'FirebaseStorage/Tests/Integration/Resources/1mb.dat',
'FirebaseStorage/Tests/Integration/Resources/GoogleService-Info.plist',
'FirebaseStorage/Tests/Integration/Resources/HomeImprovement.numbers'
int_tests.dependency 'FirebaseAuth', '~> 8.0'
end
end
| 35.719298 | 120 | 0.644401 |
d5cdf78368ebaf80f1663ce832cc4cea392a97a4 | 519 | # frozen_string_literal: true
require_relative "solution.rb"
module Hangman
class Board
attr_accessor :letters
def initialize(letters = [*("A".."Z")])
@letters = letters
end
def remove_letter(index)
if !index.nil?
letters[index] = " "
end
end
def update_dude(bad_guesses)
hangman = [" ______ ", " | | ", " | - ", " | | | ", " | = ", " | | ", " | -|- ", " | | ", " | / \\"]
puts hangman[0..bad_guesses - 1]
end
end
end
| 20.76 | 133 | 0.487476 |
bb8ebb00781596f6dc975bd648cd2f9475984ed1 | 3,104 | require 'faraday'
require 'multi_json'
require 'twitter/api'
require 'twitter/configurable'
require 'twitter/error/client_error'
require 'twitter/error/decode_error'
require 'twitter/rate_limit'
require 'simple_oauth'
require 'uri'
module Twitter
# Wrapper for the Twitter REST API
#
# @note All methods have been separated into modules and follow the same grouping used in {http://dev.twitter.com/doc the Twitter API Documentation}.
# @see http://dev.twitter.com/pages/every_developer
class Client
include Twitter::API
include Twitter::Configurable
attr_reader :rate_limit
# Initializes a new Client object
#
# @param options [Hash]
# @return [Twitter::Client]
def initialize(options={})
Twitter::Configurable.keys.each do |key|
instance_variable_set(:"@#{key}", options[key] || Twitter.instance_variable_get(:"@#{key}"))
end
@rate_limit = Twitter::RateLimit.new
end
# Perform an HTTP DELETE request
def delete(path, params={}, options={})
request(:delete, path, params, options)
end
# Perform an HTTP GET request
def get(path, params={}, options={})
request(:get, path, params, options)
end
# Perform an HTTP POST request
def post(path, params={}, options={})
request(:post, path, params, options)
end
# Perform an HTTP UPDATE request
def put(path, params={}, options={})
request(:put, path, params, options)
end
private
# Returns a Faraday::Connection object
#
# @return [Faraday::Connection]
def connection
@connection ||= Faraday.new(@endpoint, @connection_options.merge(:builder => @middleware))
end
# Perform an HTTP request
#
# @raise [Twitter::Error::ClientError, Twitter::Error::DecodeError]
def request(method, path, params={}, options={})
uri = options[:endpoint] || @endpoint
uri = URI(uri) unless uri.respond_to?(:host)
uri += path
request_headers = {}
if credentials?
authorization = auth_header(method, uri, params)
request_headers[:authorization] = authorization.to_s
end
connection.url_prefix = options[:endpoint] || @endpoint
response = connection.run_request(method.to_sym, path, nil, request_headers) do |request|
unless params.empty?
case request.method
when :post, :put
request.body = params
else
request.params.update(params)
end
end
yield request if block_given?
end.env
@rate_limit.update(response[:response_headers])
response
rescue Faraday::Error::ClientError
raise Twitter::Error::ClientError
rescue MultiJson::DecodeError
raise Twitter::Error::DecodeError
end
def auth_header(method, uri, params={})
# When posting a file, don't sign any params
signature_params = [:post, :put].include?(method.to_sym) && params.values.any?{|value| value.respond_to?(:to_io)} ? {} : params
SimpleOAuth::Header.new(method, uri, signature_params, credentials)
end
end
end
| 30.732673 | 151 | 0.658505 |
1ad56bb53d9a9edd2ab0b1b51e7cb20cb924ee95 | 5,993 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe OpenWeather::Endpoints::Stations do
include_context 'API client'
describe '#register_station' do
it 'registers a station', vcr: { cassette_name: 'stations/register_success' } do
data = client.register_station(
external_id: 'SF_TEST001',
name: 'San Francisco Test Station',
latitude: 37.76,
longitude: -122.43,
altitude: 150
)
expect(data).to be_a(OpenWeather::Models::Station)
expect(data).to have_attributes(
id: '5ed21a12cca8ce0001f1aef1',
external_id: 'SF_TEST001',
name: 'San Francisco Test Station',
latitude: 37.76,
longitude: -122.43,
altitude: 150
)
end
end
describe '#list_stations' do
it 'retrieves all stations', vcr: { cassette_name: 'stations/list_stations_success' } do
data = client.list_stations
expect(data).to be_a(Array)
expect(data.size).to eq(2)
expect(data.first).to be_a(OpenWeather::Models::Station)
expect(data.first).to have_attributes(
id: '5ed21311cca8ce0001f1aef0',
external_id: 'SF_TEST001',
name: 'San Francisco Test Station',
latitude: 37.76,
longitude: -122.43,
altitude: 150
)
end
end
describe '#get_station' do
it 'retrieves a station', vcr: { cassette_name: 'stations/get_station_success' } do
data = client.get_station('5ed21311cca8ce0001f1aef0')
expect(data).to be_a(OpenWeather::Models::Station)
expect(data).to have_attributes(
id: '5ed21311cca8ce0001f1aef0',
external_id: 'SF_TEST001',
name: 'San Francisco Test Station',
latitude: 37.76,
longitude: -122.43,
altitude: 150
)
end
context 'with invalid id' do
it 'raises error' do
expect { client.delete_station(nil) }.to raise_error ArgumentError
end
end
end
describe '#update_station' do
it 'updates a station', vcr: { cassette_name: 'stations/update_station_success' } do
update_attributes = {
external_id: 'SF_TEST002',
name: 'San Francisco Test Station 2'
}
data = client.update_station('5ed21311cca8ce0001f1aef0', update_attributes)
expect(data).to be_a(OpenWeather::Models::Station)
expect(data).to have_attributes(update_attributes)
end
context 'with invalid id' do
it 'raises error' do
expect { client.update_station(nil, {}) }.to raise_error ArgumentError
end
end
end
describe '#delete_station' do
it 'deletes a station', vcr: { cassette_name: 'stations/delete_station_success' } do
data = client.delete_station('5ed21311cca8ce0001f1aef0')
expect(data).to be_nil
end
context 'with invalid id' do
it 'raises error' do
expect { client.delete_station(nil) }.to raise_error ArgumentError
end
end
end
describe '#create_measurements' do
it 'creates measurements', vcr: { cassette_name: 'stations/create_measurement_success' } do
create_params = {
"station_id": '5ed21a12cca8ce0001f1aef1',
"dt": 1479817340,
"temperature": 18.7,
"wind_speed": 1.2,
"wind_gust": 3.4,
"pressure": 1021,
"humidity": 87,
"rain_1h": 2,
"clouds": [
{
"condition": 'NSC'
}
]
}
expect(client).to receive(:post)
.with('3.0/measurements', body: [create_params])
.and_call_original
data = client.create_measurements([create_params])
expect(data).to be_nil
end
context 'when station does not exist' do
it 'raises error', vcr: { cassette_name: 'stations/create_measurement_failed_with_invalid_station' } do
create_params = {
"station_id": 'abcde',
"dt": 1479817340,
"temperature": 18.7,
"wind_speed": 1.2,
"wind_gust": 3.4,
"pressure": 1021,
"humidity": 87,
"rain_1h": 2,
"clouds": [
{
"condition": 'NSC'
}
]
}
expect { client.create_measurements([create_params]) }
.to raise_error(OpenWeather::Errors::Fault, /Station id is invalid/)
end
end
end
describe '#get_measurements' do
it 'gets measurements', vcr: { cassette_name: 'stations/get_measurement_success' } do
data = client.get_measurements(
station_id: '5ed21a12cca8ce0001f1aef1',
type: 'd',
limit: 100,
from: 1469817340,
to: 1591620047
)
expect(data.size).to eq(1)
measurement = data.first
expect(measurement).to be_a(OpenWeather::Models::Stations::Measurement)
expect(measurement).to have_attributes(
station_id: '5ed21a12cca8ce0001f1aef1',
type: 'd',
date: 1479859200
)
expect(measurement.temp).to be_a(OpenWeather::Models::Stations::Temp)
expect(measurement.temp).to have_attributes(
max: 18.7,
min: 18.7,
average: 18.7,
weight: 1
)
expect(measurement.humidity).to be_a(OpenWeather::Models::Stations::Humidity)
expect(measurement.humidity).to have_attributes(
average: 87,
weight: 1
)
expect(measurement.pressure).to be_a(OpenWeather::Models::Stations::Pressure)
expect(measurement.pressure).to have_attributes(
min: 1021,
max: 1021,
average: 1021,
weight: 1
)
expect(measurement.precipitation).to be_a(OpenWeather::Models::Stations::Precipitation)
expect(measurement.precipitation).to have_attributes(rain: 2)
expect(measurement.wind).to eq({})
end
context 'without required params' do
it 'raises error' do
expect { client.get_measurements(something: 'something') }.to raise_error(ArgumentError, /station_id, type, limit, from, to/)
end
end
end
end
| 30.733333 | 133 | 0.621058 |
7ab104a06e18bf3e906f9c96178cdcd4a08d5edb | 414 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::RecoveryServicesBackup::Mgmt::V2019_05_13
module Models
#
# Defines values for OverwriteOptions
#
module OverwriteOptions
Invalid = "Invalid"
FailOnConflict = "FailOnConflict"
Overwrite = "Overwrite"
end
end
end
| 24.352941 | 70 | 0.714976 |
088b1d123e2f15e6c69640644fa8884644449eac | 1,551 | # This module is included in your application controller which makes
# several methods available to all controllers and views. Here's a
# common example you might add to your application layout file.
#
# <% if logged_in? %>
# Welcome <%=h current_user.username %>! Not you?
# <%= link_to "Log out", logout_path %>
# <% else %>
# <%= link_to "Sign up", signup_path %> or
# <%= link_to "log in", login_path %>.
# <% end %>
#
# You can also restrict unregistered users from accessing a controller using
# a before filter. For example.
#
# before_filter :login_required, :except => [:index, :show]
module Authentication
def self.included(controller)
controller.send :helper_method, :current_user, :logged_in?, :redirect_to_target_or_default
end
def current_user_session
return @current_user_session if defined?(@current_user_session)
@current_user_session = UserSession.find
end
def current_user
return @current_user if defined?(@current_user)
@current_user = current_user_session && current_user_session.record
end
def logged_in?
current_user
end
def login_required
unless logged_in?
flash[:error] = "You must first log in or sign up before accessing this page."
store_target_location
redirect_to login_url
end
end
def redirect_to_target_or_default(default)
redirect_to(session[:return_to] || default)
session[:return_to] = nil
end
private
def store_target_location
session[:return_to] = request.request_uri
end
end
| 28.2 | 94 | 0.70922 |
f825d2ad7f3c8a848a4d98a8a4412338f3f35fb4 | 28,842 | require File.join(File.dirname(__FILE__), 'spec_helper')
context "DB#create_table" do
before do
@db = SchemaDummyDatabase.new
end
specify "should accept the table name" do
@db.create_table(:cats) {}
@db.sqls.should == ['CREATE TABLE cats ()']
end
specify "should accept the table name in multiple formats" do
@db.create_table(:cats__cats) {}
@db.create_table("cats__cats1") {}
@db.create_table(:cats__cats2.identifier) {}
@db.create_table(:cats.qualify(:cats3)) {}
@db.sqls.should == ['CREATE TABLE cats.cats ()', 'CREATE TABLE cats__cats1 ()', 'CREATE TABLE cats__cats2 ()', 'CREATE TABLE cats3.cats ()']
end
specify "should raise an error if the table name argument is not valid" do
proc{@db.create_table(1) {}}.should raise_error(Sequel::Error)
proc{@db.create_table(:cats.as(:c)) {}}.should raise_error(Sequel::Error)
end
specify "should accept multiple columns" do
@db.create_table(:cats) do
column :id, :integer
column :name, :text
end
@db.sqls.should == ['CREATE TABLE cats (id integer, name text)']
end
specify "should accept method calls as data types" do
@db.create_table(:cats) do
integer :id
text :name
end
@db.sqls.should == ['CREATE TABLE cats (id integer, name text)']
end
specify "should transform types given as ruby classes to database-specific types" do
@db.create_table(:cats) do
String :a
Integer :b
Fixnum :c
Bignum :d
Float :e
BigDecimal :f
Date :g
DateTime :h
Time :i
Numeric :j
File :k
TrueClass :l
FalseClass :m
column :n, Fixnum
primary_key :o, :type=>String
foreign_key :p, :f, :type=>Date
end
@db.sqls.should == ['CREATE TABLE cats (o varchar(255) PRIMARY KEY AUTOINCREMENT, a varchar(255), b integer, c integer, d bigint, e double precision, f numeric, g date, h timestamp, i timestamp, j numeric, k blob, l boolean, m boolean, n integer, p date REFERENCES f)']
end
specify "should allow the use of modifiers with ruby class types" do
@db.create_table(:cats) do
String :a, :size=>50
String :b, :text=>true
String :c, :fixed=>true, :size=>40
Time :d, :only_time=>true
BigDecimal :e, :size=>[11,2]
end
@db.sqls.should == ['CREATE TABLE cats (a varchar(50), b text, c char(40), d time, e numeric(11, 2))']
end
specify "should raise an error if you use a ruby class that isn't handled" do
proc{@db.create_table(:cats){column :a, Class}}.should raise_error(Sequel::Error)
end
specify "should accept primary key definition" do
@db.create_table(:cats) do
primary_key :id
end
@db.sqls.should == ['CREATE TABLE cats (id integer PRIMARY KEY AUTOINCREMENT)']
@db.sqls.clear
@db.create_table(:cats) do
primary_key :id, :serial, :auto_increment => false
end
@db.sqls.should == ['CREATE TABLE cats (id serial PRIMARY KEY)']
@db.sqls.clear
@db.create_table(:cats) do
primary_key :id, :type => :serial, :auto_increment => false
end
@db.sqls.should == ['CREATE TABLE cats (id serial PRIMARY KEY)']
end
specify "should accept and literalize default values" do
@db.create_table(:cats) do
integer :id, :default => 123
text :name, :default => "abc'def"
end
@db.sqls.should == ["CREATE TABLE cats (id integer DEFAULT 123, name text DEFAULT 'abc''def')"]
end
specify "should accept not null definition" do
@db.create_table(:cats) do
integer :id
text :name, :null => false
text :name2, :allow_null => false
end
@db.sqls.should == ["CREATE TABLE cats (id integer, name text NOT NULL, name2 text NOT NULL)"]
end
specify "should accept null definition" do
@db.create_table(:cats) do
integer :id
text :name, :null => true
text :name2, :allow_null => true
end
@db.sqls.should == ["CREATE TABLE cats (id integer, name text NULL, name2 text NULL)"]
end
specify "should accept unique definition" do
@db.create_table(:cats) do
integer :id
text :name, :unique => true
end
@db.sqls.should == ["CREATE TABLE cats (id integer, name text UNIQUE)"]
end
specify "should accept unsigned definition" do
@db.create_table(:cats) do
integer :value, :unsigned => true
end
@db.sqls.should == ["CREATE TABLE cats (value integer UNSIGNED)"]
end
specify "should accept [SET|ENUM](...) types" do
@db.create_table(:cats) do
set :color, :elements => ['black', 'tricolor', 'grey']
end
@db.sqls.should == ["CREATE TABLE cats (color set('black', 'tricolor', 'grey'))"]
end
specify "should accept varchar size" do
@db.create_table(:cats) do
varchar :name
end
@db.sqls.should == ["CREATE TABLE cats (name varchar(255))"]
@db.sqls.clear
@db.create_table(:cats) do
varchar :name, :size => 51
end
@db.sqls.should == ["CREATE TABLE cats (name varchar(51))"]
end
specify "should use double precision for double type" do
@db.create_table(:cats) do
double :name
end
@db.sqls.should == ["CREATE TABLE cats (name double precision)"]
end
specify "should accept foreign keys without options" do
@db.create_table(:cats) do
foreign_key :project_id
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer)"]
end
specify "should accept foreign keys with options" do
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects)"]
end
specify "should accept foreign keys with separate table argument" do
@db.create_table(:cats) do
foreign_key :project_id, :projects, :default=>3
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer DEFAULT 3 REFERENCES projects)"]
end
specify "should raise an error if the table argument to foreign_key isn't a hash, symbol, or nil" do
proc{@db.create_table(:cats){foreign_key :project_id, Object.new, :default=>3}}.should raise_error(Sequel::Error)
end
specify "should accept foreign keys with arbitrary keys" do
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :key => :id
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects(id))"]
@db.sqls.clear
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :key => :zzz
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects(zzz))"]
end
specify "should accept foreign keys with ON DELETE clause" do
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_delete => :restrict
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE RESTRICT)"]
@db.sqls.clear
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_delete => :cascade
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE CASCADE)"]
@db.sqls.clear
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_delete => :no_action
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE NO ACTION)"]
@db.sqls.clear
@db.sqls.clear
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_delete => :set_null
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE SET NULL)"]
@db.sqls.clear
@db.sqls.clear
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_delete => :set_default
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE SET DEFAULT)"]
@db.sqls.clear
end
specify "should accept foreign keys with ON UPDATE clause" do
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_update => :restrict
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE RESTRICT)"]
@db.sqls.clear
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_update => :cascade
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE CASCADE)"]
@db.sqls.clear
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_update => :no_action
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE NO ACTION)"]
@db.sqls.clear
@db.sqls.clear
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_update => :set_null
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE SET NULL)"]
@db.sqls.clear
@db.sqls.clear
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_update => :set_default
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE SET DEFAULT)"]
@db.sqls.clear
end
specify "should accept inline index definition" do
@db.create_table(:cats) do
integer :id, :index => true
end
@db.sqls.should == ["CREATE TABLE cats (id integer)", "CREATE INDEX cats_id_index ON cats (id)"]
end
specify "should accept inline index definition for foreign keys" do
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_delete => :cascade, :index => true
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE CASCADE)",
"CREATE INDEX cats_project_id_index ON cats (project_id)"]
end
specify "should accept index definitions" do
@db.create_table(:cats) do
integer :id
index :id
end
@db.sqls.should == ["CREATE TABLE cats (id integer)", "CREATE INDEX cats_id_index ON cats (id)"]
end
specify "should accept unique index definitions" do
@db.create_table(:cats) do
text :name
unique :name
end
@db.sqls.should == ["CREATE TABLE cats (name text, UNIQUE (name))"]
end
specify "should raise on full-text index definitions" do
proc {
@db.create_table(:cats) do
text :name
full_text_index :name
end
}.should raise_error(Sequel::Error)
end
specify "should raise on spatial index definitions" do
proc {
@db.create_table(:cats) do
point :geom
spatial_index :geom
end
}.should raise_error(Sequel::Error)
end
specify "should raise on partial index definitions" do
proc {
@db.create_table(:cats) do
text :name
index :name, :where => {:something => true}
end
}.should raise_error(Sequel::Error)
end
specify "should raise index definitions with type" do
proc {
@db.create_table(:cats) do
text :name
index :name, :type => :hash
end
}.should raise_error(Sequel::Error)
end
specify "should ignore errors if the database raises an error on an index creation statement and the :ignore_index_errors option is used" do
@db.meta_def(:execute_ddl){|*a| raise Sequel::DatabaseError if /blah/.match(a.first); super(*a)}
lambda{@db.create_table(:cats){Integer :id; index :blah; index :id}}.should raise_error(Sequel::DatabaseError)
@db.sqls.should == ['CREATE TABLE cats (id integer)']
@db.sqls.clear
lambda{@db.create_table(:cats, :ignore_index_errors=>true){Integer :id; index :blah; index :id}}.should_not raise_error(Sequel::DatabaseError)
@db.sqls.should == ['CREATE TABLE cats (id integer)', 'CREATE INDEX cats_id_index ON cats (id)']
end
specify "should accept multiple index definitions" do
@db.create_table(:cats) do
integer :id
index :id
index :name
end
@db.sqls.should == ["CREATE TABLE cats (id integer)", "CREATE INDEX cats_id_index ON cats (id)", "CREATE INDEX cats_name_index ON cats (name)"]
end
specify "should accept functional indexes" do
@db.create_table(:cats) do
integer :id
index :lower.sql_function(:name)
end
@db.sqls.should == ["CREATE TABLE cats (id integer)", "CREATE INDEX cats_lower_name__index ON cats (lower(name))"]
end
specify "should accept indexes with identifiers" do
@db.create_table(:cats) do
integer :id
index :lower__name.identifier
end
@db.sqls.should == ["CREATE TABLE cats (id integer)", "CREATE INDEX cats_lower__name_index ON cats (lower__name)"]
end
specify "should accept custom index names" do
@db.create_table(:cats) do
integer :id
index :id, :name => 'abc'
end
@db.sqls.should == ["CREATE TABLE cats (id integer)", "CREATE INDEX abc ON cats (id)"]
end
specify "should accept unique index definitions" do
@db.create_table(:cats) do
integer :id
index :id, :unique => true
end
@db.sqls.should == ["CREATE TABLE cats (id integer)", "CREATE UNIQUE INDEX cats_id_index ON cats (id)"]
end
specify "should accept composite index definitions" do
@db.create_table(:cats) do
integer :id
index [:id, :name], :unique => true
end
@db.sqls.should == ["CREATE TABLE cats (id integer)", "CREATE UNIQUE INDEX cats_id_name_index ON cats (id, name)"]
end
specify "should accept unnamed constraint definitions with blocks" do
@db.create_table(:cats) do
integer :score
check {(:x.sql_number > 0) & (:y.sql_number < 1)}
end
@db.sqls.should == ["CREATE TABLE cats (score integer, CHECK ((x > 0) AND (y < 1)))"]
end
specify "should accept unnamed constraint definitions" do
@db.create_table(:cats) do
check 'price < ?', 100
end
@db.sqls.should == ["CREATE TABLE cats (CHECK (price < 100))"]
end
specify "should accept hash constraints" do
@db.create_table(:cats) do
check :price=>100
end
@db.sqls.should == ["CREATE TABLE cats (CHECK (price = 100))"]
end
specify "should accept named constraint definitions" do
@db.create_table(:cats) do
integer :score
constraint :valid_score, 'score <= 100'
end
@db.sqls.should == ["CREATE TABLE cats (score integer, CONSTRAINT valid_score CHECK (score <= 100))"]
end
specify "should accept named constraint definitions with block" do
@db.create_table(:cats) do
constraint(:blah_blah) {(:x.sql_number > 0) & (:y.sql_number < 1)}
end
@db.sqls.should == ["CREATE TABLE cats (CONSTRAINT blah_blah CHECK ((x > 0) AND (y < 1)))"]
end
specify "should raise an error if an invalid constraint type is used" do
proc{@db.create_table(:cats){unique [:a, :b], :type=>:bb}}.should raise_error(Sequel::Error)
end
specify "should accept composite primary keys" do
@db.create_table(:cats) do
integer :a
integer :b
primary_key [:a, :b]
end
@db.sqls.should == ["CREATE TABLE cats (a integer, b integer, PRIMARY KEY (a, b))"]
end
specify "should accept named composite primary keys" do
@db.create_table(:cats) do
integer :a
integer :b
primary_key [:a, :b], :name => :cpk
end
@db.sqls.should == ["CREATE TABLE cats (a integer, b integer, CONSTRAINT cpk PRIMARY KEY (a, b))"]
end
specify "should accept composite foreign keys" do
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc
end
@db.sqls.should == ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc)"]
end
specify "should accept named composite foreign keys" do
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :name => :cfk
end
@db.sqls.should == ["CREATE TABLE cats (a integer, b integer, CONSTRAINT cfk FOREIGN KEY (a, b) REFERENCES abc)"]
end
specify "should accept composite foreign keys with arbitrary keys" do
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :key => [:real_a, :real_b]
end
@db.sqls.should == ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc(real_a, real_b))"]
@db.sqls.clear
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :key => [:z, :x]
end
@db.sqls.should == ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc(z, x))"]
end
specify "should accept composite foreign keys with on delete and on update clauses" do
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :on_delete => :cascade
end
@db.sqls.should == ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc ON DELETE CASCADE)"]
@db.sqls.clear
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :on_update => :no_action
end
@db.sqls.should == ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc ON UPDATE NO ACTION)"]
@db.sqls.clear
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :on_delete => :restrict, :on_update => :set_default
end
@db.sqls.should == ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc ON DELETE RESTRICT ON UPDATE SET DEFAULT)"]
@db.sqls.clear
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :key => [:x, :y], :on_delete => :set_null, :on_update => :set_null
end
@db.sqls.should == ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc(x, y) ON DELETE SET NULL ON UPDATE SET NULL)"]
end
end
context "DB#create_table!" do
before do
@db = SchemaDummyDatabase.new
end
specify "should drop the table and then create it" do
@db.create_table!(:cats) {}
@db.sqls.should == ['DROP TABLE cats', 'CREATE TABLE cats ()']
end
end
context "DB#create_table?" do
before do
@db = SchemaDummyDatabase.new
end
specify "should not create the table if the table already exists" do
@db.meta_def(:table_exists?){|a| true}
@db.create_table?(:cats){|*a|}
@db.sqls.should == nil
end
specify "should create the table if the table doesn't already exist" do
@db.meta_def(:table_exists?){|a| false}
@db.create_table?(:cats){|*a|}
@db.sqls.should == ['CREATE TABLE cats ()']
end
end
context "DB#drop_table" do
before do
@db = SchemaDummyDatabase.new
end
specify "should generate a DROP TABLE statement" do
@db.drop_table :cats
@db.sqls.should == ['DROP TABLE cats']
end
end
context "DB#alter_table" do
before do
@db = SchemaDummyDatabase.new
end
specify "should allow adding not null constraint" do
@db.alter_table(:cats) do
set_column_allow_null :score, false
end
@db.sqls.should == ["ALTER TABLE cats ALTER COLUMN score SET NOT NULL"]
end
specify "should allow droping not null constraint" do
@db.alter_table(:cats) do
set_column_allow_null :score, true
end
@db.sqls.should == ["ALTER TABLE cats ALTER COLUMN score DROP NOT NULL"]
end
specify "should support add_column" do
@db.alter_table(:cats) do
add_column :score, :integer
end
@db.sqls.should == ["ALTER TABLE cats ADD COLUMN score integer"]
end
specify "should support add_constraint" do
@db.alter_table(:cats) do
add_constraint :valid_score, 'score <= 100'
end
@db.sqls.should == ["ALTER TABLE cats ADD CONSTRAINT valid_score CHECK (score <= 100)"]
end
specify "should support add_constraint with block" do
@db.alter_table(:cats) do
add_constraint(:blah_blah) {(:x.sql_number > 0) & (:y.sql_number < 1)}
end
@db.sqls.should == ["ALTER TABLE cats ADD CONSTRAINT blah_blah CHECK ((x > 0) AND (y < 1))"]
end
specify "should support add_unique_constraint" do
@db.alter_table(:cats) do
add_unique_constraint [:a, :b]
end
@db.sqls.should == ["ALTER TABLE cats ADD UNIQUE (a, b)"]
@db.sqls.clear
@db.alter_table(:cats) do
add_unique_constraint [:a, :b], :name => :ab_uniq
end
@db.sqls.should == ["ALTER TABLE cats ADD CONSTRAINT ab_uniq UNIQUE (a, b)"]
end
specify "should support add_foreign_key" do
@db.alter_table(:cats) do
add_foreign_key :node_id, :nodes
end
@db.sqls.should == ["ALTER TABLE cats ADD COLUMN node_id integer REFERENCES nodes"]
end
specify "should support add_foreign_key with composite foreign keys" do
@db.alter_table(:cats) do
add_foreign_key [:node_id, :prop_id], :nodes_props
end
@db.sqls.should == ["ALTER TABLE cats ADD FOREIGN KEY (node_id, prop_id) REFERENCES nodes_props"]
@db.sqls.clear
@db.alter_table(:cats) do
add_foreign_key [:node_id, :prop_id], :nodes_props, :name => :cfk
end
@db.sqls.should == ["ALTER TABLE cats ADD CONSTRAINT cfk FOREIGN KEY (node_id, prop_id) REFERENCES nodes_props"]
@db.sqls.clear
@db.alter_table(:cats) do
add_foreign_key [:node_id, :prop_id], :nodes_props, :key => [:nid, :pid]
end
@db.sqls.should == ["ALTER TABLE cats ADD FOREIGN KEY (node_id, prop_id) REFERENCES nodes_props(nid, pid)"]
@db.sqls.clear
@db.alter_table(:cats) do
add_foreign_key [:node_id, :prop_id], :nodes_props, :on_delete => :restrict, :on_update => :cascade
end
@db.sqls.should == ["ALTER TABLE cats ADD FOREIGN KEY (node_id, prop_id) REFERENCES nodes_props ON DELETE RESTRICT ON UPDATE CASCADE"]
end
specify "should support add_index" do
@db.alter_table(:cats) do
add_index :name
end
@db.sqls.should == ["CREATE INDEX cats_name_index ON cats (name)"]
end
specify "should ignore errors if the database raises an error on an add_index call and the :ignore_errors option is used" do
@db.meta_def(:execute_ddl){|*a| raise Sequel::DatabaseError}
lambda{@db.add_index(:cats, :id)}.should raise_error(Sequel::DatabaseError)
lambda{@db.add_index(:cats, :id, :ignore_errors=>true)}.should_not raise_error(Sequel::DatabaseError)
@db.sqls.should == nil
end
specify "should support add_primary_key" do
@db.alter_table(:cats) do
add_primary_key :id
end
@db.sqls.should == ["ALTER TABLE cats ADD COLUMN id integer PRIMARY KEY AUTOINCREMENT"]
end
specify "should support add_primary_key with composite primary keys" do
@db.alter_table(:cats) do
add_primary_key [:id, :type]
end
@db.sqls.should == ["ALTER TABLE cats ADD PRIMARY KEY (id, type)"]
@db.sqls.clear
@db.alter_table(:cats) do
add_primary_key [:id, :type], :name => :cpk
end
@db.sqls.should == ["ALTER TABLE cats ADD CONSTRAINT cpk PRIMARY KEY (id, type)"]
end
specify "should support drop_column" do
@db.alter_table(:cats) do
drop_column :score
end
@db.sqls.should == ["ALTER TABLE cats DROP COLUMN score"]
end
specify "should support drop_constraint" do
@db.alter_table(:cats) do
drop_constraint :valid_score
end
@db.sqls.should == ["ALTER TABLE cats DROP CONSTRAINT valid_score"]
end
specify "should support drop_index" do
@db.alter_table(:cats) do
drop_index :name
end
@db.sqls.should == ["DROP INDEX cats_name_index"]
end
specify "should support drop_index with a given name" do
@db.alter_table(:cats) do
drop_index :name, :name=>:blah_blah
end
@db.sqls.should == ["DROP INDEX blah_blah"]
end
specify "should support rename_column" do
@db.alter_table(:cats) do
rename_column :name, :old_name
end
@db.sqls.should == ["ALTER TABLE cats RENAME COLUMN name TO old_name"]
end
specify "should support set_column_default" do
@db.alter_table(:cats) do
set_column_default :score, 3
end
@db.sqls.should == ["ALTER TABLE cats ALTER COLUMN score SET DEFAULT 3"]
end
specify "should support set_column_type" do
@db.alter_table(:cats) do
set_column_type :score, :real
end
@db.sqls.should == ["ALTER TABLE cats ALTER COLUMN score TYPE real"]
end
specify "should support set_column_type with options" do
@db.alter_table(:cats) do
set_column_type :score, :integer, :unsigned=>true
set_column_type :score, :varchar, :size=>30
set_column_type :score, :enum, :elements=>['a', 'b']
end
@db.sqls.should == ["ALTER TABLE cats ALTER COLUMN score TYPE integer UNSIGNED",
"ALTER TABLE cats ALTER COLUMN score TYPE varchar(30)",
"ALTER TABLE cats ALTER COLUMN score TYPE enum('a', 'b')"]
end
end
context "Schema Parser" do
before do
@sqls = []
@db = Sequel::Database.new
end
specify "should raise an error if there are no columns" do
@db.meta_def(:schema_parse_table) do |t, opts|
[]
end
proc{@db.schema(:x)}.should raise_error(Sequel::Error)
end
specify "should parse the schema correctly for a single table" do
sqls = @sqls
proc{@db.schema(:x)}.should raise_error(Sequel::Error)
@db.meta_def(:schema_parse_table) do |t, opts|
sqls << t
[[:a, {:db_type=>t.to_s}]]
end
@db.schema(:x).should == [[:a, {:db_type=>"x", :ruby_default=>nil}]]
@sqls.should == ['x']
@db.schema(:x).should == [[:a, {:db_type=>"x", :ruby_default=>nil}]]
@sqls.should == ['x']
@db.schema(:x, :reload=>true).should == [[:a, {:db_type=>"x", :ruby_default=>nil}]]
@sqls.should == ['x', 'x']
end
specify "should convert various types of table name arguments" do
@db.meta_def(:schema_parse_table) do |t, opts|
[[t, {:db_type=>t}]]
end
s1 = @db.schema(:x)
s1.should == [['x', {:db_type=>'x', :ruby_default=>nil}]]
@db.schema(:x).object_id.should == s1.object_id
@db.schema(:x.identifier).object_id.should == s1.object_id
s2 = @db.schema(:x__y)
s2.should == [['y', {:db_type=>'y', :ruby_default=>nil}]]
@db.schema(:x__y).object_id.should == s2.object_id
@db.schema(:y.qualify(:x)).object_id.should == s2.object_id
end
specify "should correctly parse all supported data types" do
@db.meta_def(:schema_parse_table) do |t, opts|
[[:x, {:type=>schema_column_type(t.to_s)}]]
end
@db.schema(:tinyint).first.last[:type].should == :integer
@db.schema(:interval).first.last[:type].should == :interval
@db.schema(:int).first.last[:type].should == :integer
@db.schema(:integer).first.last[:type].should == :integer
@db.schema(:bigint).first.last[:type].should == :integer
@db.schema(:smallint).first.last[:type].should == :integer
@db.schema(:character).first.last[:type].should == :string
@db.schema(:"character varying").first.last[:type].should == :string
@db.schema(:varchar).first.last[:type].should == :string
@db.schema(:"varchar(255)").first.last[:type].should == :string
@db.schema(:text).first.last[:type].should == :string
@db.schema(:date).first.last[:type].should == :date
@db.schema(:datetime).first.last[:type].should == :datetime
@db.schema(:timestamp).first.last[:type].should == :datetime
@db.schema(:"timestamp with time zone").first.last[:type].should == :datetime
@db.schema(:"timestamp without time zone").first.last[:type].should == :datetime
@db.schema(:time).first.last[:type].should == :time
@db.schema(:"time with time zone").first.last[:type].should == :time
@db.schema(:"time without time zone").first.last[:type].should == :time
@db.schema(:boolean).first.last[:type].should == :boolean
@db.schema(:bit).first.last[:type].should == :boolean
@db.schema(:real).first.last[:type].should == :float
@db.schema(:float).first.last[:type].should == :float
@db.schema(:double).first.last[:type].should == :float
@db.schema(:"double precision").first.last[:type].should == :float
@db.schema(:numeric).first.last[:type].should == :decimal
@db.schema(:decimal).first.last[:type].should == :decimal
@db.schema(:money).first.last[:type].should == :decimal
@db.schema(:bytea).first.last[:type].should == :blob
@db.schema(:blob).first.last[:type].should == :blob
@db.schema(:image).first.last[:type].should == :blob
@db.schema(:nchar).first.last[:type].should == :string
@db.schema(:nvarchar).first.last[:type].should == :string
@db.schema(:ntext).first.last[:type].should == :string
@db.schema(:smalldatetime).first.last[:type].should == :datetime
@db.schema(:smallmoney).first.last[:type].should == :decimal
@db.schema(:binary).first.last[:type].should == :blob
@db.schema(:varbinary).first.last[:type].should == :blob
@db.schema(:enum).first.last[:type].should == :enum
end
end
| 34.707581 | 273 | 0.65526 |
18faa368908d76ca7a86f4eefedb7de949e08196 | 839 | class UsersController < ApplicationController
before_action :logged_out?, only: [:show, :songs]
def new
if logged_in?
redirect_to root_path
end
@user = User.new
end
def create
@user = User.new(user_params)
if @user.save
session[:user_id] = @user.id
redirect_to user_path(@user)
else
render :new
end
end
def show
@user = current_user
end
def add_song
@song = Song.find(params[:id])
if !current_user.songs.include?(@song)
current_user.songs << @song
end
redirect_to user_songs_path(current_user)
end
private
def user_params
params.require(:user).permit(:name, :email, :password)
end
end | 22.078947 | 63 | 0.54112 |
288198f072576c1a2ef3cf1243ea4f4a7a225d28 | 6,589 | require 'active_record'
require 'after_commit'
require 'yaml'
require 'cgi'
require 'thinking_sphinx/core/array'
require 'thinking_sphinx/core/string'
require 'thinking_sphinx/property'
require 'thinking_sphinx/active_record'
require 'thinking_sphinx/association'
require 'thinking_sphinx/attribute'
require 'thinking_sphinx/configuration'
require 'thinking_sphinx/context'
require 'thinking_sphinx/excerpter'
require 'thinking_sphinx/facet'
require 'thinking_sphinx/class_facet'
require 'thinking_sphinx/facet_search'
require 'thinking_sphinx/field'
require 'thinking_sphinx/index'
require 'thinking_sphinx/source'
require 'thinking_sphinx/rails_additions'
require 'thinking_sphinx/search'
require 'thinking_sphinx/search_methods'
require 'thinking_sphinx/deltas'
require 'thinking_sphinx/adapters/abstract_adapter'
require 'thinking_sphinx/adapters/mysql_adapter'
require 'thinking_sphinx/adapters/postgresql_adapter'
ActiveRecord::Base.send(:include, ThinkingSphinx::ActiveRecord)
Merb::Plugins.add_rakefiles(
File.join(File.dirname(__FILE__), "thinking_sphinx", "tasks")
) if defined?(Merb)
module ThinkingSphinx
# A ConnectionError will get thrown when a connection to Sphinx can't be
# made.
class ConnectionError < StandardError
end
# A StaleIdsException is thrown by Collection.instances_from_matches if there
# are records in Sphinx but not in the database, so the search can be retried.
class StaleIdsException < StandardError
attr_accessor :ids
def initialize(ids)
self.ids = ids
end
end
# The current version of Thinking Sphinx.
#
# @return [String] The version number as a string
#
def self.version
open(File.join(File.dirname(__FILE__), '../VERSION')) { |f|
f.read.strip
}
end
# The collection of indexed models. Keep in mind that Rails lazily loads
# its classes, so this may not actually be populated with _all_ the models
# that have Sphinx indexes.
def self.context
if Thread.current[:thinking_sphinx_context].nil?
Thread.current[:thinking_sphinx_context] = ThinkingSphinx::Context.new
Thread.current[:thinking_sphinx_context].prepare
end
Thread.current[:thinking_sphinx_context]
end
def self.unique_id_expression(offset = nil)
"* #{context.indexed_models.size} + #{offset || 0}"
end
# Check if index definition is disabled.
#
def self.define_indexes?
if Thread.current[:thinking_sphinx_define_indexes].nil?
Thread.current[:thinking_sphinx_define_indexes] = true
end
Thread.current[:thinking_sphinx_define_indexes]
end
# Enable/disable indexes - you may want to do this while migrating data.
#
# ThinkingSphinx.define_indexes = false
#
def self.define_indexes=(value)
Thread.current[:thinking_sphinx_define_indexes] = value
end
# Check if delta indexing is enabled.
#
def self.deltas_enabled?
if Thread.current[:thinking_sphinx_deltas_enabled].nil?
Thread.current[:thinking_sphinx_deltas_enabled] = (
ThinkingSphinx::Configuration.environment != "test"
)
end
Thread.current[:thinking_sphinx_deltas_enabled]
end
# Enable/disable all delta indexing.
#
# ThinkingSphinx.deltas_enabled = false
#
def self.deltas_enabled=(value)
Thread.current[:thinking_sphinx_deltas_enabled] = value
end
# Check if updates are enabled. True by default, unless within the test
# environment.
#
def self.updates_enabled?
if Thread.current[:thinking_sphinx_updates_enabled].nil?
Thread.current[:thinking_sphinx_updates_enabled] = (
ThinkingSphinx::Configuration.environment != "test"
)
end
Thread.current[:thinking_sphinx_updates_enabled]
end
# Enable/disable updates to Sphinx
#
# ThinkingSphinx.updates_enabled = false
#
def self.updates_enabled=(value)
Thread.current[:thinking_sphinx_updates_enabled] = value
end
def self.suppress_delta_output?
Thread.current[:thinking_sphinx_suppress_delta_output] ||= false
end
def self.suppress_delta_output=(value)
Thread.current[:thinking_sphinx_suppress_delta_output] = value
end
# Checks to see if MySQL will allow simplistic GROUP BY statements. If not,
# or if not using MySQL, this will return false.
#
def self.use_group_by_shortcut?
if Thread.current[:thinking_sphinx_use_group_by_shortcut].nil?
Thread.current[:thinking_sphinx_use_group_by_shortcut] = !!(
mysql? && ::ActiveRecord::Base.connection.select_all(
"SELECT @@global.sql_mode, @@session.sql_mode;"
).all? { |key,value| value.nil? || value[/ONLY_FULL_GROUP_BY/].nil? }
)
end
Thread.current[:thinking_sphinx_use_group_by_shortcut]
end
# An indication of whether Sphinx is running on a remote machine instead of
# the same machine.
#
def self.remote_sphinx?
Thread.current[:thinking_sphinx_remote_sphinx] ||= false
end
# Tells Thinking Sphinx that Sphinx is running on a different machine, and
# thus it can't reliably guess whether it is running or not (ie: the
# #sphinx_running? method), and so just assumes it is.
#
# Useful for multi-machine deployments. Set it in your production.rb file.
#
# ThinkingSphinx.remote_sphinx = true
#
def self.remote_sphinx=(value)
Thread.current[:thinking_sphinx_remote_sphinx] = value
end
# Check if Sphinx is running. If remote_sphinx is set to true (indicating
# Sphinx is on a different machine), this will always return true, and you
# will have to handle any connection errors yourself.
#
def self.sphinx_running?
remote_sphinx? || sphinx_running_by_pid?
end
# Check if Sphinx is actually running, provided the pid is on the same
# machine as this code.
#
def self.sphinx_running_by_pid?
!!sphinx_pid && pid_active?(sphinx_pid)
end
def self.sphinx_pid
if File.exists?(ThinkingSphinx::Configuration.instance.pid_file)
File.read(ThinkingSphinx::Configuration.instance.pid_file)[/\d+/]
else
nil
end
end
def self.pid_active?(pid)
!!Process.kill(0, pid.to_i)
rescue Exception => e
false
end
def self.microsoft?
RUBY_PLATFORM =~ /mswin/
end
def self.jruby?
defined?(JRUBY_VERSION)
end
def self.mysql?
::ActiveRecord::Base.connection.class.name.demodulize == "MysqlAdapter" ||
::ActiveRecord::Base.connection.class.name.demodulize == "MysqlplusAdapter" || (
jruby? && ::ActiveRecord::Base.connection.config[:adapter] == "jdbcmysql"
)
end
extend ThinkingSphinx::SearchMethods::ClassMethods
end
| 29.415179 | 84 | 0.735468 |
1c56cdd1868bd4bbdf9ddf0ef8ade46ef344b560 | 270 | # encoding: utf-8
require 'spec_helper'
describe Function::Numeric::Exponentiation, '.call' do
subject { object.call(left, right) }
let(:object) { described_class }
let(:left) { 2 }
let(:right) { 2 }
it { should eql(4) }
end
| 19.285714 | 54 | 0.574074 |
b93a98c48d367962450616503e9cd553dfe6329f | 475 | # Test class to test observer pattern with in-built observer
require 'rspec'
require_relative '../src/employee'
require_relative '../src/payroll'
describe 'Observable' do
it 'updates observers' do
name = 'Salone Gupta'
salary = 50000
payroll = Payroll.new
employee = Employee.new(name, salary)
employee.add_observer(payroll)
employee.salary = 70000
expect(payroll.old_salary).to eq(50000)
expect(payroll.salary).to eq(70000)
end
end | 20.652174 | 60 | 0.711579 |
284d60c555623ef930368c7d55dac21ab6d4f2ce | 2,423 | require 'registry_record'
require 'source_record'
require 'statistical_abstract'
require './header'
require 'pp'
# Parse enumchrons for Statistical Abstract registry records
deprecate_count = 0
source_count = 0
oclcnums = StatisticalAbstract.oclcs
# Each StatAb RegRec
RegistryRecord.where(oclcnum_t:{"$in":oclcnums}, deprecated_timestamp:{"$exists":0}).no_timeout.each do |reg|
#if we can parse it, then we should replace it. ignore if we can't.
ec = StatisticalAbstract.parse_ec(reg.enumchron_display)
if ec.nil?
next
end
#parsed and exploded replacement ECs.
new_ids = []
StatisticalAbstract.explode(ec).keys.uniq.each do | new_ec |
r = RegistryRecord.new(reg.source_record_ids, new_ec, 'Statistical Abstract enumchron parsing.', [reg.registry_id])
r.series = "Statistical Abstract"
r.save
new_ids << r.registry_id
end
reg.deprecate( 'Improved Statistical Abstract enum/chron parsing.', new_ids)
deprecate_count +=1
end
#lot of duplicates, merge them
merge_count = 0
all_reg_ids = RegistryRecord.where(oclcnum_t:{"$in":oclcnums},
deprecated_timestamp:{"$exists":0}).no_timeout.pluck(:registry_id)
all_reg_ids.each do | reg_id |
reg = RegistryRecord.where(registry_id:reg_id,
deprecated_timestamp:{"$exists":0}).no_timeout.first
#possible we have already merged it
if !reg
next
end
#get all matching
group = RegistryRecord.where(oclcnum_t:{"$in":oclcnums},
deprecated_timestamp:{"$exists":0},
enumchron_display:reg.enumchron_display).no_timeout.pluck(:registry_id)
if group.count > 1
RegistryRecord.merge( group, reg.enumchron_display, "Statistical Abstract enumchron parsing/merging.")
merge_count += group.count
end
end
puts "merge count: #{merge_count}"
# Parse the individual SourceRecord enumchrons
SourceRecord.where(series:"StatisticalAbstract",deprecated_timestamp:{"$exists":0}).no_timeout.each do |src|
src.ec = src.extract_enum_chrons
if src.ec.keys.count > 0
src.enum_chrons = src.ec.collect do |k,fields|
if !fields['canonical'].nil?
fields['canonical']
else
fields['string']
end
end
else
src.enum_chrons = ['']
end
src.save
source_count += 1
end
puts "Deprecated records: #{deprecate_count}"
puts "Source records: #{source_count}"
| 29.54878 | 119 | 0.694593 |
1813b5625ed6ad372819110d60c2cee428156122 | 4,144 | class Devise::CasSessionsController < Devise::SessionsController
include DeviseCasAuthenticatable::SingleSignOut::DestroySession
unless Rails.version =~/^4/
unloadable
end
skip_before_action :verify_authenticity_token, only: [:single_sign_out]
def new
if memcache_checker.session_store_memcache? && !memcache_checker.alive?
raise "memcache is down, can't get session data from it"
end
redirect_to(cas_login_url)
end
def service
warden.authenticate!(:scope => resource_name)
if LoginPolicy.new(current_user).boiv?
redirect_to after_sign_in_path_for(current_user)
else
destroy message: t('devise.sessions.new.unauthorized')
end
end
def unregistered
end
def destroy message: nil
# if :cas_create_user is false a CAS session might be open but not signed_in
# in such case we destroy the session here
if signed_in?(resource_name)
sign_out(resource_name)
else
reset_session
end
if message
redirect_to(cas_logout_url, flash: {alert: message})
else
redirect_to(cas_logout_url)
end
end
def single_sign_out
if ::Devise.cas_enable_single_sign_out
session_index = read_session_index
if session_index
logger.debug "Intercepted single-sign-out request for CAS session #{session_index}."
session_id = ::DeviseCasAuthenticatable::SingleSignOut::Strategies.current_strategy.find_session_id_by_index(session_index)
if session_id
logger.debug "Found Session ID #{session_id} with index key #{session_index}"
destroy_cas_session(session_index, session_id)
end
else
logger.warn "Ignoring CAS single-sign-out request as no session index could be parsed from the parameters."
end
else
logger.warn "Ignoring CAS single-sign-out request as feature is not currently enabled."
end
head :ok
end
private
def read_session_index
if request.headers['CONTENT_TYPE'] =~ %r{^multipart/}
false
elsif request.post? && params['logoutRequest'] =~
%r{^<samlp:LogoutRequest.*?<samlp:SessionIndex>(.*)</samlp:SessionIndex>}m
$~[1]
else
false
end
end
def destroy_cas_session(session_index, session_id)
if destroy_session_by_id(session_id)
logger.debug "Destroyed session #{session_id} corresponding to service ticket #{session_index}."
end
::DeviseCasAuthenticatable::SingleSignOut::Strategies.current_strategy.delete_session_index(session_index)
end
def cas_login_url
flash.clear # Fix 2742
::Devise.cas_client.add_service_to_login_url(cas_service_url)
end
helper_method :cas_login_url
def request_url
return @request_url if @request_url
@request_url = request.protocol.dup
@request_url << request.host
@request_url << ":#{request.port.to_s}" unless request.port == 80
@request_url
end
def cas_destination_url
return unless ::Devise.cas_logout_url_param == 'destination'
if !::Devise.cas_destination_url.blank?
url = Devise.cas_destination_url
else
url = request_url.dup
url << after_sign_out_path_for(resource_name)
end
end
def cas_follow_url
return unless ::Devise.cas_logout_url_param == 'follow'
if !::Devise.cas_follow_url.blank?
url = Devise.cas_follow_url
else
url = request_url.dup
url << after_sign_out_path_for(resource_name)
end
end
def cas_service_url
if Rails.application.config.chouette_authentication_settings.try(:[], :cas_service_url)
return Rails.application.config.chouette_authentication_settings[:cas_service_url]
end
::Devise.cas_service_url(request.url.dup, devise_mapping)
end
def cas_logout_url
begin
::Devise.cas_client.logout_url(cas_destination_url, cas_follow_url, cas_service_url)
rescue ArgumentError
# Older rubycas-clients don't accept a service_url
::Devise.cas_client.logout_url(cas_destination_url, cas_follow_url)
end
end
def memcache_checker
@memcache_checker ||= DeviseCasAuthenticatable::MemcacheChecker.new(Rails.configuration)
end
end
| 29.390071 | 131 | 0.724903 |
f8faf2594b261184a9536518f519237cd1572bc8 | 2,876 | # encoding: utf-8
require 'ffaker/name'
module FFaker
# guapolo github.com/guapolo
module NameMX
include FFaker::Name
extend ModuleUtils
extend self
MALE_PREFIXES = %w(Sr. C.).freeze
FEMALE_PREFIXES = %w(Sra. Srita. C.).freeze
PREFIXES = %w(Sr. Sra. Srita. C.).freeze
# Full name according to gender and prefix, possibly with middle_name
def full_name(gender = :any, prefix = false)
if prefix
full_name_prefix(gender)
else
full_name_no_prefix(gender)
end
end
# Full name with prefix according to gender, possibly with middle_name
def full_name_prefix(gender = :any)
case gender
when :any then
case rand(9)
when 0, 3, 6, 8 then "#{female_prefix} #{female_name} #{paternal_last_names}"
else "#{male_prefix} #{male_name} #{paternal_last_names}"
end
when :male then "#{male_prefix} #{male_name} #{paternal_last_names}"
when :female then "#{female_prefix} #{female_name} #{paternal_last_names}"
else raise ArgumentError, 'Invalid gender, must be one of :any, :male, :female'
end
end
# Full name with no prefix according to gender, possibly with middle_name
def full_name_no_prefix(gender = :any)
case gender
when :any then
case rand(9)
when 0, 3, 6, 8 then "#{female_name} #{paternal_last_names}"
else "#{male_name} #{paternal_last_names}"
end
when :male then "#{male_name} #{paternal_last_names}"
when :female then "#{female_name} #{paternal_last_names}"
else raise ArgumentError, 'Invalid gender, must be one of :any, :male, :female'
end
end
# Male first name and possibly middle name
def male_name
case rand(9)
when 0, 5 then "#{first_name(:male)} #{middle_name(:male)}"
else first_name(:male)
end
end
# Female first name and possibly middle name
def female_name
case rand(9)
when 0, 5 then "#{first_name(:female)} #{middle_name(:female)}"
else first_name(:female)
end
end
# A single name according to gender parameter
def name(gender = :any)
case gender
when :any then (rand(2) == 0) ? name(:male) : name(:female)
when :male then fetch_sample(MALE_FIRST_NAMES)
when :female then fetch_sample(FEMALE_FIRST_NAMES)
else raise ArgumentError, 'Invalid gender, must be one of :any, :male, :female'
end
end
alias middle_name name
alias first_name name
# Father's and mother's last name
def paternal_last_names
"#{last_name} #{last_name}"
end
def last_name
fetch_sample(LAST_NAMES)
end
def prefix
fetch_sample(PREFIXES)
end
def male_prefix
fetch_sample(MALE_PREFIXES)
end
def female_prefix
fetch_sample(FEMALE_PREFIXES)
end
end
end
| 27.653846 | 85 | 0.644645 |
5d17aa85b78b745710bd116366d61c98a9eb0d26 | 2,370 | #
# Ops-Pipeline - Templates for automating the production and consumption of images
# and containers.
#
# Copyright 2016 Capital One Services, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe 'jenkins::master'
jenkins_user node['jenkins-master']['admin_username'] do
password node['jenkins-master']['admin_password']
full_name 'Jenkins admin user'
email node['jenkins-master']['admin_email']
public_keys [node['jenkins-master']['admin_public_key']]
end
node.run_state['jenkins_private_key'] =
node['jenkins-master']['admin_private_key']
jenkins_script 'configure email' do
command <<-EOH.gsub(/^ {4}/, '')
import jenkins.model.*
def jenkinsLocationConfiguration = JenkinsLocationConfiguration.get()
jenkinsLocationConfiguration.setAdminAddress("node['jenkins-master']['admin_email']")
jenkinsLocationConfiguration.save()
EOH
action :nothing
end
node['jenkins-master']['users'].to_a.each do |user, hash|
jenkins_user user do
full_name hash['full_name']
email hash['email']
password hash['password']
public_keys hash['public_keys']
end
end
node.run_state['jenkins_private_key'] =
node['jenkins-master']['admin_private_key']
node['jenkins-master']['credentials'].to_a.each do |user, hash|
if hash['password']
jenkins_password_credentials user.dup do
action :create
id hash['id']
description hash['description']
password hash['password']
end
end
if hash['private_key']
jenkins_private_key_credentials user.dup do
action :create
id hash['id']
description hash['description']
private_key hash['private_key']
end
end
end
node['jenkins-master']['plugins'].to_a.each do |plugin|
plugin, version = plugin.split('=')
jenkins_plugin plugin do
version version if version
notifies :restart, 'service[jenkins]'
end
end | 29.625 | 89 | 0.727004 |
d5d5499373137ce2e5676095e6d20caa3e0a951e | 154 | if defined?(Capistrano::Configuration.instance)
require 'a9n/capistrano/ver2x.rb'
else
load File.expand_path('../capistrano/tasks.cap', __FILE__)
end
| 25.666667 | 60 | 0.772727 |
2165932d991c997f5ea0149ad65505264c76a974 | 1,119 | describe :dir_pos, shared: true do
before :each do
@dir = Dir.open DirSpecs.mock_dir
end
after :each do
@dir.close rescue nil
end
it "returns an Integer representing the current position in the directory" do
@dir.send(@method).should be_kind_of(Integer)
@dir.send(@method).should be_kind_of(Integer)
@dir.send(@method).should be_kind_of(Integer)
end
it "returns a different Integer if moved from previous position" do
a = @dir.send(@method)
@dir.read
b = @dir.send(@method)
a.should be_kind_of(Integer)
b.should be_kind_of(Integer)
a.should_not == b
end
end
describe :dir_pos_set, shared: true do
before :each do
@dir = Dir.open DirSpecs.mock_dir
end
after :each do
@dir.close
end
# NOTE: #seek/#pos= to a position not returned by #tell/#pos is undefined
# and should not be spec'd.
it "moves the read position to a previously obtained position" do
pos = @dir.pos
a = @dir.read
b = @dir.read
@dir.send @method, pos
c = @dir.read
a.should_not == b
b.should_not == c
c.should == a
end
end
| 21.519231 | 79 | 0.65773 |
1db6ca6b05320db694dac7744370d97466f9b17e | 7,588 | class GitPushService < BaseService
attr_accessor :push_data, :push_commits
include Gitlab::CurrentSettings
include Gitlab::Access
# The N most recent commits to process in a single push payload.
PROCESS_COMMIT_LIMIT = 100
# This method will be called after each git update
# and only if the provided user and project are present in GitLab.
#
# All callbacks for post receive action should be placed here.
#
# Next, this method:
# 1. Creates the push event
# 2. Updates merge requests
# 3. Recognizes cross-references from commit messages
# 4. Executes the project's webhooks
# 5. Executes the project's services
# 6. Checks if the project's main language has changed
#
def execute
@project.repository.after_create if @project.empty_repo?
@project.repository.after_push_commit(branch_name)
if push_remove_branch?
@project.repository.after_remove_branch
@push_commits = []
elsif push_to_new_branch?
@project.repository.after_create_branch
# Re-find the pushed commits.
if default_branch?
# Initial push to the default branch. Take the full history of that branch as "newly pushed".
process_default_branch
else
# Use the pushed commits that aren't reachable by the default branch
# as a heuristic. This may include more commits than are actually pushed, but
# that shouldn't matter because we check for existing cross-references later.
@push_commits = @project.repository.commits_between(@project.default_branch, params[:newrev])
# don't process commits for the initial push to the default branch
process_commit_messages
end
elsif push_to_existing_branch?
# Collect data for this git push
@push_commits = @project.repository.commits_between(params[:oldrev], params[:newrev])
process_commit_messages
# Update the bare repositories info/attributes file using the contents of the default branches
# .gitattributes file
update_gitattributes if default_branch?
end
if current_application_settings.elasticsearch_indexing? && default_branch?
ElasticCommitIndexerWorker.perform_async(@project.id, params[:oldrev], params[:newrev])
end
execute_related_hooks
perform_housekeeping
update_remote_mirrors
update_caches
update_signatures
end
def update_gitattributes
@project.repository.copy_gitattributes(params[:ref])
end
def update_caches
if default_branch?
if push_to_new_branch?
# If this is the initial push into the default branch, the file type caches
# will already be reset as a result of `Project#change_head`.
types = []
else
paths = Set.new
@push_commits.last(PROCESS_COMMIT_LIMIT).each do |commit|
commit.raw_deltas.each do |diff|
paths << diff.new_path
end
end
types = Gitlab::FileDetector.types_in_paths(paths.to_a)
end
else
types = []
end
ProjectCacheWorker.perform_async(@project.id, types, [:commit_count, :repository_size])
end
def update_signatures
commit_shas = @push_commits.last(PROCESS_COMMIT_LIMIT).map(&:sha)
return if commit_shas.empty?
shas_with_cached_signatures = GpgSignature.where(commit_sha: commit_shas).pluck(:commit_sha)
commit_shas -= shas_with_cached_signatures
return if commit_shas.empty?
commit_shas = Gitlab::Git::Commit.shas_with_signatures(project.repository, commit_shas)
commit_shas.each do |sha|
CreateGpgSignatureWorker.perform_async(sha, project.id)
end
end
# Schedules processing of commit messages.
def process_commit_messages
default = default_branch?
@push_commits.last(PROCESS_COMMIT_LIMIT).each do |commit|
if commit.matches_cross_reference_regex?
ProcessCommitWorker
.perform_async(project.id, current_user.id, commit.to_hash, default)
end
end
end
protected
def update_remote_mirrors
return unless @project.has_remote_mirror?
@project.mark_stuck_remote_mirrors_as_failed!
@project.update_remote_mirrors
end
def execute_related_hooks
# Update merge requests that may be affected by this push. A new branch
# could cause the last commit of a merge request to change.
#
UpdateMergeRequestsWorker
.perform_async(@project.id, current_user.id, params[:oldrev], params[:newrev], params[:ref])
mirror_update = @project.mirror? && @project.repository.up_to_date_with_upstream?(branch_name)
EventCreateService.new.push(@project, current_user, build_push_data)
Ci::CreatePipelineService.new(@project, current_user, build_push_data).execute(:push, mirror_update: mirror_update)
SystemHookPushWorker.perform_async(build_push_data.dup, :push_hooks)
@project.execute_hooks(build_push_data.dup, :push_hooks)
@project.execute_services(build_push_data.dup, :push_hooks)
if push_remove_branch?
AfterBranchDeleteService
.new(project, current_user)
.execute(branch_name)
end
end
def perform_housekeeping
housekeeping = Projects::HousekeepingService.new(@project)
housekeeping.increment!
housekeeping.execute if housekeeping.needed?
rescue Projects::HousekeepingService::LeaseTaken
end
def process_default_branch
@push_commits_count = project.repository.commit_count_for_ref(params[:ref])
offset = [@push_commits_count - PROCESS_COMMIT_LIMIT, 0].max
@push_commits = project.repository.commits(params[:newrev], offset: offset, limit: PROCESS_COMMIT_LIMIT)
# Ensure HEAD points to the default branch in case it is not master
project.change_head(branch_name)
# Set protection on the default branch if configured
if current_application_settings.default_branch_protection != PROTECTION_NONE && !ProtectedBranch.protected?(@project, @project.default_branch)
params = {
name: @project.default_branch,
push_access_levels_attributes: [{
access_level: current_application_settings.default_branch_protection == PROTECTION_DEV_CAN_PUSH ? Gitlab::Access::DEVELOPER : Gitlab::Access::MASTER
}],
merge_access_levels_attributes: [{
access_level: current_application_settings.default_branch_protection == PROTECTION_DEV_CAN_MERGE ? Gitlab::Access::DEVELOPER : Gitlab::Access::MASTER
}]
}
ProtectedBranches::CreateService.new(@project, current_user, params).execute
end
end
def build_push_data
@push_data ||= Gitlab::DataBuilder::Push.build(
@project,
current_user,
params[:oldrev],
params[:newrev],
params[:ref],
@push_commits,
commits_count: @push_commits_count)
end
def push_to_existing_branch?
# Return if this is not a push to a branch (e.g. new commits)
Gitlab::Git.branch_ref?(params[:ref]) && !Gitlab::Git.blank_ref?(params[:oldrev])
end
def push_to_new_branch?
Gitlab::Git.branch_ref?(params[:ref]) && Gitlab::Git.blank_ref?(params[:oldrev])
end
def push_remove_branch?
Gitlab::Git.branch_ref?(params[:ref]) && Gitlab::Git.blank_ref?(params[:newrev])
end
def push_to_branch?
Gitlab::Git.branch_ref?(params[:ref])
end
def default_branch?
Gitlab::Git.branch_ref?(params[:ref]) &&
(Gitlab::Git.ref_name(params[:ref]) == project.default_branch || project.default_branch.nil?)
end
def commit_user(commit)
commit.author || current_user
end
def branch_name
@branch_name ||= Gitlab::Git.ref_name(params[:ref])
end
end
| 32.706897 | 159 | 0.72272 |
26257c2b0b2dd9c9709db52621b6c25f9fa7058e | 108 | module KonoUtils
module Object
module Cell
class Shows::Base < Base
end
end
end
end | 12 | 30 | 0.62037 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.