hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
edb2b54458a493364ca12eb49bc41fc658363ec8 | 755 | Pod::Spec.new do |s|
s.name = 'MHNotificationHelper'
s.version = '0.9.2'
s.license = 'MIT'
s.homepage = 'https://github.com/mariohahn/MHNotificationHelper'
s.author = {
'Mario Hahn' => '[email protected]'
}
s.summary = 'ViewController to describe the User how to turn on the Notifications.'
s.platform = :ios
s.source = {
:git => 'https://github.com/mariohahn/MHNotificationHelper.git',
:tag => 'v0.9.2'
}
s.dependency "Masonry"
s.resources = "MHNotificationHelper/MHNotificationHelper/MHNotificationHelper/**/*.{png,bundle}"
s.source_files = ['MHNotificationHelper/MHNotificationHelper/MHNotificationHelper/**/*.{h,m}']
s.ios.deployment_target = '6.0'
s.requires_arc = true
end | 32.826087 | 98 | 0.660927 |
615755bd97ba13421eab5d72cd64e9f94d764db7 | 1,659 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'prismic_rails/version'
Gem::Specification.new do |spec|
spec.name = "prismic_rails"
spec.version = PrismicRails::VERSION
spec.authors = ["Felix Langenegger"]
spec.email = ["[email protected]"]
spec.summary = %q{PrismicRails provides rails view helpers to query prismic.io.}
spec.description = %q{With PrismicRails it is simple to query the prismic.io API for a defined custom type. By providing rails helpers the integration in your rails view is much easier as before.}
spec.homepage = "https://github.com/fadendaten/prismic_rails"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_runtime_dependency "prismic.io", "~> 1.5", ">= 1.5"
spec.add_runtime_dependency "rails", ">= 4.2"
spec.add_development_dependency "bundler", "~> 2.1"
spec.add_development_dependency "rake", "~> 13.0"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_development_dependency "rdoc", "~> 5.1", ">= 5.1"
spec.add_development_dependency "vcr", "~> 3.0.3", ">= 3.0.3"
spec.add_development_dependency "dotenv", "~> 2.2", ">= 2.2.1"
spec.add_development_dependency "webmock", "~> 3.0", ">= 3.0.1"
spec.add_development_dependency 'simplecov', "~> 0.14", ">= 0.14.1"
spec.add_development_dependency 'pry', '~> 0.10.4'
end
| 43.657895 | 200 | 0.664256 |
39db79297e83ea64ad199f138b1403765d4f8677 | 4,837 | require 'timewizard/versioner/worklight'
RSpec.describe 'Timewizard::Versioner::Worklight' do
before(:example) do
pwd = Dir.pwd.to_s
FileUtils.rm_rf Dir["#{pwd}/tmp"]
FileUtils.rm_rf Dir["#{pwd}/pkg"]
pwd = Dir.pwd.to_s
FileUtils.cp_r("#{pwd}/resources/.", "#{pwd}/tmp")
@versioner = Timewizard::Versioner::Worklight.new "#{pwd}/tmp/worklight/application-descriptor.xml"
end
context '.new' do
it 'should raise error if arg is nil' do
expect { Timewizard::Versioner::Worklight.new nil }.to raise_error("passed in file cannot be nil")
end
it 'should not raise error if arg is not nil' do
expect { Timewizard::Versioner::Worklight.new '' }.not_to raise_error
end
end
context '#file' do
it 'should be an application-descriptor.xml' do
expect(@versioner.file).to eq("#{Dir.pwd.to_s}/tmp/worklight/application-descriptor.xml")
end
end
context '#old_build_number' do
it 'should be nil' do
expect(@versioner.old_build_number).to be_nil
end
end
context '#old_version_number' do
it 'should be nil' do
expect(@versioner.old_version_number).to be_nil
end
end
context '#new_build_number' do
it 'should be nil' do
expect(@versioner.new_build_number).to be_nil
end
end
context '#new_build_number=' do
it 'should change instance variable' do
@versioner.new_build_number = '1'
expect(@versioner.new_build_number).not_to be_nil
@versioner.new_build_number = nil
expect(@versioner.new_build_number).to be_nil
end
end
context '#new_version_number' do
it 'should be nil' do
expect(@versioner.new_version_number).to be_nil
end
end
context '#new_version_number=' do
it 'should change instance variable' do
@versioner.new_version_number = '1'
expect(@versioner.new_version_number).not_to be_nil
@versioner.new_version_number = nil
expect(@versioner.new_version_number).to be_nil
end
end
context '#read_build_numbers' do
it 'should not raise an error' do
expect { @versioner.read_build_numbers }.to_not raise_error
end
it 'should change instance variables' do
expect(@versioner.old_build_number).to be_nil
expect(@versioner.new_build_number).to be_nil
@versioner.read_build_numbers
expect(@versioner.old_build_number).to eq('0.0.1')
expect(@versioner.new_build_number).to eq('0.0.2')
end
end
context '#read_version_numbers' do
it 'should not raise an error' do
expect { @versioner.read_version_numbers }.to_not raise_error
end
it 'should change instance variables' do
expect(@versioner.old_version_number).to be_nil
expect(@versioner.new_version_number).to be_nil
@versioner.read_version_numbers
expect(@versioner.old_version_number).to eq('0.0.1')
expect(@versioner.new_version_number).to eq('0.0.1')
end
end
context '#write_build_numbers' do
before(:example) do
@versioner.read_build_numbers
end
it 'should not raise an error' do
expect { @versioner.write_version_numbers }.to_not raise_error
end
it 'should not change instance variables' do
expect(@versioner.old_build_number).to eq('0.0.1')
expect(@versioner.new_build_number).to eq('0.0.2')
@versioner.write_build_numbers
expect(@versioner.old_build_number).to eq('0.0.1')
expect(@versioner.new_build_number).to eq('0.0.2')
end
it 'should not change contents of parsed file' do
expect(@versioner.old_build_number).to eq('0.0.1')
expect(@versioner.new_build_number).to eq('0.0.2')
@versioner.write_build_numbers
@versioner.read_build_numbers
expect(@versioner.old_build_number).to eq('0.0.1')
expect(@versioner.new_build_number).to eq('0.0.2')
end
end
context '#write_version_numbers' do
before(:example) do
@versioner.read_version_numbers
end
it 'should not raise an error' do
expect { @versioner.write_version_numbers }.to_not raise_error
end
it 'should not change instance variables' do
expect(@versioner.old_version_number).to eq('0.0.1')
expect(@versioner.new_version_number).to eq('0.0.1')
@versioner.write_version_numbers
expect(@versioner.old_version_number).to eq('0.0.1')
expect(@versioner.new_version_number).to eq('0.0.1')
end
it 'should change contents of parsed file' do
expect(@versioner.old_version_number).to eq('0.0.1')
expect(@versioner.new_version_number).to eq('0.0.1')
@versioner.new_version_number = '0.0.2'
@versioner.write_version_numbers
@versioner.read_version_numbers
expect(@versioner.old_version_number).to eq('0.0.2')
expect(@versioner.new_version_number).to eq('0.0.2')
end
end
end | 30.613924 | 104 | 0.691958 |
ed555136fc9a170825f746113942850ecc790d48 | 3,905 |
def find_or_create_clubs!( names, league:, season: nil)
## note: season is for now optional (and unused) - add/use in the future!!!
recs_uniq = []
mappings = {} ## name to db rec mapping (note: more than one name might map to the same uniq rec)
## note: for now allow multiple names for clubs
## if the name matches the same club already added it will get "dropped" and NOT added again to the database,
## thus, a unique club list gets returned with NO duplicates
##
## e.g. Raith or Raith Rvs => Raith Rovers
duplicates = {} ## check for duplicate matches
## add/find teams
names.each do |name|
m = SportDb::Import.config.clubs.match( name )
if m.nil?
## todo/check: exit if no match - why? why not?
puts "!!! *** ERROR *** no matching club found for >#{name}< - add to clubs setup"
exit 1
else
if m.size == 1
club_data = m[0]
else ## assume more than one (>1) match
## resolve conflict - find best match - how?
if league.country
## try match / filter by country
country_key = league.country.key ## e.g. eng, de, at, br, etc.
m2 = m.select { |c| c.country.key == country_key }
if m2.size == 1
club_data = m2[0]
else
puts "!!! *** ERROR *** no clubs or too many matching clubs found for country >#{country_key}< and >#{name}< - cannot resolve conflict / find best match (automatic):"
pp m
exit 1
end
else
puts "!!! *** ERROR *** too many matching clubs found for >#{name}< - cannot resolve conflict / find best match (automatic)"
pp m
exit 1
end
end
end
## todo/check/fix: use canonical name for duplicates index (instead of object.id) - why? why not?
if duplicates[ club_data.name ] ###
duplicates[ club_data.name ] << name
puts "!!! *** WARN *** duplicate name match for club:"
pp duplicates[ club_data.name ]
pp club_data
## add same rec_uniq from first mapping
duplicate_name = duplicates[ club_data.name ][0]
mappings[ name ] = mappings[ duplicate_name ]
next ### skip all the database work and creating a record etc.
end
duplicates[ club_data.name ] ||= []
duplicates[ club_data.name ] << name
## remove spaces too (e.g. Man City => mancity)
## remove dot (.) too e.g. St. Polten => stpolten
## amp (& too e.g. Brighton & Hove Albion FC = brightonhove...
## numbers 1. FC Kaiserslautern:
## team_key = team_name.downcase.gsub( /[0-9&. ]/, '' )
## fix: reuse ascify from sportdb - why? why not?
## remove all non-ascii a-z chars
club_key = club_data.name.downcase.gsub( /[^a-z]/, '' )
## puts "add club: #{club_key}, #{club_data.name}, #{club_data.country.name} (#{club_data.country.key}):"
puts "add club: #{club_key}, #{club_data.name}"
pp club_data
if name != club_data.name
puts " using mapping from >#{name}< to => >#{club_data.name}<"
end
club = SportDb::Model::Team.find_by( title: club_data.name )
if club.nil?
club = SportDb::Model::Team.create!(
key: club_key,
title: club_data.name,
country_id: SportDb::Importer::Country.find_or_create_builtin!( club_data.country.key ).id,
club: true,
national: false ## check -is default anyway - use - why? why not?
## todo/fix: add city if present - why? why not?
)
end
pp club
recs_uniq << club
mappings[ name ] = club
end
[recs_uniq, mappings] # return activerecord team objects and the mappings of names to db recs
end
| 37.190476 | 182 | 0.565941 |
6a8cf5eed77e4afcb059cc0e76369a84bf2040d4 | 981 | # frozen_string_literal: true
module Dependabot
module Python
module PythonVersions
PRE_INSTALLED_PYTHON_VERSIONS = %w(
3.9.1 2.7.18
).freeze
# Due to an OpenSSL issue we can only install the following versions in
# the Dependabot container.
SUPPORTED_VERSIONS = %w(
3.9.1 3.9.0
3.8.7 3.8.6 3.8.5 3.8.4 3.8.3 3.8.2 3.8.1 3.8.0
3.7.9 3.7.8 3.7.7 3.7.6 3.7.5 3.7.4 3.7.3 3.7.2 3.7.1 3.7.0
3.6.12 3.6.11 3.6.10 3.6.9 3.6.8 3.6.7 3.6.6 3.6.5 3.6.4 3.6.3 3.6.2
3.6.1 3.6.0 3.5.10 3.5.8 3.5.7 3.5.6 3.5.5 3.5.4 3.5.3
2.7.18 2.7.17 2.7.16 2.7.15 2.7.14 2.7.13
).freeze
# This list gets iterated through to find a valid version, so we have
# the two pre-installed versions listed first.
SUPPORTED_VERSIONS_TO_ITERATE =
[
*PRE_INSTALLED_PYTHON_VERSIONS.select { |v| v.start_with?("3") },
*SUPPORTED_VERSIONS
].freeze
end
end
end
| 31.645161 | 77 | 0.58002 |
e8ae02affccef46ae74eda3cbdeccac5d73502a3 | 83 | # frozen_string_literal: true
json.partial! 'problems/problem', problem: @problem
| 20.75 | 51 | 0.783133 |
284cd67765029bc3c81ece2fbd45c579f27adfe3 | 1,846 |
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "ivanye/version"
Gem::Specification.new do |spec|
spec.name = "ivanye"
spec.version = Ivanye::VERSION
spec.authors = ["Jacob Slack"]
spec.email = ["[email protected]"]
spec.summary = %q{TODO: Write a short summary, because RubyGems requires one.}
spec.description = %q{TODO: Write a longer description or delete this line.}
spec.homepage = "TODO: Put your gem's website or public repo URL here."
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata["allowed_push_host"] = "TODO: Set to 'http://mygemserver.com'"
spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = "TODO: Put your gem's public repo URL here."
spec.metadata["changelog_uri"] = "TODO: Put your gem's CHANGELOG.md URL here."
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 2.0"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
end
| 42.930233 | 96 | 0.674431 |
797537a284af606d4e12b69e79190e4bcb3235a8 | 3,958 | # Doubly Linked List | Set 1 (Introduction and Insertion)
# A Doubly Linked List (DLL) contains an extra pointer,
# typically called previous pointer, together with next pointer
# and data which are there in singly linked list.
# http://www.geeksforgeeks.org/doubly-linked-list/
# Advantages over singly linked list
# 1) A DLL can be traversed in both forward and backward direction.
# 2) The delete operation in DLL is more efficient if pointer to the node to be deleted is given
# Disadvantages over singly linked list
# 1) Every node of DLL Require extra space for an previous pointer. It is possible to implement DLL with single pointer though (See this and this).
# 2) All operations require an extra pointer previous to be maintained.
# Insertion
# A node can be added in four ways
# 1) At the front of the DLL
# 2) After a given node.
# 3) At the end of the DLL
# 4) Before a given node.
require 'pry'
class Node
attr_accessor :data, :next_node, :prev_node
def initialize(data, next_node=nil, prev_node=nil)
@data = data
@next = next_node
@prev = prev_node
end
end
class DoublyLinkedList
attr_accessor :head
def initialize(head=nil)
@head = head
end
# Given a reference to the head of a list and an
# integer,inserts a new node on the front of list
def push new_data
# 1 & 2: Allocate the Node & Put in the data
new_node = Node.new(new_data)
# 3. Make next of new Node as head
new_node.next_node = @head
# 4. change prev of head node to new_node
if @head != nil
@head.prev_node = new_node
end
# 5. move the head to point to the new node
@head = new_node
end
# Given a node as prev_node, insert a new node after the given node
def insert_after prev_node, new_data
# 1. check if the given prev_node exists
if prev_node == nil
puts "the given previous node cannot be NULL"
end
# 2. Create new node
# 3. Put in the data
new_node = Node.new(new_data)
# 4. Make next of new Node as next of prev_node
new_node.next_node = prev_node.next_node
# 5. make next of prev_node as new_node
prev_node.next_node = new_node
# 6. Make prev_node ass previous of new_node
new_node.prev_node = prev_node
# 7. Change previous of new_nodes's next node
if new_node.next_node != nil
new_node.next_node.prev_node = new_node
end
end
def append new_data
# 1. Create a new node
# 2. Put in the data
new_node = Node.new(new_data)
# 3. This new node is going to be the last node, so make next of it as None
new_node.next_node = nil
# 4. If the Linked List is empty, then make the new node as head
if @head == nil
new_node.prev_node = nil
@head = new_node
else
# 5. Else traverse till the last node
last = @head
while last.next_node
last = last.next_node
end
# 6. Change the next_node of last node
last.next_node = new_node
# 7. Make last node as previous of new node
new_node.prev_node = last
end
end
# Utility function to print the linked list
def print_list(node)
puts "\nTraversal in forward direction"
while node != nil
puts node.data
last = node
node = node.next_node
end
puts "\nTraversal in reverse direction"
while last != nil
puts last.data
last = last.prev_node
end
end
end
# Driver program to test above functions
# Start with empty list
llist = DoublyLinkedList.new
# Insert 6. So the list becomes 6->None
llist.append(6)
# Insert 7 at the beginning.
# So linked list becomes 7->6->None
llist.push(7)
# Insert 1 at the beginning.
# So linked list becomes 1->7->6->None
llist.push(1)
# Insert 4 at the end.
# So linked list becomes 1->7->6->4->None
llist.append(4)
# Insert 8, after 7.
# So linked list becomes 1->7->8->6->4->None
llist.insert_after(llist.head.next_node, 8)
puts "Created DLL is: "
llist.print_list(llist.head)
| 27.873239 | 147 | 0.686458 |
ff16e93f072fc805908c50662fc6c4eb800c626d | 26 | # typed: true
fun (1) {}
| 6.5 | 13 | 0.5 |
26ea0eab9f551bf8401d66fa44be9da65f9fd517 | 477 | cask "amadeus-pro" do
version "2.8.4"
sha256 "34d89cb521e7bccef39c02f07ec5dbf25d5255a5e93d563e4b774c79a5a5829e"
# s3.amazonaws.com/AmadeusPro2/ was verified as official when first introduced to the cask
url "https://s3.amazonaws.com/AmadeusPro2/AmadeusPro.zip"
appcast "https://www.hairersoft.com/pro.html"
name "Amadeus Pro"
desc "Multi-purpose audio recorder, editor and converter"
homepage "https://www.hairersoft.com/pro.html"
app "Amadeus Pro.app"
end
| 34.071429 | 92 | 0.769392 |
03413b63c01ac87a8ff6addb4850dd1c66288082 | 2,107 | # Puma can serve each request in a thread from an internal thread pool.
# The `threads` method setting takes two numbers: a minimum and maximum.
# Any libraries that use thread pools should be configured to match
# the maximum value specified for Puma. Default is set to 5 threads for minimum
# and maximum; this matches the default thread size of Active Record.
#
max_threads_count = ENV.fetch("RAILS_MAX_THREADS") { 5 }
min_threads_count = ENV.fetch("RAILS_MIN_THREADS") { max_threads_count }
threads min_threads_count, max_threads_count
# Specifies the `worker_timeout` threshold that Puma will use to wait before
# terminating a worker in development environments.
#
worker_timeout 3600 if ENV.fetch("RAILS_ENV", "development") == "development"
# Specifies the `port` that Puma will listen on to receive requests; default is 3000.
# we specify 3001 for http, 3000 for https
port ENV.fetch("PORT") { 3001 }
# Specifies the `environment` that Puma will run in.
#
environment ENV.fetch("RAILS_ENV") { "development" }
# Specifies the `pidfile` that Puma will use.
pidfile ENV.fetch("PIDFILE") { "tmp/pids/server.pid" }
# Specifies the number of `workers` to boot in clustered mode.
# Workers are forked web server processes. If using threads and workers together
# the concurrency of the application would be max `threads` * `workers`.
# Workers do not work on JRuby or Windows (both of which do not support
# processes).
#
# workers ENV.fetch("WEB_CONCURRENCY") { 2 }
# Use the `preload_app!` method when specifying a `workers` number.
# This directive tells Puma to first boot the application and load code
# before forking the application. This takes advantage of Copy On Write
# process behavior so workers use less memory.
#
# preload_app!
# Allow puma to be restarted by `rails restart` command.
plugin :tmp_restart
if Rails.env.development?
localhost_key = "#{Dir.pwd}/#{File.join('config', 'certs', 'localhost.key')}"
localhost_cert = "#{Dir.pwd}/#{File.join('config', 'certs', 'localhost.crt')}"
ssl_bind '0.0.0.0', '3000', {
key: localhost_key,
cert: localhost_cert
}
end
| 39.018519 | 85 | 0.747034 |
bf2536acda01f5b2bd1f7790250107445baa5897 | 2,179 | describe :enumerable_take, shared: true do
before :each do
@values = [4,3,2,1,0,-1]
@enum = EnumerableSpecs::Numerous.new(*@values)
end
it "returns the first count elements if given a count" do
@enum.send(@method, 2).should == [4, 3]
@enum.send(@method, 4).should == [4, 3, 2, 1] # See redmine #1686 !
end
it "returns an empty array when passed count on an empty array" do
empty = EnumerableSpecs::Empty.new
empty.send(@method, 0).should == []
empty.send(@method, 1).should == []
empty.send(@method, 2).should == []
end
it "returns an empty array when passed count == 0" do
@enum.send(@method, 0).should == []
end
it "returns an array containing the first element when passed count == 1" do
@enum.send(@method, 1).should == [4]
end
it "raises an ArgumentError when count is negative" do
lambda { @enum.send(@method, -1) }.should raise_error(ArgumentError)
end
it "returns the entire array when count > length" do
@enum.send(@method, 100).should == @values
@enum.send(@method, 8).should == @values # See redmine #1686 !
end
it "tries to convert the passed argument to an Integer using #to_int" do
obj = mock('to_int')
obj.should_receive(:to_int).and_return(3).at_most(:twice) # called twice, no apparent reason. See redmine #1554
@enum.send(@method, obj).should == [4, 3, 2]
end
it "raises a TypeError if the passed argument is not numeric" do
lambda { @enum.send(@method, nil) }.should raise_error(TypeError)
lambda { @enum.send(@method, "a") }.should raise_error(TypeError)
obj = mock("nonnumeric")
lambda { @enum.send(@method, obj) }.should raise_error(TypeError)
end
it "gathers whole arrays as elements when each yields multiple" do
multi = EnumerableSpecs::YieldsMulti.new
multi.send(@method, 1).should == [[1, 2]]
end
it "consumes only what is needed" do
thrower = EnumerableSpecs::ThrowingEach.new
thrower.send(@method, 0).should == []
counter = EnumerableSpecs::EachCounter.new(1,2,3,4)
counter.send(@method, 2).should == [1,2]
counter.times_called.should == 1
counter.times_yielded.should == 2
end
end
| 34.046875 | 115 | 0.663607 |
b9eb0063fbc1721c603212cc63c38230c22ad239 | 8,190 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `bin/rails
# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2022_01_10_164255) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "active_storage_attachments", force: :cascade do |t|
t.string "name", null: false
t.string "record_type", null: false
t.bigint "record_id", null: false
t.bigint "blob_id", null: false
t.datetime "created_at", null: false
t.index ["blob_id"], name: "index_active_storage_attachments_on_blob_id"
t.index ["record_type", "record_id", "name", "blob_id"], name: "index_active_storage_attachments_uniqueness", unique: true
end
create_table "active_storage_blobs", force: :cascade do |t|
t.string "key", null: false
t.string "filename", null: false
t.string "content_type"
t.text "metadata"
t.string "service_name", null: false
t.bigint "byte_size", null: false
t.string "checksum"
t.datetime "created_at", null: false
t.index ["key"], name: "index_active_storage_blobs_on_key", unique: true
end
create_table "active_storage_variant_records", force: :cascade do |t|
t.bigint "blob_id", null: false
t.string "variation_digest", null: false
t.index ["blob_id", "variation_digest"], name: "index_active_storage_variant_records_uniqueness", unique: true
end
create_table "business_types", force: :cascade do |t|
t.string "name", null: false
t.bigint "parent_id"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["parent_id"], name: "index_business_types_on_parent_id"
end
create_table "data_uploads", force: :cascade do |t|
t.bigint "uploaded_by_id"
t.string "uploader", null: false
t.jsonb "details", default: {}
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["uploaded_by_id"], name: "index_data_uploads_on_uploaded_by_id"
end
create_table "development_funds", force: :cascade do |t|
t.string "project_title", null: false
t.text "project_description"
t.string "recipient"
t.string "lead_organization"
t.bigint "region_id", null: false
t.string "location"
t.decimal "latitude", precision: 10, scale: 6
t.decimal "longitude", precision: 10, scale: 6
t.string "categories", default: [], array: true
t.string "scope"
t.string "planning_area"
t.string "second_planning_area"
t.float "total_project_cost"
t.float "key_funding_amount"
t.string "key_funding_source"
t.string "funding_subtype"
t.integer "funding_call_year"
t.string "funding_call_month"
t.string "project_status"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["region_id"], name: "index_development_funds_on_region_id"
end
create_table "indicator_values", force: :cascade do |t|
t.bigint "indicator_id", null: false
t.string "date"
t.string "category_1"
t.string "category_2"
t.float "value", null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.bigint "region_id"
t.index ["indicator_id"], name: "index_indicator_values_on_indicator_id"
t.index ["region_id"], name: "index_indicator_values_on_region_id"
end
create_table "indicators", force: :cascade do |t|
t.string "slug", null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.boolean "dynamic", default: false, null: false
t.index ["slug"], name: "index_indicators_on_slug", unique: true
end
create_table "organizations", force: :cascade do |t|
t.string "name", null: false
t.bigint "region_id", null: false
t.bigint "business_type_id"
t.bigint "external_company_id"
t.boolean "indigenous_ownership"
t.boolean "biosphere_program_member"
t.text "website_url"
t.decimal "latitude", precision: 10, scale: 6
t.decimal "longitude", precision: 10, scale: 6
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.boolean "accessibility"
t.boolean "show_on_platform", default: true, null: false
t.string "source"
t.index ["business_type_id"], name: "index_organizations_on_business_type_id"
t.index ["region_id"], name: "index_organizations_on_region_id"
end
create_table "region_permissions", force: :cascade do |t|
t.bigint "user_id"
t.bigint "region_id"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["region_id"], name: "index_region_permissions_on_region_id"
t.index ["user_id"], name: "index_region_permissions_on_user_id"
end
create_table "regions", force: :cascade do |t|
t.string "name", null: false
t.bigint "parent_id"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.string "region_type", default: "tourism_region", null: false
t.string "slug", null: false
t.boolean "active", default: true
t.index ["parent_id"], name: "index_regions_on_parent_id"
t.index ["slug"], name: "index_regions_on_slug", unique: true
end
create_table "themes", force: :cascade do |t|
t.string "slug", null: false
t.string "title", null: false
t.text "description"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
create_table "users", force: :cascade do |t|
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.string "name"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.string "account_type", default: "user", null: false
t.index ["email"], name: "index_users_on_email", unique: true
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
end
create_table "widgets", force: :cascade do |t|
t.bigint "theme_id", null: false
t.string "slug", null: false
t.string "title", null: false
t.string "sub_title"
t.text "description"
t.text "note"
t.jsonb "sources"
t.integer "position"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["theme_id"], name: "index_widgets_on_theme_id"
end
add_foreign_key "active_storage_attachments", "active_storage_blobs", column: "blob_id"
add_foreign_key "active_storage_variant_records", "active_storage_blobs", column: "blob_id"
add_foreign_key "business_types", "business_types", column: "parent_id"
add_foreign_key "data_uploads", "users", column: "uploaded_by_id", on_delete: :nullify
add_foreign_key "development_funds", "regions", on_delete: :cascade
add_foreign_key "indicator_values", "indicators", on_delete: :cascade
add_foreign_key "indicator_values", "regions", on_delete: :cascade
add_foreign_key "organizations", "business_types"
add_foreign_key "organizations", "regions", on_delete: :cascade
add_foreign_key "regions", "regions", column: "parent_id", on_delete: :cascade
add_foreign_key "widgets", "themes", on_delete: :cascade
end
| 41.573604 | 126 | 0.714164 |
f8345ba9cd51f09d7235934ef59d9cdd4595df1c | 179 | module RunscopeStatuspage
class RunscopeAPIException < Exception
end
class StatuspageAPIException < Exception
end
class MissingArgumentException < Exception
end
end
| 16.272727 | 44 | 0.804469 |
6a91e4ce859e790a965170adfde24735c0c8d0da | 842 | #
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html.
# Run `pod lib lint openpgp.podspec' to validate before publishing.
#
Pod::Spec.new do |s|
s.name = 'openpgp'
s.version = '0.1.0'
s.summary = 'library for use OpenPGP.'
s.description = <<-DESC
library for use OpenPGP.
DESC
s.homepage = 'https://github.com/jerson/flutter-openpgp'
s.license = { :file => '../LICENSE' }
s.author = { 'Gerson Alexander Pardo Gamez' => '[email protected]' }
s.source = { :path => '.' }
s.source_files = 'Classes/**/*'
s.dependency 'FlutterMacOS'
s.vendored_frameworks = 'Openpgp.framework'
s.platform = :osx, '10.11'
s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES' }
s.swift_version = '5.0'
end
| 36.608696 | 84 | 0.586698 |
ff50f7bfe580477413a45048be073f8f21ef2e35 | 452 | # encoding: UTF-8
require 'spec_helper'
module Normatron
module Filters
describe SwapcaseFilter do
it { should evaluate("caçador").to("CAÇADOR") }
it { should evaluate("CAÇADOR").to("caçador") }
it { should evaluate("CaÇaDoR").to("cAçAdOr") }
it { should evaluate("cAçAdOr").to("CaÇaDoR") }
it { should evaluate(100 ).to(100 ) }
it { should evaluate(nil ).to(nil ) }
end
end
end | 28.25 | 53 | 0.590708 |
e8550d730bfa24d6ea8500bf514a2f414e782cc3 | 1,772 | class Bmake < Formula
desc "Portable version of NetBSD make(1)"
homepage "https://www.crufty.net/help/sjg/bmake.html"
url "https://www.crufty.net/ftp/pub/sjg/bmake-20211001.tar.gz"
sha256 "cad7ef0fb41138050f8932af3a7ade16f7265b3f37ff6356703e0b1ad6542739"
license "BSD-3-Clause"
livecheck do
url "https://www.crufty.net/ftp/pub/sjg/"
regex(/href=.*?bmake[._-]v?(\d{6,8})\.t/i)
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "038eb9e449fc8e28cd8950f6d7bad111c6758db6458ccc42c6998e0cd004d25f"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "64d434169bc43d70b7c620369abd019e0c2005061e154bb0f8d722e7262267ec"
sha256 monterey: "b6d10880ef38c0b76afb99211c032c013e759ca56b7642fc43f5fe383423df05"
sha256 big_sur: "98ff40de4a63d3dc568a577e59089f9e58a06a0b750a5f5adbe4252307d53ae7"
sha256 catalina: "4c28d9e621ea9b03603ca96d390c9823ca94e3fb4d8049ecf7b32acbe6b1b7c3"
sha256 mojave: "31f6737e5a2920fcc54736b6dcb140748f491967bc020eae635b52df038c2edf"
end
def install
# Don't pre-roff cat pages.
inreplace "mk/man.mk", "MANTARGET?", "MANTARGET"
# -DWITHOUT_PROG_LINK means "don't symlink as bmake-VERSION."
# shell-ksh test segfaults since macOS 11.
args = ["--prefix=#{prefix}", "-DWITHOUT_PROG_LINK", "--install", "BROKEN_TESTS=shell-ksh"]
system "sh", "boot-strap", *args
man1.install "bmake.1"
end
test do
(testpath/"Makefile").write <<~EOS
all: hello
hello:
\t@echo 'Test successful.'
clean:
\trm -rf Makefile
EOS
system bin/"bmake"
system bin/"bmake", "clean"
end
end
| 36.916667 | 123 | 0.673251 |
039c8318858d3493b645a62eeaaba255b87fc5db | 1,825 | require 'date'
require 'syslog/pattern'
module Syslog
class Message
attr_accessor :facility,
:severity,
:version,
:datetime,
:hostname,
:app_name,
:procid,
:msgid,
:structured_data,
:message
def initialize(str)
m = Syslog::Pattern::MESSAGE.match(str)
fail(ArgumentError, 'Not a valid syslog message.') if m.nil?
pri = m[:priority].to_i
@facility = pri / 8
@severity = pri % 8
@version = m[:version].to_i
@datetime = maybe_nil(m[:timestamp]) { |t| DateTime.parse(t) }
@hostname = maybe_nil(m[:hostname])
@app_name = maybe_nil(m[:app_name])
@procid = maybe_nil(m[:procid])
@msgid = maybe_nil(m[:msgid])
@structured_data = maybe_nil(m[:structured_data])
@message = m[:message]
end
def maybe_nil(val)
unless val == '-'
block_given? ? yield(val) : val
end
end
private :maybe_nil
def nil_or(val)
if val.nil?
'-'
else
block_given? ? yield(val) : val
end
end
private :nil_or
def to_s
str = ''
str << "<#{(@facility * 8) + @severity}>#{@version} "
str << "#{nil_or(@datetime) { |d| d.strftime('%Y-%m-%dT%H:%M:%S.%6N%:z') }} "
str << "#{nil_or(@hostname)} "
str << "#{nil_or(@app_name)} "
str << "#{nil_or(@procid)} "
str << "#{nil_or(@msgid)} "
str << "#{nil_or(@structured_data)}"
str << " #{@message}" unless @message.nil?
str
end
def self.parse(str)
begin
Message.new(str)
rescue ArgumentError
nil
end
end
end
end
| 24.662162 | 83 | 0.484384 |
d5e3927efe906441a8db6a58ed81f465cc4ad54b | 4,903 | # -*- encoding: binary -*-
require 'raindrops'
# Raindrops::Middleware is Rack middleware that allows snapshotting
# current activity from an HTTP request. For all operating systems,
# it returns at least the following fields:
#
# * calling - the number of application dispatchers on your machine
# * writing - the number of clients being written to on your machine
#
# Additional fields are available for \Linux users.
#
# It should be loaded at the top of Rack middleware stack before other
# middlewares for maximum accuracy.
#
# === Usage (Rainbows!/Unicorn preload_app=false)
#
# If you're using preload_app=false (the default) in your Rainbows!/Unicorn
# config file, you'll need to create the global Stats object before
# forking.
#
# require 'raindrops'
# $stats ||= Raindrops::Middleware::Stats.new
#
# In your Rack config.ru:
#
# use Raindrops::Middleware, :stats => $stats
#
# === Usage (Rainbows!/Unicorn preload_app=true)
#
# If you're using preload_app=true in your Rainbows!/Unicorn
# config file, just add the middleware to your stack:
#
# In your Rack config.ru:
#
# use Raindrops::Middleware
#
# === Linux-only extras!
#
# To get bound listener statistics under \Linux, you need to specify the
# listener names for your server. You can even include listen sockets for
# *other* servers on the same machine. This can be handy for monitoring
# your nginx proxy as well.
#
# In your Rack config.ru, just pass the :listeners argument as an array of
# strings (along with any other arguments). You can specify any
# combination of TCP or Unix domain socket names:
#
# use Raindrops::Middleware, :listeners => %w(0.0.0.0:80 /tmp/.sock)
#
# If you're running Unicorn 0.98.0 or later, you don't have to pass in
# the :listeners array, Raindrops will automatically detect the listeners
# used by Unicorn master process. This does not detect listeners in
# different processes, of course.
#
# The response body includes the following stats for each listener
# (see also Raindrops::ListenStats):
#
# * active - total number of active clients on that listener
# * queued - total number of queued (pre-accept()) clients on that listener
#
# = Demo Server
#
# There is a server running this middleware (and Watcher) at
# https://yhbt.net/raindrops-demo/_raindrops
#
# Also check out the Watcher demo at https://yhbt.net/raindrops-demo/
#
# The demo server is only limited to 30 users, so be sure not to abuse it
# by using the /tail/ endpoint too much.
#
class Raindrops::Middleware
attr_accessor :app, :stats, :path, :tcp, :unix # :nodoc:
# A Raindrops::Struct used to count the number of :calling and :writing
# clients. This struct is intended to be shared across multiple processes
# and both counters are updated atomically.
#
# This is supported on all operating systems supported by Raindrops
Stats = Raindrops::Struct.new(:calling, :writing)
# :stopdoc:
require "raindrops/middleware/proxy"
# :startdoc:
# +app+ may be any Rack application, this middleware wraps it.
# +opts+ is a hash that understands the following members:
#
# * :stats - Raindrops::Middleware::Stats struct (default: Stats.new)
# * :path - HTTP endpoint used for reading the stats (default: "/_raindrops")
# * :listeners - array of host:port or socket paths (default: from Unicorn)
def initialize(app, opts = {})
@app = app
@stats = opts[:stats] || Stats.new
@path = opts[:path] || "/_raindrops"
tmp = opts[:listeners]
if tmp.nil? && defined?(Unicorn) && Unicorn.respond_to?(:listener_names)
tmp = Unicorn.listener_names
end
@tcp = @unix = nil
if tmp
@tcp = tmp.grep(/\A.+:\d+\z/)
@unix = tmp.grep(%r{\A/})
@tcp = nil if @tcp.empty?
@unix = nil if @unix.empty?
end
end
# standard Rack endpoint
def call(env) # :nodoc:
env['PATH_INFO'] == @path and return stats_response
begin
@stats.incr_calling
status, headers, body = @app.call(env)
rv = [ status, headers, Proxy.new(body, @stats) ]
# the Rack server will start writing headers soon after this method
@stats.incr_writing
rv
ensure
@stats.decr_calling
end
end
def stats_response # :nodoc:
body = "calling: #{@stats.calling}\n" \
"writing: #{@stats.writing}\n"
if defined?(Raindrops::Linux.tcp_listener_stats)
Raindrops::Linux.tcp_listener_stats(@tcp).each do |addr,stats|
body << "#{addr} active: #{stats.active}\n" \
"#{addr} queued: #{stats.queued}\n"
end if @tcp
Raindrops::Linux.unix_listener_stats(@unix).each do |addr,stats|
body << "#{addr} active: #{stats.active}\n" \
"#{addr} queued: #{stats.queued}\n"
end if @unix
end
headers = {
"Content-Type" => "text/plain",
"Content-Length" => body.size.to_s,
}
[ 200, headers, [ body ] ]
end
end
| 32.90604 | 79 | 0.678564 |
1d15ecba0e463db92132a510d342de30083d0297 | 1,208 | # frozen_string_literal: true
# encoding: utf-8
require 'spec_helper'
describe 'Client' do
context 'after client is disconnected' do
let(:client) { authorized_client.with(server_selection_timeout: 1) }
before do
client.close
end
it 'is still usable for operations' do
resp = client.database.command(ismaster: 1)
expect(resp).to be_a(Mongo::Operation::Result)
end
it 'is still usable for operations that can use sessions' do
client['collection'].insert_one(test: 1)
end
context 'after all servers are marked unknown' do
before do
client.cluster.servers.each do |server|
server.unknown!
end
end
context 'operation that never uses sessions' do
it 'fails server selection' do
expect do
client.database.command(ismaster: 1)
end.to raise_error(Mongo::Error::NoServerAvailable)
end
end
context 'operation that can use sessions' do
it 'fails server selection' do
expect do
client['collection'].insert_one(test: 1)
end.to raise_error(Mongo::Error::NoServerAvailable)
end
end
end
end
end
| 25.166667 | 72 | 0.639901 |
bf5ad90761f112dc016c57b3a40fefe4fc03ba25 | 1,718 | require('spec_helper')
describe(Recipe) do
describe('#ingredients') do
it('lists a recipes ingredients') do
test_recipe = Recipe.create({:name => 'Green Bean Casserole', :cooking_time => 35, :preparation_time => 10, :serving_size => 1, :rating => nil, :total_time => 40, :instructions => 'bake'})
test_ingredient = Ingredient.create({:name => 'green beans'})
# test_ingredient2 = Ingredient.create({:name => 'cream of mushroom'})
ingredient_recipe_test = Unit.create({:unit => "2 cans", :recipe_id => test_recipe.id(), :ingredient_id => test_ingredient.id()})
# ingredient_recipe_test2 = Unit.create({:unit => "1 can", :recipe_id => test_recipe.id(), :ingredient_id => test_ingredient2.id()})
expect(test_recipe.units).to(eq([ingredient_recipe_test]))
end
end
# describe('#category') do
# it('lists a recipes category') do
# test_recipe = Recipe.new({:name => 'Green Bean Casserole'})
# test_recipe.save()
# test_category = Category.new({:name => 'Thanksgiving'})
# test_category.save()
# test_category2 = Category.new({:name => 'Comfort Food'})
# test_category2.save()
# test_recipe.categories.push(test_category)
# test_recipe.categories.push(test_category2)
# expect(test_recipe.categories()).to(eq([test_category, test_category2]))
# end
# end
# describe('#split_instructions') do
# it('adds html notation to the instructions page') do
# instructions = "1. a 2. b 3. c"
# name = "Name"
# test_instructions = Recipe.create({name: name, instructions: instructions})
# expect(test_instructions.instructions).to(eq("1. a <br><br> 2. b <br> 3. c"))
# end
# end
end
| 44.051282 | 194 | 0.651339 |
1100e247c2d7440e4b3a5805a4eb6985cc53d049 | 766 | # require_relative './vedabase.rb'
class Vedabase::Scraper
@@url_arrays = ["https://vedabase.io/en/library/bg/introduction/",
"https://vedabase.io/en/library/sb/1/introduction/",
"https://vedabase.io/en/library/cc/adi/introduction/"
]
def self.scrape_title
doc = Nokogiri::HTML(open("https://vedabase.io/en/library/"))
books = doc.css("div.col-6.col-sm-3.col-md-2.col-lg-2.text-center.book-item").slice(0, 3)
books.map.with_index do | book, i |
title = book.css("a.book-title").text.strip
url = @@url_arrays[i]
Vedabase::Vedabase.new(title, url)
end
end
def self.scrape_intro(url)
doc = Nokogiri::HTML(open(url))
intro = doc.css("div#content.row").text.strip
end
end
| 20.702703 | 95 | 0.630548 |
878cccf18f40730336661d4527eb06ef00c5fd2f | 83 | # frozen_string_literal: true
require 'jsonapi_compliable/adapters/active_record'
| 20.75 | 51 | 0.855422 |
61baaf6da0e0852b2456fed4f3843e3916b0b651 | 2,318 | # -*- coding: utf-8 -*-
require 'sinatra'
require 'sinatra/reloader'
require "haml"
require 'backlog_api'
# 何度も読み込まれて舞う
PROJECT_ID = ENV['BACKLOG_PROJECTID2'].to_i
ASSIGENER_ID = ENV['BACKLOG_USERID'].to_i
# CLIENT = BacklogApi::Client.new
# トップレベルには定義できないのでここにビューヘルパーとか追加してね
helpers do
def select_tag(name, selected_id, opt)
options = opt.map do |k,v|
"<option value='#{k}' #{selected_id == k.to_i ? %{selected} : ''} > #{v}</option>"
end.join
<<-"EOS"
<select name='#{name}'>
#{options}
</select>
EOS
end
def status_select_tag(issue)
select_tag("status[#{issue["key"]}]", issue["status"]["id"],
'1' => '未対応',
'2' => '処理中',
'3' => '処理済み',
'4' => '完了',
)
end
end
# アクションみたいなの
get '/' do
# "views/index.erb"をrender。○しかし今回は"@@ index" の部分をrenderする
@issues = BacklogApi::Client.new.find_issue(
projectId: PROJECT_ID,
assignerId: ASSIGENER_ID,
sort: :STATUS, # ステータスでソート
order: true, # 昇順
).tapp(&:count)
erb :index
end
post '/issues' do
BacklogApi::Client.new.create_issue(
projectId: PROJECT_ID,
assignerId: ASSIGENER_ID,
summary: params[:summary],
issueTypeId: 1074130811, # タスク
)
redirect '/'
end
post '/update_issues' do
params["status"].each do |k, v|
begin
BacklogApi::Client.new.switch_status(
key: k,
statusId: v.to_i,
)
puts "update"
rescue
# TODO: 警告とかね
puts "do nothing"
end
end
redirect '/'
end
get '/test' do
"test"
end
# __END__: コレ以降がインラインテンプレートになる
__END__
# @@index: "views/index.erb"と同義っぽい
@@index
<html>
<body>
<h1>課題追加</h1>
<form action='/issues' method='post'>
<input type='text_field' value='' name='summary'>
<input type='submit' value='さぁ、課題を追加するのです!'>
</form>
<h1>課題更新</h1>
<form action='/update_issues' method='post'>
<table>
<tr>
<th>件名</th>
<th>ステータス</th>
<th>リンク</th>
</tr>
<% @issues.each do |issue| %>
<tr>
<td><%= issue["summary"] %></td>
<td><%= status_select_tag issue %></td>
<td><a href='<%= issue["url"] %>'>リンクです!!</a></td>
</tr>
<% end %>
</table>
<input type='submit' value='課題の一括更新です!'>
</form>
<body>
</html>
| 18.544 | 88 | 0.56428 |
39f672a1dfd0553e200cf01ab4677a39a451942d | 173 | require 'rubygems'
require 'pathname'
gem 'dm-core', '=0.9.5'
require 'dm-core'
require Pathname(__FILE__).dirname.expand_path / 'dm-is-versioned' / 'is' / 'versioned.rb'
| 21.625 | 90 | 0.705202 |
91ee16c1d5ccc437584fb9f7156e666ef9fb03de | 149 | class AddRecipeRefToSubstitutions < ActiveRecord::Migration[5.2]
def change
add_reference :substitutions, :recipe, foreign_key: true
end
end
| 24.833333 | 64 | 0.785235 |
f82ecd4449ab9b3c12c95863802ff58cb6f71cdf | 4,411 | # frozen_string_literal: true
require "cases/helper"
require "models/post"
module ActiveRecord
class RelationMutationTest < ActiveRecord::TestCase
(Relation::MULTI_VALUE_METHODS - [:references, :extending, :order, :unscope, :select]).each do |method|
test "##{method}!" do
assert relation.public_send("#{method}!", :foo).equal?(relation)
assert_equal [:foo], relation.public_send("#{method}_values")
end
end
test "#_select!" do
assert relation._select!(:foo).equal?(relation)
assert_equal [:foo], relation.select_values
end
test "#order!" do
assert relation.order!("name ASC").equal?(relation)
assert_equal ["name ASC"], relation.order_values
end
test "#order! with symbol prepends the table name" do
assert relation.order!(:name).equal?(relation)
node = relation.order_values.first
assert_predicate node, :ascending?
assert_equal :name, node.expr.name
assert_equal "posts", node.expr.relation.name
end
test "#order! on non-string does not attempt regexp match for references" do
obj = Object.new
assert_not_called(obj, :=~) do
assert relation.order!(obj)
assert_equal [obj], relation.order_values
end
end
test "#references!" do
assert relation.references!(:foo).equal?(relation)
assert_includes relation.references_values, "foo"
end
test "extending!" do
mod, mod2 = Module.new, Module.new
assert relation.extending!(mod).equal?(relation)
assert_equal [mod], relation.extending_values
assert relation.is_a?(mod)
relation.extending!(mod2)
assert_equal [mod, mod2], relation.extending_values
end
test "extending! with empty args" do
relation.extending!
assert_equal [], relation.extending_values
end
(Relation::SINGLE_VALUE_METHODS - [:lock, :reordering, :reverse_order, :create_with, :skip_query_cache]).each do |method|
test "##{method}!" do
assert relation.public_send("#{method}!", :foo).equal?(relation)
assert_equal :foo, relation.public_send("#{method}_value")
end
end
test "#from!" do
assert relation.from!("foo").equal?(relation)
assert_equal "foo", relation.from_clause.value
end
test "#lock!" do
assert relation.lock!("foo").equal?(relation)
assert_equal "foo", relation.lock_value
end
test "#reorder!" do
@relation = relation.order("foo")
assert relation.reorder!("bar").equal?(relation)
assert_equal ["bar"], relation.order_values
assert relation.reordering_value
end
test "#reorder! with symbol prepends the table name" do
assert relation.reorder!(:name).equal?(relation)
node = relation.order_values.first
assert_predicate node, :ascending?
assert_equal :name, node.expr.name
assert_equal "posts", node.expr.relation.name
end
test "reverse_order!" do
@relation = Post.order("title ASC, comments_count DESC")
relation.reverse_order!
assert_equal "title DESC", relation.order_values.first
assert_equal "comments_count ASC", relation.order_values.last
relation.reverse_order!
assert_equal "title ASC", relation.order_values.first
assert_equal "comments_count DESC", relation.order_values.last
end
test "create_with!" do
assert relation.create_with!(foo: "bar").equal?(relation)
assert_equal({ foo: "bar" }, relation.create_with_value)
end
test "merge!" do
assert relation.merge!(select: :foo).equal?(relation)
assert_equal [:foo], relation.select_values
end
test "merge with a proc" do
assert_equal [:foo], relation.merge(-> { select(:foo) }).select_values
end
test "none!" do
assert relation.none!.equal?(relation)
assert_equal [NullRelation], relation.extending_values
assert relation.is_a?(NullRelation)
end
test "distinct!" do
relation.distinct! :foo
assert_equal :foo, relation.distinct_value
end
test "skip_query_cache!" do
relation.skip_query_cache!
assert relation.skip_query_cache_value
end
test "skip_preloading!" do
relation.skip_preloading!
assert relation.skip_preloading_value
end
private
def relation
@relation ||= Relation.new(FakeKlass)
end
end
end
| 29.211921 | 125 | 0.67037 |
d56bc09adb52f7c9c092159609206347d92c21ca | 18,501 | # frozen_string_literal: true
require 'stringio'
RSpec.describe Pry::Command do
subject do
Class.new(described_class) do
def process; end
end
end
let(:default_options) do
{
argument_required: false,
interpolate: true,
keep_retval: false,
shellwords: true,
takes_block: false,
use_prefix: true,
listing: 'nil'
}
end
describe ".match" do
context "when no argument is given" do
context "and when match was defined previously" do
before { subject.match('old-match') }
it "doesn't overwrite match" do
expect(subject.match).to eq('old-match')
end
end
context "and when match was not defined previously" do
it "sets match to nil" do
subject.match
expect(subject.match).to be_nil
end
end
end
context "when given an argument" do
context "and when match is a string" do
it "sets command options with listing as match" do
subject.match('match') # rubocop:disable Performance/RedundantMatch
expect(subject.command_options).to include(listing: 'match')
end
end
context "and when match is an object" do
let(:object) do
obj = Object.new
def obj.inspect
'inspect'
end
obj
end
it "sets command options with listing as object's inspect" do
subject.match(object)
expect(subject.command_options).to include(listing: 'inspect')
end
end
end
end
describe ".description" do
context "and when description was defined previously" do
before { subject.description('old description') }
it "doesn't overwrite match" do
subject.description
expect(subject.description).to eq('old description')
end
end
context "and when description was not defined previously" do
it "sets description to nil" do
expect(subject.description).to be_nil
end
end
context "when given an argument" do
it "sets description" do
subject.description('description')
expect(subject.description).to eq('description')
end
end
end
describe ".command_options" do
context "when no argument is given" do
context "and when command options were defined previously" do
before { subject.command_options(foo: :bar) }
it "returns memoized command options" do
expect(subject.command_options).to eq(default_options.merge(foo: :bar))
end
end
context "and when command options were not defined previously" do
it "sets command options to default options" do
subject.command_options
expect(subject.command_options).to eq(default_options)
end
end
end
context "when given an argument" do
let(:new_option) { { new_option: 'value' } }
it "merges the argument with command options" do
expect(subject.command_options(new_option))
.to eq(default_options.merge(new_option))
end
end
end
describe ".banner" do
context "when no argument is given" do
context "and when banner was defined previously" do
before { subject.banner('banner') }
it "returns the memoized banner" do
expect(subject.banner).to eq('banner')
end
end
context "and when banner was not defined previously" do
it "return nil" do
subject.banner
expect(subject.banner).to be_nil
end
end
end
context "when given an argument" do
it "merges the argument with command options" do
expect(subject.banner('banner')).to eq('banner')
end
end
end
describe ".block" do
context "when block exists" do
let(:block) { proc {} }
it "returns the block" do
subject.block = block
expect(subject.block).to eql(block)
end
end
context "when block doesn't exist" do
it "uses #process method" do
expect(subject.block.name).to eq(:process)
end
end
end
describe ".source" do
it "returns source code of the method" do
expect(subject.source).to eq("def process; end\n")
end
end
describe ".doc" do
subject do
Class.new(described_class) do
def help
'help'
end
end
end
it "returns help output" do
expect(subject.doc).to eq('help')
end
end
describe ".source_file" do
it "returns source file" do
expect(subject.source_file).to match(__FILE__)
end
end
describe ".source_line" do
it "returns source line" do
expect(subject.source_line).to be_kind_of(Integer)
end
end
describe ".default_options" do
context "when given a String argument" do
it "returns default options with string listing" do
expect(subject.default_options('listing'))
.to eq(default_options.merge(listing: 'listing'))
end
end
context "when given an Object argument" do
let(:object) do
obj = Object.new
def obj.inspect
'inspect'
end
obj
end
it "returns default options with object's inspect as listing" do
expect(subject.default_options(object))
.to eq(default_options.merge(listing: 'inspect'))
end
end
end
describe ".name" do
it "returns the name of the command" do
expect(subject.name).to eq('#<class(Pry::Command nil)>')
end
context "when super command name exists" do
subject do
parent = Class.new(described_class) do
def name
'parent name'
end
end
Class.new(parent)
end
it "returns the name of the parent command" do
expect(subject.name).to eq('#<class(Pry::Command nil)>')
end
end
end
describe ".inspect" do
subject do
Class.new(described_class) do
def self.name
'name'
end
end
end
it "returns command name" do
expect(subject.inspect).to eq('name')
end
end
describe ".command_name" do
before { subject.match('foo') }
it "returns listing" do
expect(subject.command_name).to eq('foo')
end
end
describe ".subclass" do
it "returns a new class" do
klass = subject.subclass('match', 'desc', {}, Module.new)
expect(klass).to be_a(Class)
expect(klass).not_to eql(subject)
end
it "includes helpers to the new class" do
mod = Module.new { def foo; end }
klass = subject.subclass('match', 'desc', {}, mod)
expect(klass.new).to respond_to(:foo)
end
it "sets match on the new class" do
klass = subject.subclass('match', 'desc', {}, Module.new)
expect(klass.match).to eq('match')
end
it "sets description on the new class" do
klass = subject.subclass('match', 'desc', {}, Module.new)
expect(klass.description).to eq('desc')
end
it "sets command options on the new class" do
klass = subject.subclass('match', 'desc', { foo: :bar }, Module.new)
expect(klass.command_options).to include(foo: :bar)
end
it "sets block on the new class" do
block = proc {}
klass = subject.subclass('match', 'desc', { foo: :bar }, Module.new, &block)
expect(klass.block).to eql(block)
end
end
describe ".matches?" do
context "when given value matches command regex" do
before { subject.match('test-command') }
it "returns true" do
expect(subject.matches?('test-command')).to be_truthy
end
end
context "when given value doesn't match command regex" do
it "returns false" do
expect(subject.matches?('test-command')).to be_falsey
end
end
end
describe ".match_score" do
context "when command regex matches given value" do
context "and when the size of last match is more than 1" do
before { subject.match(/\.(.*)/) }
it "returns the length of the first match" do
expect(subject.match_score('.||')).to eq(1)
end
end
context "and when the size of last match is 1 or 0" do
before { subject.match('hi') }
it "returns the length of the last match" do
expect(subject.match_score('hi there')).to eq(2)
end
end
end
context "when command regex doesn't match given value" do
it "returns -1" do
expect(subject.match_score('test')).to eq(-1)
end
end
end
describe ".command_regex" do
before { subject.match('test-command') }
context "when use_prefix is true" do
before { subject.command_options(use_prefix: true) }
it "returns a Regexp without a prefix" do
expect(subject.command_regex).to eq(/\Atest\-command(?!\S)/)
end
end
context "when use_prefix is false" do
before { subject.command_options(use_prefix: false) }
it "returns a Regexp with a prefix" do
expect(subject.command_regex).to eq(/\A(?:)?test\-command(?!\S)/)
end
end
end
describe ".convert_to_regex" do
context "when given object is a String" do
it "escapes the string as a Regexp" do
expect(subject.convert_to_regex('foo.+')).to eq('foo\\.\\+')
end
end
context "when given object is an Object" do
let(:obj) { Object.new }
it "returns the given object" do
expect(subject.convert_to_regex(obj)).to eql(obj)
end
end
end
describe ".group" do
context "when name is given" do
it "sets group to that name" do
expect(subject.group('Test Group')).to eq('Test Group')
end
end
context "when source file matches a pry command" do
before do
expect_any_instance_of(Pry::Method).to receive(:source_file)
.and_return('/pry/test_commands/test_command.rb')
end
it "sets group name to command name" do
expect(subject.group).to eq('Test command')
end
end
context "when source file matches a pry plugin" do
before do
expect_any_instance_of(Pry::Method).to receive(:source_file)
.and_return('pry-test-1.2.3')
end
it "sets group name to plugin name" do
expect(subject.group).to eq('pry-test (v1.2.3)')
end
end
context "when source file matches 'pryrc'" do
before do
expect_any_instance_of(Pry::Method).to receive(:source_file)
.and_return('pryrc')
end
it "sets group name to pryrc" do
expect(subject.group).to eq('pryrc')
end
end
context "when source file doesn't match anything" do
it "returns '(other)'" do
expect(subject.group).to eq('(other)')
end
end
end
describe ".state" do
it "returns a command state" do
expect(described_class.state).to be_an(OpenStruct)
end
end
describe "#run" do
let(:command_set) do
set = Pry::CommandSet.new
set.command('test') {}
set
end
subject do
command = Class.new(described_class)
command.new(command_set: command_set, pry_instance: Pry.new)
end
it "runs a command from another command" do
result = subject.run('test')
expect(result).to be_command
end
end
describe "#commands" do
let(:command_set) do
set = Pry::CommandSet.new
set.command('test') do
def process; end
end
set
end
subject do
command = Class.new(described_class)
command.new(command_set: command_set, pry_instance: Pry.new)
end
it "returns command set as a hash" do
expect(subject.commands).to eq('test' => command_set['test'])
end
end
describe "#void" do
it "returns void value" do
expect(subject.new.void).to eq(Pry::Command::VOID_VALUE)
end
end
describe "#target_self" do
let(:target) { binding }
subject { Class.new(described_class).new(target: target) }
it "returns the value of self inside the target binding" do
expect(subject.target_self).to eq(target.eval('self'))
end
end
describe "#state" do
let(:target) { binding }
subject { Class.new(described_class).new(pry_instance: Pry.new) }
it "returns a state object" do
expect(subject.state).to be_an(OpenStruct)
end
it "remembers the state" do
subject.state.foo = :bar
expect(subject.state.foo).to eq(:bar)
end
end
describe "#interpolate_string" do
context "when given string contains \#{" do
let(:target) do
foo = 'bar'
binding
end
subject { Class.new(described_class).new(target: target) }
it "returns the result of eval within target" do
# rubocop:disable Lint/InterpolationCheck
expect(subject.interpolate_string('#{foo}')).to eq('bar')
# rubocop:enable Lint/InterpolationCheck
end
end
context "when given string doesn't contain \#{" do
it "returns the given string" do
expect(subject.new.interpolate_string('foo')).to eq('foo')
end
end
end
describe "#check_for_command_collision" do
let(:command_set) do
set = Pry::CommandSet.new
set.command('test') do
def process; end
end
set
end
let(:output) { StringIO.new }
subject do
command = Class.new(described_class)
command.new(command_set: command_set, target: target, output: output)
end
context "when a command collides with a local variable" do
let(:target) do
test = 'foo'
binding
end
it "displays a warning" do
subject.check_for_command_collision('test', '')
expect(output.string)
.to match("'test', which conflicts with a local-variable")
end
end
context "when a command collides with a method" do
let(:target) do
def test; end
binding
end
it "displays a warning" do
subject.check_for_command_collision('test', '')
expect(output.string).to match("'test', which conflicts with a method")
end
end
context "when a command doesn't collide" do
let(:target) do
def test; end
binding
end
it "doesn't display a warning" do
subject.check_for_command_collision('nothing', '')
expect(output.string).to be_empty
end
end
end
describe "#tokenize" do
let(:target) { binding }
let(:klass) { Class.new(described_class) }
let(:target) { binding }
subject { klass.new(target: target) }
before { klass.match('test') }
context "when given string uses interpolation" do
let(:target) do
foo = 4
binding
end
before { klass.command_options(interpolate: true) }
it "interpolates the string in the target's context" do
# rubocop:disable Lint/InterpolationCheck
expect(subject.tokenize('test #{1 + 2} #{3 + foo}'))
.to eq(['test', '3 7', [], %w[3 7]])
# rubocop:enable Lint/InterpolationCheck
end
context "and when interpolation is disabled" do
before { klass.command_options(interpolate: false) }
it "doesn't interpolate the string" do
# rubocop:disable Lint/InterpolationCheck
expect(subject.tokenize('test #{3 + foo}'))
.to eq(['test', '#{3 + foo}', [], %w[#{3 + foo}]])
# rubocop:enable Lint/InterpolationCheck
end
end
end
context "when given string doesn't match a command" do
it "raises CommandError" do
expect { subject.tokenize('boom') }
.to raise_error(Pry::CommandError, /command which didn't match/)
end
end
context "when target is not set" do
subject { klass.new }
it "still returns tokens" do
expect(subject.tokenize('test --help'))
.to eq(['test', '--help', [], ['--help']])
end
end
context "when shellwords is enabled" do
before { klass.command_options(shellwords: true) }
it "strips quotes from the arguments" do
expect(subject.tokenize(%(test "foo" 'bar' 1)))
.to eq(['test', %("foo" 'bar' 1), [], %w[foo bar 1]])
end
end
context "when shellwords is disabled" do
before { klass.command_options(shellwords: false) }
it "doesn't split quotes from the arguments" do
# rubocop:disable Lint/PercentStringArray
expect(subject.tokenize(%(test "foo" 'bar' 1)))
.to eq(['test', %("foo" 'bar' 1), [], %w["foo" 'bar' 1]])
# rubocop:enable Lint/PercentStringArray
end
end
context "when command regex has captures" do
before { klass.match(/perfectly (normal)( beast)/i) }
it "returns the captures" do
expect(subject.tokenize('Perfectly Normal Beast (honest!)')).to eq(
[
'Perfectly Normal Beast',
'(honest!)',
['Normal', ' Beast'],
['(honest!)']
]
)
end
end
end
describe "#process_line" do
let(:klass) do
Class.new(described_class) do
def call(*args); end
end
end
let(:target) do
test = 4
binding
end
let(:output) { StringIO.new }
subject { klass.new(target: target, output: output) }
before { klass.match(/test(y)?/) }
it "sets arg_string" do
subject.process_line('test -v')
expect(subject.arg_string).to eq('-v')
end
it "sets captures" do
subject.process_line('testy')
expect(subject.captures).to eq(['y'])
end
describe "collision warnings" do
context "when collision warnings are configured" do
before do
expect(Pry.config).to receive(:collision_warning).and_return(true)
end
it "prints a warning when there's a collision" do
subject.process_line('test')
expect(output.string).to match(/conflicts with a local-variable/)
end
end
context "when collision warnings are not set" do
before do
expect(Pry.config).to receive(:collision_warning).and_return(false)
end
it "prints a warning when there's a collision" do
subject.process_line('test')
expect(output.string).to be_empty
end
end
end
end
describe "#complete" do
it "returns empty array" do
expect(subject.new.complete('')).to eq([])
end
end
end
| 25.731572 | 82 | 0.608616 |
d56595bb683b8c7ea69de8b7fa1a3b07b7e73fd3 | 2,321 | module Findout
class Preprocessor
def initialize(base)
@base = base
end
def run(params)
process(params)
end
private
def process(params)
if params.is_a?(Array)
process_array(params)
elsif params.is_a?(Hash)
process_hash(params)
else
process_value(params)
end
end
def process_array(array)
array.map do |a|
process(a)
end
end
def process_hash(hash)
remove_empty(hash)
convert_flat_format(hash)
hash.map do |key, val|
if keys = find_alternates(key)
process_hash(resolve_alternates(keys, val))
elsif keys = find_shorthand(key)
process_hash(resolve_shorthand(keys, val))
elsif val.is_a?(Array) && val.all? { |v| v.is_a?(Hash) }
{ key => process_array(val) }
elsif val.is_a?(Hash)
{ key => process_hash(val) }
elsif val.is_a?(Symbol)
{ key => process_value(val) }
else
{ key => val }
end
end.reduce(:deep_merge)
end
def process_value(val)
if (vals = find_alternates(val))
process(vals)
elsif (vals = find_shorthand(val))
last = vals.pop
process_hash(resolve_shorthand(vals, last))
else
val
end
end
def remove_empty(hash)
hash.keys.each do |key|
val = hash[key]
hash.delete(key) if val.nil? || (val.respond_to?(:empty?) && val.empty?)
end
end
def convert_flat_format(hash)
if hash[:col].respond_to?(:to_sym) && hash[:ope].respond_to?(:to_sym) && hash[:val]
col = hash.delete(:col)
ope = hash.delete(:ope)
val = hash.delete(:val)
hash[col.to_sym] = { ope.to_sym => val }
end
end
def find_alternates(key)
key.respond_to?(:to_sym) && @base.class.alternates[key.to_sym]
end
def resolve_alternates(keys, val)
Array(keys).map { |key| { key.to_sym => val } }.reduce(:deep_merge)
end
def find_shorthand(key)
if key.is_a?(Symbol) || key.is_a?(String)
Shorthand.new(@base).find(key)
else
nil
end
end
def resolve_shorthand(keys, val)
keys = Array(keys)
keys.reverse.inject(val) { |ret, k| { k => ret } }
end
end
end
| 23.444444 | 89 | 0.567428 |
bbf449216c8508448f9ff1f2ed74812eba6dbc5f | 161 | # frozen_string_literal: true
# Public: Helper to render only the id and name of the model.
class ModelSerializer < BaseSerializer
attributes(:id, :name)
end
| 23 | 61 | 0.770186 |
4a8b3d8eeb6657420ae4cf01a3c047136d1dcdec | 93 | # frozen_string_literal: true
module Steam
module Wrapper
VERSION = "0.1.0"
end
end
| 11.625 | 29 | 0.698925 |
ab3e897501b5f50b8769fffadf61d14e4c969902 | 1,843 | require 'twitter'
require 'pony'
require 'twilio-rb'
require 'pushover'
module FISC
module Alerts
# for lower priority things that don't need to get texted every 5 mins
def self.email!(message)
Pony.mail(FISC.config['email'].merge(body: message)) if FISC.config['email']
end
def self.admin!(message, short_message = nil)
short_message ||= (message.size > 140) ? message[0..140] : message
# do in order of importance, in case it blows up in the middle
Pony.mail(FISC.config['email'].merge(body: message)) if FISC.config['email']
Twilio::SMS.create(to: FISC.config['twilio']['to'], from: FISC.config['twilio']['from'], body: short_message) if FISC.config['twilio']
Pushover.notification(title: short_message, message: message) if FISC.config['pushover']
end
def self.public!(message)
# can't use blunt 140-char check, Twitter handles URLs specially
Twitter.update(message) if FISC.config['twitter']
end
def self.config!
if FISC.config['twitter']
Twitter.configure do |twitter|
twitter.consumer_key = FISC.config['twitter']['consumer_key']
twitter.consumer_secret = FISC.config['twitter']['consumer_secret']
twitter.oauth_token = FISC.config['twitter']['oauth_token']
twitter.oauth_token_secret = FISC.config['twitter']['oauth_token_secret']
end
end
if FISC.config['twilio']
Twilio::Config.setup(
account_sid: FISC.config['twilio']['account_sid'],
auth_token: FISC.config['twilio']['auth_token']
)
end
if FISC.config['pushover']
Pushover.configure do |pushover|
pushover.user = FISC.config['pushover']['user_key']
pushover.token = FISC.config['pushover']['app_key']
end
end
end
end
end | 34.12963 | 140 | 0.648942 |
f81869ee9000ec3f3106c65897abc938c7cc2d93 | 289 | # encoding: UTF-8
newparam(:sys_password) do
include EasyType
desc 'The password of ythe SYS account'
to_translate_to_resource do | raw_resource|
# raw_resource.column_data('sys_password')
end
on_apply do | command_builder|
"user sys identified by #{value}"
end
end | 20.642857 | 45 | 0.730104 |
1ab13f47ee113da017177a8ee86642aed200f583 | 330 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe BatchLeaderSheet, type: :model do
context 'check creation' do
let(:batchleadersheet) { create(:batch_leader_sheet) }
it 'check creation week' do
expect(batchleadersheet.creation_week).to eq(Date.current.at_beginning_of_week)
end
end
end
| 23.571429 | 85 | 0.754545 |
bf6265a29555a1c2f4ad540bcdb18bbea7d81f46 | 955 | #! /usr/bin/env ruby -S rspec
require 'spec_helper_acceptance'
describe 'ceiling function', :unless => UNSUPPORTED_PLATFORMS.include?(fact('operatingsystem')) do
describe 'success' do
it 'ceilings floats' do
pp = <<-EOS
$a = 12.8
$b = 13
$o = ceiling($a)
if $o == $b {
notify { 'output correct': }
}
EOS
apply_manifest(pp, :catch_failures => true) do |r|
expect(r.stdout).to match(/Notice: output correct/)
end
end
it 'ceilings integers' do
pp = <<-EOS
$a = 7
$b = 7
$o = ceiling($a)
if $o == $b {
notify { 'output correct': }
}
EOS
apply_manifest(pp, :catch_failures => true) do |r|
expect(r.stdout).to match(/Notice: output correct/)
end
end
end
describe 'failure' do
it 'handles improper argument counts'
it 'handles non-numbers'
end
end
| 23.875 | 99 | 0.536126 |
792fce74e353417f166d82aa909f99868fc9f0eb | 547 | # frozen_string_literal: true
class AddClientFields < ActiveRecord::Migration[5.2]
def change
remove_index :datacentre, %i[re3data]
rename_column :datacentre, :re3data, :re3data_id
add_index :datacentre, %i[re3data_id]
add_column :datacentre, :issn, :json
add_column :datacentre, :certificate, :json
add_column :datacentre, :repository_type, :json
add_column :datacentre, :alternate_name, :string, limit: 191
add_column :datacentre, :language, :json
add_column :datacentre, :opendoar_id, :integer
end
end
| 32.176471 | 64 | 0.736746 |
ede3eb5580b6bcec3df8a47272a53e5e6c216bdb | 170 | require 'rails_helper'
describe BiologicalRelationshipType::BiologicalRelationshipSujbectType, type: :model do
skip "add some examples to (or delete) #{__FILE__}"
end
| 28.333333 | 87 | 0.805882 |
21ef5f29b15c3670430fe3e128930f82c904bac0 | 23,723 | # Copyright 2015 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "google/cloud/bigquery/version"
require "google/cloud/bigquery/convert"
require "google/cloud/errors"
require "google/apis/bigquery_v2"
require "pathname"
require "securerandom"
require "mini_mime"
require "date"
module Google
module Cloud
module Bigquery
##
# @private Represents the Bigquery service and API calls.
class Service
##
# Alias to the Google Client API module
API = Google::Apis::BigqueryV2
# @private
attr_accessor :project
# @private
attr_accessor :credentials
# @private
attr_reader :retries, :timeout, :host
##
# Creates a new Service instance.
def initialize project, credentials, retries: nil, timeout: nil, host: nil, quota_project: nil
@project = project
@credentials = credentials
@retries = retries
@timeout = timeout
@host = host
@quota_project = quota_project
end
def service
return mocked_service if mocked_service
@service ||= begin
service = API::BigqueryService.new
service.client_options.application_name = "gcloud-ruby"
service.client_options.application_version = Google::Cloud::Bigquery::VERSION
service.client_options.open_timeout_sec = timeout
service.client_options.read_timeout_sec = timeout
service.client_options.send_timeout_sec = timeout
service.request_options.retries = 0 # handle retries in #execute
service.request_options.header ||= {}
service.request_options.header["x-goog-api-client"] = \
"gl-ruby/#{RUBY_VERSION} gccl/#{Google::Cloud::Bigquery::VERSION}"
service.request_options.query ||= {}
service.request_options.query["prettyPrint"] = false
service.request_options.quota_project = @quota_project if @quota_project
service.authorization = @credentials.client
service.root_url = host if host
service
end
end
attr_accessor :mocked_service
def project_service_account
service.get_project_service_account project
end
##
# Lists all datasets in the specified project to which you have
# been granted the READER dataset role.
def list_datasets all: nil, filter: nil, max: nil, token: nil
# The list operation is considered idempotent
execute backoff: true do
service.list_datasets @project, all: all, filter: filter, max_results: max, page_token: token
end
end
##
# Returns the dataset specified by datasetID.
def get_dataset dataset_id
# The get operation is considered idempotent
execute backoff: true do
service.get_dataset @project, dataset_id
end
end
##
# Creates a new empty dataset.
def insert_dataset new_dataset_gapi
execute { service.insert_dataset @project, new_dataset_gapi }
end
##
# Updates information in an existing dataset, only replacing
# fields that are provided in the submitted dataset resource.
def patch_dataset dataset_id, patched_dataset_gapi
patch_with_backoff = false
options = {}
if patched_dataset_gapi.etag
options[:header] = { "If-Match" => patched_dataset_gapi.etag }
# The patch with etag operation is considered idempotent
patch_with_backoff = true
end
execute backoff: patch_with_backoff do
service.patch_dataset @project, dataset_id, patched_dataset_gapi, options: options
end
end
##
# Deletes the dataset specified by the datasetId value.
# Before you can delete a dataset, you must delete all its tables,
# either manually or by specifying force: true in options.
# Immediately after deletion, you can create another dataset with
# the same name.
def delete_dataset dataset_id, force = nil
execute do
service.delete_dataset @project, dataset_id, delete_contents: force
end
end
##
# Lists all tables in the specified dataset.
# Requires the READER dataset role.
def list_tables dataset_id, max: nil, token: nil
# The list operation is considered idempotent
execute backoff: true do
service.list_tables @project, dataset_id, max_results: max, page_token: token
end
end
##
# Gets the specified table resource by full table reference.
def get_project_table project_id, dataset_id, table_id
# The get operation is considered idempotent
execute backoff: true do
service.get_table project_id, dataset_id, table_id
end
end
##
# Gets the specified table resource by table ID.
# This method does not return the data in the table,
# it only returns the table resource,
# which describes the structure of this table.
def get_table dataset_id, table_id
get_project_table @project, dataset_id, table_id
end
##
# Creates a new, empty table in the dataset.
def insert_table dataset_id, new_table_gapi
execute { service.insert_table @project, dataset_id, new_table_gapi }
end
##
# Updates information in an existing table, replacing fields that
# are provided in the submitted table resource.
def patch_table dataset_id, table_id, patched_table_gapi
patch_with_backoff = false
options = {}
if patched_table_gapi.etag
options[:header] = { "If-Match" => patched_table_gapi.etag }
# The patch with etag operation is considered idempotent
patch_with_backoff = true
end
execute backoff: patch_with_backoff do
service.patch_table @project, dataset_id, table_id, patched_table_gapi, options: options
end
end
##
# Returns Google::Apis::BigqueryV2::Policy
def get_table_policy dataset_id, table_id
policy_options = API::GetPolicyOptions.new requested_policy_version: 1
execute do
service.get_table_iam_policy table_path(dataset_id, table_id),
API::GetIamPolicyRequest.new(options: policy_options)
end
end
##
# @param [Google::Apis::BigqueryV2::Policy] new_policy
def set_table_policy dataset_id, table_id, new_policy
execute do
service.set_table_iam_policy table_path(dataset_id, table_id),
API::SetIamPolicyRequest.new(policy: new_policy)
end
end
##
# Returns Google::Apis::BigqueryV2::TestIamPermissionsResponse
def test_table_permissions dataset_id, table_id, permissions
execute do
service.test_table_iam_permissions table_path(dataset_id, table_id),
API::TestIamPermissionsRequest.new(permissions: permissions)
end
end
##
# Deletes the table specified by tableId from the dataset.
# If the table contains data, all the data will be deleted.
def delete_table dataset_id, table_id
execute { service.delete_table @project, dataset_id, table_id }
end
##
# Retrieves data from the table.
def list_tabledata dataset_id, table_id, max: nil, token: nil, start: nil
# The list operation is considered idempotent
execute backoff: true do
json_txt = service.list_table_data \
@project, dataset_id, table_id,
max_results: max,
page_token: token,
start_index: start,
options: { skip_deserialization: true }
JSON.parse json_txt, symbolize_names: true
end
end
def insert_tabledata dataset_id, table_id, rows, insert_ids: nil, ignore_unknown: nil, skip_invalid: nil
json_rows = Array(rows).map { |row| Convert.to_json_row row }
insert_tabledata_json_rows dataset_id, table_id, json_rows, insert_ids: insert_ids,
ignore_unknown: ignore_unknown,
skip_invalid: skip_invalid
end
def insert_tabledata_json_rows dataset_id, table_id, json_rows, insert_ids: nil, ignore_unknown: nil,
skip_invalid: nil
rows_and_ids = Array(json_rows).zip Array(insert_ids)
insert_rows = rows_and_ids.map do |json_row, insert_id|
if insert_id == :skip
{ json: json_row }
else
insert_id ||= SecureRandom.uuid
{
insertId: insert_id,
json: json_row
}
end
end
insert_req = {
rows: insert_rows,
ignoreUnknownValues: ignore_unknown,
skipInvalidRows: skip_invalid
}.to_json
# The insertAll with insertId operation is considered idempotent
execute backoff: true do
service.insert_all_table_data(
@project, dataset_id, table_id, insert_req,
options: { skip_serialization: true }
)
end
end
##
# Lists all models in the specified dataset.
# Requires the READER dataset role.
def list_models dataset_id, max: nil, token: nil
options = { skip_deserialization: true }
# The list operation is considered idempotent
execute backoff: true do
json_txt = service.list_models @project, dataset_id, max_results: max, page_token: token, options: options
JSON.parse json_txt, symbolize_names: true
end
end
# Gets the specified model resource by full model reference.
def get_project_model project_id, dataset_id, model_id
# The get operation is considered idempotent
execute backoff: true do
json_txt = service.get_model project_id, dataset_id, model_id, options: { skip_deserialization: true }
JSON.parse json_txt, symbolize_names: true
end
end
# Gets the specified model resource by model ID. This method does not return the data in the model, it only
# returns the model resource, which describes the structure of this model.
def get_model dataset_id, model_id
get_project_model @project, dataset_id, model_id
end
##
# Updates information in an existing model, replacing fields that
# are provided in the submitted model resource.
def patch_model dataset_id, model_id, patched_model_gapi, etag = nil
patch_with_backoff = false
options = { skip_deserialization: true }
if etag
options[:header] = { "If-Match" => etag }
# The patch with etag operation is considered idempotent
patch_with_backoff = true
end
execute backoff: patch_with_backoff do
json_txt = service.patch_model @project, dataset_id, model_id, patched_model_gapi, options: options
JSON.parse json_txt, symbolize_names: true
end
end
##
# Deletes the model specified by modelId from the dataset.
# If the model contains data, all the data will be deleted.
def delete_model dataset_id, model_id
execute { service.delete_model @project, dataset_id, model_id }
end
##
# Creates a new routine in the dataset.
def insert_routine dataset_id, new_routine_gapi
execute { service.insert_routine @project, dataset_id, new_routine_gapi }
end
##
# Lists all routines in the specified dataset.
# Requires the READER dataset role.
# Unless readMask is set in the request, only the following fields are populated:
# etag, projectId, datasetId, routineId, routineType, creationTime, lastModifiedTime, and language.
def list_routines dataset_id, max: nil, token: nil, filter: nil
# The list operation is considered idempotent
execute backoff: true do
service.list_routines @project, dataset_id, max_results: max,
page_token: token,
filter: filter
end
end
##
# Gets the specified routine resource by routine ID.
def get_routine dataset_id, routine_id
# The get operation is considered idempotent
execute backoff: true do
service.get_routine @project, dataset_id, routine_id
end
end
##
# Updates information in an existing routine, replacing the entire routine resource.
def update_routine dataset_id, routine_id, new_routine_gapi
update_with_backoff = false
options = {}
if new_routine_gapi.etag
options[:header] = { "If-Match" => new_routine_gapi.etag }
# The update with etag operation is considered idempotent
update_with_backoff = true
end
execute backoff: update_with_backoff do
service.update_routine @project, dataset_id, routine_id, new_routine_gapi, options: options
end
end
##
# Deletes the routine specified by routine_id from the dataset.
def delete_routine dataset_id, routine_id
execute { service.delete_routine @project, dataset_id, routine_id }
end
##
# Lists all jobs in the specified project to which you have
# been granted the READER job role.
def list_jobs all: nil, token: nil, max: nil, filter: nil, min_created_at: nil, max_created_at: nil,
parent_job_id: nil
# The list operation is considered idempotent
min_creation_time = Convert.time_to_millis min_created_at
max_creation_time = Convert.time_to_millis max_created_at
execute backoff: true do
service.list_jobs @project, all_users: all, max_results: max,
page_token: token, projection: "full", state_filter: filter,
min_creation_time: min_creation_time, max_creation_time: max_creation_time,
parent_job_id: parent_job_id
end
end
##
# Cancel the job specified by jobId.
def cancel_job job_id, location: nil
# The BigQuery team has told us cancelling is considered idempotent
execute backoff: true do
service.cancel_job @project, job_id, location: location
end
end
##
# Returns the job specified by jobID.
def get_job job_id, location: nil
# The get operation is considered idempotent
execute backoff: true do
service.get_job @project, job_id, location: location
end
end
def insert_job config, location: nil
job_object = API::Job.new job_reference: job_ref_from(nil, nil, location: location), configuration: config
# Jobs have generated id, so this operation is considered idempotent
execute backoff: true do
service.insert_job @project, job_object
end
end
def query_job query_job_gapi
execute backoff: true do
service.insert_job @project, query_job_gapi
end
end
##
# Returns the query data for the job
def job_query_results job_id, location: nil, max: nil, token: nil, start: nil, timeout: nil
# The get operation is considered idempotent
execute backoff: true do
service.get_job_query_results @project, job_id,
location: location,
max_results: max,
page_token: token,
start_index: start,
timeout_ms: timeout
end
end
def copy_table copy_job_gapi
execute backoff: true do
service.insert_job @project, copy_job_gapi
end
end
def extract_table extract_job_gapi
execute backoff: true do
service.insert_job @project, extract_job_gapi
end
end
def load_table_gs_url load_job_gapi
execute backoff: true do
service.insert_job @project, load_job_gapi
end
end
def load_table_file file, load_job_gapi
execute backoff: true do
service.insert_job @project, load_job_gapi, upload_source: file, content_type: mime_type_for(file)
end
end
def self.get_table_ref table, default_ref: nil
if table.respond_to? :table_ref
table.table_ref
else
table_ref_from_s table, default_ref: default_ref
end
end
##
# Extracts at least `tbl` group, and possibly `dts` and `prj` groups,
# from strings in the formats: "my_table", "my_dataset.my_table", or
# "my-project:my_dataset.my_table". Then merges project_id and
# dataset_id from the default table ref if they are missing.
#
# The regex matches both Standard SQL
# ("bigquery-public-data.samples.shakespeare") and Legacy SQL
# ("bigquery-public-data:samples.shakespeare").
def self.table_ref_from_s str, default_ref: {}
str = str.to_s
m = /\A(((?<prj>\S*)(:|\.))?(?<dts>\S*)\.)?(?<tbl>\S*)\z/.match str
raise ArgumentError, "unable to identify table from #{str.inspect}" unless m
str_table_ref_hash = {
project_id: m["prj"],
dataset_id: m["dts"],
table_id: m["tbl"]
}.delete_if { |_, v| v.nil? }
str_table_ref_hash = default_ref.to_h.merge str_table_ref_hash
ref = Google::Apis::BigqueryV2::TableReference.new str_table_ref_hash
validate_table_ref ref
ref
end
def self.validate_table_ref table_ref
[:project_id, :dataset_id, :table_id].each do |f|
raise ArgumentError, "TableReference is missing #{f}" if table_ref.send(f).nil?
end
end
##
# Lists all projects to which you have been granted any project role.
def list_projects max: nil, token: nil
execute backoff: true do
service.list_projects max_results: max, page_token: token
end
end
# If no job_id or prefix is given, always generate a client-side job ID
# anyway, for idempotent retry in the google-api-client layer.
# See https://cloud.google.com/bigquery/docs/managing-jobs#generate-jobid
def job_ref_from job_id, prefix, location: nil
prefix ||= "job_"
job_id ||= "#{prefix}#{generate_id}"
job_ref = API::JobReference.new project_id: @project, job_id: job_id
# BigQuery does not allow nil location, but missing is ok.
job_ref.location = location if location
job_ref
end
# API object for dataset.
def dataset_ref_from dts, pjt = nil
return nil if dts.nil?
if dts.respond_to? :dataset_id
Google::Apis::BigqueryV2::DatasetReference.new(
project_id: (pjt || dts.project_id || @project),
dataset_id: dts.dataset_id
)
else
Google::Apis::BigqueryV2::DatasetReference.new(
project_id: (pjt || @project),
dataset_id: dts
)
end
end
def inspect
"#{self.class}(#{@project})"
end
protected
# Creates a formatted table path.
def table_path dataset_id, table_id
"projects/#{@project}/datasets/#{dataset_id}/tables/#{table_id}"
end
# Generate a random string similar to the BigQuery service job IDs.
def generate_id
SecureRandom.urlsafe_base64 21
end
def mime_type_for file
mime_type = MiniMime.lookup_by_filename Pathname(file).to_path
return nil if mime_type.nil?
mime_type.content_type
rescue StandardError
nil
end
def execute backoff: nil
if backoff
Backoff.new(retries: retries).execute { yield }
else
yield
end
rescue Google::Apis::Error => e
raise Google::Cloud::Error.from_error e
end
class Backoff
class << self
attr_accessor :retries
attr_accessor :reasons
attr_accessor :backoff
end
self.retries = 5
self.reasons = ["rateLimitExceeded", "backendError"]
self.backoff = lambda do |retries|
# Max delay is 32 seconds
# See "Back-off Requirements" here:
# https://cloud.google.com/bigquery/sla
retries = 5 if retries > 5
delay = 2**retries
sleep delay
end
def initialize retries: nil, reasons: nil, backoff: nil
@retries = (retries || Backoff.retries).to_i
@reasons = (reasons || Backoff.reasons).to_a
@backoff = backoff || Backoff.backoff
end
def execute
current_retries = 0
loop do
begin
return yield
rescue Google::Apis::Error => e
raise e unless retry? e.body, current_retries
@backoff.call current_retries
current_retries += 1
end
end
end
protected
def retry? result, current_retries #:nodoc:
if current_retries < @retries
return true if retry_error_reason? result
end
false
end
def retry_error_reason? err_body
err_hash = JSON.parse err_body
json_errors = Array err_hash["error"]["errors"]
return false if json_errors.empty?
json_errors.each do |json_error|
return false unless @reasons.include? json_error["reason"]
end
true
rescue StandardError
false
end
end
end
end
end
end
| 37.715421 | 118 | 0.596425 |
8728eda39200ec29fecb8ce409d3c875e7d33189 | 15,776 | describe MiqServer do
context ".seed" do
before do
MiqRegion.seed
Zone.seed
end
include_examples ".seed called multiple times"
end
context "#hostname" do
it("with a valid hostname") { expect(MiqServer.new(:hostname => "test").hostname).to eq("test") }
it("with a valid fqdn") { expect(MiqServer.new(:hostname => "test.example.com").hostname).to eq("test.example.com") }
it("with an invalid hostname") { expect(MiqServer.new(:hostname => "test_host").hostname).to be_nil }
it("without a hostname") { expect(MiqServer.new.hostname).to be_nil }
end
context ".my_guid" do
let(:guid_file) { Rails.root.join("GUID") }
it "should return the GUID from the file" do
MiqServer.my_guid_cache = nil
expect(File).to receive(:exist?).with(guid_file).and_return(true)
expect(File).to receive(:read).with(guid_file).and_return("an-existing-guid\n\n")
expect(MiqServer.my_guid).to eq("an-existing-guid")
end
it "should generate a new GUID and write it out when there is no GUID file" do
test_guid = SecureRandom.uuid
expect(SecureRandom).to receive(:uuid).and_return(test_guid)
Tempfile.create do |tempfile|
stub_const("MiqServer::GUID_FILE", tempfile.path)
MiqServer.my_guid_cache = nil
expect(MiqServer.my_guid).to eq(test_guid)
expect(File.read(tempfile)).to eq(test_guid)
end
end
it "should not generate a new GUID file if new_guid blows up" do # Test for case 10942
MiqServer.my_guid_cache = nil
expect(SecureRandom).to receive(:uuid).and_raise(StandardError)
expect(File).to receive(:exist?).with(guid_file).and_return(false)
expect(File).not_to receive(:write)
expect { MiqServer.my_guid }.to raise_error(StandardError)
end
end
context "instance" do
before do
@guid, @miq_server, @zone = EvmSpecHelper.create_guid_miq_server_zone
end
describe "#monitor_myself" do
it "does not exit with nil memory_usage" do
@miq_server.update(:memory_usage => nil)
expect(@miq_server).to receive(:exit).never
@miq_server.monitor_myself
expect(Notification.count).to eq(0)
end
it "creates a notification and exits with memory usage > limit" do
NotificationType.seed
@miq_server.update(:memory_usage => 3.gigabytes)
expect(@miq_server).to receive(:exit).once
@miq_server.monitor_myself
expect(Notification.count).to eq(1)
end
it "does not exit with memory_usage < limit" do
@miq_server.update(:memory_usage => 1.gigabyte)
expect(@miq_server).to receive(:exit).never
@miq_server.monitor_myself
expect(Notification.count).to eq(0)
end
end
describe "#monitor_loop" do
it "calls shutdown_and_exit if SIGTERM is raised" do
expect(@miq_server).to receive(:monitor).and_raise(SignalException, "SIGTERM")
expect(@miq_server).to receive(:shutdown_and_exit)
@miq_server.monitor_loop
end
it "kills the server and exits if SIGINT is raised" do
expect(@miq_server).to receive(:monitor).and_raise(Interrupt)
expect(MiqServer).to receive(:kill)
expect(@miq_server).to receive(:exit).with(1)
@miq_server.monitor_loop
end
end
it "should have proper guid" do
expect(@miq_server.guid).to eq(@guid)
end
it "should have default zone" do
expect(@miq_server.zone.name).to eq(@zone.name)
end
it "cannot assign to maintenance zone" do
MiqRegion.seed
Zone.seed
@miq_server.zone = Zone.maintenance_zone
expect(@miq_server.save).to eq(false)
expect(@miq_server.errors.messages[:zone]).to be_present
end
it "shutdown will raise an event and quiesce" do
expect(MiqEvent).to receive(:raise_evm_event)
expect(@miq_server).to receive(:quiesce)
@miq_server.shutdown
end
it "sync stop will do nothing if stopped" do
@miq_server.update(:status => 'stopped')
expect(@miq_server).to receive(:wait_for_stopped).never
@miq_server.stop(true)
expect(MiqQueue.exists?(:method_name => 'shutdown_and_exit', :queue_name => :miq_server, :server_guid => @miq_server.guid)).not_to be_truthy
end
it "async stop will do nothing if stopped" do
@miq_server.update(:status => 'stopped')
expect(@miq_server).to receive(:wait_for_stopped).never
@miq_server.stop(false)
expect(MiqQueue.exists?(:method_name => 'shutdown_and_exit', :queue_name => :miq_server, :server_guid => @miq_server.guid)).not_to be_truthy
end
it "sync stop will do nothing if killed" do
@miq_server.update(:status => 'killed')
@miq_server.reload
expect(@miq_server).to receive(:wait_for_stopped).never
@miq_server.stop(true)
expect(MiqQueue.exists?(:method_name => 'shutdown_and_exit', :queue_name => :miq_server, :server_guid => @miq_server.guid)).not_to be_truthy
end
it "sync stop will queue shutdown_and_exit and wait_for_stopped" do
@miq_server.update(:status => 'started')
expect(@miq_server).to receive(:wait_for_stopped)
@miq_server.stop(true)
expect(MiqQueue.exists?(:method_name => 'shutdown_and_exit', :queue_name => :miq_server, :server_guid => @miq_server.guid)).to be_truthy
end
it "async stop will queue shutdown_and_exit and return" do
@miq_server.update(:status => 'started')
expect(@miq_server).to receive(:wait_for_stopped).never
@miq_server.stop(false)
expect(MiqQueue.exists?(:method_name => 'shutdown_and_exit', :queue_name => :miq_server, :server_guid => @miq_server.guid)).to be_truthy
end
it "async stop will not update existing exit message and return" do
@miq_server.update(:status => 'started')
expect(@miq_server).to receive(:wait_for_stopped).never
@miq_server.stop(false)
end
context "#is_recently_active?" do
it "should return false when last_heartbeat is nil" do
@miq_server.last_heartbeat = nil
expect(@miq_server.is_recently_active?).to be_falsey
end
it "should return false when last_heartbeat is at least 10.minutes ago" do
@miq_server.last_heartbeat = 10.minutes.ago.utc
expect(@miq_server.is_recently_active?).to be_falsey
end
it "should return true when last_heartbeat is less than 10.minutes ago" do
@miq_server.last_heartbeat = 500.seconds.ago.utc
expect(@miq_server.is_recently_active?).to be_truthy
end
end
context "validate_is_deleteable before destroying" do
it "prevents deleting the current server" do
allow(@miq_server).to receive(:is_local?).and_return(true)
@miq_server.destroy
expect(@miq_server.errors.full_messages.first).to match(/current/)
end
it "prevents deleting recently active server" do
allow(@miq_server).to receive(:is_local?).and_return(false)
@miq_server.last_heartbeat = 2.minutes.ago.utc
@miq_server.destroy
expect(@miq_server.errors.full_messages.first).to match(/recently/)
end
end
context "#ntp_reload_queue" do
let(:queue_cond) { {:method_name => 'ntp_reload', :class_name => 'MiqServer', :instance_id => @miq_server.id, :server_guid => @miq_server.guid, :zone => @miq_server.zone.name} }
let(:message) { MiqQueue.where(queue_cond).first }
before { MiqQueue.destroy_all }
context "when on an appliance" do
before do
allow(MiqEnvironment::Command).to receive(:is_appliance?).and_return(true)
@miq_server.ntp_reload_queue
end
it "will queue up a message with high priority" do
expect(MiqQueue.where(queue_cond)).not_to be_nil
end
end
context "when not on an appliance" do
before do
allow(MiqEnvironment::Command).to receive(:is_appliance?).and_return(false)
@miq_server.ntp_reload_queue
end
it "will not queue up a message" do
expect(message).to be_nil
end
end
end
context "#ntp_reload" do
let(:server_ntp) { {:server => ["server.pool.com"]} }
let(:zone_ntp) { {:server => ["zone.pool.com"]} }
let(:chrony) { double }
context "when on an appliance" do
before do
allow(MiqEnvironment::Command).to receive(:is_appliance?).and_return(true)
end
it "doesn't sync the settings when running in a container" do
allow(MiqEnvironment::Command).to receive(:is_container?).and_return(true)
@zone.update_attribute(:settings, :ntp => zone_ntp)
stub_settings(:ntp => server_ntp)
expect(LinuxAdmin::Chrony).not_to receive(:new)
@miq_server.ntp_reload
end
it "syncs the settings" do
expect(LinuxAdmin::Chrony).to receive(:new).and_return(chrony)
expect(chrony).to receive(:clear_servers)
expect(chrony).to receive(:add_servers).with("0.pool.ntp.org", "1.pool.ntp.org", "2.pool.ntp.org")
@miq_server.ntp_reload
end
it "only changes the config file if there are changes" do
expect(@miq_server).to receive(:apply_ntp_server_settings).once
@miq_server.ntp_reload
@miq_server.ntp_reload
end
end
context "when not on an appliance" do
before do
allow(MiqEnvironment::Command).to receive(:is_appliance?).and_return(false)
end
it "does not apply NTP settings" do
expect(LinuxAdmin::Chrony).to_not receive(:new)
expect(chrony).to_not receive(:clear_servers)
expect(chrony).to_not receive(:add_servers)
@miq_server.ntp_reload
end
end
end
context "with a worker" do
before do
MiqWorkerType.seed
@worker = FactoryBot.create(:miq_generic_worker, :miq_server_id => @miq_server.id, :pid => Process.pid)
allow(@miq_server).to receive(:validate_worker).and_return(true)
@miq_server.setup_drb_variables
@miq_server.worker_add(@worker.pid)
end
it "quiesce will update status to quiesce, deactivate_roles, quiesce workers, clean active messages, and set status to stopped" do
expect(@miq_server).to receive(:status=).with('quiesce')
expect(@miq_server).to receive(:deactivate_roles)
expect(@miq_server).to receive(:quiesce_workers_loop)
expect_any_instance_of(MiqWorker).to receive(:clean_active_messages)
expect(@miq_server).to receive(:status=).with('stopped')
@miq_server.quiesce
end
it "quiesce_workers_loop will initiate shutdown of workers" do
expect(@miq_server).to receive(:stop_worker)
@miq_server.instance_variable_set(:@worker_monitor_settings, :quiesce_loop_timeout => 15.minutes)
expect(@miq_server).to receive(:workers_quiesced?).and_return(true)
@miq_server.quiesce_workers_loop
end
it "quiesce_workers do mini-monitor_workers loop" do
expect(@miq_server).to receive(:heartbeat)
expect(@miq_server).to receive(:quiesce_workers_loop_timeout?).never
@worker.update(:status => MiqWorker::STATUS_STOPPED)
@miq_server.workers_quiesced?
end
it "quiesce_workers_loop_timeout? will return true if timeout reached" do
@miq_server.instance_variable_set(:@quiesce_started_on, Time.now.utc)
@miq_server.instance_variable_set(:@quiesce_loop_timeout, 10.minutes)
expect(@miq_server.quiesce_workers_loop_timeout?).not_to be_truthy
Timecop.travel 10.minutes do
expect(@miq_server.quiesce_workers_loop_timeout?).to be_truthy
end
end
it "quiesce_workers_loop_timeout? will return false if timeout is not reached" do
@miq_server.instance_variable_set(:@quiesce_started_on, Time.now.utc)
@miq_server.instance_variable_set(:@quiesce_loop_timeout, 10.minutes)
expect_any_instance_of(MiqWorker).to receive(:kill).never
expect(@miq_server.quiesce_workers_loop_timeout?).not_to be_truthy
end
context "#server_timezone" do
it "utc with no system default" do
stub_settings(:server => {:timezone => nil})
expect(@miq_server.server_timezone).to eq("UTC")
end
it "uses system default" do
stub_settings(:server => {:timezone => "Eastern Time (US & Canada)"})
expect(@miq_server.server_timezone).to eq("Eastern Time (US & Canada)")
end
end
end
context "with server roles" do
before do
@server_roles = []
[
['event', 1],
['ems_metrics_coordinator', 1],
['ems_operations', 0]
].each { |r, max| @server_roles << FactoryBot.create(:server_role, :name => r, :max_concurrent => max) }
@miq_server.role = @server_roles.collect(&:name).join(',')
end
it "should have all server roles" do
expect(@miq_server.server_roles).to match_array(@server_roles)
end
context "activating All roles" do
before do
@miq_server.activate_all_roles
end
it "should have activated All roles" do
expect(@miq_server.active_roles).to match_array(@server_roles)
end
end
context "activating Event role" do
before do
@miq_server.activate_roles("event")
end
it "should have activated Event role" do
expect(@miq_server.active_role_names.include?("event")).to be_truthy
end
end
end
context "after_destroy callback" do
let(:remote_server) { EvmSpecHelper.remote_miq_server }
describe "#destroy_linked_events_queue" do
it "queue request to destroy events linked to this server" do
remote_server.destroy_linked_events_queue
expect(MiqQueue.find_by(:class_name => 'MiqServer').method_name).to eq 'destroy_linked_events'
end
end
describe ".destroy_linked_events" do
it "destroys all events associated with destroyed server" do
FactoryBot.create(:miq_event, :event_type => "Local TestEvent", :target => @miq_server)
FactoryBot.create(:miq_event, :event_type => "Remote TestEvent 1", :target => remote_server)
FactoryBot.create(:miq_event, :event_type => "Remote TestEvent 1", :target => remote_server)
expect(MiqEvent.count).to eq 3
allow(remote_server).to receive(:is_deleteable?).and_return(true)
described_class.destroy_linked_events(remote_server.id)
expect(MiqEvent.count).to eq 1
end
end
end
end
it "detects already .running?" do
Tempfile.open("evmpid") do |file|
allow(MiqServer).to receive(:pidfile).and_return(file.path)
File.write(file.path, Process.pid)
expect(MiqServer.running?).to be_truthy
end
end
describe "#active?" do
context "Active status returns true" do
["starting", "started"].each do |status|
it status do
expect(described_class.new(:status => status).active?).to be_truthy
end
end
end
it "Inactive status returns false" do
expect(described_class.new(:status => "stopped").active?).to be_falsey
end
end
describe "#zone_description" do
it "delegates to zone" do
_, miq_server, zone = EvmSpecHelper.create_guid_miq_server_zone
expect(miq_server.zone_description).to eq(zone.description)
end
end
describe "#description" do
it "doesnt blowup" do
s = described_class.new(:name => "name")
expect(s.description).to eq(s.name)
end
end
end
| 36.35023 | 183 | 0.663286 |
f876cb7a627318a8452f09570e156f3afb28337a | 421 | class FontKimberella < Formula
head "https://moji-waku.com/download/kimberella.zip"
desc "Kimberella"
desc "Ancient biology styled font inspired by kimberella"
homepage "https://moji-waku.com/kimberella/index.html"
def install
parent = File.dirname(Dir.pwd) != (ENV['HOMEBREW_TEMP'] || '/tmp') ? '../' : ''
(share/"fonts").install "#{parent}kimberella/Kimberella-Regular.otf"
end
test do
end
end
| 32.384615 | 84 | 0.693587 |
d5b43f6d0c36cebc8d51cb0fdeb5a8217ed862ee | 917 | Pod::Spec.new do |spec|
spec.name = "MultipleImageView"
spec.version = "0.1.2"
spec.summary = "Displaying multiple images like Twitter."
spec.homepage = "https://github.com/nnsnodnb/MultipleImageView"
spec.swift_version = "5.2"
spec.license = { :type => "MIT", :file => "LICENSE" }
spec.author = { "nnsnodnb" => "[email protected]" }
spec.social_media_url = "https://twitter.com/nnsnodnb"
spec.platform = :ios
spec.platform = :ios, "9.0"
spec.ios.deployment_target = "9.0"
spec.ios.framework = "UIKit"
spec.source = { :git => "https://github.com/nnsnodnb/#{spec.name}.git", :tag => "#{spec.version}" }
spec.source_files = "#{spec.name}", "#{spec.name}/*.{h,swift}"
spec.public_header_files = "#{spec.name}/#{spec.name}.h"
end
| 50.944444 | 116 | 0.541985 |
7adba8e549d4527ce842cf1931d7b791a1c29b48 | 346 | # This is a snippet of the config that powers
# https://raindrops-demo.bogomips.org/
# This may be used with the packaged zbatery.conf.rb
#
# zbatery -c zbatery.conf.ru watcher_demo.ru -E none
require "raindrops"
use Raindrops::Middleware
listeners = %w(
0.0.0.0:9418
0.0.0.0:80
/tmp/.r
)
run Raindrops::Watcher.new :listeners => listeners
| 24.714286 | 52 | 0.725434 |
7a399cb473c61917bc69001c697a3c8b0750404f | 2,324 | class TransaccionesController < ApplicationController
before_action :set_transaccione, only: [:show, :edit, :update, :destroy]
layout "template"
# GET /transacciones
# GET /transacciones.json
def index
# @transacciones = Transaccione.all
respond_to do |format|
format.html
format.json { render json: TransaccionesDatatable.new(view_context) }
end
end
# GET /transacciones/1
# GET /transacciones/1.json
def show
end
# GET /transacciones/new
def new
@transaccione = Transaccione.new
end
# GET /transacciones/1/edit
def edit
end
# POST /transacciones
# POST /transacciones.json
def create
@transaccione = Transaccione.new(transaccione_params)
respond_to do |format|
if @transaccione.save
format.html { redirect_to @transaccione, notice: 'Transaccione was successfully created.' }
format.json { render :show, status: :created, location: @transaccione }
else
format.html { render :new }
format.json { render json: @transaccione.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /transacciones/1
# PATCH/PUT /transacciones/1.json
def update
respond_to do |format|
if @transaccione.update(transaccione_params)
format.html { redirect_to @transaccione, notice: 'Transaccione was successfully updated.' }
format.json { render :show, status: :ok, location: @transaccione }
else
format.html { render :edit }
format.json { render json: @transaccione.errors, status: :unprocessable_entity }
end
end
end
# DELETE /transacciones/1
# DELETE /transacciones/1.json
def destroy
@transaccione.destroy
respond_to do |format|
format.html { redirect_to transacciones_url, notice: 'Transaccione was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_transaccione
@transaccione = Transaccione.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def transaccione_params
params.require(:transaccione).permit(:user_id, :categoria_id, :proyecto_id, :monto, :tipo, :nota, :estado, :fecha, :borrado)
end
end
| 29.05 | 130 | 0.69191 |
1a8ffc86f3717637e0b503415ffb44bd6300df86 | 822 | require 'git_utils'
require 'r10k_utils'
require 'master_manipulator'
test_name 'CODEMGMT-84 - C59270 - Attempt to Deploy with Missing r10k Configuration File'
#Init
r10k_config_path = get_r10k_config_file_path(master)
r10k_config_bak_path = "#{r10k_config_path}.bak"
#Verification
error_message_regex = /Error while running.*R10K\:\:Deployment.*No configuration file given/
#Teardown
teardown do
step 'Restore Original "r10k" Config'
on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}")
end
#Setup
step 'Backup Current "r10k" Config'
on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}")
#Tests
step 'Attempt to Deploy via r10k'
on(master, 'r10k deploy environment -v', :acceptable_exit_codes => 1) do |result|
assert_match(error_message_regex, result.stderr, 'Expected message not found!')
end
| 29.357143 | 92 | 0.778589 |
5dc85b2baead9dd14f129db1d2796f1fc2b367b7 | 1,965 | require "tempfile"
require "tmpdir"
require "fileutils"
class UploadedFile
InvalidPathError = Class.new(StandardError)
# The filename, *not* including the path, of the "uploaded" file
attr_reader :original_filename
# The tempfile
attr_reader :tempfile
# The content type of the "uploaded" file
attr_accessor :content_type
attr_reader :remote_id
attr_reader :sha256
def initialize(path, filename: nil, content_type: "application/octet-stream", sha256: nil, remote_id: nil)
raise InvalidPathError, "#{path} file does not exist" unless ::File.exist?(path)
@content_type = content_type
@original_filename = filename || ::File.basename(path)
@content_type = content_type
@sha256 = sha256
@remote_id = remote_id
@tempfile = File.new(path, 'rb')
end
def self.from_params(params, field, upload_path)
unless params["#{field}.path"]
raise InvalidPathError, "file is invalid" if params["#{field}.remote_id"]
return
end
file_path = File.realpath(params["#{field}.path"])
unless self.allowed_path?(file_path, [upload_path, Dir.tmpdir].compact)
raise InvalidPathError, "insecure path used '#{file_path}'"
end
UploadedFile.new(file_path,
filename: params["#{field}.name"],
content_type: params["#{field}.type"] || 'application/octet-stream',
sha256: params["#{field}.sha256"],
remote_id: params["#{field}.remote_id"])
end
def self.allowed_path?(file_path, paths)
paths.any? do |path|
File.exist?(path) && file_path.start_with?(File.realpath(path))
end
end
def path
@tempfile.path
end
alias_method :local_path, :path
def method_missing(method_name, *args, &block) #:nodoc:
@tempfile.__send__(method_name, *args, &block) # rubocop:disable GitlabSecurity/PublicSend
end
def respond_to?(method_name, include_private = false) #:nodoc:
@tempfile.respond_to?(method_name, include_private) || super
end
end
| 27.676056 | 108 | 0.699237 |
1ad697c77cba4bddf3bff6be4949beba451491c8 | 139 | require "test_helper"
class AnswersControllerTest < ActionDispatch::IntegrationTest
# test "the truth" do
# assert true
# end
end
| 17.375 | 61 | 0.741007 |
876b7c7f377041da9b9732f41850d6d7f48c8b88 | 743 | Pod::Spec.new do |spec|
spec.name = "LYYDispatch"
spec.version = "1.0.0"
spec.summary = "基于GCD的链式封装"
spec.description = <<-DESC
基于系统GCD,采用链式思想、去除重复名称等方式进行的多线程封装
DESC
spec.homepage = "https://github.com/liyaoyao613/LYYDispatch"
spec.license = "MIT"
spec.author = { "liyaoyao" => "[email protected]" }
spec.platform = :ios, "8.0"
spec.ios.deployment_target = "8.0"
spec.osx.deployment_target = "10.10"
spec.source = { :git => "https://github.com/liyaoyao613/LYYDispatch.git", :tag => "#{spec.version}" }
spec.source_files = "LYYDispatch/**/*.{h,m}"
spec.public_header_files = "LYYDispatch/**/*.h"
spec.static_framework = true
end
| 26.535714 | 109 | 0.602961 |
bf74f48b34905d7e0b8c1d11fd70a1394b20e294 | 1,073 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'jquery/uploadify/rails/version'
Gem::Specification.new do |spec|
spec.name = "jquery-uploadify-rails"
spec.version = Jquery::Uploadify::Rails::VERSION
spec.authors = ["Reyes Yang"]
spec.email = ["[email protected]"]
spec.summary = %q{Package jQuery Uploadify plugin for Rails asset pipeline}
spec.description = %q{Package jQuery Uploadify plugin's javascript, stylesheet and images for Rails asset pipeline}
spec.homepage = "https://github.com/reyesyang/jquery-uploadify-rails"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency 'rails', '>= 3.1.0'
spec.add_development_dependency "bundler", "~> 1.7"
spec.add_development_dependency "rake", "~> 10.0"
end
| 41.269231 | 119 | 0.671948 |
911cc150dbbd621272e74e7d9da27719852e08ef | 62 | module PreventBlankificationValidator
VERSION = "0.1.0"
end
| 15.5 | 37 | 0.790323 |
0806f80f1e03c6e0d627645c397b22fff13e88f3 | 1,161 | require 'rails_helper'
describe "comments/index_rss_feed.rss.builder", :type => :view do
let!(:blog) { build_stubbed :blog }
describe "rendering comments" do
let(:article) { FactoryGirl.build_stubbed :article }
let(:comment) { FactoryGirl.build_stubbed(:comment, article: article, body: "Comment body", guid: '12313123123123123') }
before(:each) do
assign(:items, [comment])
render
end
it "should render a valid rss feed" do
assert_feedvalidator rendered
assert_rss20 rendered, 1
end
describe "the comment entry" do
it "should have all the required attributes" do
xml = Nokogiri::XML.parse(rendered)
entry_xml = xml.css("item").first
expect(entry_xml.css("title").first.content).to eq(
"Comment on #{article.title} by #{comment.author}"
)
expect(entry_xml.css("guid").first.content).to eq("urn:uuid:12313123123123123")
expect(entry_xml.css("description").first.content).to eq("<p>Comment body</p>")
expect(entry_xml.css("link").first.content).to eq("#{article.permalink_url}#comment-#{comment.id}")
end
end
end
end
| 33.171429 | 124 | 0.660637 |
91fc68a5ab60094adbc93ce13b7fd3db12032f6c | 6,607 | # frozen_string_literal: false
module REXMLTests
class AttributesTester < Test::Unit::TestCase
include REXML
def test_accessor
doc = Document.new("<a xmlns:foo='a' xmlns:bar='b' foo:att='1' bar:att='2' att='3'/>")
assert_equal '3', doc.root.attributes['att']
assert_equal '2', doc.root.attributes['bar:att']
doc.root.attributes['att'] = 5
assert_equal '5', doc.root.attributes['att']
end
def test_each_attribute
doc = Document.new('<a x="1" y="2"/>')
doc.root.attributes.each_attribute {|attr|
if attr.expanded_name == 'x'
assert_equal '1', attr.value
elsif attr.expanded_name == 'y'
assert_equal '2', attr.value
else
assert_fail "No such attribute!!"
end
}
end
def test_each
doc = Document.new('<a x="1" y="2"/>')
doc.root.attributes.each {|name, value|
if name == 'x'
assert_equal '1', value
elsif name == 'y'
assert_equal '2', value
else
assert_fail "No such attribute!!"
end
}
end
def test_get_attribute
doc = Document.new('<a xmlns:x="a" x:foo="1" foo="2" bar="3"/>')
assert_equal '2', doc.root.attributes.get_attribute("foo").value
assert_equal '1', doc.root.attributes.get_attribute("x:foo").value
end
def test_size
doc = Document.new("<a xmlns:foo='a' x='1' y='2' foo:x='3'/>")
assert_equal 4, doc.root.attributes.length
end
def test_setter
doc = Document.new("<a xmlns:x='a' x:foo='1' foo='3'/>")
doc.root.attributes['y:foo'] = '2'
assert_equal '2', doc.root.attributes['y:foo']
doc.root.attributes['foo'] = '4'
assert_equal '4', doc.root.attributes['foo']
doc.root.attributes['x:foo'] = nil
assert_equal 3, doc.root.attributes.size
end
def test_delete
doc = Document.new("<a xmlns:y='a' xmlns:x='b' xmlns:z='c' y:foo='0' x:foo='1' foo='3' z:foo='4'/>")
doc.root.attributes.delete 'foo'
assert_equal 6, doc.root.attributes.size
assert_equal '1', doc.root.attributes['x:foo']
doc.root.attributes.delete 'x:foo'
assert_equal 5, doc.root.attributes.size
attr = doc.root.attributes.get_attribute('y:foo')
doc.root.attributes.delete attr
assert_equal 4, doc.root.attributes.size
assert_equal '4', doc.root.attributes['z:foo']
end
def test_prefixes
doc = Document.new("<a xmlns='foo' xmlns:x='bar' xmlns:y='twee' z='glorp' x:k='gru'/>")
prefixes = doc.root.attributes.prefixes
assert_equal 2, prefixes.size
assert_equal 0, (prefixes - ['x', 'y']).size
end
# Contributed by Mike Stok
def test_values_with_apostrophes
doc = Document.new(%q#<tag h1="1'2'" h2='1"2'/>#)
s = doc.to_s
assert(s =~ /h1='1'2''/)
assert(s =~ /h2='1"2'/)
end
# Submitted by Kou
def test_namespace_conflict
assert_raise( ParseException,
"Declaring two attributes with the same namespace should be an error" ) do
REXML::Document.new <<-XML
<x xmlns:n1="http://www.w3.org"
xmlns:n2="http://www.w3.org" >
<bad n1:a="1" n2:a="2" />
</x>
XML
end
REXML::Document.new("<a xmlns:a='a' xmlns:b='a'></a>")
end
# Submitted by Kou
def test_attribute_deletion
e = REXML::Element.new
e.add_namespace("a", "http://a/")
e.add_namespace("b", "http://b/")
e.add_attributes({"c" => "cc", "a:c" => "cC", "b:c" => "CC"})
e.attributes.delete("c")
assert_nil(e.attributes.get_attribute("c"))
before_size = e.attributes.size
e.attributes.delete("c")
assert_nil(e.attributes.get_attribute("c"))
assert_equal(before_size, e.attributes.size)
e.attributes.delete(e.attributes.get_attribute("a:c"))
assert_nil(e.attributes.get_attribute("a:c"))
e.attributes.delete("b:c")
assert_nil(e.attributes.get_attribute("b:c"))
before_size = e.attributes.size
e.attributes.delete(e.attributes.get_attribute("b:c"))
assert_nil(e.attributes.get_attribute("b:c"))
assert_equal(before_size, e.attributes.size)
before_size = e.attributes.size
e.attributes.delete("c")
assert_nil(e.attributes.get_attribute("c"))
assert_equal(before_size, e.attributes.size)
e.add_attribute("c", "cc")
e.attributes.delete(e.attributes.get_attribute("c"))
assert_nil(e.attributes.get_attribute("c"))
end
# Submitted by Kou
def test_element_usage
attr = Attribute.new("name", "value")
elem = Element.new("elem")
a = Attribute.new(attr, elem)
assert_equal(elem, a.element)
end
def attr_test(attr_name,attr_value)
a1 = REXML::Attribute.new(attr_name,attr_value)
s1 = a1.value
s2 = a1.value
#p s1
#p s2
assert_equal(s1,s2)
a2 = REXML::Attribute.new(attr_name,attr_value)
a2.to_s # NB invocation of to_s
s1 = a2.value
s2 = a2.value
#p s1
#p s2
assert_equal(s1,s2)
end
def test_amp_attributes
attr_test('name','value with & ampersand only')
end
def test_amp_and_lf_attributes
attr_test('name','value with LF 
 & ampersand')
end
def test_quoting
d = Document.new(%q{<a x='1' y="2"/>})
assert_equal( %q{<a x='1' y='2'/>}, d.to_s )
d.root.context[:attribute_quote] = :quote
assert_equal( %q{<a x="1" y="2"/>}, d.to_s )
d = Document.new(%q{<a x='1' y="2"><b z='3'/></a>})
assert_equal( %q{<a x='1' y='2'><b z='3'/></a>}, d.to_s )
d.root.context[:attribute_quote] = :quote
assert_equal( %q{<a x="1" y="2"><b z="3"/></a>}, d.to_s )
end
def test_ticket_127
doc = Document.new
doc.add_element 'a', { 'v' => 'x & y' }
assert doc.to_s.index(';')
end
def test_to_a_with_namespaces
document = Document.new(<<-XML)
<root
xmlns:ns1="http://example.org/ns1"
xmlns:ns2="http://example.org/ns2">
<child
ns1:attribute="ns1"
ns2:attribute="ns2"
attribute="no-ns"
other-attribute="other-value"/>
</root>
XML
child = document.root.elements["child"]
assert_equal([
"attribute='no-ns'",
"ns1:attribute='ns1'",
"ns2:attribute='ns2'",
"other-attribute='other-value'",
],
child.attributes.to_a.collect(&:to_string).sort)
end
end
end
| 29.761261 | 106 | 0.584077 |
6177ac6e5930b014fbfb84c219aba82f67bd8539 | 716 | require 'spec_helper'
describe 'vox_selinux::build' do
let(:params) { { module_build_root: '/var/lib/puppet/puppet-selinux' } }
it { is_expected.to compile.with_all_deps }
it { is_expected.to contain_file('/var/lib/puppet/puppet-selinux').with_ensure('directory') }
it { is_expected.to contain_file('/var/lib/puppet/puppet-selinux/modules').with_ensure('directory') }
it { is_expected.to contain_file('/var/lib/puppet/puppet-selinux/modules/tmp').with_ensure('directory') }
it { is_expected.to contain_file('/var/lib/puppet/puppet-selinux/bin').with_ensure('directory') }
it { is_expected.to contain_file('/var/lib/puppet/puppet-selinux/bin/selinux_build_module_simple.sh').with_ensure('file') }
end
| 55.076923 | 125 | 0.75419 |
38a369c4af582883970ab3184e74bc4ee068a4f9 | 1,221 | module ApplicationHelper
def agency_item(agency)
"<li class='agency'><h2>#{link_to agency.name, agency_path(agency.oba_id), class: 'btn btn-large btn-info'}</h2></li>"
end
def route_item(route)
"<li class='route'><h3>#{link_to route.name, route_path(route.oba_id), class: 'btn btn-large btn-success'}</h3></li>"
end
def stop_item(stop)
"<li class='stop'><h4>#{link_to stop.name, stop_path(stop.oba_id), class: 'btn btn-large btn-warning'}</h4></li>"
end
def arrival_item(arrival)
"<li class='arrival'><h3>
<span class='btn btn-large btn-success'>#{arrival['routeShortName']}</span>
<span class='details'>
#{arrival['tripHeadsign']}<br/>
#{Time.at(arrival['predictedArrivalTime'] / 1000).strftime('at %r')}
</span>
</li>"
end
def wb_route_path(agency_or_id, code=nil)
str = "/#{agency_or_id}"
str += "/#{code}" unless code.nil?
end
def api_links(item)
"<small class='api links'>
<span class='oba id right'>OBA: <a class='btn' id='oba_id'>#{item.oba_id}</a></span><br/>
<span class='otp id right'>OTP: " +
(item.class() == Agency ? '' : "<a class='btn' id='agency'>#{item.agency_code}</a>") +
"<a class='btn' id='code'>#{item.code}</a>
</span></small>"
end
end
| 32.131579 | 120 | 0.642916 |
21cc400abcc8575ba8801a1e0060316fc444fe76 | 596 | require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe "History" do
before(:each) do
@date_time = mock(DateTime)
@date_time.stub!(:strftime).and_return("01/11/2010 02:48PM")
DateTime.stub!(:now).and_return(@date_time)
History.stub!(:file_path).and_return("#{File.dirname(__FILE__)}/fixtures/history")
end
it "should format a line" do
line = History.format_line("original_name", "remote_path")
line.should == "01/11/2010 02:48PM original_name remote_path"
end
it "should list the history" do
History.should respond_to(:list)
end
end
| 28.380952 | 86 | 0.709732 |
33381cc36462e4e8e3cb1406cfdbdaa02fcf6545 | 489 | # Copyright 2012 Team 254. All Rights Reserved.
# @author [email protected] (Patrick Fairbank)
#
# Script for starting/stopping the parts management server.
require "bundler/setup"
require "daemons"
require "pathological"
require "thin"
pwd = Dir.pwd
Daemons.run_proc("parts_server", :monitor => true) do
Dir.chdir(pwd) # Fix working directory after daemons sets it to /.
require "parts_server"
Thin::Server.start("0.0.0.0", CheesyCommon::Config.port, CheesyParts::Server)
end
| 27.166667 | 79 | 0.748466 |
1ad10290dbd910490f3d7e4157ab9d31f59e61e4 | 541 | # frozen_string_literal: true
class StatusUpdateCommentsController < CommentsController
def create
comment = Comment.new(comment_params)
status_update = comment.commentable
if comment.text.present?
comment.save
else
flash[:alert] = "O no! We can't save an empty comment. Please try again?"
end
redirect_to status_update_path(status_update)
end
private
def comment_params
params.require(:comment).permit(:commentable_id, :commentable_type, :text).merge(user_id: current_user.id)
end
end
| 23.521739 | 110 | 0.737523 |
260531b617081ff41bdd19b6afc21afb0569f02a | 7,593 | # ------------------------------------------------------------------------------------
# <copyright company="Aspose" file="list_format.rb">
# Copyright (c) 2020 Aspose.Words for Cloud
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# ------------------------------------------------------------------------------------
require 'date'
module AsposeWordsCloud
# DTO container with a paragraph list format element.
class ListFormat
# Gets or sets the link to the document.
attr_accessor :link
# Gets or sets a value indicating whether the paragraph has bulleted or numbered formatting applied to it.
attr_accessor :is_list_item
# Gets or sets the list id of this paragraph.
attr_accessor :list_id
# Gets or sets the list level number (0 to 8) for the paragraph.
attr_accessor :list_level_number
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'link' => :'Link',
:'is_list_item' => :'IsListItem',
:'list_id' => :'ListId',
:'list_level_number' => :'ListLevelNumber'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'link' => :'WordsApiLink',
:'is_list_item' => :'BOOLEAN',
:'list_id' => :'Integer',
:'list_level_number' => :'Integer'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.key?(:'Link')
self.link = attributes[:'Link']
end
if attributes.key?(:'IsListItem')
self.is_list_item = attributes[:'IsListItem']
end
if attributes.key?(:'ListId')
self.list_id = attributes[:'ListId']
end
if attributes.key?(:'ListLevelNumber')
self.list_level_number = attributes[:'ListLevelNumber']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = []
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(other)
return true if self.equal?(other)
self.class == other.class &&
link == other.link &&
is_list_item == other.is_list_item &&
list_id == other.list_id &&
list_level_number == other.list_level_number
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(other)
self == other
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[link, is_list_item, list_id, list_level_number].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
Time.at(/\d/.match(value)[0].to_f).to_datetime
when :Date
Time.at(/\d/.match(value)[0].to_f).to_date
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else
# model
temp_model = AsposeWordsCloud.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 32.728448 | 110 | 0.623996 |
87b0b2f308d18c3859469dacfafc97119c204ef1 | 1,844 | # frozen_string_literal: true
class CollegeGetTogetherSchema < GraphQL::Schema
mutation(Types::MutationType)
query(Types::QueryType)
# For batch-loading (see https://graphql-ruby.org/dataloader/overview.html)
use GraphQL::Dataloader
# GraphQL-Ruby calls this when something goes wrong while running a query:
def self.type_error(err, context)
# if err.is_a?(GraphQL::InvalidNullError)
# # report to your bug tracker here
# return nil
# end
super
end
# Union and Interface Resolution
def self.resolve_type(abstract_type, obj, ctx)
# TODO: Implement this method
# to return the correct GraphQL object type for `obj`
raise(GraphQL::RequiredImplementationMissingError)
end
# Relay-style Object Identification:
# Return a string UUID for `object`
def self.id_from_object(object, type_definition, query_ctx)
# For example, use Rails' GlobalID library (https://github.com/rails/globalid):
object_id = object.to_global_id.to_s
# Remove this redundant prefix to make IDs shorter:
object_id = object_id.sub("gid://#{GlobalID.app}/", "")
encoded_id = Base64.urlsafe_encode64(object_id)
# Remove the "=" padding
encoded_id = encoded_id.sub(/=+/, "")
# Add a type hint
type_hint = type_definition.graphql_name.first
"#{type_hint}_#{encoded_id}"
end
# Given a string UUID, find the object
def self.object_from_id(encoded_id_with_hint, query_ctx)
# For example, use Rails' GlobalID library (https://github.com/rails/globalid):
# Split off the type hint
_type_hint, encoded_id = encoded_id_with_hint.split("_", 2)
# Decode the ID
id = Base64.urlsafe_decode64(encoded_id)
# Rebuild it for Rails then find the object:
full_global_id = "gid://#{GlobalID.app}/#{id}"
GlobalID::Locator.locate(full_global_id)
end
end
| 34.148148 | 83 | 0.714751 |
39ab13a0704eb786d7dda6d1b1039ec2e819136c | 9,829 | # frozen_string_literal: true
require "helper"
module Nokogiri
module HTML
# testing error edge cases of HTML comments from the living WHATWG spec
# as of 2020-08-03
# https://html.spec.whatwg.org/multipage/parsing.html
class TestComment < Nokogiri::TestCase
# https://html.spec.whatwg.org/multipage/parsing.html#parse-error-abrupt-closing-of-empty-comment
#
# This error occurs if the parser encounters an empty comment
# that is abruptly closed by a U+003E (>) code point (i.e.,
# <!--> or <!--->). The parser behaves as if the comment is
# closed correctly.
describe "abrupt closing of empty comment" do
let(:doc) { Nokogiri::HTML(html) }
let(:subject) { doc.at_css("div#under-test") }
let(:other_div) { doc.at_css("div#also-here") }
describe "two dashes" do
let(:html) { "<html><body><div id=under-test><!--></div><div id=also-here></div></body></html>" }
if Nokogiri.uses_libxml?
if Nokogiri.libxml2_patches.include?("0008-htmlParseComment-handle-abruptly-closed-comments.patch")
it "behaves as if the comment is closed correctly" do # COMPLIANT
assert_equal 1, subject.children.length
assert subject.children.first.comment?
assert_equal "", subject.children.first.content
assert other_div
end
else
it "behaves as if the comment is unterminated and doesn't exist" do # NON-COMPLIANT
assert_equal 0, subject.children.length
assert_equal 1, doc.errors.length
assert_match(/Comment not terminated/, doc.errors.first.to_s)
refute other_div
end
end
end
if Nokogiri.jruby?
it "behaves as if the comment is closed correctly" do # COMPLIANT
assert_equal 1, subject.children.length
assert subject.children.first.comment?
assert_equal "", subject.children.first.content
assert other_div
end
end
end
describe "three dashes" do
let(:html) { "<html><body><div id=under-test><!---></div><div id=also-here></div></body></html>" }
if Nokogiri.uses_libxml?
if Nokogiri.libxml2_patches.include?("0008-htmlParseComment-handle-abruptly-closed-comments.patch")
it "behaves as if the comment is closed correctly" do # COMPLIANT
assert_equal 1, subject.children.length
assert subject.children.first.comment?
assert_equal "", subject.children.first.content
assert other_div
end
else
it "behaves as if the comment is unterminated and doesn't exist" do # NON-COMPLIANT
assert_equal 0, subject.children.length
assert_equal 1, doc.errors.length
assert_match(/Comment not terminated/, doc.errors.first.to_s)
refute other_div
end
end
end
if Nokogiri.jruby?
it "behaves as if the comment is closed correctly" do # COMPLIANT
assert_equal 1, subject.children.length
assert subject.children.first.comment?
assert_equal "-", subject.children.first.content # curious, potentially non-compliant?
assert other_div
end
end
end
describe "four dashes" do
let(:html) { "<html><body><div id=under-test><!----></div><div id=also-here></div></body></html>" }
it "behaves as if the comment is closed correctly" do # COMPLIANT
assert_equal 1, subject.children.length
assert subject.children.first.comment?
assert_equal "", subject.children.first.content
assert other_div
end
end
end
# https://html.spec.whatwg.org/multipage/parsing.html#parse-error-eof-in-comment
#
# This error occurs if the parser encounters the end of the
# input stream in a comment. The parser treats such comments as
# if they are closed immediately before the end of the input
# stream.
describe "eof in comment" do
let(:html) { "<html><body><div id=under-test><!--start of unterminated comment" }
let(:doc) { Nokogiri::HTML(html) }
let(:subject) { doc.at_css("div#under-test") }
if Nokogiri.uses_libxml?
it "behaves as if the comment is unterminated and doesn't exist" do # NON-COMPLIANT
assert_equal 0, subject.children.length
assert_equal 1, doc.errors.length
assert_match(/Comment not terminated/, doc.errors.first.to_s)
end
end
if Nokogiri.jruby?
it "behaves as if the comment is closed immediately before the end of the input stream" do # COMPLIANT
assert_equal 1, subject.children.length
assert subject.children.first.comment?
assert_equal "start of unterminated comment", subject.children.first.content
end
end
end
# https://html.spec.whatwg.org/multipage/parsing.html#parse-error-incorrectly-closed-comment
#
# This error occurs if the parser encounters a comment that is
# closed by the "--!>" code point sequence. The parser treats
# such comments as if they are correctly closed by the "-->"
# code point sequence.
describe "incorrectly closed comment" do
let(:html) { "<html><body><div id=under-test><!--foo--!><div id=do-i-exist></div><!--bar--></div></body></html>" }
let(:doc) { Nokogiri::HTML(html) }
let(:subject) { doc.at_css("div#under-test") }
let(:inner_div) { doc.at_css("div#do-i-exist") }
if Nokogiri::VersionInfo.instance.libxml2_using_packaged? || (Nokogiri::VersionInfo.instance.libxml2_using_system? && Nokogiri.uses_libxml?(">=2.9.11"))
it "behaves as if the comment is normally closed" do # COMPLIANT
assert_equal 3, subject.children.length
assert subject.children[0].comment?
assert_equal "foo", subject.children[0].content
assert inner_div
assert_equal inner_div, subject.children[1]
assert subject.children[2].comment?
assert_equal "bar", subject.children[2].content
assert_equal 1, doc.errors.length
assert_match(/Comment incorrectly closed/, doc.errors.first.to_s)
end
end
if Nokogiri.jruby? || (Nokogiri::VersionInfo.instance.libxml2_using_system? && Nokogiri.uses_libxml?("<2.9.11"))
it "behaves as if the comment encompasses the inner div" do # NON-COMPLIANT
assert_equal 1, subject.children.length
assert subject.children.first.comment?
refute inner_div
assert_match(/id=do-i-exist/, subject.children.first.content)
assert_equal 0, doc.errors.length
end
end
end
# https://html.spec.whatwg.org/multipage/parsing.html#parse-error-incorrectly-opened-comment
#
# This error occurs if the parser encounters the "<!" code point
# sequence that is not immidiately followed by two U+002D (-)
# code points and that is not the start of a DOCTYPE or a CDATA
# section. All content that follows the "<!" code point sequence
# up to a U+003E (>) code point (if present) or to the end of
# the input stream is treated as a comment.
describe "incorrectly opened comment" do
let(:html) { "<html><body><div id=under-test><! comment <div id=do-i-exist>inner content</div>-->hello</div></body></html>" }
let(:doc) { Nokogiri::HTML(html) }
let(:body) { doc.at_css("body") }
let(:subject) { doc.at_css("div#under-test") }
if Nokogiri.uses_libxml?
it "ignores up to the next '>'" do # NON-COMPLIANT
assert_equal 2, body.children.length
assert_equal body.children[0], subject
assert_equal 1, subject.children.length
assert subject.children[0].text?
assert_equal "inner content", subject.children[0].content
assert body.children[1].text?
assert_equal "-->hello", body.children[1].content
end
end
if Nokogiri.jruby?
it "ignores up to the next '-->'" do # NON-COMPLIANT
assert_equal 1, subject.children.length
assert subject.children[0].text?
assert_equal "hello", subject.children[0].content
end
end
end
# https://html.spec.whatwg.org/multipage/parsing.html#parse-error-nested-comment
#
# This error occurs if the parser encounters a nested comment
# (e.g., <!-- <!-- nested --> -->). Such a comment will be
# closed by the first occuring "-->" code point sequence and
# everything that follows will be treated as markup.
describe "nested comment" do
let(:html) { "<html><body><div id=under-test><!-- outer <!-- inner --><div id=do-i-exist></div>--></div></body></html>" }
let(:doc) { Nokogiri::HTML(html) }
let(:subject) { doc.at_css("div#under-test") }
let(:inner_div) { doc.at_css("div#do-i-exist") }
it "ignores to the next '-->'" do # COMPLIANT
assert_equal 3, subject.children.length
assert subject.children[0].comment?
assert_equal " outer <!-- inner ", subject.children[0].content
assert inner_div
assert_equal inner_div, subject.children[1]
assert subject.children[2].text?
assert_equal "-->", subject.children[2].content
end
end
end
end
end
| 44.274775 | 160 | 0.607997 |
211f420d571dafb9660274a8acc4db3a3ee9d4ba | 806 | # frozen_string_literal: true
# == Schema Information
#
# Table name: players
#
# id :bigint(8) not null, primary key
# session_token :string
# player_name :string not null
# color :string not null
# world_id :bigint(8)
# created_at :datetime not null
# updated_at :datetime not null
#
class Player < ApplicationRecord
has_many :cells, dependent: :destroy
belongs_to :world
validates :player_name, presence: true
validates :world, presence: true
def self.create_unique_session_token
is_unique = false
until is_unique
new_session_token = SecureRandom.urlsafe_base64(nil, false).to_s
is_unique = !Player.where(session_token: new_session_token).exists?
end
new_session_token
end
end
| 25.1875 | 73 | 0.668734 |
18b644182ea49a2c7f13383282ce5223f366e2d6 | 884 | json.path @path.to_s
json.url files_path(@path).to_s
#TODO: support array of shell urls, along with the default shell url which could be above
json.shell_url OodAppkit.shell.url(path: @path.to_s).to_s
json.files @files do |f|
json.id f[:id]
json.type f[:directory] ? 'd' : 'f'
json.name f[:name]
json.url files_path(@path.join(f[:name]).to_s)
json.download_url files_path(@path.join(f[:name]).to_s, download: '1') # FIXME: should change for directory
json.edit_url OodAppkit.editor.edit(path:@path.join(f[:name])).to_s
json.size f[:size]
json.human_size f[:human_size]
json.modified_at f[:date]
json.owner f[:owner]
json.mode f[:mode]
end
json.breadcrumbs_html render partial: 'breadcrumb.html.erb', collection: @path.descend, as: :file, locals: { file_count: @path.descend.count, full_path: @path }
json.time Time.now.to_i
json.error_message alert if alert
| 35.36 | 160 | 0.727376 |
9144cae1b1e2559a18b02cb2a34f775f9e3a3236 | 4,478 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Labservices::Mgmt::V2018_10_15
#
# The Managed Labs Client.
#
class Operations
include MsRestAzure
#
# Creates and initializes a new instance of the Operations class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [ManagedLabsClient] reference to the ManagedLabsClient
attr_reader :client
#
# Get operation
#
# @param location_name [String] The name of the location.
# @param operation_name [String] The name of the operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [OperationResult] operation results.
#
def get(location_name, operation_name, custom_headers:nil)
response = get_async(location_name, operation_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Get operation
#
# @param location_name [String] The name of the location.
# @param operation_name [String] The name of the operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_with_http_info(location_name, operation_name, custom_headers:nil)
get_async(location_name, operation_name, custom_headers:custom_headers).value!
end
#
# Get operation
#
# @param location_name [String] The name of the location.
# @param operation_name [String] The name of the operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_async(location_name, operation_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'location_name is nil' if location_name.nil?
fail ArgumentError, 'operation_name is nil' if operation_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/providers/Microsoft.LabServices/locations/{locationName}/operations/{operationName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'locationName' => location_name,'operationName' => operation_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Labservices::Mgmt::V2018_10_15::Models::OperationResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
end
end
| 38.273504 | 138 | 0.690487 |
b96b6e505e20e7cd3df834b016956ee1a2ac4c15 | 4,676 | class Lua < Formula
desc "Powerful, lightweight programming language"
homepage "https://www.lua.org/"
url "https://www.lua.org/ftp/lua-5.4.4.tar.gz"
sha256 "164c7849653b80ae67bec4b7473b884bf5cc8d2dca05653475ec2ed27b9ebf61"
license "MIT"
revision 1
livecheck do
url "https://www.lua.org/ftp/"
regex(/href=.*?lua[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
root_url "https://github.com/gromgit/homebrew-core-mojave/releases/download/lua"
rebuild 1
sha256 cellar: :any, mojave: "70c6fa7b6c9e2e31c5f203c2502451a76486be261cb403fa1f7d69a2b6bef3b3"
end
uses_from_macos "unzip" => :build
on_macos do
# Be sure to build a dylib, or else runtime modules will pull in another static copy of liblua = crashy
# See: https://github.com/Homebrew/legacy-homebrew/pull/5043
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/11c8360432f471f74a9b2d76e012e3b36f30b871/lua/lua-dylib.patch"
sha256 "a39e2ae1066f680e5c8bf1749fe09b0e33a0215c31972b133a73d43b00bf29dc"
end
end
on_linux do
depends_on "readline"
# Add shared library for linux. Equivalent to the mac patch above.
# Inspired from https://www.linuxfromscratch.org/blfs/view/cvs/general/lua.html
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/0dcd11880c7d63eb395105a5cdddc1ca05b40f4a/lua/lua-so.patch"
sha256 "522dc63a0c1d87bf127c992dfdf73a9267890fd01a5a17e2bcf06f7eb2782942"
end
end
# Fix crash issue in luac when invoked with multiple files.
# http://lua-users.org/lists/lua-l/2022-02/msg00113.html
patch :DATA
def install
if OS.linux?
# Fix: /usr/bin/ld: lapi.o: relocation R_X86_64_32 against `luaO_nilobject_' can not be used
# when making a shared object; recompile with -fPIC
# See https://www.linuxfromscratch.org/blfs/view/cvs/general/lua.html
ENV.append_to_cflags "-fPIC"
end
# Substitute formula prefix in `src/Makefile` for install name (dylib ID).
# Use our CC/CFLAGS to compile.
inreplace "src/Makefile" do |s|
s.gsub! "@OPT_LIB@", opt_lib if OS.mac?
s.remove_make_var! "CC"
s.change_make_var! "MYCFLAGS", ENV.cflags
s.change_make_var! "MYLDFLAGS", ENV.ldflags
end
# Fix path in the config header
inreplace "src/luaconf.h", "/usr/local", HOMEBREW_PREFIX
os = if OS.mac?
"macosx"
else
"linux-readline"
end
system "make", os, "INSTALL_TOP=#{prefix}"
system "make", "install", "INSTALL_TOP=#{prefix}"
# We ship our own pkg-config file as Lua no longer provide them upstream.
libs = %w[-llua -lm]
libs << "-ldl" if OS.linux?
(lib/"pkgconfig/lua.pc").write <<~EOS
V= #{version.major_minor}
R= #{version}
prefix=#{HOMEBREW_PREFIX}
INSTALL_BIN= ${prefix}/bin
INSTALL_INC= ${prefix}/include/lua
INSTALL_LIB= ${prefix}/lib
INSTALL_MAN= ${prefix}/share/man/man1
INSTALL_LMOD= ${prefix}/share/lua/${V}
INSTALL_CMOD= ${prefix}/lib/lua/${V}
exec_prefix=${prefix}
libdir=${exec_prefix}/lib
includedir=${prefix}/include/lua
Name: Lua
Description: An Extensible Extension Language
Version: #{version}
Requires:
Libs: -L${libdir} #{libs.join(" ")}
Cflags: -I${includedir}
EOS
# Fix some software potentially hunting for different pc names.
bin.install_symlink "lua" => "lua#{version.major_minor}"
bin.install_symlink "lua" => "lua-#{version.major_minor}"
bin.install_symlink "luac" => "luac#{version.major_minor}"
bin.install_symlink "luac" => "luac-#{version.major_minor}"
(include/"lua#{version.major_minor}").install_symlink Dir[include/"lua/*"]
lib.install_symlink shared_library("liblua", version.major_minor) => shared_library("liblua#{version.major_minor}")
(lib/"pkgconfig").install_symlink "lua.pc" => "lua#{version.major_minor}.pc"
(lib/"pkgconfig").install_symlink "lua.pc" => "lua-#{version.major_minor}.pc"
lib.install Dir[shared_library("src/liblua", "*")] if OS.linux?
end
def caveats
<<~EOS
You may also want luarocks:
brew install luarocks
EOS
end
test do
assert_match "Homebrew is awesome!", shell_output("#{bin}/lua -e \"print ('Homebrew is awesome!')\"")
end
end
__END__
diff --git a/src/luac.c b/src/luac.c
index f6db9cf..ba0a81e 100644
--- a/src/luac.c
+++ b/src/luac.c
@@ -156,6 +156,7 @@ static const Proto* combine(lua_State* L, int n)
if (f->p[i]->sizeupvalues>0) f->p[i]->upvalues[0].instack=0;
}
luaM_freearray(L,f->lineinfo,f->sizelineinfo);
+ f->lineinfo = NULL;
f->sizelineinfo=0;
return f;
}
| 33.884058 | 131 | 0.679855 |
91e044d342348c911c56045e756b3f0ae6c595b6 | 506 | class Article < ActiveRecord::Base
has_many :comments,
dependent: :destroy
validates :title,
presence: true,
length: {minimum: 5}
validates :text,
presence: true,
length: {minimum: 5}
# It returns the articles whose titles contain one or more words that form the query
def self.search(query)
# where(:title, query) -> This would return an exact match of the query
where("lower(title) LIKE ?", "%#{query}%".downcase)
end
end
| 26.631579 | 86 | 0.62253 |
1ad1ee79c378d371e3127f8777f104ff76e241c9 | 243 | class PictureSet
# %%mo%%ps
def self.order
'year DESC, weight, sequence DESC'
end
def self.get(tag=nil)
r=(relation=Picture.order PictureSet.order)
r=r.joins(:tags).where :tags => {:name => tag} if tag
r.all
end
end
| 16.2 | 57 | 0.633745 |
03a7513aa403acb7104c14d6814899f741c12569 | 266 | class CreateComments < ActiveRecord::Migration
def change
create_table :comments do |t|
t.string :name
t.text :body
t.references :article, index: true
t.timestamps null: false
end
add_foreign_key :comments, :articles
end
end
| 20.461538 | 46 | 0.672932 |
f7598ab3d9b096f6f86498c67f95f303eb232c5e | 1,046 | Pod::Spec.new do |s|
s.name = "NGTabBarController"
s.version = "0.1"
s.summary = "A custom TabBarController implementation for iPhone and iPad."
s.description = 'A custom TabBarController which can be positioned on the bottom, top, left or top. ' \
'Utilizes iOS 5 Containment API if possible, but works on iOS 4 too. ' \
'The TabBar is fully customizable with a tintColor or background image as well as ' \
'the possibility to show/hide the item highlight and the possibility to change the ' \
'text colors, have image-only tabBar items etc.'
s.homepage = "https://github.com/NOUSguide/NGTabBarController"
s.author = { "NOUSguide Inc. / NOUS Wissensmanagement GmbH" => "[email protected]" }
s.source = { :git => "https://github.com/NOUSguide/NGTabBarController.git", :tag => "0.1" }
s.license = 'MIT'
s.platform = :ios
s.source_files = 'NGTabBarController'
s.requires_arc = true
end
| 55.052632 | 106 | 0.624283 |
6a246f5b1c2f790d51d4a1f8729d97acbb025651 | 680 | require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module MaintenanceManagementRailsApp
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
end
end
| 34 | 82 | 0.772059 |
ab60ad07ac196c2b88ce1f1e4db73c6be808cc1d | 1,621 | # Copyright (C) 2015-2019 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Mongo
module Operation
# Shared behavior of operations that support read preference.
#
# @since 2.5.2
module ReadPreferenceSupported
private
SLAVE_OK = :slave_ok
def options(server)
update_options_for_slave_ok(super, server)
end
def update_selector_for_read_pref(sel, server)
if read && server.mongos? && read_pref = read.to_mongos
Mongo::Lint.validate_camel_case_read_preference(read_pref)
sel = sel[:$query] ? sel : {:$query => sel}
sel = sel.merge(:$readPreference => read_pref)
else
sel
end
end
def update_options_for_slave_ok(opts, server)
if (server.cluster.single? && !server.mongos?) || (read && read.slave_ok?)
opts.dup.tap do |o|
(o[:flags] ||= []) << SLAVE_OK
end
else
opts
end
end
def command(server)
sel = super
update_selector_for_read_pref(sel, server)
end
end
end
end
| 27.948276 | 82 | 0.646514 |
3373de7c05cc6bbf050a3e518c8e5e350a6d6f77 | 1,976 | require 'test_helper'
module ActionController
module Serialization
class AdapterSelectorTest < ActionController::TestCase
class Profile < Model
attributes :id, :name, :description
associations :comments
end
class ProfileSerializer < ActiveModel::Serializer
type 'profiles'
attributes :name, :description
end
class AdapterSelectorTestController < ActionController::Base
def render_using_default_adapter
@profile = Profile.new(name: 'Name 1', description: 'Description 1', comments: 'Comments 1')
render json: @profile
end
def render_using_adapter_override
@profile = Profile.new(id: 'render_using_adapter_override', name: 'Name 1', description: 'Description 1', comments: 'Comments 1')
render json: @profile, adapter: :json_api
end
def render_skipping_adapter
@profile = Profile.new(id: 'render_skipping_adapter_id', name: 'Name 1', description: 'Description 1', comments: 'Comments 1')
render json: @profile, adapter: false
end
end
tests AdapterSelectorTestController
def test_render_using_default_adapter
get :render_using_default_adapter
assert_equal '{"name":"Name 1","description":"Description 1"}', response.body
end
def test_render_using_adapter_override
get :render_using_adapter_override
expected = {
data: {
id: 'render_using_adapter_override',
type: 'profiles',
attributes: {
name: 'Name 1',
description: 'Description 1'
}
}
}
assert_equal expected.to_json, response.body
end
def test_render_skipping_adapter
get :render_skipping_adapter
assert_equal '{"id":"render_skipping_adapter_id","name":"Name 1","description":"Description 1"}', response.body
end
end
end
end
| 31.365079 | 139 | 0.6417 |
18107d4a9107050449eaf50e4ea2d94a541d0e9d | 695 | class Dungeons::API
require 'httparty'
def race_call
url = 'http://www.dnd5eapi.co/api/races'
response = HTTParty.get(url)
response.parsed_response
end
def race_info_call(input)
url = 'http://www.dnd5eapi.co/api/races'
response = HTTParty.get(url + "/#{input}")
response.parsed_response
end
def klass_call
url = 'http://www.dnd5eapi.co/api/classes'
response = HTTParty.get(url)
response.parsed_response
end
def klass_info_call(input)
url = 'http://www.dnd5eapi.co/api/classes'
response = HTTParty.get(url + "/#{input}")
response.parsed_response
end
end
| 23.166667 | 50 | 0.607194 |
01382866b91383c142d7e849dba01891c343b16e | 289 | ENV['SINATRA_ENV'] ||= "development"
require 'bundler/setup'
Bundler.require(:default, ENV['SINATRA_ENV'])
# ActiveRecord::Base.establish_connection(
# :adapter => "sqlite3",
# :database => "db/#{ENV['SINATRA_ENV']}.sqlite"
# )
set :database_file, "./database.yml"
require_all 'app' | 24.083333 | 50 | 0.692042 |
7a47bc612cd39e29b2b1d875593a86561035d311 | 907 | # frozen_string_literal: true
# User authenticate operation (server step 1)
class Auth::Challenge < AuthTransaction
CONTRACT = Auth::ChallengeContract
DECORATOR = Auth::ChallengeDecorator
step :get_user_from_email_param
step :start_sirp_verifier
step :get_challenge_and_proof
step :store_proof
step :challenge_response
def get_challenge_and_proof(input)
ctx[:challenge] = ctx[:verifier].get_challenge_and_proof(
ctx[:user].email,
ctx[:user].verifier,
ctx[:user].salt,
input[:params][:A]
)
ctx[:challenge] ? Success(input) : Failure(ErrorService.bad_request_fail(message: 'Failed Challenge'))
end
def store_proof(input)
ctx[:user].proof = Base64.encode64(JSON.dump(ctx[:challenge][:proof]))
ctx[:user].save
Success(input)
end
def challenge_response(input)
ctx[:model] = ctx[:challenge][:challenge]
Success(input)
end
end
| 25.914286 | 106 | 0.716648 |
e2521908cc37b011d9083e03468b6b5faced03dc | 28,032 | # frozen_string_literal: true
module Razorpay
module IFSC
module Bank
AACX = :AACX
ABBL = :ABBL
ABCX = :ABCX
ABDX = :ABDX
ABEX = :ABEX
ABHY = :ABHY
ABNA = :ABNA
ABPB = :ABPB
ABSB = :ABSB
ABUX = :ABUX
ACAX = :ACAX
ACBX = :ACBX
ACCX = :ACCX
ACKX = :ACKX
ACOX = :ACOX
ACUB = :ACUB
ACUX = :ACUX
ADBX = :ADBX
ADCB = :ADCB
ADCC = :ADCC
ADCX = :ADCX
ADDX = :ADDX
AGCX = :AGCX
AGDX = :AGDX
AGRX = :AGRX
AGSX = :AGSX
AGUX = :AGUX
AGVX = :AGVX
AHMX = :AHMX
AHUX = :AHUX
AIRP = :AIRP
AJAR = :AJAR
AJHC = :AJHC
AJKB = :AJKB
AJMX = :AJMX
AJNX = :AJNX
AJPX = :AJPX
AJSX = :AJSX
AJUX = :AJUX
AKJB = :AKJB
AKKB = :AKKB
AKMX = :AKMX
AKOX = :AKOX
ALAX = :ALAX
ALIX = :ALIX
ALLA = :ALLA
ALLX = :ALLX
ALWX = :ALWX
AMAX = :AMAX
AMBX = :AMBX
AMCB = :AMCB
AMCX = :AMCX
AMDN = :AMDN
AMMX = :AMMX
AMNX = :AMNX
AMRX = :AMRX
AMSB = :AMSB
AMSX = :AMSX
ANBX = :ANBX
ANDB = :ANDB
ANDX = :ANDX
ANMX = :ANMX
ANSX = :ANSX
ANUX = :ANUX
ANZB = :ANZB
APBL = :APBL
APCX = :APCX
APGB = :APGB
APGV = :APGV
APGX = :APGX
APJX = :APJX
APMC = :APMC
APMX = :APMX
APNX = :APNX
APRX = :APRX
APSX = :APSX
ARCX = :ARCX
ARMX = :ARMX
ARYX = :ARYX
ASBL = :ASBL
ASBX = :ASBX
ASHX = :ASHX
ASKX = :ASKX
ASNX = :ASNX
ASOX = :ASOX
ASSX = :ASSX
AUBL = :AUBL
AUBX = :AUBX
AUCB = :AUCB
AUCX = :AUCX
AUGX = :AUGX
AURX = :AURX
AVDX = :AVDX
AWCX = :AWCX
AWUX = :AWUX
AZAX = :AZAX
AZPX = :AZPX
AZSX = :AZSX
AZUX = :AZUX
BACB = :BACB
BACX = :BACX
BADX = :BADX
BALX = :BALX
BANX = :BANX
BARA = :BARA
BARB = :BARB
BARC = :BARC
BARX = :BARX
BASX = :BASX
BAUX = :BAUX
BAVX = :BAVX
BBKM = :BBKM
BBLX = :BBLX
BBRX = :BBRX
BBSX = :BBSX
BBUX = :BBUX
BBVX = :BBVX
BCBM = :BCBM
BCBX = :BCBX
BCCB = :BCCB
BCCX = :BCCX
BCEX = :BCEX
BCEY = :BCEY
BCOX = :BCOX
BCUB = :BCUB
BCUX = :BCUX
BDBB = :BDBB
BDBL = :BDBL
BDBX = :BDBX
BDCX = :BDCX
BDDX = :BDDX
BDIX = :BDIX
BDNX = :BDNX
BDOX = :BDOX
BDUX = :BDUX
BEDX = :BEDX
BELX = :BELX
BFUX = :BFUX
BGBX = :BGBX
BGCX = :BGCX
BGGX = :BGGX
BGUX = :BGUX
BGVX = :BGVX
BHAX = :BHAX
BHBX = :BHBX
BHCX = :BHCX
BHDX = :BHDX
BHEX = :BHEX
BHGX = :BHGX
BHIX = :BHIX
BHJX = :BHJX
BHMX = :BHMX
BHOX = :BHOX
BHRX = :BHRX
BHSX = :BHSX
BHTX = :BHTX
BHUX = :BHUX
BHWX = :BHWX
BJUX = :BJUX
BKCX = :BKCX
BKDN = :BKDN
BKDX = :BKDX
BKID = :BKID
BKSX = :BKSX
BLGX = :BLGX
BMBL = :BMBL
BMCB = :BMCB
BMCX = :BMCX
BMPX = :BMPX
BMSX = :BMSX
BNBX = :BNBX
BNCX = :BNCX
BNPA = :BNPA
BNSB = :BNSB
BNSX = :BNSX
BODX = :BODX
BOFA = :BOFA
BORX = :BORX
BOTM = :BOTM
BOTX = :BOTX
BPCX = :BPCX
BPSX = :BPSX
BRCX = :BRCX
BRDX = :BRDX
BRGX = :BRGX
BRMX = :BRMX
BRSX = :BRSX
BRUX = :BRUX
BSBX = :BSBX
BSCX = :BSCX
BTCX = :BTCX
BTUX = :BTUX
BUBX = :BUBX
BUCL = :BUCL
BUCX = :BUCX
BUGX = :BUGX
BUNX = :BUNX
BURX = :BURX
BUSX = :BUSX
BUZX = :BUZX
BVNX = :BVNX
BVSX = :BVSX
BWCX = :BWCX
CALX = :CALX
CBHX = :CBHX
CBIN = :CBIN
CCBL = :CCBL
CCBX = :CCBX
CCCX = :CCCX
CCMX = :CCMX
CCOB = :CCOB
CCUX = :CCUX
CDCX = :CDCX
CEBX = :CEBX
CGBX = :CGBX
CGGX = :CGGX
CHAS = :CHAS
CHAX = :CHAX
CHBX = :CHBX
CHCX = :CHCX
CHDX = :CHDX
CHIX = :CHIX
CHKX = :CHKX
CHPX = :CHPX
CHRX = :CHRX
CHSX = :CHSX
CHTX = :CHTX
CIDX = :CIDX
CITI = :CITI
CITX = :CITX
CIUB = :CIUB
CJAX = :CJAX
CJMX = :CJMX
CLBL = :CLBL
CMCB = :CMCB
CMCX = :CMCX
CMDX = :CMDX
CMLX = :CMLX
CMPX = :CMPX
CNRB = :CNRB
CNSX = :CNSX
COCX = :COCX
COLX = :COLX
COMX = :COMX
CONX = :CONX
CORP = :CORP
COAS = :COAS
COSB = :COSB
CPDX = :CPDX
CPSN = :CPSN
CRBX = :CRBX
CRES = :CRES
CRLY = :CRLY
CRSX = :CRSX
CRUB = :CRUB
CSBK = :CSBK
CSBX = :CSBX
CTBA = :CTBA
CTBX = :CTBX
CTCB = :CTCB
CTOX = :CTOX
CTUX = :CTUX
CUBX = :CUBX
CUCX = :CUCX
CURX = :CURX
CZCX = :CZCX
CZUX = :CZUX
DAAX = :DAAX
DAHX = :DAHX
DAUX = :DAUX
DBAX = :DBAX
DBSS = :DBSS
DCBL = :DCBL
DCBX = :DCBX
DCCX = :DCCX
DCDX = :DCDX
DCEX = :DCEX
DCKX = :DCKX
DCMX = :DCMX
DCNX = :DCNX
DCPX = :DCPX
DCSX = :DCSX
DCTX = :DCTX
DCUX = :DCUX
DDBX = :DDBX
DDCX = :DDCX
DDDX = :DDDX
DDHX = :DDHX
DEGX = :DEGX
DENS = :DENS
DEOB = :DEOB
DEOX = :DEOX
DEUT = :DEUT
DEUX = :DEUX
DEVX = :DEVX
DGBX = :DGBX
DHBX = :DHBX
DHKX = :DHKX
DHUX = :DHUX
DIBX = :DIBX
DICG = :DICG
DICX = :DICX
DIUX = :DIUX
DJCX = :DJCX
DKCL = :DKCL
DKSX = :DKSX
DLSC = :DLSC
DLXB = :DLXB
DMCB = :DMCB
DMCX = :DMCX
DMKB = :DMKB
DMKJ = :DMKJ
DNDC = :DNDC
DNSB = :DNSB
DNSX = :DNSX
DOBX = :DOBX
DOHB = :DOHB
DRGX = :DRGX
DSBX = :DSBX
DSCB = :DSCB
DSHX = :DSHX
DSPX = :DSPX
DSUX = :DSUX
DTCX = :DTCX
DTPX = :DTPX
DUCX = :DUCX
DUMX = :DUMX
DUNX = :DUNX
DURG = :DURG
DVDX = :DVDX
DYPX = :DYPX
EBIL = :EBIL
ECBL = :ECBL
EDBX = :EDBX
EDCX = :EDCX
EDSX = :EDSX
EIBI = :EIBI
ESAF = :ESAF
ESFB = :ESFB
ESMF = :ESMF
ETCX = :ETCX
ETDX = :ETDX
EUCX = :EUCX
EWCX = :EWCX
FCBX = :FCBX
FCCX = :FCCX
FCOX = :FCOX
FDFX = :FDFX
FDRL = :FDRL
FEKX = :FEKX
FGCB = :FGCB
FINF = :FINF
FINO = :FINO
FINX = :FINX
FIRN = :FIRN
FIRX = :FIRX
FMCX = :FMCX
FRIX = :FRIX
FSCX = :FSCX
FSFB = :FSFB
FZCX = :FZCX
FZSX = :FZSX
GACX = :GACX
GADX = :GADX
GANX = :GANX
GBCB = :GBCB
GCBX = :GCBX
GCCX = :GCCX
GCUL = :GCUL
GCUX = :GCUX
GDCB = :GDCB
GDCX = :GDCX
GDDX = :GDDX
GDUX = :GDUX
GGBK = :GGBK
GGCX = :GGCX
GHPX = :GHPX
GKNX = :GKNX
GMBX = :GMBX
GMCX = :GMCX
GMUX = :GMUX
GNCX = :GNCX
GNSX = :GNSX
GODX = :GODX
GOSX = :GOSX
GPCX = :GPCX
GPOX = :GPOX
GRAX = :GRAX
GSBL = :GSBL
GSBX = :GSBX
GSCB = :GSCB
GSCX = :GSCX
GSSX = :GSSX
GTCX = :GTCX
GUBX = :GUBX
GUCX = :GUCX
GUNX = :GUNX
GUOX = :GUOX
HAMX = :HAMX
HANX = :HANX
HARC = :HARC
HCBL = :HCBL
HCBX = :HCBX
HCCX = :HCCX
HCLX = :HCLX
HDCL = :HDCL
HDCX = :HDCX
HDFC = :HDFC
HGBX = :HGBX
HINX = :HINX
HISX = :HISX
HMBX = :HMBX
HMNX = :HMNX
HOCX = :HOCX
HOOX = :HOOX
HPCX = :HPCX
HPSC = :HPSC
HPSX = :HPSX
HSBC = :HSBC
HSBM = :HSBM
HSBX = :HSBX
HSCX = :HSCX
HSDX = :HSDX
HSSX = :HSSX
HUBX = :HUBX
HUCH = :HUCH
HUCB = :HUCB
HUCX = :HUCX
HUTX = :HUTX
HVBK = :HVBK
IBBK = :IBBK
IBKL = :IBKL
IBKO = :IBKO
ICBK = :ICBK
ICBL = :ICBL
ICHX = :ICHX
ICIC = :ICIC
ICMX = :ICMX
IDFB = :IDFB
IDIB = :IDIB
IDUK = :IDUK
IDUX = :IDUX
ILCB = :ILCB
IMCX = :IMCX
IMPX = :IMPX
INCX = :INCX
INDB = :INDB
INDX = :INDX
IOBA = :IOBA
IPCX = :IPCX
IPOS = :IPOS
IPPB = :IPPB
IPSX = :IPSX
ISBX = :ISBX
ISMX = :ISMX
ITBL = :ITBL
ITCX = :ITCX
ITDX = :ITDX
IUCB = :IUCB
IUCX = :IUCX
JACX = :JACX
JAKA = :JAKA
JALX = :JALX
JAMX = :JAMX
JANA = :JANA
JANX = :JANX
JASB = :JASB
JASX = :JASX
JAUX = :JAUX
JBHX = :JBHX
JBIX = :JBIX
JBMX = :JBMX
JCBX = :JCBX
JCCB = :JCCB
JCCX = :JCCX
JCDX = :JCDX
JCHX = :JCHX
JCPX = :JCPX
JCUX = :JCUX
JDCX = :JDCX
JDEX = :JDEX
JGBX = :JGBX
JGCX = :JGCX
JGWX = :JGWX
JHAX = :JHAX
JHSX = :JHSX
JHUX = :JHUX
JIBX = :JIBX
JICX = :JICX
JIDX = :JIDX
JIGX = :JIGX
JIKX = :JIKX
JIMX = :JIMX
JINX = :JINX
JIOP = :JIOP
JIOX = :JIOX
JIRX = :JIRX
JISX = :JISX
JIVX = :JIVX
JJCX = :JJCX
JJHX = :JJHX
JJSB = :JJSB
JKAX = :JKAX
JKCX = :JKCX
JKDX = :JKDX
JKEX = :JKEX
JKHX = :JKHX
JKMX = :JKMX
JKRX = :JKRX
JKSX = :JKSX
JLCX = :JLCX
JLDX = :JLDX
JLNX = :JLNX
JLSX = :JLSX
JLWX = :JLWX
JMAX = :JMAX
JMBX = :JMBX
JMCX = :JMCX
JMDX = :JMDX
JMHX = :JMHX
JMMX = :JMMX
JMOX = :JMOX
JMPX = :JMPX
JMSX = :JMSX
JMYX = :JMYX
JNAX = :JNAX
JNDX = :JNDX
JNSX = :JNSX
JODX = :JODX
JONX = :JONX
JOWX = :JOWX
JPAX = :JPAX
JPCB = :JPCB
JPCX = :JPCX
JRAX = :JRAX
JRKX = :JRKX
JRNX = :JRNX
JRSX = :JRSX
JSAB = :JSAB
JSAX = :JSAX
JSBL = :JSBL
JSBP = :JSBP
JSBX = :JSBX
JSCX = :JSCX
JSDX = :JSDX
JSEX = :JSEX
JSFB = :JSFB
JSHX = :JSHX
JSKX = :JSKX
JSMX = :JSMX
JSOX = :JSOX
JSRX = :JSRX
JSTX = :JSTX
JSVX = :JSVX
JSWX = :JSWX
JTIX = :JTIX
JTSX = :JTSX
JUCX = :JUCX
JUSX = :JUSX
JVCX = :JVCX
KAAX = :KAAX
KACE = :KACE
KACX = :KACX
KADX = :KADX
KAGX = :KAGX
KAIJ = :KAIJ
KALX = :KALX
KAMX = :KAMX
KANG = :KANG
KANX = :KANX
KARB = :KARB
KARX = :KARX
KASX = :KASX
KATX = :KATX
KAYX = :KAYX
KBCX = :KBCX
KBKB = :KBKB
KBNX = :KBNX
KBSX = :KBSX
KCBL = :KCBL
KCBX = :KCBX
KCCB = :KCCB
KCCX = :KCCX
KCDX = :KCDX
KCEX = :KCEX
KCOB = :KCOB
KCUB = :KCUB
KCUX = :KCUX
KDBX = :KDBX
KDCB = :KDCB
KDCX = :KDCX
KDIX = :KDIX
KDNX = :KDNX
KDUX = :KDUX
KEMX = :KEMX
KESX = :KESX
KGBX = :KGBX
KGDX = :KGDX
KGRB = :KGRB
KGSX = :KGSX
KHAX = :KHAX
KHCX = :KHCX
KHDX = :KHDX
KHNX = :KHNX
KHUX = :KHUX
KICX = :KICX
KJSB = :KJSB
KJSX = :KJSX
KKBK = :KKBK
KKMX = :KKMX
KKSX = :KKSX
KLGB = :KLGB
KLMX = :KLMX
KMCB = :KMCB
KMCX = :KMCX
KMNX = :KMNX
KMSX = :KMSX
KNBX = :KNBX
KNCX = :KNCX
KNNX = :KNNX
KNPX = :KNPX
KNSB = :KNSB
KNSX = :KNSX
KOBX = :KOBX
KOCX = :KOCX
KODX = :KODX
KOEX = :KOEX
KOLH = :KOLH
KOSX = :KOSX
KOTX = :KOTX
KOYX = :KOYX
KPCX = :KPCX
KRCX = :KRCX
KRDX = :KRDX
KRIX = :KRIX
KRMX = :KRMX
KRNX = :KRNX
KRTH = :KRTH
KRTX = :KRTX
KSBK = :KSBK
KSBX = :KSBX
KSCB = :KSCB
KSCX = :KSCX
KSMX = :KSMX
KSNX = :KSNX
KSTX = :KSTX
KSUX = :KSUX
KTBX = :KTBX
KTCX = :KTCX
KTDX = :KTDX
KTTX = :KTTX
KUBX = :KUBX
KUCB = :KUCB
KUCX = :KUCX
KUKX = :KUKX
KULX = :KULX
KUMX = :KUMX
KUNS = :KUNS
KURX = :KURX
KVBL = :KVBL
KVCX = :KVCX
KVGB = :KVGB
KYDX = :KYDX
LACX = :LACX
LATX = :LATX
LAVB = :LAVB
LBMX = :LBMX
LCBX = :LCBX
LCCX = :LCCX
LDCX = :LDCX
LDPX = :LDPX
LDRX = :LDRX
LECX = :LECX
LICB = :LICB
LKBL = :LKBL
LKCX = :LKCX
LKHX = :LKHX
LKMX = :LKMX
LMNX = :LMNX
LNSX = :LNSX
LOKX = :LOKX
LONX = :LONX
LUCX = :LUCX
LULX = :LULX
MABL = :MABL
MACX = :MACX
MADX = :MADX
MAGX = :MAGX
MAHB = :MAHB
MAHG = :MAHG
MAHX = :MAHX
MAJX = :MAJX
MAKX = :MAKX
MALX = :MALX
MAMX = :MAMX
MANX = :MANX
MAPX = :MAPX
MASX = :MASX
MAUX = :MAUX
MAVX = :MAVX
MAWX = :MAWX
MAYX = :MAYX
MBCX = :MBCX
MBGX = :MBGX
MBLX = :MBLX
MCAX = :MCAX
MCBL = :MCBL
MCBX = :MCBX
MCCX = :MCCX
MCDX = :MCDX
MCLX = :MCLX
MCOX = :MCOX
MCSX = :MCSX
MCUX = :MCUX
MDBK = :MDBK
MDCB = :MDCB
MDCX = :MDCX
MDEX = :MDEX
MDGX = :MDGX
MDIX = :MDIX
MDMX = :MDMX
MDPX = :MDPX
MEDX = :MEDX
MERX = :MERX
MEUX = :MEUX
MFCX = :MFCX
MFUX = :MFUX
MGBX = :MGBX
MGCB = :MGCB
MGCX = :MGCX
MGDX = :MGDX
MGRB = :MGRB
MGSX = :MGSX
MGUX = :MGUX
MHCB = :MHCB
MHCX = :MHCX
MHEX = :MHEX
MHLX = :MHLX
MHMX = :MHMX
MHNX = :MHNX
MHSX = :MHSX
MHUX = :MHUX
MIZX = :MIZX
MJCX = :MJCX
MKPB = :MKPB
MKUX = :MKUX
MKYX = :MKYX
MLCG = :MLCG
MLDX = :MLDX
MMCX = :MMCX
MMMX = :MMMX
MNBX = :MNBX
MNCX = :MNCX
MNSX = :MNSX
MOGX = :MOGX
MOUX = :MOUX
MPCX = :MPCX
MPDX = :MPDX
MPRX = :MPRX
MRBX = :MRBX
MRTX = :MRTX
MSAX = :MSAX
MSBL = :MSBL
MSBX = :MSBX
MSCI = :MSCI
MSCX = :MSCX
MSHQ = :MSHQ
MSLM = :MSLM
MSNU = :MSNU
MSNX = :MSNX
MSOX = :MSOX
MSSX = :MSSX
MUBL = :MUBL
MUBX = :MUBX
MUCG = :MUCG
MUCX = :MUCX
MUDX = :MUDX
MUNX = :MUNX
MUPX = :MUPX
MURX = :MURX
MUSX = :MUSX
MVCB = :MVCB
MVCX = :MVCX
MVIX = :MVIX
MYAX = :MYAX
MYSX = :MYSX
MZCX = :MZCX
MZRX = :MZRX
NABX = :NABX
NACX = :NACX
NAGX = :NAGX
NAIX = :NAIX
NALX = :NALX
NANX = :NANX
NASX = :NASX
NATA = :NATA
NAUX = :NAUX
NAVX = :NAVX
NAWX = :NAWX
NBAD = :NBAD
NBBX = :NBBX
NBCX = :NBCX
NBMX = :NBMX
NBRD = :NBRD
NCBL = :NCBL
NCBX = :NCBX
NCCX = :NCCX
NCOX = :NCOX
NCUB = :NCUB
NCUX = :NCUX
NDCB = :NDCB
NDCX = :NDCX
NDDX = :NDDX
NDGX = :NDGX
NDIX = :NDIX
NESF = :NESF
NEYX = :NEYX
NGBX = :NGBX
NGKX = :NGKX
NGNX = :NGNX
NGRX = :NGRX
NGSB = :NGSB
NGSX = :NGSX
NICB = :NICB
NIDX = :NIDX
NILX = :NILX
NIRX = :NIRX
NIUX = :NIUX
NJBK = :NJBK
NJCX = :NJCX
NJGX = :NJGX
NJMX = :NJMX
NJSX = :NJSX
NKGS = :NKGS
NLGX = :NLGX
NLUX = :NLUX
NMCB = :NMCB
NMCX = :NMCX
NMGB = :NMGB
NNCX = :NNCX
NNSB = :NNSB
NNSX = :NNSX
NOBX = :NOBX
NOIX = :NOIX
NOSC = :NOSC
NPCX = :NPCX
NPKX = :NPKX
NRDX = :NRDX
NRMX = :NRMX
NSBB = :NSBB
NSBX = :NSBX
NSCX = :NSCX
NSGX = :NSGX
NSIX = :NSIX
NSJX = :NSJX
NSMX = :NSMX
NSPB = :NSPB
NSPX = :NSPX
NSRX = :NSRX
NTBL = :NTBL
NUBX = :NUBX
NUCB = :NUCB
NUCX = :NUCX
NVCX = :NVCX
NVNM = :NVNM
NVSX = :NVSX
NWCX = :NWCX
ODCX = :ODCX
ODGB = :ODGB
OIBA = :OIBA
OMCX = :OMCX
ONSX = :ONSX
ORBC = :ORBC
ORCB = :ORCB
OSMX = :OSMX
PABX = :PABX
PACX = :PACX
PADX = :PADX
PALX = :PALX
PANX = :PANX
PARX = :PARX
PASX = :PASX
PATX = :PATX
PAYX = :PAYX
PBGX = :PBGX
PCBL = :PCBL
PCBX = :PCBX
PCCB = :PCCB
PCCX = :PCCX
PCLX = :PCLX
PCMX = :PCMX
PCOX = :PCOX
PCPX = :PCPX
PCSX = :PCSX
PCTX = :PCTX
PCUX = :PCUX
PDBX = :PDBX
PDCX = :PDCX
PDNX = :PDNX
PDSX = :PDSX
PDUX = :PDUX
PGBX = :PGBX
PGCX = :PGCX
PGRX = :PGRX
PGTX = :PGTX
PITX = :PITX
PJSB = :PJSB
PKBX = :PKBX
PKDX = :PKDX
PKGB = :PKGB
PLOX = :PLOX
PLUX = :PLUX
PMCB = :PMCB
PMCX = :PMCX
PMEC = :PMEC
PMNX = :PMNX
PNCX = :PNCX
PNMX = :PNMX
PNPX = :PNPX
PNSX = :PNSX
PPBX = :PPBX
PPCX = :PPCX
PRCX = :PRCX
PREX = :PREX
PROX = :PROX
PRPX = :PRPX
PRSX = :PRSX
PRTH = :PRTH
PSBX = :PSBX
PSCX = :PSCX
PSIB = :PSIB
PSRX = :PSRX
PSSX = :PSSX
PTCX = :PTCX
PTNX = :PTNX
PTSX = :PTSX
PUBX = :PUBX
PUCB = :PUCB
PUCX = :PUCX
PUDX = :PUDX
PUGX = :PUGX
PUNB = :PUNB
PUNX = :PUNX
PURX = :PURX
PUSD = :PUSD
PUSX = :PUSX
PUUX = :PUUX
PVAX = :PVAX
PVCX = :PVCX
PVVX = :PVVX
PWUX = :PWUX
PYCX = :PYCX
PYTM = :PYTM
QNBA = :QNBA
QUCX = :QUCX
RABO = :RABO
RACX = :RACX
RAEX = :RAEX
RAJX = :RAJX
RAKX = :RAKX
RAMX = :RAMX
RANX = :RANX
RATN = :RATN
RAUX = :RAUX
RBBX = :RBBX
RBCX = :RBCX
RBIH = :RBIH
RBIN = :RBIN
RBIS = :RBIS
RCBX = :RCBX
RCCX = :RCCX
RCDX = :RCDX
RCMX = :RCMX
RCUX = :RCUX
RDCX = :RDCX
RDNX = :RDNX
REBX = :REBX
RECX = :RECX
REWX = :REWX
RGCX = :RGCX
RGSX = :RGSX
RHMX = :RHMX
RJCX = :RJCX
RJJX = :RJJX
RJNX = :RJNX
RJTX = :RJTX
RKCX = :RKCX
RLUX = :RLUX
RMGB = :RMGB
RNBX = :RNBX
RNDX = :RNDX
RNGX = :RNGX
RNSB = :RNSB
RNSX = :RNSX
ROCX = :ROCX
ROHX = :ROHX
RPUX = :RPUX
RRBP = :RRBP
RRSX = :RRSX
RSBL = :RSBL
RSBX = :RSBX
RSCB = :RSCB
RSSB = :RSSB
RSSX = :RSSX
RSUX = :RSUX
RSVX = :RSVX
RUCX = :RUCX
RUKX = :RUKX
RUMX = :RUMX
RZSX = :RZSX
SABR = :SABR
SABX = :SABX
SACB = :SACB
SACX = :SACX
SADX = :SADX
SAGX = :SAGX
SAHE = :SAHE
SAHX = :SAHX
SAIX = :SAIX
SALX = :SALX
SAMX = :SAMX
SANT = :SANT
SANX = :SANX
SAPX = :SAPX
SARX = :SARX
SASA = :SASA
SASX = :SASX
SATX = :SATX
SAUX = :SAUX
SAVX = :SAVX
SAWX = :SAWX
SBBJ = :SBBJ
SBCR = :SBCR
SBCX = :SBCX
SBHY = :SBHY
SBIN = :SBIN
SBKX = :SBKX
SBLD = :SBLD
SBLS = :SBLS
SBLX = :SBLX
SBMX = :SBMX
SBMY = :SBMY
SBNX = :SBNX
SBPS = :SBPS
SBPX = :SBPX
SBSX = :SBSX
SBTR = :SBTR
SBUJ = :SBUJ
SBUX = :SBUX
SCBL = :SCBL
SCBX = :SCBX
SCCX = :SCCX
SCDX = :SCDX
SCIX = :SCIX
SCNX = :SCNX
SCOB = :SCOB
SCOX = :SCOX
SCPX = :SCPX
SCSX = :SCSX
SCUX = :SCUX
SDBX = :SDBX
SDCB = :SDCB
SDCE = :SDCE
SDCX = :SDCX
SDHX = :SDHX
SDSX = :SDSX
SDTC = :SDTC
SDTX = :SDTX
SDUX = :SDUX
SEMX = :SEMX
SENX = :SENX
SEUX = :SEUX
SEWX = :SEWX
SGCX = :SGCX
SGLX = :SGLX
SGSX = :SGSX
SGUX = :SGUX
SHAX = :SHAX
SHBK = :SHBK
SHBX = :SHBX
SHCX = :SHCX
SHEX = :SHEX
SHGX = :SHGX
SHIX = :SHIX
SHKX = :SHKX
SHMX = :SHMX
SHNX = :SHNX
SHOX = :SHOX
SHRX = :SHRX
SHSX = :SHSX
SHUB = :SHUB
SHUX = :SHUX
SIBL = :SIBL
SIBX = :SIBX
SICX = :SICX
SIDB = :SIDB
SIDC = :SIDC
SIDX = :SIDX
SIGX = :SIGX
SIHX = :SIHX
SIKX = :SIKX
SINX = :SINX
SIRX = :SIRX
SISX = :SISX
SITX = :SITX
SIWX = :SIWX
SJGX = :SJGX
SJSB = :SJSB
SJSX = :SJSX
SKCX = :SKCX
SKKX = :SKKX
SKNX = :SKNX
SKSB = :SKSB
SKUX = :SKUX
SLAX = :SLAX
SLCX = :SLCX
SMBC = :SMBC
SMBX = :SMBX
SMCB = :SMCB
SMCX = :SMCX
SMEX = :SMEX
SMMX = :SMMX
SMNX = :SMNX
SMPX = :SMPX
SMSX = :SMSX
SMTX = :SMTX
SMUX = :SMUX
SMVC = :SMVC
SMWX = :SMWX
SNAX = :SNAX
SNBK = :SNBK
SNBX = :SNBX
SNCX = :SNCX
SNDX = :SNDX
SNGX = :SNGX
SNKX = :SNKX
SNLX = :SNLX
SNPX = :SNPX
SNSV = :SNSV
SNSX = :SNSX
SOBX = :SOBX
SOGE = :SOGE
SOLX = :SOLX
SONX = :SONX
SPBX = :SPBX
SPCB = :SPCB
SPCX = :SPCX
SPNX = :SPNX
SPSX = :SPSX
SPTX = :SPTX
SRCB = :SRCB
SRCX = :SRCX
SREX = :SREX
SRGX = :SRGX
SRHX = :SRHX
SRSX = :SRSX
SSBL = :SSBL
SSBX = :SSBX
SSDX = :SSDX
SSHX = :SSHX
SSKX = :SSKX
SSLX = :SSLX
SSNX = :SSNX
SSOX = :SSOX
SSSX = :SSSX
SSWX = :SSWX
STBP = :STBP
STCB = :STCB
STCX = :STCX
STDX = :STDX
STRX = :STRX
SUBX = :SUBX
SUCX = :SUCX
SUDX = :SUDX
SULX = :SULX
SUMX = :SUMX
SUNB = :SUNB
SURX = :SURX
SURY = :SURY
SUSB = :SUSB
SUSX = :SUSX
SUTB = :SUTB
SUVX = :SUVX
SVAX = :SVAX
SVBL = :SVBL
SVCB = :SVCB
SVCX = :SVCX
SVGX = :SVGX
SVMC = :SVMC
SVNX = :SVNX
SVOX = :SVOX
SVRX = :SVRX
SVSH = :SVSH
SVSX = :SVSX
SWMX = :SWMX
SWSX = :SWSX
SYNB = :SYNB
TACX = :TACX
TADX = :TADX
TAMX = :TAMX
TAPX = :TAPX
TASX = :TASX
TBCX = :TBCX
TBDX = :TBDX
TBHX = :TBHX
TBMX = :TBMX
TBNS = :TBNS
TBPX = :TBPX
TBSB = :TBSB
TBSX = :TBSX
TBTX = :TBTX
TBUX = :TBUX
TCBX = :TCBX
TCCX = :TCCX
TCHX = :TCHX
TCPX = :TCPX
TCUB = :TCUB
TCUX = :TCUX
TDBX = :TDBX
TDCB = :TDCB
TDCX = :TDCX
TDIX = :TDIX
TDMX = :TDMX
TDPX = :TDPX
TDSX = :TDSX
TECX = :TECX
TEHX = :TEHX
TEMX = :TEMX
TESX = :TESX
TETX = :TETX
TFCX = :TFCX
TGBX = :TGBX
TGCG = :TGCG
TGCX = :TGCX
TGDX = :TGDX
TGMB = :TGMB
TGNX = :TGNX
TGUX = :TGUX
THCX = :THCX
THOX = :THOX
THRS = :THRS
THRX = :THRX
THWX = :THWX
TIDX = :TIDX
TIRX = :TIRX
TJAX = :TJAX
TJBX = :TJBX
TJCX = :TJCX
TJDX = :TJDX
TJMX = :TJMX
TJNX = :TJNX
TJSB = :TJSB
TKAX = :TKAX
TKCX = :TKCX
TKDX = :TKDX
TKTX = :TKTX
TKUX = :TKUX
TLPX = :TLPX
TMAX = :TMAX
TMBL = :TMBL
TMBX = :TMBX
TMCX = :TMCX
TMNX = :TMNX
TMPX = :TMPX
TMSC = :TMSC
TMSX = :TMSX
TMTX = :TMTX
TMUX = :TMUX
TNBX = :TNBX
TNCX = :TNCX
TNDC = :TNDC
TNEX = :TNEX
TNHX = :TNHX
TNIX = :TNIX
TNKX = :TNKX
TNMX = :TNMX
TNSC = :TNSC
TNUX = :TNUX
TOCX = :TOCX
TPCX = :TPCX
TPDX = :TPDX
TPSC = :TPSC
TPSX = :TPSX
TPUX = :TPUX
TRAX = :TRAX
TRDX = :TRDX
TSAB = :TSAB
TSAX = :TSAX
TSBX = :TSBX
TSCX = :TSCX
TSDX = :TSDX
TSIX = :TSIX
TSMX = :TSMX
TSNX = :TSNX
TSPX = :TSPX
TSSB = :TSSB
TSSX = :TSSX
TSUX = :TSUX
TTBX = :TTBX
TTCB = :TTCB
TTCX = :TTCX
TTGX = :TTGX
TTLX = :TTLX
TTUX = :TTUX
TUBX = :TUBX
TUCL = :TUCL
TUCX = :TUCX
TUDX = :TUDX
TUMX = :TUMX
TUNX = :TUNX
TUOX = :TUOX
TUPX = :TUPX
TURX = :TURX
TVBX = :TVBX
TVDX = :TVDX
TVPX = :TVPX
TVUX = :TVUX
TYCX = :TYCX
UBBX = :UBBX
UBGX = :UBGX
UBIN = :UBIN
UCBA = :UCBA
UCBS = :UCBS
UCBX = :UCBX
UCCX = :UCCX
UCDX = :UCDX
UCUX = :UCUX
UGBX = :UGBX
UICX = :UICX
UJSX = :UJSX
UJVN = :UJVN
UKGX = :UKGX
UMAX = :UMAX
UMCX = :UMCX
UMSX = :UMSX
UMUX = :UMUX
UNAX = :UNAX
UNIX = :UNIX
UNMX = :UNMX
UNSX = :UNSX
UOVB = :UOVB
UPCB = :UPCB
UPCX = :UPCX
URBN = :URBN
URCX = :URCX
URDX = :URDX
URMX = :URMX
UROX = :UROX
USFB = :USFB
USNX = :USNX
UTBI = :UTBI
UTBX = :UTBX
UTCX = :UTCX
UTGX = :UTGX
UTIB = :UTIB
UTKS = :UTKS
UTKX = :UTKX
UTZX = :UTZX
UUCB = :UUCB
UUCX = :UUCX
VADX = :VADX
VAIX = :VAIX
VANX = :VANX
VARA = :VARA
VASJ = :VASJ
VASX = :VASX
VAUX = :VAUX
VCAX = :VCAX
VCBX = :VCBX
VCCX = :VCCX
VCNB = :VCNB
VCOB = :VCOB
VCOX = :VCOX
VDCX = :VDCX
VDYX = :VDYX
VEDX = :VEDX
VERX = :VERX
VGBX = :VGBX
VHDX = :VHDX
VICX = :VICX
VIDX = :VIDX
VIJB = :VIJB
VIJX = :VIJX
VIKX = :VIKX
VIMX = :VIMX
VIRX = :VIRX
VISX = :VISX
VJSX = :VJSX
VKCX = :VKCX
VKSX = :VKSX
VMCX = :VMCX
VMMX = :VMMX
VMUX = :VMUX
VNSX = :VNSX
VRDX = :VRDX
VSBL = :VSBL
VSBX = :VSBX
VSCX = :VSCX
VSSX = :VSSX
VSVX = :VSVX
VUCX = :VUCX
VVCX = :VVCX
VVSB = :VVSB
VYAX = :VYAX
VYPX = :VYPX
VYSA = :VYSA
WACX = :WACX
WAIX = :WAIX
WARX = :WARX
WAUX = :WAUX
WBSC = :WBSC
WCBX = :WCBX
WDCX = :WDCX
WKGX = :WKGX
WNBX = :WNBX
WPAC = :WPAC
WRCX = :WRCX
WUCX = :WUCX
WZUX = :WZUX
XJKG = :XJKG
YADX = :YADX
YAVX = :YAVX
YCBX = :YCBX
YDCX = :YDCX
YESB = :YESB
YLNX = :YLNX
YMSX = :YMSX
YNCX = :YNCX
YNSX = :YNSX
ZBBX = :ZBBX
ZBSX = :ZBSX
ZCBL = :ZCBL
ZIBX = :ZIBX
ZILX = :ZILX
ZIMX = :ZIMX
ZISX = :ZISX
ZLLX = :ZLLX
ZMMX = :ZMMX
ZRNB = :ZRNB
ZSAX = :ZSAX
ZSBG = :ZSBG
ZSBL = :ZSBL
ZSBX = :ZSBX
ZSGX = :ZSGX
ZSHX = :ZSHX
ZSJX = :ZSJX
ZSKX = :ZSKX
ZSLX = :ZSLX
ZSMX = :ZSMX
class << self
def get_details(code)
h = data[code]
h[:bank_code] = (h[:micr][3..5] if h.key? :micr)
h
end
def parse_json_file(file)
file = "../#{file}.json"
JSON.parse(File.read(File.join(__dir__, file)), symbolize_names: true)
end
def data
@data ||= parse_json_file 'banks'
end
end
end
end
end
| 19.004746 | 80 | 0.422945 |
03a32d5b3b882457354d0869e419de7f2b19e4e4 | 3,866 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: subdir/messages.proto
require 'google/protobuf'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("subdir/messages.proto", :syntax => :proto3) do
add_message "testdata.subdir.IntegerMessage" do
optional :value, :int32, 1
end
add_message "testdata.subdir.IntegerMessage.InnerNestedMessage" do
optional :value, :float, 1
end
add_message "testdata.subdir.IntegerMessage.NestedEmpty" do
end
add_message "testdata.subdir.Empty" do
end
add_message "testdata.subdir.AllTypes" do
optional :double_value, :double, 1
optional :float_value, :float, 2
optional :int32_value, :int32, 3
optional :int64_value, :int64, 4
optional :uint32_value, :uint32, 5
optional :uint64_value, :uint64, 6
optional :sint32_value, :sint32, 7
optional :sint64_value, :sint64, 8
optional :fixed32_value, :fixed32, 9
optional :fixed64_value, :fixed64, 10
optional :sfixed32_value, :sfixed32, 11
optional :sfixed64_value, :sfixed64, 12
optional :bool_value, :bool, 13
optional :string_value, :string, 14
optional :bytes_value, :bytes, 15
optional :enum_value, :enum, 16, "testdata.subdir.AllTypes.Corpus"
optional :alias_enum_value, :enum, 17, "testdata.subdir.AllTypes.EnumAllowingAlias"
optional :nested_value, :message, 18, "testdata.subdir.IntegerMessage"
repeated :repeated_nested_value, :message, 19, "testdata.subdir.IntegerMessage"
repeated :repeated_int32_value, :int32, 20
repeated :repeated_enum, :enum, 21, "testdata.subdir.AllTypes.Corpus"
optional :inner_value, :message, 22, "testdata.subdir.AllTypes.InnerMessage"
optional :inner_nested_value, :message, 23, "testdata.subdir.IntegerMessage.InnerNestedMessage"
map :string_map_value, :string, :message, 26, "testdata.subdir.IntegerMessage"
map :int32_map_value, :int32, :message, 27, "testdata.subdir.IntegerMessage"
map :enum_map_value, :string, :enum, 28, "testdata.subdir.AllTypes.Corpus"
proto3_optional :optional_bool, :bool, 29
oneof :test_oneof do
optional :name, :string, 24
optional :sub_message, :bool, 25
end
end
add_message "testdata.subdir.AllTypes.InnerMessage" do
optional :value, :string, 1
end
add_enum "testdata.subdir.AllTypes.Corpus" do
value :UNIVERSAL, 0
value :WEB, 1
value :IMAGES, 2
value :LOCAL, 3
value :NEWS, 4
value :PRODUCTS, 5
value :VIDEO, 6
value :END, 7
end
add_enum "testdata.subdir.AllTypes.EnumAllowingAlias" do
value :UNKNOWN, 0
value :STARTED, 1
value :RUNNING, 1
end
end
end
module Testdata
module Subdir
IntegerMessage = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("testdata.subdir.IntegerMessage").msgclass
IntegerMessage::InnerNestedMessage = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("testdata.subdir.IntegerMessage.InnerNestedMessage").msgclass
IntegerMessage::NestedEmpty = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("testdata.subdir.IntegerMessage.NestedEmpty").msgclass
Empty = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("testdata.subdir.Empty").msgclass
AllTypes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("testdata.subdir.AllTypes").msgclass
AllTypes::InnerMessage = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("testdata.subdir.AllTypes.InnerMessage").msgclass
AllTypes::Corpus = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("testdata.subdir.AllTypes.Corpus").enummodule
AllTypes::EnumAllowingAlias = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("testdata.subdir.AllTypes.EnumAllowingAlias").enummodule
end
end
| 46.02381 | 159 | 0.724263 |
6ac51ee67c46aee7c0fdafd2de127a87c4b73bfa | 4,204 | # encoding: utf-8
#
# Cookbook Name:: zap
# HWRP:: zap
#
# Author:: Joseph J. Nuspl Jr. <[email protected]>
#
# Copyright:: 2014, Joseph J. Nuspl Jr.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class Chef
# resource
class Resource::Zap < Resource
def initialize(name, run_context = nil)
super
# supported features
@supports = []
@delayed = false
@pattern = '*'
@filter = lambda { |_| true }
# Set the resource name and provider
@resource_name = :zap
@provider = Provider::Zap
# Set default actions and allowed actions
@action = :delete
@allowed_actions.push(:delete, :remove)
end
def pattern(arg = nil)
set_or_return(:pattern, arg, kind_of: String)
end
def filter(&block)
if !block.nil? && [email protected]?(:filter)
Chef::Log.warn "#{@resource_name} does not support filter"
end
set_or_return(:filter, block, kind_of: Proc)
end
def collect(&block)
set_or_return(:collect, block, kind_of: Proc)
end
def select(&block)
set_or_return(:select, block, kind_of: Proc)
end
def delayed(arg = nil)
if arg == true
@delayed = true
elsif @delayed == false && immediately == false
r = dup
r.delayed(true)
@run_context.resource_collection.all_resources << r
end
@delayed
end
def klass(arg = nil)
return @klass if arg.nil?
@klass = [arg].flatten.map do |obj|
if obj.is_a?(Class)
obj
else
begin
obj.split('::').reduce(Module, :const_get)
rescue
fail "Cannot convert #{obj.inspect} into Class"
end
end
end
end
def immediately(arg = nil)
set_or_return(:immediately, arg, equal_to: [true, false], default: false)
end
end
# provider
class Provider::Zap < Provider::LWRPBase
def load_current_resource
@name = @new_resource.name
@klass = @new_resource.klass
@pattern = @new_resource.pattern
@filter = @new_resource.filter
@collector = @new_resource.collect || method(:collect)
@selector = @new_resource.select || method(:select)
end
def whyrun_supported?
true
end
def action_delete
iterate(:delete)
end
def action_remove
iterate(:remove)
end
private
def collect
[]
end
def select(r)
r.name if @klass.include?(r.class)
end
# rubocop:disable MethodLength
def iterate(act)
return unless @new_resource.delayed || @new_resource.immediately
# collect all existing resources
extraneous = @collector.call
# keep only those that match the specified pattern
extraneous.select! { |name| ::File.fnmatch(@pattern, name) }
@run_context.resource_collection.each do |r|
name = @selector.call(r)
if name && extraneous.delete(name)
Chef::Log.debug "#{@new_resource} keeping #{name}"
end
end
converge_by(@new_resource.to_s) do
extraneous.each do |name|
r = zap(name, act)
Chef::Log.debug "#{@new_resource} zapping #{r}"
if @new_resource.immediately
r.run_action(act)
else
@run_context.resource_collection << r
end
end
end unless extraneous.empty?
end
# rubocop:enable MethodLength
def zap(name, act, klass = nil)
klass = @klass.first if klass.nil?
r = klass.new(name, @run_context)
r.cookbook_name = @new_resource.cookbook_name
r.recipe_name = @new_resource.recipe_name
r.action(act)
r
end
end
end
| 24.87574 | 79 | 0.616794 |
79d7f95b35842c11f026fac3667c798bf798d5b6 | 1,578 | =begin
#Telegram bot API
#This is a swagger defenition for [Telegram bot API](https://core.telegram.org/bots/api).
OpenAPI spec version: 3.3
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.2.3
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for Teleswagger::ResponseMessage
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'ResponseMessage' do
before do
# run before each test
@instance = Teleswagger::ResponseMessage.new
end
after do
# run after each test
end
describe 'test an instance of ResponseMessage' do
it 'should create an instance of ResponseMessage' do
expect(@instance).to be_instance_of(Teleswagger::ResponseMessage)
end
end
describe 'test attribute "result"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "error_code"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "ok"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "description"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 25.868852 | 103 | 0.722433 |
ab720aa6dbc95d2ec9894c6c265c720ba65fd707 | 4,448 | require_relative 'base'
module RailsRoutesAnalyzer
module RouteIssue
class Resources < Base
fields :suggested_param
def human_readable_error_message
"`#{route_creation_method}' call at #{file_location} for #{controller_class_name} should use #{suggested_param}"
end
def error_suggestion(num_controllers:, **)
"use #{suggested_param}".tap do |message|
message << " only for #{controller_class_name}" if num_controllers > 1
end
end
def verbose_message
"This route currently covers unimplemented actions: #{format_actions(missing_actions.sort)}"
end
# This is horrible but just maybe works well enough most of the time to be useful.
RESOURCES_PARSE_REGEX = %r%
\A
(?<beginning> # contains the part that looks like: resources :some_things
\s*
resources?
\(?
\s*
:\w+ # the name of the resource as a symbol
)
(?<separator>,\s*)? # something optional that separates "resources :some_things" from its parameters
(?<params>.*?) # all the parameters whatever they might be, if present at all
(?<end>
\)?
(\s+(do|{))? # optional block, either " do" or " {"
[\t ]* # any whitespace, except linebreak (not sure why it's matched without 'm' modifier here)
(\#.*)? # an optional comment
)
$
%x
ONLY_EXCEPT_PARAM_REGEX = %r{
(
(:(?<key>only|except)\s*=>) # ":only =>" or ":except =>"
|
(?<key>only|except): # "only:" or "except:"
)
\s*
(
\[[^\]]*\] # anything between [ and ]
|
:\w+ # or a symbol
)
}x
RESOURCE_OTHER_PARAM_REGEX = %r{
(
(:(?<key>\w+)\s*=>)
|
(?<key>\w+):
)
\s*
(
\[[^\]]*\] # anything between [ and ]
|
:\w+ # or a symbol
|
'[^']*' # a limited single-quote string
|
"[^"]*" # a limited double-quote string
|
true
|
false
)
}x
def try_to_fix_line(line, suggestion: suggested_param)
self.class.try_to_fix_resources_line(line, suggestion)
end
def self.try_to_fix_resources_line(line, suggestion)
data = line.match(RESOURCES_PARSE_REGEX)
line_break = line[/$(.*)\z/m, 1]
return unless data
separator = data[:separator].presence || ', '
params = \
if [nil, ''].include?(data[:params])
suggestion
elsif (existing = data[:params][ONLY_EXCEPT_PARAM_REGEX]).present?
# We get here if the only/except parameter already exists and
# our only task is to replace it, should generally be ok.
data[:params].sub(existing, suggestion)
elsif does_params_look_like_a_safe_hash?(data[:params])
# If params looks like a hash it should be safe to append the suggestion
"#{data[:params]}, #{suggestion}"
elsif (match = data[:params].match(/\A(?<opening>\s*{\s*)(?<inner_data>.*?)(?<closing>\s*}\s*)\z/))
# If params looks like a safe hash between { and } then add they key inside the hash
if does_params_look_like_a_safe_hash?(match[:inner_data])
"#{match[:opening]}#{match[:inner_data]}, #{suggestion}#{match[:closing]}"
end
end
return unless params
"#{data[:beginning]}#{separator}#{params}#{data[:end]}#{line_break}"
end
# Check if the parameter string contains only a limited set of known
# resource hash keys in which case it should generally be safe to
# append only:/except: to it.
def self.does_params_look_like_a_safe_hash?(params)
return false if params =~ /[{}]/ # definitely can't handle: "resources :name, { key: val }"
# Replace all known "key: simple_value" pairs with 'X'
result = params.gsub(RESOURCE_OTHER_PARAM_REGEX, 'X')
# Remove all whitespace
result.gsub!(/\s/, '')
# check that the result string looks like: "X" or "X,X", "X,X,X" depending on how many parameters there were
result.split(',').uniq == %w(X)
end
end
end
end
| 33.19403 | 120 | 0.545414 |
38758690d622d2f3e9d8bf8323ba251013eedda6 | 1,573 | Pod::Spec.new do |s|
s.name = "UMengSocial"
s.version = "2.1.3"
s.summary = "UMeng's official Social SDK for iOS."
s.description = "UMeng Social SDK,You can easily share any content to Sina Weibo, Tencent Weibo, Renren, Qzone, Doubban, Wechat and so on using our unified APIs."
s.homepage = "http://dev.umeng.com/doc/document_social_ios.html"
s.license = {
:type => "Copyright",
:text => <<-LICENSE
Copyright 2011 - 2013 UMeng.com. All rights reserved.
LICENSE
}
s.author = { "UMeng" => "[email protected]" }
s.source = { :http => "http://dev.umeng.com/files/download/UMSocial_Sdk_All_2.1.0.zip"}
s.source_files = "UMSocial_Sdk_All_2.1.3/UMSocial_Sdk_2.1.3/Header/*.h","UMSocial_Sdk_All_2.1.3/UMSocial_Sdk_2.1.3/frameworks/*/*.{h,m}"
s.resources = "UMSocial_Sdk_All_2.1.3/UMSocial_Sdk_2.1.3/UMSocialSDKResources.bundle","UMSocial_Sdk_All_2.1.3/UMSocial_Sdk_2.1.3/SocialSDKXib/*.xib"
s.preserve_paths = "UMSocial_Sdk_All_2.1.3/UMSocial_Sdk_2.1.3/libUMSocial_Sdk_2.1.3.a","UMSocial_Sdk_All_2.1.3/UMSocial_Sdk_2.1.3/libUMSocial_Sdk_Comment_2.1.3.a","UMSocial_Sdk_All_2.1.3/UMSocial_Sdk_2.1.3/frameworks/Wechat/libWeChatSDK_armv7_armv7s.a"
s.library = "UMSocial_Sdk_2.1.3","UMSocial_Sdk_Comment_2.1.3","WeChatSDK_armv7_armv7s","z"
s.xcconfig = {'LIBRARY_SEARCH_PATHS' =>"$(PODS_ROOT)/UmengSocial/UMSocial_Sdk_All_2.1.3/UMSocial_Sdk_2.1.3/**"}
s.framework = "SystemConfiguration","MobileCoreServices"
s.platform = :ios,'4.3'
s.requires_arc = false
end
| 62.92 | 255 | 0.699936 |
e8bfe49db017a1e4db03f9797b72f61442b66ba2 | 5,714 | require_relative '../../../lib/gitlab/build/info'
require 'chef_helper'
describe Build::Info do
describe '.package' do
describe 'shows EE' do
it 'when ee=true' do
stub_is_ee_env(true)
expect(described_class.package).to eq('gitlab-ee')
end
it 'when env var is not present, checks VERSION file' do
stub_is_ee_version(true)
expect(described_class.package).to eq('gitlab-ee')
end
end
describe 'shows CE' do
it 'by default' do
stub_is_ee(false)
expect(described_class.package).to eq('gitlab-ce')
end
end
end
describe '.release_version' do
before do
allow(Build::Check).to receive(:on_tag?).and_return(true)
allow_any_instance_of(Omnibus::BuildVersion).to receive(:semver).and_return('12.121.12')
allow_any_instance_of(Gitlab::BuildIteration).to receive(:build_iteration).and_return('ce.1')
end
it 'returns build version and iteration' do
expect(described_class.release_version).to eq('12.121.12-ce.1')
end
describe 'with env variables' do
it 'returns build version and iteration with env variable' do
stub_env_var('USE_S3_CACHE', 'false')
stub_env_var('CACHE_AWS_ACCESS_KEY_ID', 'NOT-KEY')
stub_env_var('CACHE_AWS_SECRET_ACCESS_KEY', 'NOT-SECRET-KEY')
stub_env_var('CACHE_AWS_BUCKET', 'bucket')
stub_env_var('CACHE_AWS_S3_REGION', 'moon-west1')
stub_env_var('CACHE_S3_ACCELERATE', 'sure')
stub_env_var('NIGHTLY', 'true')
stub_env_var('CI_PIPELINE_ID', '5555')
expect(described_class.release_version).to eq('12.121.12-ce.1')
end
end
end
describe '.docker_tag' do
before do
allow(Build::Check).to receive(:on_tag?).and_return(true)
allow_any_instance_of(Omnibus::BuildVersion).to receive(:semver).and_return('12.121.12')
allow_any_instance_of(Gitlab::BuildIteration).to receive(:build_iteration).and_return('ce.1')
end
it 'returns package version when regular build' do
expect(described_class.docker_tag).to eq('12.121.12-ce.1')
end
end
# Specs for latest_tag and for latest_stable_tag are really useful since we
# are stubbing out shell out to git.
# However, they are showing what we expect to see.
describe '.latest_tag' do
describe 'for CE' do
before do
stub_is_ee(false)
allow(described_class).to receive(:`).with("git -c versionsort.prereleaseSuffix=rc tag -l '*[+.]ce.*' --sort=-v:refname | head -1").and_return('12.121.12+rc7.ce.0')
end
it 'returns the version of correct edition' do
expect(described_class.latest_tag).to eq('12.121.12+rc7.ce.0')
end
end
describe 'for EE' do
before do
stub_is_ee(true)
allow(described_class).to receive(:`).with("git -c versionsort.prereleaseSuffix=rc tag -l '*[+.]ee.*' --sort=-v:refname | head -1").and_return('12.121.12+rc7.ee.0')
end
it 'returns the version of correct edition' do
expect(described_class.latest_tag).to eq('12.121.12+rc7.ee.0')
end
end
end
describe '.latest_stable_tag' do
describe 'for CE' do
before do
stub_is_ee(nil)
allow(described_class).to receive(:`).with("git -c versionsort.prereleaseSuffix=rc tag -l '*[+.]ce.*' --sort=-v:refname | awk '!/rc/' | head -1").and_return('12.121.12+ce.0')
end
it 'returns the version of correct edition' do
expect(described_class.latest_stable_tag).to eq('12.121.12+ce.0')
end
end
describe 'for EE' do
before do
stub_is_ee(true)
allow(described_class).to receive(:`).with("git -c versionsort.prereleaseSuffix=rc tag -l '*[+.]ee.*' --sort=-v:refname | awk '!/rc/' | head -1").and_return('12.121.12+ee.0')
end
it 'returns the version of correct edition' do
expect(described_class.latest_stable_tag).to eq('12.121.12+ee.0')
end
end
end
describe '.gitlab_version' do
describe 'GITLAB_VERSION variable specified' do
it 'returns passed value' do
allow(ENV).to receive(:[]).with("GITLAB_VERSION").and_return("9.0.0")
expect(described_class.gitlab_version).to eq('9.0.0')
end
end
describe 'GITLAB_VERSION variable not specified' do
it 'returns content of VERSION' do
allow(File).to receive(:read).with("VERSION").and_return("8.5.6")
expect(described_class.gitlab_version).to eq('8.5.6')
end
end
end
describe '.gitlab_rails repo' do
describe 'ALTERNATIVE_SOURCES variable specified' do
before do
allow(ENV).to receive(:[]).with("ALTERNATIVE_SOURCES").and_return("true")
end
it 'returns public mirror for GitLab CE' do
allow(Build::Info).to receive(:package).and_return("gitlab-ce")
expect(described_class.gitlab_rails_repo).to eq("https://gitlab.com/gitlab-org/gitlab-ce.git")
end
it 'returns public mirror for GitLab EE' do
allow(Build::Info).to receive(:package).and_return("gitlab-ee")
expect(described_class.gitlab_rails_repo).to eq("https://gitlab.com/gitlab-org/gitlab-ee.git")
end
end
describe 'ALTERNATIVE_SOURCES variable not specified' do
it 'returns dev repo for GitLab CE' do
allow(Build::Info).to receive(:package).and_return("gitlab-ce")
expect(described_class.gitlab_rails_repo).to eq("[email protected]:gitlab/gitlabhq.git")
end
it 'returns dev repo for GitLab EE' do
allow(Build::Info).to receive(:package).and_return("gitlab-ee")
expect(described_class.gitlab_rails_repo).to eq("[email protected]:gitlab/gitlab-ee.git")
end
end
end
end
| 35.490683 | 182 | 0.661883 |
4a41da232cc8b8079031e526848d7b44322a6980 | 3,360 | #-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2020 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require 'spec_helper'
describe 'OAuth applications management', type: :feature, js: true do
let(:admin) { FactoryBot.create(:admin) }
before do
login_as(admin)
visit oauth_applications_path
end
it 'can create, update, show and delete applications' do
# Initially empty
expect(page).to have_selector('.generic-table--empty-row', text: 'There is currently nothing to display')
# Create application
find('.button', text: 'Add').click
fill_in 'application_name', with: 'My API application'
# Fill invalid redirect_uri
fill_in 'application_redirect_uri', with: "not a url!"
click_on 'Create'
expect(page).to have_selector('.errorExplanation', text: 'Redirect URI must be an absolute URI.')
fill_in 'application_redirect_uri', with: "urn:ietf:wg:oauth:2.0:oob\nhttps://localhost/my/callback"
click_on 'Create'
expect(page).to have_selector('.flash.notice', text: 'Successful creation.')
expect(page).to have_selector('.attributes-key-value--key', text: 'Client ID')
expect(page).to have_selector('.attributes-key-value--value', text: "urn:ietf:wg:oauth:2.0:oob\nhttps://localhost/my/callback")
# Should print secret on initial visit
expect(page).to have_selector('.attributes-key-value--key', text: 'Client secret')
expect(page.first('.attributes-key-value--value code').text).to match /\w+/
# Edit again
click_on 'Edit'
fill_in 'application_redirect_uri', with: "urn:ietf:wg:oauth:2.0:oob"
click_on 'Save'
# Show application
find('td a', text: 'My API application').click
expect(page).to have_no_selector('.attributes-key-value--key', text: 'Client secret')
expect(page).to have_no_selector('.attributes-key-value--value code')
expect(page).to have_selector('.attributes-key-value--key', text: 'Client ID')
expect(page).to have_selector('.attributes-key-value--value', text: "urn:ietf:wg:oauth:2.0:oob")
click_on 'Delete'
page.driver.browser.switch_to.alert.accept
# Table is empty again
expect(page).to have_selector('.generic-table--empty-row', text: 'There is currently nothing to display')
end
end
| 40 | 131 | 0.72619 |
01533939876d735567dbda17f03a42377d642011 | 2,740 | class OauthController < ApplicationController
skip_before_action :authenticate_request
def auth_oauth
#try to signup
res = check_token
if res == "Not exists"
command = RegisterUserOauth.call(params[:email], params[:token_oauth])
if command.success?
render json: {
response: "success",
type: "signup",
conf_token: command.result
}, status: :ok
else
render json: {
response: "failure",
type: "signup",
error: command.errors
}, status: :conflict
end
#try to login
elsif res == "Exists"
command = AuthOauthUser.call(params[:email], params[:token_oauth])
if command.success?
user_info = User.find(command.result)
token = JsonWebToken::encode(user_id: command.result)
render json: {
response: "success",
type: "login",
auth_token: token,
email: user_info.email,
name: user_info.name,
surname: user_info.surname,
gender: user_info.gender,
photo: user_info.img
}, status: :ok
else
render json: {
response: "failure",
type: "login",
error: command.errors,
conf_token: command.result
}, status: :unauthorized
end
else
render json: {
response: "failure",
error: "Token not valid"
}, status: :bad_request
end
end
private
attr_accessor :token_oauth
# Function to check if there is an user with user_id equalt to token's one
def check_token
id_oauth = get_id_oauth
if not id_oauth.nil?
if User.exists?(id_oauth: id_oauth)
return "Exists"
else
return "Not exists"
end
else
return "Error"
end
end
# Function to check if the oauth token is valid
def get_id_oauth
response = HTTParty.get('https://graph.facebook.com/debug_token?input_token='+params[:token_oauth]+'&access_token='+ENV["APP_ID_FACEBOOK"]+'|'+ENV["SECRET_KEY_FACEBOOK"])
if response.code == 200
body = JSON.parse(response.body)
return body['data']['user_id'] if body['data']['is_valid']
end
nil
end
end | 33.012048 | 178 | 0.485401 |
ffc4dcbbc68324a8d22a37a680c1ee24728bd592 | 461 | # frozen_string_literal: true
class AddCompressImageFileNameToSpreeProductImports < SolidusSupport::Migration[4.2]
def change
add_column :spree_product_imports, :compress_image_file_file_name, :string
add_column :spree_product_imports, :compress_image_file_content_type, :string
add_column :spree_product_imports, :compress_image_file_file_size, :integer
add_column :spree_product_imports, :compress_image_file_updated_at, :datetime
end
end | 46.1 | 84 | 0.83731 |
87ddaca9b885b1c7d1ddbf2366bed8cfe56e03ec | 5,457 | # Encoding: utf-8
# Cloud Foundry Java Buildpack
# Copyright 2013 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'fileutils'
require 'java_buildpack'
require 'java_buildpack/util/application_cache'
require 'java_buildpack/util/library_utils'
require 'java_buildpack/util/shell'
module JavaBuildpack
# A convenience base class for all components in the buildpack. This base class ensures that the contents of the
# +context+ are assigned to instance variables matching their keys. It also ensures that all contract methods are
# implemented.
class BaseComponent
include JavaBuildpack::Util::Shell
# @!attribute [r] size
# @return [String] the name of this component
attr_reader :component_name
# Creates an instance. The contents of +context+ are assigned to instance variables matching their keys.
# +component_name+ and +context+ are exposed via +@component_name+ and +@context+ respectively for any component
# that wishes to use them. An additional +@parsable_component_name+ is exposed that is a lowercased and space-
# removed version of +component_name+.
#
# @param [String] component_name The name of the component
# @param [Hash] context A shared context provided to all components
def initialize(component_name, context)
@component_name = component_name
@parsable_component_name = component_name.gsub(/ /, '-').downcase
@context = context
@context.each { |key, value| instance_variable_set("@#{key}", value) }
end
# If the component should be used when staging an application
#
# @return [Array<String>, String, nil] If the component should be used when staging the application, a +String+ or
# an +Array<String>+ that uniquely identifies the component (e.g.
# +openjdk=1.7.0_40+). Otherwise, +nil+.
def detect
fail "Method 'detect' must be defined"
end
# Modifies the application's file system. The component is expected to transform the application's file system in
# whatever way is necessary (e.g. downloading files or creating symbolic links) to support the function of the
# component. Status output written to +STDOUT+ is expected as part of this invocation.
#
# @return [void]
def compile
fail "Method 'compile' must be defined"
end
# Modifies the application's runtime configuration. The component is expected to transform members of the +context+
# (e.g. +@java_home+, +@java_opts+, etc.) in whatever way is necessary to support the function of the component.
#
# Container components are also expected to create the command required to run the application. These components
# are expected to read the +context+ values and take them into account when creating the command.
#
# @return [void, String] components other than containers are not expected to return any value. Container
# components are expected to return the command required to run the application.
def release
fail "Method 'release' must be defined"
end
protected
# Downloads an item with the given name and version from the given URI, then yields the resultant file to the given
# block.
#
# @param [JavaBuildpack::Util::TokenizedVersion] version
# @param [String] uri
# @param [String] description an optional description for the download. Defaults to +@component_name+.
# @return [void]
def download(version, uri, description = @component_name, &block)
download_start_time = Time.now
print "-----> Downloading #{description} #{version} from #{uri} "
JavaBuildpack::Util::ApplicationCache.new.get(uri) do |file| # TODO: Use global cache #50175265
puts "(#{(Time.now - download_start_time).duration})"
yield file
end
end
# Downloads a given JAR and copies it to a given destination.
#
# @param [JavaBuildpack::Util::TokenizedVersion] version the version of the item
# @param [String] uri the URI of the item
# @param [String] jar_name the filename of the item
# @param [String] target_directory the path of the directory into which to download the item. Defaults to
# +@lib_directory+
# @param [String] description an optional description for the download. Defaults to +@component_name+.
def download_jar(version, uri, jar_name, target_directory = @lib_directory, description = @component_name)
download(version, uri, description) { |file| FileUtils.cp file.path, File.join(target_directory, jar_name) }
end
# Returns the additional libraries.
#
# @param [Array<String>] the paths of JARs in the additional libraries directory
def additional_libraries
JavaBuildpack::Util::LibraryUtils.lib_jars @lib_directory
end
end
end
| 45.099174 | 119 | 0.703134 |
e85e02d9cface9d0435a363ea209d8dd0fedb1ef | 47 | module BrowserifyRails
VERSION = "0.4.2"
end
| 11.75 | 22 | 0.723404 |
1a625a89d7df6f421b8bcb43dedec7d1a9819312 | 2,293 | # frozen_string_literal: true
class UsersController < ApplicationController
layout 'sites'
before_action :require_user, only: [:show, :edit, :update]
before_action :set_user, except: :create
before_action :complain_if_non_gov_email, only: [:show, :edit]
NON_GOV_EMAIL_MESSAGE = <<~MESSAGE
Because you don't have a .gov or .mil email address, we need additional information.
If you are a contractor on an active contract, please use your .gov or .mil email
address on this account, or have your federal POC email [email protected]
to confirm your status.
MESSAGE
def create
@user = User.new(user_params)
if verify_recaptcha(model: @user, message: 'Word verification is incorrect') && @user.save
if @user.has_government_affiliated_email?
flash[:success] = 'Thank you for signing up. To continue the signup process, check your inbox, so we may verify your email address.'
else
flash[:success] = "Sorry! You don't have a .gov or .mil email address so we need some more information from you before approving your account."
end
redirect_to account_path
else
flash.delete(:recaptcha_error)
render action: :new, layout: 'application'
end
end
def update_account
@user.attributes = user_params
if @user.save(context: :update_account)
flash[:success] = 'Account updated!'
redirect_to account_url
else
render :edit
end
end
def show; end
def edit; end
def update
if @user.update(user_params)
flash[:success] = 'Account updated!'
redirect_to account_url
else
render :edit
end
end
def developer_redirect; end
private
def complain_if_non_gov_email
return if @user.has_government_affiliated_email? ||
@user.approval_status == 'approved'
flash[:notice] = NON_GOV_EMAIL_MESSAGE
end
def require_user
redirect_to developer_redirect_url if super.nil? && current_user.is_developer?
end
def set_user
@user = @current_user.presence || current_user
end
def user_params
params.require(:user).permit(:first_name,
:last_name,
:organization_name,
:email).to_h
end
end
| 28.6625 | 151 | 0.67597 |
ff3b2bee005f7a0db563e3db509d9aa91d6a7fbd | 1,238 | # frozen_string_literal: true
require 'active_record'
require 'zombie_battleground/api/validation_helper'
require 'zombie_battleground/api/requests/request_helper'
module ZombieBattleground
class Api
class Requests
##
# Request validator for GetMatch
class GetMatchRequest
include ActiveModel::Validations
include ZombieBattleground::Api::ValidationHelper
include ZombieBattleground::Api::Requests::RequestHelper
##
# @!attribute [r] id
# Match's id
#
# @return [Integer]
#
# @example
# request.id #=> 1
#
# @api public
attr_accessor :id
validate :id_is_a_non_negative_integer
##
# The URI for the endpoint
#
# @return [String]
#
# @example
# request.uri # => "match/1"
#
# @api public
def uri
"match/#{id}"
end
##
# Match does not take params, return an empty Hash
#
# @return [Hash]
#
# @example
# request.params # => {}
#
# @api public
def params
{}
end
end
end
end
end
| 20.295082 | 64 | 0.523425 |
393bc73113457d5d1953a27fffb427a748732274 | 77 | module EventMachine
class HttpRequest
VERSION = "1.0.3"
end
end
| 12.833333 | 22 | 0.649351 |
e937bbe5eace260cfa0d165d3c04d7c0ee9b30bb | 1,562 | Userland::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Configure static asset server for tests with Cache-Control for performance.
config.serve_static_assets = true
config.static_cache_control = "public, max-age=3600"
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
end
| 42.216216 | 85 | 0.774648 |
262b932ce78d8851cea9c91deea68525d2040eac | 1,528 | require 'adyen/signature'
module Adyen
module HPP
# The Signature module can sign and verify HMAC SHA-256 signatures for Hosted Payment Pages
module Signature
extend self
# Sign the parameters with the given shared secret
# @param [Hash] params The set of parameters to sign.
# @param [String] shared_secret The shared secret for signing/verification. Can also be sent in the
# params hash with the `sharedSecret` key.
# @return [Hash] params The params that were passed in plus a new `merchantSig` param
def sign(params, shared_secret = nil)
params["sharedSecret"] ||= shared_secret
params.merge('merchantSig' => Adyen::Signature.sign(params))
end
# Verify the parameters with the given shared secret
# @param [Hash] params The set of parameters to verify. Must include a `merchantSig`
# param that will be compared to the signature we calculate.
# @param [String] shared_secret The shared secret for signing/verification. Can also be sent in the
# params hash with the `sharedSecret` key.
# @return [Boolean] true if the `merchantSig` in the params matches our calculated signature
def verify(params, shared_secret = nil)
params["sharedSecret"] ||= shared_secret
their_sig = params.delete('merchantSig')
raise ArgumentError, "params must include 'merchantSig' for verification" if their_sig.empty?
Adyen::Signature.verify(params, their_sig)
end
end
end
end
| 44.941176 | 105 | 0.695681 |
ed51f2777e4d22386f74fc67752fe80ec9b95e58 | 1,352 | class Librcsc < Formula
desc "RoboCup Soccer Simulator library"
homepage "https://osdn.net/projects/rctools/"
# Canonical: https://osdn.net/dl/rctools/librcsc-4.1.0.tar.gz
url "https://dotsrc.dl.osdn.net/osdn/rctools/51941/librcsc-4.1.0.tar.gz"
sha256 "1e8f66927b03fb921c5a2a8c763fb7297a4349c81d1411c450b180178b46f481"
bottle do
cellar :any
rebuild 1
sha256 "0eeb0dfb16662da2760d8c753dc23049afdd9a8da0a5ae3eba9c5ac56ed00a41" => :mojave
sha256 "4bd96acb6e78620e25b3b33e745e7770ea812cde22a3d756ac978c778d3b993c" => :high_sierra
sha256 "c8b9dc2887f771f07b33bb70cec9ab62b4cee067f8b3a2d7ae57296428881031" => :sierra
sha256 "c2093c232c857c15bea5dd6c1c6df14aa4b00ed0c6eb3ab7e4d0d3f8c72b54c6" => :el_capitan
sha256 "c339890cbed4a1ca1b0a14d4375ece92ccee44a1f29023e1f633e9a9e0d6b6d5" => :yosemite
sha256 "db8f74fadedc34da92c2109c1bbb90971c494e104c6041f1c8429def7f14dbc9" => :mavericks
end
depends_on "boost"
def install
system "./configure", "--disable-debug",
"--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"test.cpp").write <<~EOS
#include <rcsc/rcg.h>
int main() {
rcsc::rcg::PlayerT p;
return 0;
}
EOS
system ENV.cxx, "test.cpp", "-o", "test", "-L#{lib}", "-lrcsc_rcg"
system "./test"
end
end
| 34.666667 | 93 | 0.715237 |
3866460dc28e4d2fe1d3516f19d81787ef816a47 | 138 | class AddRecognitionBotMessageTs < ActiveRecord::Migration[5.0]
def change
add_column :recognitions, :bot_msg_ts, :string
end
end
| 23 | 63 | 0.775362 |
87975c0cca036e73dcbc7b6f21ad78060e2c43c3 | 445 | class TaxonNameClassification::Icnb::NotEffectivelyPublished < TaxonNameClassification::Icnb
NOMEN_URI='http://purl.obolibrary.org/obo/NOMEN_0000082'.freeze
def self.disjoint_taxon_name_classes
self.parent.disjoint_taxon_name_classes +
self.collect_descendants_and_itself_to_s(TaxonNameClassification::Icnb::EffectivelyPublished)
end
def self.gbif_status
'invalidum'
end
def self.assignable
true
end
end
| 21.190476 | 101 | 0.788764 |
e9b0765ed4834965e76f5a22b2958e2561261b38 | 19,794 | # frozen_string_literal: true
# name: discourse-calendar
# about: Display a calendar in the first post of a topic
# version: 0.2
# author: Daniel Waterworth, Joffrey Jaffeux
# url: https://github.com/discourse/discourse-calendar
libdir = File.join(File.dirname(__FILE__), "vendor/holidays/lib")
$LOAD_PATH.unshift(libdir) unless $LOAD_PATH.include?(libdir)
gem 'rrule', '0.4.2', require: false
load File.expand_path('../lib/calendar_settings_validator.rb', __FILE__)
enabled_site_setting :calendar_enabled
register_asset "javascripts/initializers/event-relative-date.js.es6"
register_asset 'stylesheets/vendor/fullcalendar.min.css'
register_asset 'stylesheets/common/discourse-calendar.scss'
register_asset 'stylesheets/common/upcoming-events-calendar.scss'
register_asset 'stylesheets/common/discourse-post-event.scss'
register_asset 'stylesheets/common/discourse-post-event-preview.scss'
register_asset 'stylesheets/common/discourse-post-event-builder.scss'
register_asset 'stylesheets/common/discourse-post-event-invitees.scss'
register_asset 'stylesheets/common/discourse-post-event-upcoming-events.scss'
register_asset 'stylesheets/common/discourse-post-event-core-ext.scss'
register_asset 'stylesheets/mobile/discourse-post-event-core-ext.scss', :mobile
register_asset 'stylesheets/common/discourse-post-event-bulk-invite-modal.scss'
register_asset 'stylesheets/mobile/discourse-calendar.scss', :mobile
register_asset 'stylesheets/mobile/discourse-post-event.scss', :mobile
register_asset 'stylesheets/desktop/discourse-calendar.scss', :desktop
register_asset 'stylesheets/colors.scss', :color_definitions
register_svg_icon 'fas fa-calendar-day'
register_svg_icon 'fas fa-clock'
register_svg_icon 'fas fa-file-csv'
register_svg_icon 'fas fa-star'
register_svg_icon 'fas fa-file-upload'
after_initialize do
module ::DiscourseCalendar
PLUGIN_NAME ||= 'discourse-calendar'
# Type of calendar ('static' or 'dynamic')
CALENDAR_CUSTOM_FIELD ||= 'calendar'
# User custom field set when user is on holiday
HOLIDAY_CUSTOM_FIELD ||= 'on_holiday'
# List of all users on holiday
USERS_ON_HOLIDAY_KEY ||= 'users_on_holiday'
# User region used in finding holidays
REGION_CUSTOM_FIELD ||= 'holidays-region'
# List of groups
GROUP_TIMEZONES_CUSTOM_FIELD ||= 'group-timezones'
def self.users_on_holiday
PluginStore.get(PLUGIN_NAME, USERS_ON_HOLIDAY_KEY)
end
def self.users_on_holiday=(usernames)
PluginStore.set(PLUGIN_NAME, USERS_ON_HOLIDAY_KEY, usernames)
end
class Engine < ::Rails::Engine
engine_name PLUGIN_NAME
isolate_namespace DiscourseCalendar
end
end
module ::DiscoursePostEvent
PLUGIN_NAME ||= 'discourse-post-event'
# Topic where op has a post event custom field
TOPIC_POST_EVENT_STARTS_AT ||= 'TopicEventStartsAt'
TOPIC_POST_EVENT_ENDS_AT ||= 'TopicEventEndsAt'
class Engine < ::Rails::Engine
engine_name PLUGIN_NAME
isolate_namespace DiscoursePostEvent
end
end
# DISCOURSE POST EVENT
%w[
../app/controllers/discourse_post_event_controller.rb
../app/controllers/discourse_post_event/invitees_controller.rb
../app/controllers/discourse_post_event/events_controller.rb
../app/controllers/discourse_post_event/upcoming_events_controller.rb
../app/models/discourse_post_event/event.rb
../app/models/discourse_post_event/event_date.rb
../app/models/discourse_post_event/invitee.rb
../lib/discourse_post_event/event_parser.rb
../lib/discourse_post_event/event_validator.rb
../lib/discourse_post_event/rrule_generator.rb
../jobs/regular/discourse_post_event/bulk_invite.rb
../jobs/regular/discourse_post_event/send_reminder.rb
../lib/discourse_post_event/event_finder.rb
../app/serializers/discourse_post_event/invitee_serializer.rb
../app/serializers/discourse_post_event/event_serializer.rb
].each { |path| load File.expand_path(path, __FILE__) }
::ActionController::Base.prepend_view_path File.expand_path(
'../app/views',
__FILE__
)
Discourse::Application.routes.append do
mount ::DiscoursePostEvent::Engine, at: '/'
end
DiscoursePostEvent::Engine.routes.draw do
get '/discourse-post-event/events' => 'events#index',
constraints: { format: /(json|ics)/ }
get '/discourse-post-event/events/:id' => 'events#show'
delete '/discourse-post-event/events/:id' => 'events#destroy'
post '/discourse-post-event/events' => 'events#create'
post '/discourse-post-event/events/:id/csv-bulk-invite' =>
'events#csv_bulk_invite'
post '/discourse-post-event/events/:id/bulk-invite' => 'events#bulk_invite',
format: :json
post '/discourse-post-event/events/:id/invite' => 'events#invite'
put '/discourse-post-event/events/:post_id/invitees/:id' =>
'invitees#update'
post '/discourse-post-event/events/:post_id/invitees' => 'invitees#create'
get '/discourse-post-event/events/:post_id/invitees' => 'invitees#index'
delete '/discourse-post-event/events/:post_id/invitees/:id' =>
'invitees#destroy'
get '/upcoming-events' => 'upcoming_events#index'
end
reloadable_patch do
Post.class_eval do
has_one :event,
dependent: :destroy,
class_name: 'DiscoursePostEvent::Event',
foreign_key: :id
validate :valid_event
def valid_event
return unless self.raw_changed?
validator = DiscoursePostEvent::EventValidator.new(self)
validator.validate_event
end
end
end
add_to_class(:user, :can_create_discourse_post_event?) do
if defined?(@can_create_discourse_post_event)
return @can_create_discourse_post_event
end
@can_create_discourse_post_event = begin
return true if staff?
allowed_groups = SiteSetting.discourse_post_event_allowed_on_groups.to_s.split('|').compact
allowed_groups.present? && groups.where(id: allowed_groups).exists?
rescue StandardError
false
end
end
add_to_class(:guardian, :can_act_on_invitee?) do |invitee|
user && (user.staff? || user.id == invitee.user_id)
end
add_to_class(:guardian, :can_create_discourse_post_event?) do
user && user.can_create_discourse_post_event?
end
add_to_serializer(:current_user, :can_create_discourse_post_event) do
object.can_create_discourse_post_event?
end
add_to_class(:user, :can_act_on_discourse_post_event?) do |event|
if defined?(@can_act_on_discourse_post_event)
return @can_act_on_discourse_post_event
end
@can_act_on_discourse_post_event = begin
return true if staff?
can_create_discourse_post_event? && event.post.user_id == id
rescue StandardError
false
end
end
add_to_class(:guardian, :can_act_on_discourse_post_event?) do |event|
user && user.can_act_on_discourse_post_event?(event)
end
add_class_method(:group, :discourse_post_event_allowed_groups) do
where(
id: SiteSetting.discourse_post_event_allowed_on_groups.split('|').compact
)
end
# TODO: Switch to an official plugin API once support for it has landed.
if TopicView.respond_to?(:on_preload)
TopicView.on_preload do |topic_view|
if SiteSetting.discourse_post_event_enabled
topic_view.instance_variable_set(:@posts, topic_view.posts.includes(:event))
end
end
end
add_to_serializer(:post, :event) do
DiscoursePostEvent::EventSerializer.new(
object.event,
scope: scope, root: false
)
end
add_to_serializer(:post, :include_event?) do
SiteSetting.discourse_post_event_enabled && !object.nil? &&
!object.deleted_at.present?
end
on(:post_process_cooked) do |doc, post|
DiscoursePostEvent::Event.update_from_raw(post)
end
on(:post_destroyed) do |post|
if SiteSetting.discourse_post_event_enabled && post.event
post.event.update!(deleted_at: Time.now)
end
end
on(:post_recovered) do |post|
if SiteSetting.discourse_post_event_enabled && post.event
post.event.update!(deleted_at: nil)
end
end
TopicList.preloaded_custom_fields << DiscoursePostEvent::TOPIC_POST_EVENT_STARTS_AT
add_to_serializer(:topic_view, :event_starts_at, false) do
object.topic.custom_fields[DiscoursePostEvent::TOPIC_POST_EVENT_STARTS_AT]
end
add_to_serializer(:topic_view, 'include_event_starts_at?') do
SiteSetting.discourse_post_event_enabled &&
SiteSetting.display_post_event_date_on_topic_title &&
object.topic.custom_fields.keys.include?(
DiscoursePostEvent::TOPIC_POST_EVENT_STARTS_AT
)
end
add_to_class(:topic, :event_starts_at) do
@event_starts_at ||= custom_fields[DiscoursePostEvent::TOPIC_POST_EVENT_STARTS_AT]
end
add_to_serializer(:topic_list_item, :event_starts_at, false) do
object.event_starts_at
end
add_to_serializer(:topic_list_item, 'include_event_starts_at?') do
SiteSetting.discourse_post_event_enabled &&
SiteSetting.display_post_event_date_on_topic_title &&
object.event_starts_at
end
TopicList.preloaded_custom_fields << DiscoursePostEvent::TOPIC_POST_EVENT_ENDS_AT
add_to_serializer(:topic_view, :event_ends_at, false) do
object.topic.custom_fields[DiscoursePostEvent::TOPIC_POST_EVENT_ENDS_AT]
end
add_to_serializer(:topic_view, 'include_event_ends_at?') do
SiteSetting.discourse_post_event_enabled &&
SiteSetting.display_post_event_date_on_topic_title &&
object.topic.custom_fields.keys.include?(
DiscoursePostEvent::TOPIC_POST_EVENT_ENDS_AT
)
end
add_to_class(:topic, :event_ends_at) do
@event_ends_at ||= custom_fields[DiscoursePostEvent::TOPIC_POST_EVENT_ENDS_AT]
end
add_to_serializer(:topic_list_item, :event_ends_at, false) do
object.event_ends_at
end
add_to_serializer(:topic_list_item, 'include_event_ends_at?') do
SiteSetting.discourse_post_event_enabled &&
SiteSetting.display_post_event_date_on_topic_title &&
object.event_ends_at
end
# DISCOURSE CALENDAR
%w[
../app/models/calendar_event.rb
../app/serializers/user_timezone_serializer.rb
../jobs/scheduled/create_holiday_events.rb
../jobs/scheduled/destroy_past_events.rb
../jobs/scheduled/update_holiday_usernames.rb
../jobs/scheduled/monitor_event_dates.rb
../lib/calendar_validator.rb
../lib/calendar.rb
../lib/event_validator.rb
../lib/group_timezones.rb
../lib/time_sniffer.rb
].each { |path| load File.expand_path(path, __FILE__) }
register_post_custom_field_type(
DiscourseCalendar::CALENDAR_CUSTOM_FIELD,
:string
)
register_post_custom_field_type(
DiscourseCalendar::GROUP_TIMEZONES_CUSTOM_FIELD,
:json
)
TopicView.default_post_custom_fields <<
DiscourseCalendar::GROUP_TIMEZONES_CUSTOM_FIELD
register_user_custom_field_type(
DiscourseCalendar::HOLIDAY_CUSTOM_FIELD,
:boolean
)
# TODO Drop after Discourse 2.6.0 release
if respond_to?(:allow_staff_user_custom_field)
allow_staff_user_custom_field(DiscourseCalendar::HOLIDAY_CUSTOM_FIELD)
else
whitelist_staff_user_custom_field(DiscourseCalendar::HOLIDAY_CUSTOM_FIELD)
end
register_editable_user_custom_field(DiscourseCalendar::REGION_CUSTOM_FIELD)
# TODO Drop after Discourse 2.6.0 release
if respond_to?(:allow_staff_user_custom_field)
allow_staff_user_custom_field(DiscourseCalendar::REGION_CUSTOM_FIELD)
else
whitelist_staff_user_custom_field(DiscourseCalendar::REGION_CUSTOM_FIELD)
end
on(:site_setting_changed) do |name, old_value, new_value|
unless %i[all_day_event_start_time all_day_event_end_time].include? name
next
end
Post.where(id: CalendarEvent.select(:post_id).distinct).each do |post|
CalendarEvent.update(post)
end
end
on(:post_process_cooked) do |doc, post|
DiscourseCalendar::Calendar.update(post)
DiscourseCalendar::GroupTimezones.update(post)
CalendarEvent.update(post)
end
on(:post_recovered) do |post, _, _|
DiscourseCalendar::Calendar.update(post)
DiscourseCalendar::GroupTimezones.update(post)
CalendarEvent.update(post)
end
on(:post_destroyed) do |post, _, _|
DiscourseCalendar::Calendar.destroy(post)
CalendarEvent.where(post_id: post.id).destroy_all
end
validate(:post, :validate_calendar) do |force = nil|
return unless self.raw_changed? || force
validator = DiscourseCalendar::CalendarValidator.new(self)
validator.validate_calendar
end
validate(:post, :validate_event) do |force = nil|
return unless self.raw_changed? || force
return if self.is_first_post?
# Skip if not a calendar topic
if !self&.topic&.first_post&.custom_fields&.[](
DiscourseCalendar::CALENDAR_CUSTOM_FIELD
)
return
end
validator = DiscourseCalendar::EventValidator.new(self)
validator.validate_event
end
add_to_class(:post, :has_group_timezones?) do
custom_fields[DiscourseCalendar::GROUP_TIMEZONES_CUSTOM_FIELD].present?
end
add_to_class(:post, :group_timezones) do
custom_fields[DiscourseCalendar::GROUP_TIMEZONES_CUSTOM_FIELD] || {}
end
add_to_class(:post, :group_timezones=) do |val|
if val.present?
custom_fields[DiscourseCalendar::GROUP_TIMEZONES_CUSTOM_FIELD] = val
else
custom_fields.delete(DiscourseCalendar::GROUP_TIMEZONES_CUSTOM_FIELD)
end
end
add_to_serializer(:post, :calendar_details) do
grouped = {}
standalones = []
CalendarEvent.where(topic_id: object.topic_id).order(:start_date, :end_date).each do |event|
if event.post_id
standalones << {
type: :standalone,
post_number: event.post_number,
message: event.description,
from: event.start_date,
to: event.end_date,
username: event.username,
recurring: event.recurrence,
post_url: Post.url("-", event.topic_id, event.post_number)
}
else
identifier = "#{event.region.split("_").first}-#{event.start_date.strftime("%j")}"
grouped[identifier] ||= {
type: :grouped,
from: event.start_date,
name: [],
usernames: []
}
grouped[identifier][:name] << event.description
grouped[identifier][:usernames] << event.username
end
end
grouped.each do |_, v|
v[:name].sort!.uniq!
v[:name] = v[:name].join(", ")
v[:usernames].sort!.uniq!
end
standalones + grouped.values
end
add_to_serializer(:post, :include_calendar_details?) { object.is_first_post? }
add_to_serializer(:post, :group_timezones) do
result = {}
group_timezones = post_custom_fields[DiscourseCalendar::GROUP_TIMEZONES_CUSTOM_FIELD] || {}
group_names = group_timezones['groups'] || []
if group_names.present?
users =
User.joins(:groups, :user_option).where("groups.name": group_names)
.select(
'users.*',
'groups.name AS group_name',
'user_options.timezone'
)
users.each do |u|
result[u.group_name] ||= []
result[u.group_name] << UserTimezoneSerializer.new(u, root: false).as_json
end
end
result
end
add_to_serializer(:post, :include_group_timezones?) do
post_custom_fields[DiscourseCalendar::GROUP_TIMEZONES_CUSTOM_FIELD].present?
end
add_to_serializer(:site, :users_on_holiday) do
DiscourseCalendar.users_on_holiday
end
add_to_serializer(:site, :include_users_on_holiday?) { scope.is_staff? }
reloadable_patch do
module DiscoursePostEvent::ExportCsvControllerExtension
def export_entity
if post_event_export? && ensure_can_export_post_event
Jobs.enqueue(
:export_csv_file,
entity: export_params[:entity],
user_id: current_user.id,
args: export_params[:args]
)
StaffActionLogger.new(current_user).log_entity_export(
export_params[:entity]
)
render json: success_json
else
super
end
end
private
def export_params
if post_event_export?
@_export_params ||=
begin
params.require(:entity)
params.permit(:entity, args: %i[id]).to_h
end
else
super
end
end
def post_event_export?
params[:entity] === 'post_event'
end
def ensure_can_export_post_event
return if !SiteSetting.discourse_post_event_enabled
post_event = DiscoursePostEvent::Event.find(export_params[:args][:id])
post_event && guardian.can_act_on_discourse_post_event?(post_event)
end
end
require_dependency 'export_csv_controller'
class ::ExportCsvController
prepend DiscoursePostEvent::ExportCsvControllerExtension
end
module ExportPostEventCsvReportExtension
def post_event_export(&block)
return enum_for(:post_event_export) unless block_given?
guardian = Guardian.new(current_user)
event = DiscoursePostEvent::Event.includes(invitees: :user).find(@extra[:id])
guardian.ensure_can_act_on_discourse_post_event!(event)
event.invitees.order(:id).each do |invitee|
yield [
invitee.user.username,
DiscoursePostEvent::Invitee.statuses[invitee.status],
invitee.created_at,
invitee.updated_at
]
end
end
def get_header(entity)
if SiteSetting.discourse_post_event_enabled && entity === 'post_event'
%w[username status first_answered_at last_updated_at]
else
super
end
end
end
class Jobs::ExportCsvFile
prepend ExportPostEventCsvReportExtension
end
on(:reduce_cooked) do |fragment, post|
if SiteSetting.discourse_post_event_enabled
fragment.css('.discourse-post-event').each do |event_node|
starts_at = event_node['data-start']
ends_at = event_node['data-end']
dates = "#{starts_at} (UTC)"
dates = "#{dates} → #{ends_at} (UTC)" if ends_at
event_name = event_node['data-name'] || post.topic.title
event_node.replace <<~TXT
<div style='border:1px solid #dedede'>
<p><a href="#{
Discourse.base_url
}#{post.url}">#{event_name}</a></p>
<p>#{
dates
}</p>
</div>
TXT
end
end
end
on(:user_destroyed) do |user|
DiscoursePostEvent::Invitee.where(user_id: user.id).destroy_all
end
if respond_to?(:add_post_revision_notifier_recipients)
add_post_revision_notifier_recipients do |post_revision|
# next if no modifications
next if !post_revision.modifications.present?
# do no notify recipients when only updating tags
next if post_revision.modifications.keys == ['tags']
ids = []
post = post_revision.post
if post && post.is_first_post? && post.event
ids.concat(post.event.on_going_event_invitees.pluck(:user_id))
end
ids
end
end
on(:site_setting_changed) do |name, old_val, new_val|
next if name != :discourse_post_event_allowed_custom_fields
previous_fields = old_val.split('|')
new_fields = new_val.split('|')
removed_fields = previous_fields - new_fields
next if removed_fields.empty?
DiscoursePostEvent::Event.all.find_each do |event|
removed_fields.each { |field| event.custom_fields.delete(field) }
event.save
end
end
end
end
| 31.721154 | 97 | 0.705163 |
1dabfbfaf9d1c09e63948a161bbb2af5130113c1 | 203 | require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe "Paysecure" do
it "fails" do
fail "hey buddy, you should probably rename this file and start specing for real"
end
end
| 25.375 | 85 | 0.743842 |
0309d5edf880f2b06a34c5949d0c86d6ae899df1 | 9,362 | # -*- coding: binary -*-
module Msf
module Ui
module Console
###
#
# Module-specific command dispatcher.
#
###
module ModuleCommandDispatcher
include Msf::Ui::Console::CommandDispatcher
def commands
{
"reload" => "Reload the current module from disk",
"check" => "Check to see if a target is vulnerable"
}
end
#
# The active driver module, if any.
#
def mod
return driver.active_module
end
#
# Sets the active driver module.
#
def mod=(m)
self.driver.active_module = m
end
def check_progress
return 0 unless @range_done and @range_count
pct = (@range_done / @range_count.to_f) * 100
end
def check_show_progress
pct = check_progress
if(pct >= (@range_percent + @show_percent))
@range_percent = @range_percent + @show_percent
tdlen = @range_count.to_s.length
print_status("Checked #{"%.#{tdlen}d" % @range_done} of #{@range_count} hosts (#{"%.3d" % pct.to_i}% complete)")
end
end
def check_multiple(hosts)
# This part of the code is mostly from scanner.rb
@show_progress = framework.datastore['ShowProgress'] || mod.datastore['ShowProgress'] || false
@show_percent = ( framework.datastore['ShowProgressPercent'] || mod.datastore['ShowProgressPercent'] ).to_i
@range_count = hosts.length || 0
@range_done = 0
@range_percent = 0
# Set the default thread to 1. The same behavior as before.
threads_max = (framework.datastore['THREADS'] || mod.datastore['THREADS'] || 1).to_i
@tl = []
if Rex::Compat.is_windows
if threads_max > 16
print_warning("Thread count has been adjusted to 16")
threads_max = 16
end
end
if Rex::Compat.is_cygwin
if threads_max > 200
print_warning("Thread count has been adjusted to 200")
threads_max = 200
end
end
loop do
while (@tl.length < threads_max)
ip = hosts.next_ip
break unless ip
@tl << framework.threads.spawn("CheckHost-#{ip}", false, ip.dup) { |tip|
# Make sure this is thread-safe when assigning an IP to the RHOST
# datastore option
instance = mod.replicant
instance.datastore['RHOST'] = tip.dup
Msf::Simple::Framework.simplify_module(instance, false)
check_simple(instance)
}
end
break if @tl.length == 0
tla = @tl.length
# This exception handling is necessary, the first thread with errors can kill the
# whole check_multiple and leave the rest of the threads running in background and
# only accessible with the threads command (Thread.list)
begin
@tl.first.join
rescue ::Exception => exception
if exception.kind_of?(::Interrupt)
raise exception
else
elog('Error encountered with first Thread', error: exception)
end
end
@tl.delete_if { |t| not t.alive? }
tlb = @tl.length
@range_done += (tla - tlb)
check_show_progress if @show_progress
end
end
#
# Checks to see if a target is vulnerable.
#
def cmd_check(*args)
if args.first =~ /^\-h$/i
cmd_check_help
return
end
ip_range_arg = args.join(' ') unless args.empty?
ip_range_arg ||= mod.datastore['RHOSTS'] || framework.datastore['RHOSTS'] || ''
opt = Msf::OptAddressRange.new('RHOSTS')
begin
if !ip_range_arg.blank? && opt.valid?(ip_range_arg)
hosts = Rex::Socket::RangeWalker.new(opt.normalize(ip_range_arg))
# Check multiple hosts
last_rhost_opt = mod.datastore['RHOST']
last_rhosts_opt = mod.datastore['RHOSTS']
mod.datastore['RHOSTS'] = ip_range_arg
begin
if hosts.length > 1
check_multiple(hosts)
# Short-circuit check_multiple if it's a single host
else
mod.datastore['RHOST'] = hosts.next_ip
check_simple
end
ensure
# Restore the original rhost if set
mod.datastore['RHOST'] = last_rhost_opt
mod.datastore['RHOSTS'] = last_rhosts_opt
mod.cleanup
end
# XXX: This is basically dead code now that exploits use RHOSTS
else
# Check a single rhost
unless Msf::OptAddress.new('RHOST').valid?(mod.datastore['RHOST'])
raise Msf::OptionValidateError.new(['RHOST'])
end
check_simple
end
rescue ::Interrupt
# When the user sends interrupt trying to quit the task, some threads will still be active.
# This means even though the console tells the user the task has aborted (or at least they
# assume so), the checks are still running. Because of this, as soon as we detect interrupt,
# we force the threads to die.
if @tl
@tl.each { |t| t.kill }
end
print_status("Caught interrupt from the console...")
return
end
end
def cmd_check_help
print_line('Usage: check [option] [IP Range]')
print_line
print_line('Options:')
print_line('-h You are looking at it.')
print_line
print_line('Examples:')
print_line('')
print_line('Normally, if a RHOST is already specified, you can just run check.')
print_line('But here are different ways to use the command:')
print_line
print_line('Against a single host:')
print_line('check 192.168.1.123')
print_line
print_line('Against a range of IPs:')
print_line('check 192.168.1.1-192.168.1.254')
print_line
print_line('Against a range of IPs loaded from a file:')
print_line('check file:///tmp/ip_list.txt')
print_line
print_line('Multi-threaded checks:')
print_line('1. set THREADS 10')
print_line('2. check')
print_line
end
def report_vuln(instance)
framework.db.report_vuln(
workspace: instance.workspace,
host: instance.rhost,
name: instance.name,
info: "This was flagged as vulnerable by the explicit check of #{instance.fullname}.",
refs: instance.references
)
end
def check_simple(instance=nil)
unless instance
instance = mod
end
rhost = instance.datastore['RHOST']
rport = instance.datastore['RPORT']
peer = rhost
if rport
rport = instance.rport if instance.respond_to?(:rport)
peer = "#{rhost}:#{rport}"
end
peer_msg = peer ? "#{peer} - " : ''
begin
if instance.respond_to?(:check_simple)
code = instance.check_simple(
'LocalInput' => driver.input,
'LocalOutput' => driver.output
)
else
msg = "Check failed: #{instance.type.capitalize} modules do not support check."
raise NotImplementedError, msg
end
if (code && code.kind_of?(Msf::Exploit::CheckCode))
if (code == Msf::Exploit::CheckCode::Vulnerable)
print_good("#{peer_msg}#{code[1]}")
# Restore RHOST for report_vuln
instance.datastore['RHOST'] ||= rhost
report_vuln(instance)
else
print_status("#{peer_msg}#{code[1]}")
end
else
msg = "#{peer_msg}Check failed: The state could not be determined."
print_error(msg)
elog("#{msg}\n#{caller.join("\n")}")
end
rescue ::Rex::ConnectionError, ::Rex::ConnectionProxyError, ::Errno::ECONNRESET, ::Errno::EINTR, ::Rex::TimeoutError, ::Timeout::Error => e
# Connection issues while running check should be handled by the module
print_error("Check failed: #{e.class} #{e}")
elog('Check Failed', error: e)
rescue ::Msf::Exploit::Failed => e
# Handle fail_with and other designated exploit failures
print_error("Check failed: #{e.class} #{e}")
elog('Check Failed', error: e)
rescue ::RuntimeError => e
# Some modules raise RuntimeError but we don't necessarily care about those when we run check()
elog('Check Failed', error: e)
rescue ::NotImplementedError => e
print_error(e.message)
elog('Check Failed', error: e)
rescue ::Exception => e
print_error("Check failed: #{e.class} #{e}")
elog('Check Failed', error: e)
end
end
#
# Reloads the active module
#
def cmd_reload(*args)
begin
reload
rescue
log_error("Failed to reload: #{$!}")
end
end
@@reload_opts = Rex::Parser::Arguments.new(
'-k' => [ false, 'Stop the current job before reloading.' ],
'-h' => [ false, 'Help banner.' ])
def cmd_reload_help
print_line "Usage: reload [-k]"
print_line
print_line "Reloads the current module."
print @@reload_opts.usage
end
#
# Reload the current module, optionally stopping existing job
#
def reload(should_stop_job=false)
if should_stop_job and mod.job_id
print_status('Stopping existing job...')
framework.jobs.stop_job(mod.job_id)
mod.job_id = nil
end
print_status('Reloading module...')
original_mod = self.mod
reloaded_mod = framework.modules.reload_module(original_mod)
unless reloaded_mod
error = framework.modules.module_load_error_by_path[original_mod.file_path]
print_error("Failed to reload module: #{error}")
self.mod = original_mod
else
self.mod = reloaded_mod
self.mod.init_ui(driver.input, driver.output)
end
reloaded_mod
end
end
end end end
| 28.455927 | 143 | 0.629673 |
385540661336b63d96e02c0235e38191cce97be6 | 1,660 | # Copyright 2012 The ham21/radio Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class Radio
class PSK31
class Rx
include Utils
# 16 Hz bw LP filter for data recovery
FIR_BIT = FirPM.new numtaps: 65, type: :bandpass,
bands: [0.0,0.03125,0.0625,0.5], desired: [1,1,0,0], weights: [1,286]
def initialize frequency
mix = frequency.to_f / 8000
fir_500hz = firpm numtaps: 35, type: :bandpass,
bands: [0,0.0125,0.125,0.5], desired: [1,1,0,0], weights: [1,10]
@dec_filter = Filter.new mix:mix, decimate:16, fir:fir_500hz
@bit_filter = Filter.new fir:FIR_BIT
@bit_detect = BitDetect.new
@decoder = Decoder.new
end
def call data
decoded = ''
@dec_filter.call data do |data|
@bit_filter.call data do |data|
data.each do |iq|
@bit_detect.call iq do |iq|
@decoder.call iq do |symbol|
decoded += symbol
end
end
end
end
end
yield decoded
end
end
end
end
| 28.62069 | 77 | 0.601807 |
8705b1976793853b16f53132a1f7fb2c3151c838 | 716 | module LanguageServer
module Protocol
module Interface
#
# Document link options.
#
class DocumentLinkOptions
def initialize(resolve_provider: nil)
@attributes = {}
@attributes[:resolveProvider] = resolve_provider if resolve_provider
@attributes.freeze
end
#
# Document links have a resolve provider as well.
#
# @return [boolean]
def resolve_provider
attributes.fetch(:resolveProvider)
end
attr_reader :attributes
def to_hash
attributes
end
def to_json(*args)
to_hash.to_json(*args)
end
end
end
end
end
| 19.351351 | 78 | 0.568436 |
28e330bbe83cbdb6bff5a49858953de9f6595bc2 | 50 | module ForestrapSass
VERSION = "0.0.1.pre2"
end
| 12.5 | 24 | 0.72 |
625afe8c8a415afb9bb8a9d29adc2b89ee5e54be | 297 | module Administer
module Fields
class BelongsTo < Administer::Fields::Association
attr_reader :primary_key
def initialize(name, primary_key, association_class, parent)
super(name, association_class, parent)
@primary_key = primary_key
end
end
end
end
| 22.846154 | 66 | 0.700337 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.